hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
7e3c148481dc5590346b302c02684885ab710d45
48,147
py
Python
ui.py
wusuopu/lc-pydns
4caa6efdcce08d05648f436dc8c845f820ebac6b
[ "Python-2.0" ]
1
2015-09-24T13:11:23.000Z
2015-09-24T13:11:23.000Z
ui.py
wusuopu/lc-pydns
4caa6efdcce08d05648f436dc8c845f820ebac6b
[ "Python-2.0" ]
null
null
null
ui.py
wusuopu/lc-pydns
4caa6efdcce08d05648f436dc8c845f820ebac6b
[ "Python-2.0" ]
null
null
null
#!/usr/bin/env python #-*- coding:utf-8 -*- ## # Copyright (C) # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation. # 本程序是免费软件,基于GPL许可发布。 # ## # @文件名(file): ui.py # @作者(author): 龙昌锦(LongChangjin) # @博客(blog): http://www.xefan.com # @邮箱(mail): admin@xefan.com # @QQ: 346202141 # @ICQ: wosuopu@gmail.com # @时间(date): 2012-03-08 # login = """ <?xml version="1.0" encoding="UTF-8"?> <interface> <requires lib="gtk+" version="2.24"/> <!-- interface-naming-policy project-wide --> <object class="GtkWindow" id="window_login"> <property name="width_request">300</property> <property name="height_request">150</property> <property name="can_focus">False</property> <property name="title" translatable="yes">LC-pydns v1.0</property> <property name="resizable">False</property> <property name="window_position">center</property> <property name="icon">logo.ico</property> <property name="opacity">0.90000000000000002</property> <signal name="destroy" handler="main_quit" swapped="no"/> <child> <object class="GtkFixed" id="fixed_login"> <property name="visible">True</property> <property name="can_focus">False</property> <child> <object class="GtkLabel" id="label_mail"> <property name="width_request">100</property> <property name="height_request">25</property> <property name="visible">True</property> <property name="can_focus">False</property> <property name="label" translatable="yes">邮箱:</property> </object> <packing> <property name="x">15</property> <property name="y">25</property> </packing> </child> <child> <object class="GtkLabel" id="label_pswd"> <property name="width_request">100</property> <property name="height_request">25</property> <property name="visible">True</property> <property name="can_focus">False</property> <property name="label" translatable="yes">密码:</property> </object> <packing> <property name="x">15</property> <property name="y">70</property> </packing> </child> <child> <object class="GtkEntry" id="entry_mail"> <property name="width_request">150</property> <property name="height_request">25</property> <property name="visible">True</property> <property name="can_focus">True</property> <property name="invisible_char">●</property> <property name="primary_icon_activatable">False</property> <property name="secondary_icon_activatable">False</property> <property name="primary_icon_sensitive">True</property> <property name="secondary_icon_sensitive">True</property> </object> <packing> <property name="x">120</property> <property name="y">25</property> </packing> </child> <child> <object class="GtkEntry" id="entry_pswd"> <property name="width_request">150</property> <property name="height_request">25</property> <property name="visible">True</property> <property name="can_focus">True</property> <property name="visibility">False</property> <property name="invisible_char">●</property> <property name="primary_icon_activatable">False</property> <property name="secondary_icon_activatable">False</property> <property name="primary_icon_sensitive">True</property> <property name="secondary_icon_sensitive">True</property> </object> <packing> <property name="x">120</property> <property name="y">70</property> </packing> </child> <child> <object class="GtkButton" id="button_login"> <property name="label" translatable="yes">登陆</property> <property name="use_action_appearance">False</property> <property name="width_request">80</property> <property name="height_request">25</property> <property name="visible">True</property> <property name="can_focus">True</property> <property name="receives_default">True</property> <accelerator key="Return" signal="clicked"/> <signal name="clicked" handler="on_button_login_clicked" swapped="no"/> </object> <packing> <property name="x">113</property> <property name="y">110</property> </packing> </child> </object> </child> </object> </interface> """ ui = """ <?xml version="1.0" encoding="UTF-8"?> <interface> <requires lib="gtk+" version="2.24"/> <!-- interface-naming-policy project-wide --> <object class="GtkListStore" id="liststore_domain"> <columns> <!-- column-name id --> <column type="gchararray"/> <!-- column-name 标星 --> <column type="gboolean"/> <!-- column-name 域名 --> <column type="gchararray"/> <!-- column-name 状态 --> <column type="gchararray"/> <!-- column-name 备注 --> <column type="gchararray"/> </columns> </object> <object class="GtkListStore" id="liststore_linetype"> <columns> <!-- column-name 线路类型 --> <column type="gchararray"/> </columns> <data> <row> <col id="0" translatable="yes">默认</col> </row> <row> <col id="0" translatable="yes">电信</col> </row> <row> <col id="0" translatable="yes">联通</col> </row> <row> <col id="0" translatable="yes">教育网</col> </row> <row> <col id="0" translatable="yes">移动</col> </row> <row> <col id="0" translatable="yes">铁通</col> </row> <row> <col id="0" translatable="yes">国内</col> </row> <row> <col id="0" translatable="yes">国外</col> </row> <row> <col id="0" translatable="yes">搜索引擎</col> </row> <row> <col id="0" translatable="yes">百度</col> </row> <row> <col id="0" translatable="yes">Google</col> </row> <row> <col id="0" translatable="yes">有道</col> </row> <row> <col id="0" translatable="yes">必应</col> </row> <row> <col id="0" translatable="yes">搜搜</col> </row> <row> <col id="0" translatable="yes">搜狗</col> </row> </data> </object> <object class="GtkListStore" id="liststore_record"> <columns> <!-- column-name rid --> <column type="gchararray"/> <!-- column-name 主机 --> <column type="gchararray"/> <!-- column-name 记录类型 --> <column type="gchararray"/> <!-- column-name 线路类型 --> <column type="gchararray"/> <!-- column-name 记录值 --> <column type="gchararray"/> <!-- column-name MX优先级 --> <column type="gchararray"/> <!-- column-name TTL --> <column type="gchararray"/> <!-- column-name 状态 --> <column type="gchararray"/> <!-- column-name 备注 --> <column type="gchararray"/> </columns> </object> <object class="GtkListStore" id="liststore_recordtype"> <columns> <!-- column-name 记录类型 --> <column type="gchararray"/> </columns> <data> <row> <col id="0" translatable="yes">A</col> </row> <row> <col id="0" translatable="yes">CNAME</col> </row> <row> <col id="0" translatable="yes">MX</col> </row> <row> <col id="0" translatable="yes">TXT</col> </row> <row> <col id="0" translatable="yes">NS</col> </row> <row> <col id="0" translatable="yes">AAAA</col> </row> <row> <col id="0" translatable="yes">SRV</col> </row> <row> <col id="0" translatable="yes">URL</col> </row> </data> </object> <object class="GtkWindow" id="Mainwindow"> <property name="can_focus">False</property> <property name="title" translatable="yes">LC-pydns v1.0</property> <property name="window_position">center</property> <property name="icon">logo.ico</property> <property name="opacity">0.90000000000000002</property> <property name="mnemonics_visible">False</property> <signal name="delete-event" handler="main_quit" swapped="no"/> <child> <object class="GtkVBox" id="vbox1"> <property name="visible">True</property> <property name="can_focus">False</property> <child> <object class="GtkVBox" id="vbox2"> <property name="visible">True</property> <property name="can_focus">False</property> <child> <object class="GtkLabel" id="label_info"> <property name="visible">True</property> <property name="can_focus">False</property> <property name="xalign">0.0099999997764825821</property> <property name="label" translatable="yes">&lt;b&gt;个人信息:&lt;/b&gt;</property> <property name="use_markup">True</property> </object> <packing> <property name="expand">False</property> <property name="fill">False</property> <property name="position">0</property> </packing> </child> <child> <object class="GtkHBox" id="hbox1"> <property name="visible">True</property> <property name="can_focus">False</property> <child> <object class="GtkLabel" id="label_email0"> <property name="visible">True</property> <property name="can_focus">False</property> <property name="xalign">0.0099999997764825821</property> <property name="label" translatable="yes">&lt;b&gt;邮箱:&lt;/b&gt;</property> <property name="use_markup">True</property> </object> <packing> <property name="expand">False</property> <property name="fill">False</property> <property name="padding">5</property> <property name="position">0</property> </packing> </child> <child> <object class="GtkLabel" id="label_info_email"> <property name="visible">True</property> <property name="can_focus">False</property> <property name="xalign">0.10000000149011612</property> <property name="label" translatable="yes">mail</property> </object> <packing> <property name="expand">False</property> <property name="fill">False</property> <property name="padding">5</property> <property name="position">1</property> </packing> </child> <child> <object class="GtkLabel" id="label1"> <property name="visible">True</property> <property name="can_focus">False</property> <property name="xalign">0.10000000149011612</property> <property name="label" translatable="yes">&lt;b&gt;邮箱认证:&lt;/b&gt;</property> <property name="use_markup">True</property> </object> <packing> <property name="expand">False</property> <property name="fill">False</property> <property name="padding">5</property> <property name="position">2</property> </packing> </child> <child> <object class="GtkLabel" id="label_info_mailverified"> <property name="visible">True</property> <property name="can_focus">False</property> <property name="xalign">0.10000000149011612</property> <property name="label" translatable="yes">mail_verified</property> </object> <packing> <property name="expand">False</property> <property name="fill">False</property> <property name="padding">5</property> <property name="position">3</property> </packing> </child> <child> <object class="GtkLabel" id="label2"> <property name="visible">True</property> <property name="can_focus">False</property> <property name="xalign">0.10000000149011612</property> <property name="label" translatable="yes">&lt;b&gt;账号状态:&lt;/b&gt;</property> <property name="use_markup">True</property> </object> <packing> <property name="expand">False</property> <property name="fill">False</property> <property name="padding">5</property> <property name="position">4</property> </packing> </child> <child> <object class="GtkLabel" id="label_info_status"> <property name="visible">True</property> <property name="can_focus">False</property> <property name="xalign">0.10000000149011612</property> <property name="label" translatable="yes">status</property> </object> <packing> <property name="expand">False</property> <property name="fill">False</property> <property name="padding">5</property> <property name="position">5</property> </packing> </child> </object> <packing> <property name="expand">False</property> <property name="fill">False</property> <property name="position">1</property> </packing> </child> <child> <object class="GtkHSeparator" id="hseparator1"> <property name="visible">True</property> <property name="can_focus">False</property> </object> <packing> <property name="expand">False</property> <property name="fill">True</property> <property name="padding">5</property> <property name="position">2</property> </packing> </child> </object> <packing> <property name="expand">False</property> <property name="fill">False</property> <property name="position">0</property> </packing> </child> <child> <object class="GtkHPaned" id="hpaned"> <property name="visible">True</property> <property name="can_focus">True</property> <child> <object class="GtkViewport" id="viewport1"> <property name="visible">True</property> <property name="can_focus">False</property> <child> <object class="GtkVBox" id="vbox3"> <property name="visible">True</property> <property name="can_focus">False</property> <child> <object class="GtkHBox" id="hbox2"> <property name="visible">True</property> <property name="can_focus">False</property> <property name="spacing">5</property> <child> <object class="GtkLabel" id="label3"> <property name="visible">True</property> <property name="can_focus">False</property> <property name="label" translatable="yes">&lt;b&gt;我的域名&lt;/b&gt;</property> <property name="use_markup">True</property> </object> <packing> <property name="expand">False</property> <property name="fill">False</property> <property name="padding">15</property> <property name="position">0</property> </packing> </child> <child> <object class="GtkButton" id="button_dpause"> <property name="label" translatable="yes">暂停/启用</property> <property name="use_action_appearance">False</property> <property name="visible">True</property> <property name="can_focus">True</property> <property name="receives_default">True</property> <signal name="clicked" handler="on_button_dpause_clicked" swapped="no"/> </object> <packing> <property name="expand">False</property> <property name="fill">False</property> <property name="position">1</property> </packing> </child> <child> <object class="GtkButton" id="button_dfresh"> <property name="label" translatable="yes">刷新</property> <property name="use_action_appearance">False</property> <property name="visible">True</property> <property name="can_focus">True</property> <property name="receives_default">True</property> <signal name="clicked" handler="on_button_dfresh_clicked" swapped="no"/> </object> <packing> <property name="expand">False</property> <property name="fill">False</property> <property name="position">2</property> </packing> </child> <child> <object class="GtkButton" id="button_ddel"> <property name="label" translatable="yes">删除</property> <property name="use_action_appearance">False</property> <property name="visible">True</property> <property name="can_focus">True</property> <property name="receives_default">True</property> <signal name="clicked" handler="on_button_ddel_clicked" swapped="no"/> </object> <packing> <property name="expand">False</property> <property name="fill">False</property> <property name="position">3</property> </packing> </child> </object> <packing> <property name="expand">False</property> <property name="fill">False</property> <property name="position">0</property> </packing> </child> <child> <object class="GtkScrolledWindow" id="scrolledwindow1"> <property name="visible">True</property> <property name="can_focus">True</property> <property name="hscrollbar_policy">automatic</property> <property name="vscrollbar_policy">automatic</property> <child> <object class="GtkTreeView" id="treeview_domain"> <property name="width_request">220</property> <property name="height_request">400</property> <property name="visible">True</property> <property name="can_focus">True</property> <property name="model">liststore_domain</property> <property name="enable_search">False</property> <property name="search_column">0</property> <property name="show_expanders">False</property> <property name="enable_grid_lines">horizontal</property> <signal name="row-activated" handler="on_treeview_domain_row_activated" swapped="no"/> </object> </child> </object> <packing> <property name="expand">True</property> <property name="fill">True</property> <property name="position">1</property> </packing> </child> <child> <object class="GtkHBox" id="hbox4"> <property name="visible">True</property> <property name="can_focus">False</property> <child> <object class="GtkLabel" id="label5"> <property name="visible">True</property> <property name="can_focus">False</property> <property name="label" translatable="yes">输入要解析的域名:</property> </object> <packing> <property name="expand">False</property> <property name="fill">False</property> <property name="position">0</property> </packing> </child> <child> <object class="GtkEntry" id="entry1"> <property name="width_request">110</property> <property name="visible">True</property> <property name="can_focus">True</property> <property name="invisible_char">●</property> <property name="primary_icon_activatable">False</property> <property name="secondary_icon_activatable">False</property> <property name="primary_icon_sensitive">True</property> <property name="secondary_icon_sensitive">True</property> </object> <packing> <property name="expand">False</property> <property name="fill">False</property> <property name="position">1</property> </packing> </child> <child> <object class="GtkButton" id="button_dadd"> <property name="label" translatable="yes">添加</property> <property name="use_action_appearance">False</property> <property name="visible">True</property> <property name="can_focus">True</property> <property name="receives_default">True</property> <signal name="clicked" handler="on_button_dadd_clicked" object="entry1" swapped="no"/> </object> <packing> <property name="expand">False</property> <property name="fill">False</property> <property name="position">2</property> </packing> </child> </object> <packing> <property name="expand">False</property> <property name="fill">False</property> <property name="padding">5</property> <property name="position">2</property> </packing> </child> <child> <object class="GtkLabel" id="label4"> <property name="visible">True</property> <property name="can_focus">False</property> <property name="xalign">0</property> <property name="yalign">0</property> <property name="xpad">5</property> <property name="label" translatable="yes">添加域名后再到域名注册的地方将 DNS 修改为: f1g1ns1.dnspod.net f1g1ns2.dnspod.net</property> <property name="selectable">True</property> <attributes> <attribute name="foreground" value="#ffff00000000"/> </attributes> </object> <packing> <property name="expand">True</property> <property name="fill">True</property> <property name="position">3</property> </packing> </child> </object> </child> </object> <packing> <property name="resize">False</property> <property name="shrink">True</property> </packing> </child> <child> <object class="GtkViewport" id="viewport2"> <property name="visible">True</property> <property name="can_focus">False</property> <child> <object class="GtkVBox" id="vbox4"> <property name="visible">True</property> <property name="can_focus">False</property> <child> <object class="GtkHBox" id="hbox3"> <property name="visible">True</property> <property name="can_focus">False</property> <property name="spacing">5</property> <child> <object class="GtkLabel" id="label_rdomain"> <property name="visible">True</property> <property name="can_focus">False</property> <property name="label" translatable="yes">&lt;请先选择域名&gt;</property> </object> <packing> <property name="expand">False</property> <property name="fill">False</property> <property name="padding">15</property> <property name="position">0</property> </packing> </child> <child> <object class="GtkButton" id="button_rpause"> <property name="label" translatable="yes">暂停/启用</property> <property name="use_action_appearance">False</property> <property name="visible">True</property> <property name="can_focus">True</property> <property name="receives_default">True</property> <signal name="clicked" handler="on_button_rpause_clicked" swapped="no"/> </object> <packing> <property name="expand">False</property> <property name="fill">False</property> <property name="position">1</property> </packing> </child> <child> <object class="GtkButton" id="button_rdel"> <property name="label" translatable="yes">删除</property> <property name="use_action_appearance">False</property> <property name="visible">True</property> <property name="can_focus">True</property> <property name="receives_default">True</property> <signal name="clicked" handler="on_button_rdel_clicked" swapped="no"/> </object> <packing> <property name="expand">False</property> <property name="fill">False</property> <property name="position">2</property> </packing> </child> </object> <packing> <property name="expand">False</property> <property name="fill">False</property> <property name="position">0</property> </packing> </child> <child> <object class="GtkScrolledWindow" id="scrolledwindow2"> <property name="visible">True</property> <property name="can_focus">True</property> <property name="hscrollbar_policy">automatic</property> <property name="vscrollbar_policy">automatic</property> <child> <object class="GtkTreeView" id="treeview_record"> <property name="width_request">450</property> <property name="height_request">400</property> <property name="visible">True</property> <property name="can_focus">True</property> <property name="model">liststore_record</property> <property name="enable_search">False</property> <property name="search_column">0</property> <property name="show_expanders">False</property> <property name="enable_grid_lines">both</property> </object> </child> </object> <packing> <property name="expand">True</property> <property name="fill">True</property> <property name="position">1</property> </packing> </child> <child> <object class="GtkTable" id="table1"> <property name="visible">True</property> <property name="can_focus">False</property> <property name="n_rows">3</property> <property name="n_columns">6</property> <child> <object class="GtkLabel" id="label7"> <property name="visible">True</property> <property name="can_focus">False</property> <property name="label" translatable="yes">主机</property> </object> <packing> <property name="top_attach">1</property> <property name="bottom_attach">2</property> </packing> </child> <child> <object class="GtkLabel" id="label8"> <property name="visible">True</property> <property name="can_focus">False</property> <property name="label" translatable="yes">记录类型</property> </object> <packing> <property name="left_attach">1</property> <property name="right_attach">2</property> <property name="top_attach">1</property> <property name="bottom_attach">2</property> </packing> </child> <child> <object class="GtkLabel" id="label9"> <property name="visible">True</property> <property name="can_focus">False</property> <property name="label" translatable="yes">线路类型</property> </object> <packing> <property name="left_attach">2</property> <property name="right_attach">3</property> <property name="top_attach">1</property> <property name="bottom_attach">2</property> </packing> </child> <child> <object class="GtkLabel" id="label10"> <property name="visible">True</property> <property name="can_focus">False</property> <property name="label" translatable="yes">记录值</property> </object> <packing> <property name="left_attach">3</property> <property name="right_attach">4</property> <property name="top_attach">1</property> <property name="bottom_attach">2</property> </packing> </child> <child> <object class="GtkLabel" id="label11"> <property name="visible">True</property> <property name="can_focus">False</property> <property name="label" translatable="yes">MX</property> </object> <packing> <property name="left_attach">4</property> <property name="right_attach">5</property> <property name="top_attach">1</property> <property name="bottom_attach">2</property> </packing> </child> <child> <object class="GtkLabel" id="label12"> <property name="visible">True</property> <property name="can_focus">False</property> <property name="label" translatable="yes">TTL</property> </object> <packing> <property name="left_attach">5</property> <property name="right_attach">6</property> <property name="top_attach">1</property> <property name="bottom_attach">2</property> </packing> </child> <child> <object class="GtkEntry" id="entry_host"> <property name="width_request">50</property> <property name="visible">True</property> <property name="can_focus">True</property> <property name="invisible_char">●</property> <property name="primary_icon_activatable">False</property> <property name="secondary_icon_activatable">False</property> <property name="primary_icon_sensitive">True</property> <property name="secondary_icon_sensitive">True</property> </object> <packing> <property name="top_attach">2</property> <property name="bottom_attach">3</property> <property name="x_options"></property> </packing> </child> <child> <object class="GtkEntry" id="entry_value"> <property name="width_request">50</property> <property name="visible">True</property> <property name="can_focus">True</property> <property name="invisible_char">●</property> <property name="invisible_char_set">True</property> <property name="primary_icon_activatable">False</property> <property name="secondary_icon_activatable">False</property> <property name="primary_icon_sensitive">True</property> <property name="secondary_icon_sensitive">True</property> </object> <packing> <property name="left_attach">3</property> <property name="right_attach">4</property> <property name="top_attach">2</property> <property name="bottom_attach">3</property> <property name="x_options"></property> </packing> </child> <child> <object class="GtkEntry" id="entry_mx"> <property name="width_request">50</property> <property name="visible">True</property> <property name="can_focus">True</property> <property name="invisible_char">●</property> <property name="text" translatable="yes">5</property> <property name="invisible_char_set">True</property> <property name="primary_icon_activatable">False</property> <property name="secondary_icon_activatable">False</property> <property name="primary_icon_sensitive">True</property> <property name="secondary_icon_sensitive">True</property> </object> <packing> <property name="left_attach">4</property> <property name="right_attach">5</property> <property name="top_attach">2</property> <property name="bottom_attach">3</property> <property name="x_options"></property> </packing> </child> <child> <object class="GtkEntry" id="entry_ttl"> <property name="width_request">50</property> <property name="visible">True</property> <property name="can_focus">True</property> <property name="invisible_char">●</property> <property name="text" translatable="yes">600</property> <property name="invisible_char_set">True</property> <property name="primary_icon_activatable">False</property> <property name="secondary_icon_activatable">False</property> <property name="primary_icon_sensitive">True</property> <property name="secondary_icon_sensitive">True</property> </object> <packing> <property name="left_attach">5</property> <property name="right_attach">6</property> <property name="top_attach">2</property> <property name="bottom_attach">3</property> <property name="x_options"></property> </packing> </child> <child> <object class="GtkLabel" id="label6"> <property name="visible">True</property> <property name="can_focus">False</property> <property name="xalign">0</property> <property name="label" translatable="yes">添加记录:</property> <attributes> <attribute name="weight" value="heavy"/> </attributes> </object> </child> <child> <object class="GtkButton" id="button_radd"> <property name="label" translatable="yes">添加</property> <property name="use_action_appearance">False</property> <property name="visible">True</property> <property name="can_focus">True</property> <property name="receives_default">True</property> <signal name="clicked" handler="on_button_radd_clicked" swapped="no"/> </object> <packing> <property name="left_attach">4</property> <property name="right_attach">5</property> <property name="x_options"></property> <property name="y_options"></property> </packing> </child> <child> <object class="GtkComboBox" id="combobox_rtype"> <property name="width_request">80</property> <property name="visible">True</property> <property name="can_focus">False</property> <property name="model">liststore_recordtype</property> <property name="has_entry">True</property> <property name="entry_text_column">0</property> </object> <packing> <property name="left_attach">1</property> <property name="right_attach">2</property> <property name="top_attach">2</property> <property name="bottom_attach">3</property> <property name="x_options"></property> </packing> </child> <child> <object class="GtkComboBox" id="combobox_ltype"> <property name="width_request">80</property> <property name="visible">True</property> <property name="can_focus">False</property> <property name="model">liststore_linetype</property> <property name="has_entry">True</property> <property name="entry_text_column">0</property> </object> <packing> <property name="left_attach">2</property> <property name="right_attach">3</property> <property name="top_attach">2</property> <property name="bottom_attach">3</property> <property name="x_options"></property> </packing> </child> <child> <placeholder/> </child> <child> <placeholder/> </child> <child> <placeholder/> </child> <child> <placeholder/> </child> </object> <packing> <property name="expand">True</property> <property name="fill">True</property> <property name="position">2</property> </packing> </child> </object> </child> </object> <packing> <property name="resize">True</property> <property name="shrink">True</property> </packing> </child> </object> <packing> <property name="expand">True</property> <property name="fill">True</property> <property name="position">1</property> </packing> </child> <child> <object class="GtkHSeparator" id="hseparator2"> <property name="visible">True</property> <property name="can_focus">False</property> </object> <packing> <property name="expand">False</property> <property name="fill">True</property> <property name="padding">5</property> <property name="position">2</property> </packing> </child> <child> <object class="GtkStatusbar" id="statusbar"> <property name="visible">True</property> <property name="can_focus">False</property> <property name="spacing">5</property> <child> <object class="GtkLabel" id="label_status"> <property name="visible">True</property> <property name="can_focus">False</property> <property name="label" translatable="yes">作者:&lt;b&gt;龙昌&lt;/b&gt; 博客:&lt;b&gt;http://www.xefan.com&lt;/b&gt;</property> <property name="use_markup">True</property> <property name="selectable">True</property> </object> <packing> <property name="expand">False</property> <property name="fill">False</property> <property name="position">0</property> </packing> </child> </object> <packing> <property name="expand">False</property> <property name="fill">False</property> <property name="position">3</property> </packing> </child> </object> </child> </object> </interface> """
49.079511
136
0.461129
3,873
48,147
5.648593
0.08882
0.24848
0.312657
0.123417
0.919505
0.903917
0.885862
0.877863
0.836084
0.831238
0
0.016194
0.412591
48,147
980
137
49.129592
0.757089
0.00864
0
0.82881
0
0.006263
0.998973
0.343164
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
1
1
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
10
7e6102e87077415a03fb97771b8d77955e90be73
189
py
Python
CAGG-Molecular-Discovery/objective_func.py
csjtx1021/CAGG
67fde2f1488ee6e2ff137e87860b5243c5b5fe7c
[ "MIT" ]
7
2020-09-05T01:50:06.000Z
2021-09-29T13:33:35.000Z
CAGG-Molecular-Discovery/objective_func.py
csjtx1021/CAND
67fde2f1488ee6e2ff137e87860b5243c5b5fe7c
[ "MIT" ]
null
null
null
CAGG-Molecular-Discovery/objective_func.py
csjtx1021/CAND
67fde2f1488ee6e2ff137e87860b5243c5b5fe7c
[ "MIT" ]
1
2021-12-07T03:16:24.000Z
2021-12-07T03:16:24.000Z
#!/usr/bin/env/python import check_property #real evaluation function def evaluate_point(input,target=None): return check_property.check_property(input["smiles"], target=target)
21
72
0.772487
25
189
5.68
0.72
0.274648
0
0
0
0
0
0
0
0
0
0
0.116402
189
8
73
23.625
0.850299
0.232804
0
0
0
0
0.041958
0
0
0
0
0
0
1
0.333333
false
0
0.333333
0.333333
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
1
1
0
0
7
7e70b971824a8608a311d77b923989c756d4c624
190
py
Python
profit/dataset/__init__.py
ayushkarnawat/profit
f3c4d601078b52513af6832c3faf75ddafc59ac5
[ "MIT" ]
null
null
null
profit/dataset/__init__.py
ayushkarnawat/profit
f3c4d601078b52513af6832c3faf75ddafc59ac5
[ "MIT" ]
1
2021-09-15T13:13:12.000Z
2021-09-15T13:13:12.000Z
profit/dataset/__init__.py
ayushkarnawat/profit
f3c4d601078b52513af6832c3faf75ddafc59ac5
[ "MIT" ]
null
null
null
from profit.dataset import parsers from profit.dataset import preprocessing from profit.dataset import preprocessors from profit.dataset import splitters from profit.dataset import generator
38
40
0.873684
25
190
6.64
0.36
0.301205
0.512048
0.692771
0
0
0
0
0
0
0
0
0.1
190
5
41
38
0.97076
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
8
7e792f65d1e9163646fabd276dc8c3603a62b4cd
11,735
py
Python
tests/t_multi.py
carlosefr/python-kyototycoon-ng
3d1b5d974b50ea44ed95bee0c08bf00d93ac4834
[ "BSD-3-Clause" ]
null
null
null
tests/t_multi.py
carlosefr/python-kyototycoon-ng
3d1b5d974b50ea44ed95bee0c08bf00d93ac4834
[ "BSD-3-Clause" ]
null
null
null
tests/t_multi.py
carlosefr/python-kyototycoon-ng
3d1b5d974b50ea44ed95bee0c08bf00d93ac4834
[ "BSD-3-Clause" ]
null
null
null
#!/usr/bin/env python # # Copyright 2011, Toru Maesaka # # Redistribution and use of this source code is licensed under # the BSD license. See COPYING file for license description. # # Kyoto Tycoon should be started like this: # $ ktserver one.kch two.kch import config import time import unittest from kyototycoon import KyotoTycoon, KyotoTycoonException DB_1 = 0 DB_2 = 1 DB_INVALID = 3 class UnitTest(unittest.TestCase): def setUp(self): self.kt_handle_http = KyotoTycoon(binary=False) self.kt_handle_http.open(port=11978) self.kt_handle_bin = KyotoTycoon(binary=True) self.kt_handle_bin.open(port=11978) self.LARGE_KEY_LEN = 8000 def tearDown(self): self.kt_handle_http.close() self.kt_handle_bin.close() def clear_all(self): self.assertTrue(self.kt_handle_http.clear(db=DB_1)) self.assertTrue(self.kt_handle_http.clear(db=DB_2)) return True def test_status(self): status = self.kt_handle_http.status(DB_1) assert status is not None status = self.kt_handle_http.status(DB_2) assert status is not None self.assertRaises(KyotoTycoonException, self.kt_handle_http.status, DB_INVALID) self.assertRaises(KyotoTycoonException, self.kt_handle_http.status, 'non_existent') def test_set_get(self): self.assertTrue(self.clear_all()) self.assertTrue(self.kt_handle_http.set('ice', 'cream', db=DB_2)) self.assertRaises(KyotoTycoonException, self.kt_handle_http.set, 'palo', 'alto', db=DB_INVALID) self.assertEqual(self.kt_handle_http.get('ice'), None) self.assertEqual(self.kt_handle_http.get('ice', db=DB_1), None) self.assertRaises(KyotoTycoonException, self.kt_handle_http.get, 'ice', db=DB_INVALID) self.assertEqual(self.kt_handle_http.get('ice', db=DB_2), 'cream') self.assertEqual(self.kt_handle_http.count(db=DB_1), 0) self.assertEqual(self.kt_handle_http.count(db=DB_2), 1) self.assertTrue(self.kt_handle_http.set('frozen', 'yoghurt', db=DB_1)) self.assertEqual(self.kt_handle_http.count(db=DB_1), 1) self.assertEqual(self.kt_handle_http.get('frozen'), 'yoghurt') self.assertEqual(self.kt_handle_http.get('frozen', db=DB_1), 'yoghurt') self.assertEqual(self.kt_handle_http.get('frozen', db=DB_2), None) self.assertRaises(KyotoTycoonException, self.kt_handle_http.get, 'frozen', db=DB_INVALID) self.assertTrue(self.kt_handle_http.clear(db=DB_1)) self.assertEqual(self.kt_handle_http.count(db=DB_1), 0) self.assertEqual(self.kt_handle_http.count(db=DB_2), 1) def test_get_multi(self): self.assertTrue(self.clear_all()) self.assertTrue(self.kt_handle_http.set('a', 'xxxx', db=DB_1)) self.assertTrue(self.kt_handle_http.set_bulk({'b': 'yyyy', 'c': 'zzzz'}, db=DB_1)) self.assertTrue(self.kt_handle_http.set('a1', 'xxxx', db=DB_2)) self.assertTrue(self.kt_handle_http.set_bulk({'b1': 'yyyy', 'c1': 'zzzz'}, db=DB_2)) d = self.kt_handle_http.get_bulk(['a', 'b', 'c'], db=DB_1) self.assertEqual(len(d), 3) self.assertEqual(d['a'], 'xxxx') self.assertEqual(d['b'], 'yyyy') self.assertEqual(d['c'], 'zzzz') d = self.kt_handle_http.get_bulk(['a', 'b', 'c'], db=DB_2) self.assertEqual(len(d), 0) d = self.kt_handle_http.get_bulk(['a1', 'b1', 'c1'], db=DB_2) self.assertEqual(len(d), 3) self.assertEqual(d['a1'], 'xxxx') self.assertEqual(d['b1'], 'yyyy') self.assertEqual(d['c1'], 'zzzz') d = self.kt_handle_http.get_bulk(['a1', 'b1', 'c1'], db=DB_1) self.assertEqual(len(d), 0) def test_add(self): self.assertTrue(self.clear_all()) # Should not conflict due to different databases. self.assertTrue(self.kt_handle_http.add('key1', 'val1', db=DB_1)) self.assertTrue(self.kt_handle_http.add('key1', 'val1', db=DB_2)) # Now they should. self.assertRaises(KyotoTycoonException, self.kt_handle_http.add, 'key1', 'val1', db=DB_1) self.assertRaises(KyotoTycoonException, self.kt_handle_http.add, 'key1', 'val1', db=DB_2) self.assertRaises(KyotoTycoonException, self.kt_handle_http.add, 'key1', 'val1', db=DB_INVALID) def test_check(self): self.assertTrue(self.kt_handle_http.set('check1', 'abc', db=DB_2)) self.assertTrue(self.kt_handle_http.check('check1', db=DB_2)) self.assertFalse(self.kt_handle_http.check('check1', db=DB_1)) def test_seize(self): self.assertTrue(self.kt_handle_http.set('seize1', 'abc', db=DB_2)) self.assertEqual(self.kt_handle_http.get('seize1', db=DB_2), 'abc') self.assertEqual(self.kt_handle_http.get('seize1', db=DB_1), None) self.assertEqual(self.kt_handle_http.seize('seize1', db=DB_2), 'abc') self.assertEqual(self.kt_handle_http.get('seize1', db=DB_2), None) def test_replace(self): self.assertTrue(self.clear_all()) self.assertTrue(self.kt_handle_http.add('key1', 'val1', db=DB_1)) self.assertFalse(self.kt_handle_http.replace('key1', 'val2', db=DB_2)) self.assertTrue(self.kt_handle_http.replace('key1', 'val2', db=DB_1)) self.assertFalse(self.kt_handle_http.replace('key1', 'val2', db=DB_INVALID)) self.assertTrue(self.kt_handle_http.add('key2', 'aaa')) self.assertTrue(self.kt_handle_http.replace('key2', 'bbb')) self.assertTrue(self.kt_handle_http.replace('key1', 'zzz')) self.assertEqual(self.kt_handle_http.get('key2'), 'bbb') self.assertEqual(self.kt_handle_http.get('key1'), 'zzz') def test_cas(self): self.assertTrue(self.clear_all()) self.assertTrue(self.kt_handle_http.set('key', 'xxx')) self.assertEqual(self.kt_handle_http.get('key', db=DB_2), None) self.assertRaises(KyotoTycoonException, self.kt_handle_http.cas, 'key', old_val='xxx', new_val='yyy', db=DB_2) self.assertEqual(self.kt_handle_http.get('key', db=DB_1), 'xxx') self.assertTrue(self.kt_handle_http.cas('key', old_val='xxx', new_val='yyy', db=DB_1)) self.assertTrue(self.kt_handle_http.cas('key', new_val='xxx', db=DB_2)) def test_remove(self): self.assertTrue(self.clear_all()) self.assertTrue(self.kt_handle_http.add('key', 'value', db=DB_1)) self.assertTrue(self.kt_handle_http.add('key', 'value', db=DB_2)) self.assertTrue(self.kt_handle_http.remove('key', db=DB_1)) self.assertEqual(self.kt_handle_http.get('key', db=DB_2), 'value') assert self.kt_handle_http.get('key', db=DB_1) is None def test_vacuum(self): self.assertTrue(self.kt_handle_http.vacuum()) self.assertTrue(self.kt_handle_http.vacuum(db=DB_1)) self.assertTrue(self.kt_handle_http.vacuum(db=DB_2)) self.assertRaises(KyotoTycoonException, self.kt_handle_http.vacuum, db=DB_INVALID) def test_append(self): self.assertTrue(self.clear_all()) self.assertTrue(self.kt_handle_http.set('key', 'xxx', db=DB_1)) self.assertTrue(self.kt_handle_http.set('key', 'xxx', db=DB_2)) self.assertTrue(self.kt_handle_http.append('key', 'xxx', db=DB_1)) self.assertEqual(self.kt_handle_http.get('key', db=DB_1), 'xxxxxx') self.assertEqual(self.kt_handle_http.get('key', db=DB_2), 'xxx') def test_increment(self): self.assertTrue(self.clear_all()) self.assertEqual(self.kt_handle_http.increment('key', 0, db=DB_1), 0) self.assertEqual(self.kt_handle_http.increment('key', 0, db=DB_2), 0) self.assertEqual(self.kt_handle_http.increment('key', 100, db=DB_1), 100) self.assertEqual(self.kt_handle_http.increment('key', 200, db=DB_2), 200) self.assertEqual(self.kt_handle_http.increment('key', 100, db=DB_1), 200) self.assertEqual(self.kt_handle_http.increment('key', 200, db=DB_2), 400) self.assertEqual(self.kt_handle_http.get_int('key', db=DB_1), 200) self.assertEqual(self.kt_handle_http.get_int('key', db=DB_2), 400) def test_match_prefix(self): self.assertTrue(self.clear_all()) self.assertTrue(self.kt_handle_http.set('abcdef', 'val', db=DB_1)) self.assertTrue(self.kt_handle_http.set('fedcba', 'val', db=DB_2)) list = self.kt_handle_http.match_prefix('abc', db=DB_1) self.assertEqual(len(list), 1) self.assertEqual(list[0], 'abcdef') list = self.kt_handle_http.match_prefix('abc', db=DB_2) self.assertEqual(len(list), 0) list = self.kt_handle_http.match_prefix('fed', db=DB_1) self.assertEqual(len(list), 0) list = self.kt_handle_http.match_prefix('fed', db=DB_2) self.assertEqual(len(list), 1) self.assertEqual(list[0], 'fedcba') def test_set_get_bin(self): self.assertTrue(self.clear_all()) self.assertTrue(self.kt_handle_bin.set('ice', 'cream', db=DB_2)) self.assertFalse(self.kt_handle_bin.set('palo', 'alto', db=DB_INVALID)) self.assertEqual(self.kt_handle_bin.get('ice'), None) self.assertEqual(self.kt_handle_bin.get('ice', db=DB_1), None) self.assertFalse(self.kt_handle_bin.get('ice', db=DB_INVALID)) self.assertEqual(self.kt_handle_bin.get('ice', db=DB_2), 'cream') self.assertEqual(self.kt_handle_http.count(db=DB_1), 0) self.assertEqual(self.kt_handle_http.count(db=DB_2), 1) self.assertTrue(self.kt_handle_bin.set('frozen', 'yoghurt', db=DB_1)) self.assertEqual(self.kt_handle_http.count(db=DB_1), 1) self.assertEqual(self.kt_handle_bin.get('frozen'), 'yoghurt') self.assertEqual(self.kt_handle_bin.get('frozen', db=DB_1), 'yoghurt') self.assertEqual(self.kt_handle_bin.get('frozen', db=DB_2), None) self.assertFalse(self.kt_handle_bin.get('frozen', db=DB_INVALID), None) self.assertTrue(self.kt_handle_http.clear(db=DB_1)) self.assertEqual(self.kt_handle_http.count(db=DB_1), 0) self.assertEqual(self.kt_handle_http.count(db=DB_2), 1) def test_get_multi_bin(self): self.assertTrue(self.clear_all()) self.assertTrue(self.kt_handle_bin.set('a', 'xxxx', db=DB_1)) self.assertTrue(self.kt_handle_bin.set_bulk({'b': 'yyyy', 'c': 'zzzz'}, db=DB_1)) self.assertTrue(self.kt_handle_bin.set('a1', 'xxxx', db=DB_2)) self.assertTrue(self.kt_handle_bin.set_bulk({'b1': 'yyyy', 'c1': 'zzzz'}, db=DB_2)) d = self.kt_handle_bin.get_bulk(['a', 'b', 'c'], db=DB_1, atomic=False) self.assertEqual(len(d), 3) self.assertEqual(d['a'], 'xxxx') self.assertEqual(d['b'], 'yyyy') self.assertEqual(d['c'], 'zzzz') d = self.kt_handle_bin.get_bulk(['a', 'b', 'c'], db=DB_2, atomic=False) self.assertEqual(len(d), 0) d = self.kt_handle_bin.get_bulk(['a1', 'b1', 'c1'], db=DB_2, atomic=False) self.assertEqual(len(d), 3) self.assertEqual(d['a1'], 'xxxx') self.assertEqual(d['b1'], 'yyyy') self.assertEqual(d['c1'], 'zzzz') d = self.kt_handle_bin.get_bulk(['a1', 'b1', 'c1'], db=DB_1, atomic=False) self.assertEqual(len(d), 0) def test_remove_bin(self): self.assertTrue(self.clear_all()) self.assertTrue(self.kt_handle_bin.set('key', 'value', db=DB_1)) self.assertTrue(self.kt_handle_bin.set('key', 'value', db=DB_2)) self.assertTrue(self.kt_handle_bin.remove('key', db=DB_1)) self.assertEqual(self.kt_handle_bin.get('key', db=DB_2), 'value') assert self.kt_handle_bin.get('key', db=DB_1) is None if __name__ == '__main__': unittest.main()
45.839844
118
0.664082
1,783
11,735
4.132361
0.084689
0.100163
0.200326
0.208469
0.875679
0.859799
0.853827
0.805239
0.737921
0.661102
0
0.023861
0.175032
11,735
255
119
46.019608
0.737217
0.025991
0
0.273196
0
0
0.06314
0
0
0
0
0
0.737113
1
0.097938
false
0
0.020619
0
0.128866
0
0
0
0
null
0
1
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
9
7e7f377f9258b4e6efedc0d5406fd637c53d30b4
139,936
py
Python
dlkit/services/learning.py
UOC/dlkit
a9d265db67e81b9e0f405457464e762e2c03f769
[ "MIT" ]
2
2018-02-23T12:16:11.000Z
2020-10-08T17:54:24.000Z
dlkit/services/learning.py
UOC/dlkit
a9d265db67e81b9e0f405457464e762e2c03f769
[ "MIT" ]
87
2017-04-21T18:57:15.000Z
2021-12-13T19:43:57.000Z
dlkit/services/learning.py
UOC/dlkit
a9d265db67e81b9e0f405457464e762e2c03f769
[ "MIT" ]
1
2018-03-01T16:44:25.000Z
2018-03-01T16:44:25.000Z
"""DLKit Services implementations of learning service.""" # pylint: disable=no-init # osid specification includes some 'marker' interfaces. # pylint: disable=too-many-ancestors # number of ancestors defined in spec. # pylint: disable=too-few-public-methods,too-many-public-methods # number of methods defined in spec. Worse yet, these are aggregates. # pylint: disable=invalid-name # method and class names defined in spec. # pylint: disable=no-self-use,unused-argument # to catch unimplemented methods. # pylint: disable=super-init-not-called # it just isn't. from . import osid from .osid_errors import Unimplemented, IllegalState, InvalidArgument from dlkit.abstract_osid.learning import objects as abc_learning_objects from dlkit.manager_impls.learning import managers as learning_managers DEFAULT = 0 COMPARATIVE = 0 PLENARY = 1 FEDERATED = 0 ISOLATED = 1 ANY_STATUS = 0 ACTIVE = 1 UNSEQUESTERED = 0 SEQUESTERED = 1 AUTOMATIC = 0 MANDATORY = 1 DISABLED = -1 class LearningProfile(osid.OsidProfile, learning_managers.LearningProfile): """LearningProfile convenience adapter including related Session methods.""" def __init__(self): self._provider_manager = None def supports_objective_lookup(self): """Pass through to provider supports_objective_lookup""" # Implemented from kitosid template for - # osid.resource.ResourceProfile.supports_resource_lookup return self._provider_manager.supports_objective_lookup() def supports_objective_query(self): """Pass through to provider supports_objective_query""" # Implemented from kitosid template for - # osid.resource.ResourceProfile.supports_resource_lookup return self._provider_manager.supports_objective_query() def supports_objective_admin(self): """Pass through to provider supports_objective_admin""" # Implemented from kitosid template for - # osid.resource.ResourceProfile.supports_resource_lookup return self._provider_manager.supports_objective_admin() def supports_objective_hierarchy(self): """Pass through to provider supports_objective_hierarchy""" # Implemented from kitosid template for - # osid.resource.ResourceProfile.supports_resource_lookup return self._provider_manager.supports_objective_hierarchy() def supports_objective_hierarchy_design(self): """Pass through to provider supports_objective_hierarchy_design""" # Implemented from kitosid template for - # osid.resource.ResourceProfile.supports_resource_lookup return self._provider_manager.supports_objective_hierarchy_design() def supports_objective_sequencing(self): """Pass through to provider supports_objective_sequencing""" # Implemented from kitosid template for - # osid.resource.ResourceProfile.supports_resource_lookup return self._provider_manager.supports_objective_sequencing() def supports_objective_objective_bank(self): """Pass through to provider supports_objective_objective_bank""" # Implemented from kitosid template for - # osid.resource.ResourceProfile.supports_resource_lookup return self._provider_manager.supports_objective_objective_bank() def supports_objective_objective_bank_assignment(self): """Pass through to provider supports_objective_objective_bank_assignment""" # Implemented from kitosid template for - # osid.resource.ResourceProfile.supports_resource_lookup return self._provider_manager.supports_objective_objective_bank_assignment() def supports_objective_requisite(self): """Pass through to provider supports_objective_requisite""" # Implemented from kitosid template for - # osid.resource.ResourceProfile.supports_resource_lookup return self._provider_manager.supports_objective_requisite() def supports_objective_requisite_assignment(self): """Pass through to provider supports_objective_requisite_assignment""" # Implemented from kitosid template for - # osid.resource.ResourceProfile.supports_resource_lookup return self._provider_manager.supports_objective_requisite_assignment() def supports_activity_lookup(self): """Pass through to provider supports_activity_lookup""" # Implemented from kitosid template for - # osid.resource.ResourceProfile.supports_resource_lookup return self._provider_manager.supports_activity_lookup() def supports_activity_query(self): """Pass through to provider supports_activity_query""" # Implemented from kitosid template for - # osid.resource.ResourceProfile.supports_resource_lookup return self._provider_manager.supports_activity_query() def supports_activity_admin(self): """Pass through to provider supports_activity_admin""" # Implemented from kitosid template for - # osid.resource.ResourceProfile.supports_resource_lookup return self._provider_manager.supports_activity_admin() def supports_activity_objective_bank(self): """Pass through to provider supports_activity_objective_bank""" # Implemented from kitosid template for - # osid.resource.ResourceProfile.supports_resource_lookup return self._provider_manager.supports_activity_objective_bank() def supports_activity_objective_bank_assignment(self): """Pass through to provider supports_activity_objective_bank_assignment""" # Implemented from kitosid template for - # osid.resource.ResourceProfile.supports_resource_lookup return self._provider_manager.supports_activity_objective_bank_assignment() def supports_proficiency_lookup(self): """Pass through to provider supports_proficiency_lookup""" # Implemented from kitosid template for - # osid.resource.ResourceProfile.supports_resource_lookup return self._provider_manager.supports_proficiency_lookup() def supports_proficiency_query(self): """Pass through to provider supports_proficiency_query""" # Implemented from kitosid template for - # osid.resource.ResourceProfile.supports_resource_lookup return self._provider_manager.supports_proficiency_query() def supports_proficiency_admin(self): """Pass through to provider supports_proficiency_admin""" # Implemented from kitosid template for - # osid.resource.ResourceProfile.supports_resource_lookup return self._provider_manager.supports_proficiency_admin() def supports_proficiency_objective_bank_assignment(self): """Pass through to provider supports_proficiency_objective_bank_assignment""" # Implemented from kitosid template for - # osid.resource.ResourceProfile.supports_resource_lookup return self._provider_manager.supports_proficiency_objective_bank_assignment() def supports_objective_bank_lookup(self): """Pass through to provider supports_objective_bank_lookup""" # Implemented from kitosid template for - # osid.resource.ResourceProfile.supports_resource_lookup return self._provider_manager.supports_objective_bank_lookup() def supports_objective_bank_admin(self): """Pass through to provider supports_objective_bank_admin""" # Implemented from kitosid template for - # osid.resource.ResourceProfile.supports_resource_lookup return self._provider_manager.supports_objective_bank_admin() def supports_objective_bank_hierarchy(self): """Pass through to provider supports_objective_bank_hierarchy""" # Implemented from kitosid template for - # osid.resource.ResourceProfile.supports_resource_lookup return self._provider_manager.supports_objective_bank_hierarchy() def supports_objective_bank_hierarchy_design(self): """Pass through to provider supports_objective_bank_hierarchy_design""" # Implemented from kitosid template for - # osid.resource.ResourceProfile.supports_resource_lookup return self._provider_manager.supports_objective_bank_hierarchy_design() def get_objective_record_types(self): """Pass through to provider get_objective_record_types""" # Implemented from kitosid template for - # osid.resource.ResourceProfile.get_resource_record_types return self._provider_manager.get_objective_record_types() objective_record_types = property(fget=get_objective_record_types) def get_objective_search_record_types(self): """Pass through to provider get_objective_search_record_types""" # Implemented from kitosid template for - # osid.resource.ResourceProfile.get_resource_record_types return self._provider_manager.get_objective_search_record_types() objective_search_record_types = property(fget=get_objective_search_record_types) def get_activity_record_types(self): """Pass through to provider get_activity_record_types""" # Implemented from kitosid template for - # osid.resource.ResourceProfile.get_resource_record_types return self._provider_manager.get_activity_record_types() activity_record_types = property(fget=get_activity_record_types) def get_activity_search_record_types(self): """Pass through to provider get_activity_search_record_types""" # Implemented from kitosid template for - # osid.resource.ResourceProfile.get_resource_record_types return self._provider_manager.get_activity_search_record_types() activity_search_record_types = property(fget=get_activity_search_record_types) def get_proficiency_record_types(self): """Pass through to provider get_proficiency_record_types""" # Implemented from kitosid template for - # osid.resource.ResourceProfile.get_resource_record_types return self._provider_manager.get_proficiency_record_types() proficiency_record_types = property(fget=get_proficiency_record_types) def get_proficiency_search_record_types(self): """Pass through to provider get_proficiency_search_record_types""" # Implemented from kitosid template for - # osid.resource.ResourceProfile.get_resource_record_types return self._provider_manager.get_proficiency_search_record_types() proficiency_search_record_types = property(fget=get_proficiency_search_record_types) def get_objective_bank_record_types(self): """Pass through to provider get_objective_bank_record_types""" # Implemented from kitosid template for - # osid.resource.ResourceProfile.get_resource_record_types return self._provider_manager.get_objective_bank_record_types() objective_bank_record_types = property(fget=get_objective_bank_record_types) def get_objective_bank_search_record_types(self): """Pass through to provider get_objective_bank_search_record_types""" # Implemented from kitosid template for - # osid.resource.ResourceProfile.get_resource_record_types return self._provider_manager.get_objective_bank_search_record_types() objective_bank_search_record_types = property(fget=get_objective_bank_search_record_types) class LearningManager(osid.OsidManager, osid.OsidSession, LearningProfile, learning_managers.LearningManager): """LearningManager convenience adapter including related Session methods.""" def __init__(self, proxy=None): self._runtime = None self._provider_manager = None self._provider_sessions = dict() self._session_management = AUTOMATIC self._objective_bank_view = DEFAULT # This is to initialize self._proxy osid.OsidSession.__init__(self, proxy) self._sub_package_provider_managers = dict() def _set_objective_bank_view(self, session): """Sets the underlying objective_bank view to match current view""" if self._objective_bank_view == COMPARATIVE: try: session.use_comparative_objective_bank_view() except AttributeError: pass else: try: session.use_plenary_objective_bank_view() except AttributeError: pass def _get_provider_session(self, session_name, proxy=None): """Gets the session for the provider""" agent_key = self._get_agent_key(proxy) if session_name in self._provider_sessions[agent_key]: return self._provider_sessions[agent_key][session_name] else: session = self._instantiate_session('get_' + session_name, self._proxy) self._set_objective_bank_view(session) if self._session_management != DISABLED: self._provider_sessions[agent_key][session_name] = session return session def _get_sub_package_provider_manager(self, sub_package_name): if sub_package_name in self._sub_package_provider_managers: return self._sub_package_provider_managers[sub_package_name] config = self._runtime.get_configuration() parameter_id = Id('parameter:{0}ProviderImpl@dlkit_service'.format(sub_package_name)) provider_impl = config.get_value_by_parameter(parameter_id).get_string_value() if self._proxy is None: # need to add version argument sub_package = self._runtime.get_manager(sub_package_name.upper(), provider_impl) else: # need to add version argument sub_package = self._runtime.get_proxy_manager(sub_package_name.upper(), provider_impl) self._sub_package_provider_managers[sub_package_name] = sub_package return sub_package def _get_sub_package_provider_session(self, sub_package, session_name, proxy=None): """Gets the session from a sub-package""" agent_key = self._get_agent_key(proxy) if session_name in self._provider_sessions[agent_key]: return self._provider_sessions[agent_key][session_name] else: manager = self._get_sub_package_provider_manager(sub_package) try: session = self._instantiate_session('get_' + session_name + '_for_bank', proxy=self._proxy, manager=manager) except AttributeError: session = self._instantiate_session('get_' + session_name, proxy=self._proxy, manager=manager) self._set_bank_view(session) if self._session_management != DISABLED: self._provider_sessions[agent_key][session_name] = session return session def _instantiate_session(self, method_name, proxy=None, *args, **kwargs): """Instantiates a provider session""" if 'manager' in kwargs: session_class = getattr(kwargs['manager'], method_name) del kwargs['manager'] else: session_class = getattr(self._provider_manager, method_name) if proxy is None: try: return session_class(bank_id=self._catalog_id, *args, **kwargs) except AttributeError: return session_class(*args, **kwargs) else: try: return session_class(bank_id=self._catalog_id, proxy=proxy, *args, **kwargs) except AttributeError: return session_class(proxy=proxy, *args, **kwargs) def initialize(self, runtime): """OSID Manager initialize""" from .primitives import Id if self._runtime is not None: raise IllegalState('Manager has already been initialized') self._runtime = runtime config = runtime.get_configuration() parameter_id = Id('parameter:learningProviderImpl@dlkit_service') provider_impl = config.get_value_by_parameter(parameter_id).get_string_value() if self._proxy is None: # need to add version argument self._provider_manager = runtime.get_manager('LEARNING', provider_impl) else: # need to add version argument self._provider_manager = runtime.get_proxy_manager('LEARNING', provider_impl) def close_sessions(self): """Close all sessions, unless session management is set to MANDATORY""" if self._session_management != MANDATORY: self._provider_sessions = dict() def use_automatic_session_management(self): """Session state will be saved unless closed by consumers""" self._session_management = AUTOMATIC def use_mandatory_session_management(self): """Session state will be saved and can not be closed by consumers""" self._session_management = MANDATORY def disable_session_management(self): """Session state will never be saved""" self._session_management = DISABLED self.close_sessions() def get_objective_lookup_session(self, *args, **kwargs): """Pass through to provider get_objective_lookup_session""" # Implemented from kitosid template for - # osid.resource.ResourceManager.get_resource_lookup_session_catalog_template return self._provider_manager.get_objective_lookup_session(*args, **kwargs) objective_lookup_session = property(fget=get_objective_lookup_session) def get_objective_lookup_session_for_objective_bank(self, *args, **kwargs): """Pass through to provider get_objective_lookup_session_for_objective_bank""" # Implemented from kitosid template for - # osid.resource.ResourceManager.get_resource_lookup_session_for_bin_catalog_template return self._provider_manager.get_objective_lookup_session_for_objective_bank(*args, **kwargs) def get_objective_query_session(self, *args, **kwargs): """Pass through to provider get_objective_query_session""" # Implemented from kitosid template for - # osid.resource.ResourceManager.get_resource_lookup_session_catalog_template return self._provider_manager.get_objective_query_session(*args, **kwargs) objective_query_session = property(fget=get_objective_query_session) def get_objective_query_session_for_objective_bank(self, *args, **kwargs): """Pass through to provider get_objective_query_session_for_objective_bank""" # Implemented from kitosid template for - # osid.resource.ResourceManager.get_resource_lookup_session_for_bin_catalog_template return self._provider_manager.get_objective_query_session_for_objective_bank(*args, **kwargs) def get_objective_admin_session(self, *args, **kwargs): """Pass through to provider get_objective_admin_session""" # Implemented from kitosid template for - # osid.resource.ResourceManager.get_resource_lookup_session_catalog_template return self._provider_manager.get_objective_admin_session(*args, **kwargs) objective_admin_session = property(fget=get_objective_admin_session) def get_objective_admin_session_for_objective_bank(self, *args, **kwargs): """Pass through to provider get_objective_admin_session_for_objective_bank""" # Implemented from kitosid template for - # osid.resource.ResourceManager.get_resource_lookup_session_for_bin_catalog_template return self._provider_manager.get_objective_admin_session_for_objective_bank(*args, **kwargs) def get_objective_hierarchy_session(self, *args, **kwargs): """Pass through to provider get_objective_hierarchy_session""" # Implemented from kitosid template for - # osid.resource.ResourceManager.get_resource_lookup_session_catalog_template return self._provider_manager.get_objective_hierarchy_session(*args, **kwargs) objective_hierarchy_session = property(fget=get_objective_hierarchy_session) def get_objective_hierarchy_session_for_objective_bank(self, *args, **kwargs): """Pass through to provider get_objective_hierarchy_session_for_objective_bank""" # Implemented from kitosid template for - # osid.resource.ResourceManager.get_resource_lookup_session_for_bin_catalog_template return self._provider_manager.get_objective_hierarchy_session_for_objective_bank(*args, **kwargs) def get_objective_hierarchy_design_session(self, *args, **kwargs): """Pass through to provider get_objective_hierarchy_design_session""" # Implemented from kitosid template for - # osid.resource.ResourceManager.get_resource_lookup_session_catalog_template return self._provider_manager.get_objective_hierarchy_design_session(*args, **kwargs) objective_hierarchy_design_session = property(fget=get_objective_hierarchy_design_session) def get_objective_hierarchy_design_session_for_objective_bank(self, *args, **kwargs): """Pass through to provider get_objective_hierarchy_design_session_for_objective_bank""" # Implemented from kitosid template for - # osid.resource.ResourceManager.get_resource_lookup_session_for_bin_catalog_template return self._provider_manager.get_objective_hierarchy_design_session_for_objective_bank(*args, **kwargs) def get_objective_sequencing_session(self, *args, **kwargs): """Pass through to provider get_objective_sequencing_session""" # Implemented from kitosid template for - # osid.resource.ResourceManager.get_resource_lookup_session_catalog_template return self._provider_manager.get_objective_sequencing_session(*args, **kwargs) objective_sequencing_session = property(fget=get_objective_sequencing_session) def get_objective_sequencing_session_for_objective_bank(self, *args, **kwargs): """Pass through to provider get_objective_sequencing_session_for_objective_bank""" # Implemented from kitosid template for - # osid.resource.ResourceManager.get_resource_lookup_session_for_bin_catalog_template return self._provider_manager.get_objective_sequencing_session_for_objective_bank(*args, **kwargs) def get_objective_objective_bank_session(self, *args, **kwargs): """Pass through to provider get_objective_objective_bank_session""" # Implemented from kitosid template for - # osid.resource.ResourceManager.get_resource_lookup_session_manager_template return self._provider_manager.get_objective_objective_bank_session(*args, **kwargs) objective_objective_bank_session = property(fget=get_objective_objective_bank_session) def get_objective_objective_bank_assignment_session(self, *args, **kwargs): """Pass through to provider get_objective_objective_bank_assignment_session""" # Implemented from kitosid template for - # osid.resource.ResourceManager.get_resource_lookup_session_manager_template return self._provider_manager.get_objective_objective_bank_assignment_session(*args, **kwargs) objective_objective_bank_assignment_session = property(fget=get_objective_objective_bank_assignment_session) def get_objective_requisite_session(self, *args, **kwargs): """Pass through to provider get_objective_requisite_session""" # Implemented from kitosid template for - # osid.resource.ResourceManager.get_resource_lookup_session_catalog_template return self._provider_manager.get_objective_requisite_session(*args, **kwargs) objective_requisite_session = property(fget=get_objective_requisite_session) def get_objective_requisite_session_for_objective_bank(self, *args, **kwargs): """Pass through to provider get_objective_requisite_session_for_objective_bank""" # Implemented from kitosid template for - # osid.resource.ResourceManager.get_resource_lookup_session_for_bin_catalog_template return self._provider_manager.get_objective_requisite_session_for_objective_bank(*args, **kwargs) def get_objective_requisite_assignment_session(self, *args, **kwargs): """Pass through to provider get_objective_requisite_assignment_session""" # Implemented from kitosid template for - # osid.resource.ResourceManager.get_resource_lookup_session_catalog_template return self._provider_manager.get_objective_requisite_assignment_session(*args, **kwargs) objective_requisite_assignment_session = property(fget=get_objective_requisite_assignment_session) def get_objective_requisite_assignment_session_for_objective_bank(self, *args, **kwargs): """Pass through to provider get_objective_requisite_assignment_session_for_objective_bank""" # Implemented from kitosid template for - # osid.resource.ResourceManager.get_resource_lookup_session_for_bin_catalog_template return self._provider_manager.get_objective_requisite_assignment_session_for_objective_bank(*args, **kwargs) def get_activity_lookup_session(self, *args, **kwargs): """Pass through to provider get_activity_lookup_session""" # Implemented from kitosid template for - # osid.resource.ResourceManager.get_resource_lookup_session_catalog_template return self._provider_manager.get_activity_lookup_session(*args, **kwargs) activity_lookup_session = property(fget=get_activity_lookup_session) def get_activity_lookup_session_for_objective_bank(self, *args, **kwargs): """Pass through to provider get_activity_lookup_session_for_objective_bank""" # Implemented from kitosid template for - # osid.resource.ResourceManager.get_resource_lookup_session_for_bin_catalog_template return self._provider_manager.get_activity_lookup_session_for_objective_bank(*args, **kwargs) def get_activity_query_session(self, *args, **kwargs): """Pass through to provider get_activity_query_session""" # Implemented from kitosid template for - # osid.resource.ResourceManager.get_resource_lookup_session_catalog_template return self._provider_manager.get_activity_query_session(*args, **kwargs) activity_query_session = property(fget=get_activity_query_session) def get_activity_query_session_for_objective_bank(self, *args, **kwargs): """Pass through to provider get_activity_query_session_for_objective_bank""" # Implemented from kitosid template for - # osid.resource.ResourceManager.get_resource_lookup_session_for_bin_catalog_template return self._provider_manager.get_activity_query_session_for_objective_bank(*args, **kwargs) def get_activity_admin_session(self, *args, **kwargs): """Pass through to provider get_activity_admin_session""" # Implemented from kitosid template for - # osid.resource.ResourceManager.get_resource_lookup_session_catalog_template return self._provider_manager.get_activity_admin_session(*args, **kwargs) activity_admin_session = property(fget=get_activity_admin_session) def get_activity_admin_session_for_objective_bank(self, *args, **kwargs): """Pass through to provider get_activity_admin_session_for_objective_bank""" # Implemented from kitosid template for - # osid.resource.ResourceManager.get_resource_lookup_session_for_bin_catalog_template return self._provider_manager.get_activity_admin_session_for_objective_bank(*args, **kwargs) def get_activity_objective_bank_session(self, *args, **kwargs): """Pass through to provider get_activity_objective_bank_session""" # Implemented from kitosid template for - # osid.resource.ResourceManager.get_resource_lookup_session_manager_template return self._provider_manager.get_activity_objective_bank_session(*args, **kwargs) activity_objective_bank_session = property(fget=get_activity_objective_bank_session) def get_activity_objective_bank_assignment_session(self, *args, **kwargs): """Pass through to provider get_activity_objective_bank_assignment_session""" # Implemented from kitosid template for - # osid.resource.ResourceManager.get_resource_lookup_session_manager_template return self._provider_manager.get_activity_objective_bank_assignment_session(*args, **kwargs) activity_objective_bank_assignment_session = property(fget=get_activity_objective_bank_assignment_session) def get_proficiency_lookup_session(self, *args, **kwargs): """Pass through to provider get_proficiency_lookup_session""" # Implemented from kitosid template for - # osid.resource.ResourceManager.get_resource_lookup_session_catalog_template return self._provider_manager.get_proficiency_lookup_session(*args, **kwargs) proficiency_lookup_session = property(fget=get_proficiency_lookup_session) def get_proficiency_lookup_session_for_objective_bank(self, *args, **kwargs): """Pass through to provider get_proficiency_lookup_session_for_objective_bank""" # Implemented from kitosid template for - # osid.resource.ResourceManager.get_resource_lookup_session_for_bin_catalog_template return self._provider_manager.get_proficiency_lookup_session_for_objective_bank(*args, **kwargs) def get_proficiency_query_session(self, *args, **kwargs): """Pass through to provider get_proficiency_query_session""" # Implemented from kitosid template for - # osid.resource.ResourceManager.get_resource_lookup_session_catalog_template return self._provider_manager.get_proficiency_query_session(*args, **kwargs) proficiency_query_session = property(fget=get_proficiency_query_session) def get_proficiency_query_session_for_objective_bank(self, *args, **kwargs): """Pass through to provider get_proficiency_query_session_for_objective_bank""" # Implemented from kitosid template for - # osid.resource.ResourceManager.get_resource_lookup_session_for_bin_catalog_template return self._provider_manager.get_proficiency_query_session_for_objective_bank(*args, **kwargs) def get_proficiency_admin_session(self, *args, **kwargs): """Pass through to provider get_proficiency_admin_session""" # Implemented from kitosid template for - # osid.resource.ResourceManager.get_resource_lookup_session_catalog_template return self._provider_manager.get_proficiency_admin_session(*args, **kwargs) proficiency_admin_session = property(fget=get_proficiency_admin_session) def get_proficiency_admin_session_for_objective_bank(self, *args, **kwargs): """Pass through to provider get_proficiency_admin_session_for_objective_bank""" # Implemented from kitosid template for - # osid.resource.ResourceManager.get_resource_lookup_session_for_bin_catalog_template return self._provider_manager.get_proficiency_admin_session_for_objective_bank(*args, **kwargs) def get_proficiency_objective_bank_assignment_session(self, *args, **kwargs): """Pass through to provider get_proficiency_objective_bank_assignment_session""" # Implemented from kitosid template for - # osid.resource.ResourceManager.get_resource_lookup_session_manager_template return self._provider_manager.get_proficiency_objective_bank_assignment_session(*args, **kwargs) proficiency_objective_bank_assignment_session = property(fget=get_proficiency_objective_bank_assignment_session) def get_objective_bank_lookup_session(self, *args, **kwargs): """Pass through to provider get_objective_bank_lookup_session""" # Implemented from kitosid template for - # osid.resource.ResourceManager.get_resource_lookup_session_manager_template return self._provider_manager.get_objective_bank_lookup_session(*args, **kwargs) objective_bank_lookup_session = property(fget=get_objective_bank_lookup_session) def get_objective_bank_admin_session(self, *args, **kwargs): """Pass through to provider get_objective_bank_admin_session""" # Implemented from kitosid template for - # osid.resource.ResourceManager.get_resource_lookup_session_manager_template return self._provider_manager.get_objective_bank_admin_session(*args, **kwargs) objective_bank_admin_session = property(fget=get_objective_bank_admin_session) def get_objective_bank_hierarchy_session(self, *args, **kwargs): """Pass through to provider get_objective_bank_hierarchy_session""" # Implemented from kitosid template for - # osid.resource.ResourceManager.get_resource_lookup_session_manager_template return self._provider_manager.get_objective_bank_hierarchy_session(*args, **kwargs) objective_bank_hierarchy_session = property(fget=get_objective_bank_hierarchy_session) def get_objective_bank_hierarchy_design_session(self, *args, **kwargs): """Pass through to provider get_objective_bank_hierarchy_design_session""" # Implemented from kitosid template for - # osid.resource.ResourceManager.get_resource_lookup_session_manager_template return self._provider_manager.get_objective_bank_hierarchy_design_session(*args, **kwargs) objective_bank_hierarchy_design_session = property(fget=get_objective_bank_hierarchy_design_session) def get_learning_batch_manager(self, *args, **kwargs): """Pass through to provider unimplemented""" raise Unimplemented('Unimplemented in dlkit.services') learning_batch_manager = property(fget=get_learning_batch_manager) ## # The following methods are from osid.learning.ObjectiveObjectiveBankSession def can_lookup_objective_objective_bank_mappings(self): """Pass through to provider ObjectiveObjectiveBankSession.can_lookup_objective_objective_bank_mappings""" # Implemented from kitosid template for - # osid.resource.ResourceBinSession.can_lookup_resource_bin_mappings return self._get_provider_session('objective_objective_bank_session').can_lookup_objective_objective_bank_mappings() def use_comparative_objective_bank_view(self): """Pass through to provider ObjectiveObjectiveBankSession.use_comparative_objective_bank_view""" self._objective_bank_view = COMPARATIVE # self._get_provider_session('objective_objective_bank_session') # To make sure the session is tracked for session in self._get_provider_sessions(): try: session.use_comparative_objective_bank_view() except AttributeError: pass def use_plenary_objective_bank_view(self): """Pass through to provider ObjectiveObjectiveBankSession.use_plenary_objective_bank_view""" self._objective_bank_view = PLENARY # self._get_provider_session('objective_objective_bank_session') # To make sure the session is tracked for session in self._get_provider_sessions(): try: session.use_plenary_objective_bank_view() except AttributeError: pass def get_objective_ids_by_objective_bank(self, *args, **kwargs): """Pass through to provider ObjectiveObjectiveBankSession.get_objective_ids_by_objective_bank""" # Implemented from kitosid template for - # osid.resource.ResourceBinSession.get_resource_ids_by_bin return self._get_provider_session('objective_objective_bank_session').get_objective_ids_by_objective_bank(*args, **kwargs) def get_objectives_by_objective_bank(self, *args, **kwargs): """Pass through to provider ObjectiveObjectiveBankSession.get_objectives_by_objective_bank""" # Implemented from kitosid template for - # osid.resource.ResourceBinSession.get_resources_by_bin return self._get_provider_session('objective_objective_bank_session').get_objectives_by_objective_bank(*args, **kwargs) def get_objective_ids_by_objective_banks(self, *args, **kwargs): """Pass through to provider ObjectiveObjectiveBankSession.get_objective_ids_by_objective_banks""" # Implemented from kitosid template for - # osid.resource.ResourceBinSession.get_resource_ids_by_bins return self._get_provider_session('objective_objective_bank_session').get_objective_ids_by_objective_banks(*args, **kwargs) def get_objectives_by_objective_banks(self, *args, **kwargs): """Pass through to provider ObjectiveObjectiveBankSession.get_objectives_by_objective_banks""" # Implemented from kitosid template for - # osid.resource.ResourceBinSession.get_resources_by_bins return self._get_provider_session('objective_objective_bank_session').get_objectives_by_objective_banks(*args, **kwargs) def get_objective_bank_ids_by_objective(self, *args, **kwargs): """Pass through to provider ObjectiveObjectiveBankSession.get_objective_bank_ids_by_objective""" # Implemented from kitosid template for - # osid.resource.ResourceBinSession.get_bin_ids_by_resource return self._get_provider_session('objective_objective_bank_session').get_objective_bank_ids_by_objective(*args, **kwargs) def get_objective_banks_by_objective(self, *args, **kwargs): """Pass through to provider ObjectiveObjectiveBankSession.get_objective_banks_by_objective""" # Implemented from kitosid template for - # osid.resource.ResourceBinSession.get_bins_by_resource catalogs = self._get_provider_session('objective_objective_bank_session').get_objective_banks_by_objective(*args, **kwargs) cat_list = [] for cat in catalogs: cat_list.append(ObjectiveBank(self._provider_manager, cat, self._runtime, self._proxy)) return ObjectiveBankList(cat_list) ## # The following methods are from osid.learning.ObjectiveObjectiveBankAssignmentSession def can_assign_objectives(self): """Pass through to provider ObjectiveObjectiveBankAssignmentSession.can_assign_objectives""" # Implemented from kitosid template for - # osid.resource.ResourceBinAssignmentSession.can_assign_resources return self._get_provider_session('objective_objective_bank_assignment_session').can_assign_objectives() def can_assign_objectives_to_objective_bank(self, *args, **kwargs): """Pass through to provider ObjectiveObjectiveBankAssignmentSession.can_assign_objectives_to_objective_bank""" # Implemented from kitosid template for - # osid.resource.ResourceBinAssignmentSession.can_assign_resources_to_bin return self._get_provider_session('objective_objective_bank_assignment_session').can_assign_objectives_to_objective_bank(*args, **kwargs) def get_assignable_objective_bank_ids(self, *args, **kwargs): """Pass through to provider ObjectiveObjectiveBankAssignmentSession.get_assignable_objective_bank_ids""" # Implemented from kitosid template for - # osid.resource.ResourceBinAssignmentSession.get_assignable_bin_ids return self._get_provider_session('objective_objective_bank_assignment_session').get_assignable_objective_bank_ids(*args, **kwargs) def get_assignable_objective_bank_ids_for_objective(self, *args, **kwargs): """Pass through to provider ObjectiveObjectiveBankAssignmentSession.get_assignable_objective_bank_ids_for_objective""" # Implemented from kitosid template for - # osid.resource.ResourceBinAssignmentSession.get_assignable_bin_ids_for_resource return self._get_provider_session('objective_objective_bank_assignment_session').get_assignable_objective_bank_ids_for_objective(*args, **kwargs) def assign_objective_to_objective_bank(self, *args, **kwargs): """Pass through to provider ObjectiveObjectiveBankAssignmentSession.assign_objective_to_objective_bank""" # Implemented from kitosid template for - # osid.resource.ResourceBinAssignmentSession.assign_resource_to_bin self._get_provider_session('objective_objective_bank_assignment_session').assign_objective_to_objective_bank(*args, **kwargs) def unassign_objective_from_objective_bank(self, *args, **kwargs): """Pass through to provider ObjectiveObjectiveBankAssignmentSession.unassign_objective_from_objective_bank""" # Implemented from kitosid template for - # osid.resource.ResourceBinAssignmentSession.unassign_resource_from_bin self._get_provider_session('objective_objective_bank_assignment_session').unassign_objective_from_objective_bank(*args, **kwargs) def reassign_proficiency_to_objective_bank(self, *args, **kwargs): """Pass through to provider unimplemented""" raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs)) ## # The following methods are from osid.learning.ActivityObjectiveBankSession def can_lookup_activity_objective_bank_mappings(self): """Pass through to provider ActivityObjectiveBankSession.can_lookup_activity_objective_bank_mappings""" # Implemented from kitosid template for - # osid.resource.ResourceBinSession.can_lookup_resource_bin_mappings return self._get_provider_session('activity_objective_bank_session').can_lookup_activity_objective_bank_mappings() def get_activity_ids_by_objective_bank(self, *args, **kwargs): """Pass through to provider ActivityObjectiveBankSession.get_activity_ids_by_objective_bank""" # Implemented from kitosid template for - # osid.resource.ResourceBinSession.get_resource_ids_by_bin return self._get_provider_session('activity_objective_bank_session').get_activity_ids_by_objective_bank(*args, **kwargs) def get_activities_by_objective_bank(self, *args, **kwargs): """Pass through to provider ActivityObjectiveBankSession.get_activities_by_objective_bank""" # Implemented from kitosid template for - # osid.resource.ResourceBinSession.get_resources_by_bin return self._get_provider_session('activity_objective_bank_session').get_activities_by_objective_bank(*args, **kwargs) def get_activity_ids_by_objective_banks(self, *args, **kwargs): """Pass through to provider ActivityObjectiveBankSession.get_activity_ids_by_objective_banks""" # Implemented from kitosid template for - # osid.resource.ResourceBinSession.get_resource_ids_by_bins return self._get_provider_session('activity_objective_bank_session').get_activity_ids_by_objective_banks(*args, **kwargs) def get_activities_by_objective_banks(self, *args, **kwargs): """Pass through to provider ActivityObjectiveBankSession.get_activities_by_objective_banks""" # Implemented from kitosid template for - # osid.resource.ResourceBinSession.get_resources_by_bins return self._get_provider_session('activity_objective_bank_session').get_activities_by_objective_banks(*args, **kwargs) def get_objective_bank_ids_by_activity(self, *args, **kwargs): """Pass through to provider ActivityObjectiveBankSession.get_objective_bank_ids_by_activity""" # Implemented from kitosid template for - # osid.resource.ResourceBinSession.get_bin_ids_by_resource return self._get_provider_session('activity_objective_bank_session').get_objective_bank_ids_by_activity(*args, **kwargs) def get_objective_banks_by_activity(self, *args, **kwargs): """Pass through to provider ActivityObjectiveBankSession.get_objective_banks_by_activity""" # Implemented from kitosid template for - # osid.resource.ResourceBinSession.get_bins_by_resource catalogs = self._get_provider_session('activity_objective_bank_session').get_objective_banks_by_activity(*args, **kwargs) cat_list = [] for cat in catalogs: cat_list.append(ObjectiveBank(self._provider_manager, cat, self._runtime, self._proxy)) return ObjectiveBankList(cat_list) ## # The following methods are from osid.learning.ActivityObjectiveBankAssignmentSession def can_assign_activities(self): """Pass through to provider ActivityObjectiveBankAssignmentSession.can_assign_activities""" # Implemented from kitosid template for - # osid.resource.ResourceBinAssignmentSession.can_assign_resources return self._get_provider_session('activity_objective_bank_assignment_session').can_assign_activities() def can_assign_activities_to_objective_bank(self, *args, **kwargs): """Pass through to provider ActivityObjectiveBankAssignmentSession.can_assign_activities_to_objective_bank""" # Implemented from kitosid template for - # osid.resource.ResourceBinAssignmentSession.can_assign_resources_to_bin return self._get_provider_session('activity_objective_bank_assignment_session').can_assign_activities_to_objective_bank(*args, **kwargs) def get_assignable_objective_bank_ids_for_activity(self, *args, **kwargs): """Pass through to provider ActivityObjectiveBankAssignmentSession.get_assignable_objective_bank_ids_for_activity""" # Implemented from kitosid template for - # osid.resource.ResourceBinAssignmentSession.get_assignable_bin_ids_for_resource return self._get_provider_session('activity_objective_bank_assignment_session').get_assignable_objective_bank_ids_for_activity(*args, **kwargs) def assign_activity_to_objective_bank(self, *args, **kwargs): """Pass through to provider ActivityObjectiveBankAssignmentSession.assign_activity_to_objective_bank""" # Implemented from kitosid template for - # osid.resource.ResourceBinAssignmentSession.assign_resource_to_bin self._get_provider_session('activity_objective_bank_assignment_session').assign_activity_to_objective_bank(*args, **kwargs) def unassign_activity_from_objective_bank(self, *args, **kwargs): """Pass through to provider ActivityObjectiveBankAssignmentSession.unassign_activity_from_objective_bank""" # Implemented from kitosid template for - # osid.resource.ResourceBinAssignmentSession.unassign_resource_from_bin self._get_provider_session('activity_objective_bank_assignment_session').unassign_activity_from_objective_bank(*args, **kwargs) def reassign_activity_to_objective_bank(self, *args, **kwargs): """Pass through to provider unimplemented""" raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs)) ## # The following methods are from osid.learning.ProficiencyObjectiveBankAssignmentSession def can_assign_proficiencies(self): """Pass through to provider ProficiencyObjectiveBankAssignmentSession.can_assign_proficiencies""" # Implemented from kitosid template for - # osid.resource.ResourceBinAssignmentSession.can_assign_resources return self._get_provider_session('proficiency_objective_bank_assignment_session').can_assign_proficiencies() def can_assign_proficiencies_to_objective_bank(self, *args, **kwargs): """Pass through to provider ProficiencyObjectiveBankAssignmentSession.can_assign_proficiencies_to_objective_bank""" # Implemented from kitosid template for - # osid.resource.ResourceBinAssignmentSession.can_assign_resources_to_bin return self._get_provider_session('proficiency_objective_bank_assignment_session').can_assign_proficiencies_to_objective_bank(*args, **kwargs) def get_assignable_objective_bank_ids_for_proficiency(self, *args, **kwargs): """Pass through to provider ProficiencyObjectiveBankAssignmentSession.get_assignable_objective_bank_ids_for_proficiency""" # Implemented from kitosid template for - # osid.resource.ResourceBinAssignmentSession.get_assignable_bin_ids_for_resource return self._get_provider_session('proficiency_objective_bank_assignment_session').get_assignable_objective_bank_ids_for_proficiency(*args, **kwargs) def assign_proficiency_to_objective_bank(self, *args, **kwargs): """Pass through to provider ProficiencyObjectiveBankAssignmentSession.assign_proficiency_to_objective_bank""" # Implemented from kitosid template for - # osid.resource.ResourceBinAssignmentSession.assign_resource_to_bin self._get_provider_session('proficiency_objective_bank_assignment_session').assign_proficiency_to_objective_bank(*args, **kwargs) def unassign_proficiency_from_objective_bank(self, *args, **kwargs): """Pass through to provider ProficiencyObjectiveBankAssignmentSession.unassign_proficiency_from_objective_bank""" # Implemented from kitosid template for - # osid.resource.ResourceBinAssignmentSession.unassign_resource_from_bin self._get_provider_session('proficiency_objective_bank_assignment_session').unassign_proficiency_from_objective_bank(*args, **kwargs) ## # The following methods are from osid.learning.ObjectiveBankLookupSession def can_lookup_objective_banks(self): """Pass through to provider ObjectiveBankLookupSession.can_lookup_objective_banks""" # Implemented from kitosid template for - # osid.resource.BinLookupSession.can_lookup_bins_template return self._get_provider_session('objective_bank_lookup_session').can_lookup_objective_banks() def get_objective_bank(self, *args, **kwargs): """Pass through to provider ObjectiveBankLookupSession.get_objective_bank""" # Implemented from kitosid template for - # osid.resource.BinLookupSession.get_bin return ObjectiveBank( self._provider_manager, self._get_provider_session('objective_bank_lookup_session').get_objective_bank(*args, **kwargs), self._runtime, self._proxy) def get_objective_banks_by_ids(self, *args, **kwargs): """Pass through to provider ObjectiveBankLookupSession.get_objective_banks_by_ids""" # Implemented from kitosid template for - # osid.resource.BinLookupSession.get_bins_by_ids catalogs = self._get_provider_session('objective_bank_lookup_session').get_objective_banks_by_ids(*args, **kwargs) cat_list = [] for cat in catalogs: cat_list.append(ObjectiveBank(self._provider_manager, cat, self._runtime, self._proxy)) return ObjectiveBankList(cat_list) def get_objective_banks_by_genus_type(self, *args, **kwargs): """Pass through to provider ObjectiveBankLookupSession.get_objective_banks_by_genus_type""" # Implemented from kitosid template for - # osid.resource.BinLookupSession.get_bins_by_genus_type catalogs = self._get_provider_session('objective_bank_lookup_session').get_objective_banks_by_genus_type(*args, **kwargs) cat_list = [] for cat in catalogs: cat_list.append(ObjectiveBank(self._provider_manager, cat, self._runtime, self._proxy)) return ObjectiveBankList(cat_list) def get_objective_banks_by_parent_genus_type(self, *args, **kwargs): """Pass through to provider ObjectiveBankLookupSession.get_objective_banks_by_parent_genus_type""" # Implemented from kitosid template for - # osid.resource.BinLookupSession.get_bins_by_parent_genus_type catalogs = self._get_provider_session('objective_bank_lookup_session').get_objective_banks_by_parent_genus_type(*args, **kwargs) cat_list = [] for cat in catalogs: cat_list.append(ObjectiveBank(self._provider_manager, cat, self._runtime, self._proxy)) return ObjectiveBankList(cat_list) def get_objective_banks_by_record_type(self, *args, **kwargs): """Pass through to provider ObjectiveBankLookupSession.get_objective_banks_by_record_type""" # Implemented from kitosid template for - # osid.resource.BinLookupSession.get_bins_by_record_type catalogs = self._get_provider_session('objective_bank_lookup_session').get_objective_banks_by_record_type(*args, **kwargs) cat_list = [] for cat in catalogs: cat_list.append(ObjectiveBank(self._provider_manager, cat, self._runtime, self._proxy)) return ObjectiveBankList(cat_list) def get_objective_banks_by_provider(self, *args, **kwargs): """Pass through to provider ObjectiveBankLookupSession.get_objective_banks_by_provider""" # Implemented from kitosid template for - # osid.resource.BinLookupSession.get_bins_by_provider catalogs = self._get_provider_session('objective_bank_lookup_session').get_objective_banks_by_provider(*args, **kwargs) cat_list = [] for cat in catalogs: cat_list.append(ObjectiveBank(self._provider_manager, cat, self._runtime, self._proxy)) return ObjectiveBankList(cat_list) def get_objective_banks(self): """Pass through to provider ObjectiveBankLookupSession.get_objective_banks""" # Implemented from kitosid template for - # osid.resource.BinLookupSession.get_bins_template catalogs = self._get_provider_session('objective_bank_lookup_session').get_objective_banks() cat_list = [] for cat in catalogs: cat_list.append(ObjectiveBank(self._provider_manager, cat, self._runtime, self._proxy)) return ObjectiveBankList(cat_list) objective_banks = property(fget=get_objective_banks) ## # The following methods are from osid.learning.ObjectiveBankAdminSession def can_create_objective_banks(self): """Pass through to provider ObjectiveBankAdminSession.can_create_objective_banks""" # Implemented from kitosid template for - # osid.resource.BinAdminSession.can_create_bins return self._get_provider_session('objective_bank_admin_session').can_create_objective_banks() def can_create_objective_bank_with_record_types(self, *args, **kwargs): """Pass through to provider ObjectiveBankAdminSession.can_create_objective_bank_with_record_types""" # Implemented from kitosid template for - # osid.resource.BinAdminSession.can_create_bin_with_record_types return self._get_provider_session('objective_bank_admin_session').can_create_objective_bank_with_record_types(*args, **kwargs) def get_objective_bank_form_for_create(self, *args, **kwargs): """Pass through to provider ObjectiveBankAdminSession.get_objective_bank_form_for_create""" # Implemented from kitosid template for - # osid.resource.BinAdminSession.get_bin_form_for_create return self._get_provider_session('objective_bank_admin_session').get_objective_bank_form_for_create(*args, **kwargs) def create_objective_bank(self, *args, **kwargs): """Pass through to provider ObjectiveBankAdminSession.create_objective_bank""" # Implemented from kitosid template for - # osid.resource.BinAdminSession.create_bin return ObjectiveBank( self._provider_manager, self._get_provider_session('objective_bank_admin_session').create_objective_bank(*args, **kwargs), self._runtime, self._proxy) def can_update_objective_banks(self): """Pass through to provider ObjectiveBankAdminSession.can_update_objective_banks""" # Implemented from kitosid template for - # osid.resource.BinAdminSession.can_update_bins return self._get_provider_session('objective_bank_admin_session').can_update_objective_banks() def get_objective_bank_form_for_update(self, *args, **kwargs): """Pass through to provider ObjectiveBankAdminSession.get_objective_bank_form_for_update""" # Implemented from kitosid template for - # osid.resource.BinAdminSession.get_bin_form_for_update return self._get_provider_session('objective_bank_admin_session').get_objective_bank_form_for_update(*args, **kwargs) def get_objective_bank_form(self, *args, **kwargs): """Pass through to provider ObjectiveBankAdminSession.get_objective_bank_form_for_update""" # Implemented from kitosid template for - # osid.resource.BinAdminSession.get_bin_form_for_update_template # This method might be a bit sketchy. Time will tell. if isinstance(args[-1], list) or 'objective_bank_record_types' in kwargs: return self.get_objective_bank_form_for_create(*args, **kwargs) else: return self.get_objective_bank_form_for_update(*args, **kwargs) def update_objective_bank(self, *args, **kwargs): """Pass through to provider ObjectiveBankAdminSession.update_objective_bank""" # Implemented from kitosid template for - # osid.resource.BinAdminSession.update_bin # OSID spec does not require returning updated catalog return ObjectiveBank( self._provider_manager, self._get_provider_session('objective_bank_admin_session').update_objective_bank(*args, **kwargs), self._runtime, self._proxy) def save_objective_bank(self, objective_bank_form, *args, **kwargs): """Pass through to provider ObjectiveBankAdminSession.update_objective_bank""" # Implemented from kitosid template for - # osid.resource.BinAdminSession.update_bin if objective_bank_form.is_for_update(): return self.update_objective_bank(objective_bank_form, *args, **kwargs) else: return self.create_objective_bank(objective_bank_form, *args, **kwargs) def can_delete_objective_banks(self): """Pass through to provider ObjectiveBankAdminSession.can_delete_objective_banks""" # Implemented from kitosid template for - # osid.resource.BinAdminSession.can_delete_bins return self._get_provider_session('objective_bank_admin_session').can_delete_objective_banks() def delete_objective_bank(self, *args, **kwargs): """Pass through to provider ObjectiveBankAdminSession.delete_objective_bank""" # Implemented from kitosid template for - # osid.resource.BinAdminSession.delete_bin self._get_provider_session('objective_bank_admin_session').delete_objective_bank(*args, **kwargs) def can_manage_objective_bank_aliases(self): """Pass through to provider ObjectiveBankAdminSession.can_manage_objective_bank_aliases""" # Implemented from kitosid template for - # osid.resource.ResourceAdminSession.can_manage_resource_aliases_template return self._get_provider_session('objective_bank_admin_session').can_manage_objective_bank_aliases() def alias_objective_bank(self, *args, **kwargs): """Pass through to provider ObjectiveBankAdminSession.alias_objective_bank""" # Implemented from kitosid template for - # osid.resource.BinAdminSession.alias_bin self._get_provider_session('objective_bank_admin_session').alias_objective_bank(*args, **kwargs) ## # The following methods are from osid.learning.ObjectiveBankHierarchySession def get_objective_bank_hierarchy_id(self): """Pass through to provider ObjectiveBankHierarchySession.get_objective_bank_hierarchy_id""" # Implemented from kitosid template for - # osid.resource.BinHierarchySession.get_bin_hierarchy_id return self._get_provider_session('objective_bank_hierarchy_session').get_objective_bank_hierarchy_id() objective_bank_hierarchy_id = property(fget=get_objective_bank_hierarchy_id) def get_objective_bank_hierarchy(self): """Pass through to provider ObjectiveBankHierarchySession.get_objective_bank_hierarchy""" # Implemented from kitosid template for - # osid.resource.BinHierarchySession.get_bin_hierarchy return self._get_provider_session('objective_bank_hierarchy_session').get_objective_bank_hierarchy() objective_bank_hierarchy = property(fget=get_objective_bank_hierarchy) def can_access_objective_bank_hierarchy(self): """Pass through to provider ObjectiveBankHierarchySession.can_access_objective_bank_hierarchy""" # Implemented from kitosid template for - # osid.resource.BinHierarchySession.can_access_bin_hierarchy return self._get_provider_session('objective_bank_hierarchy_session').can_access_objective_bank_hierarchy() def get_root_objective_bank_ids(self): """Pass through to provider ObjectiveBankHierarchySession.get_root_objective_bank_ids""" # Implemented from kitosid template for - # osid.resource.BinHierarchySession.get_root_bin_ids return self._get_provider_session('objective_bank_hierarchy_session').get_root_objective_bank_ids() root_objective_bank_ids = property(fget=get_root_objective_bank_ids) def get_root_objective_banks(self): """Pass through to provider ObjectiveBankHierarchySession.get_root_objective_banks""" # Implemented from kitosid template for - # osid.resource.BinHierarchySession.get_root_bins return self._get_provider_session('objective_bank_hierarchy_session').get_root_objective_banks() root_objective_banks = property(fget=get_root_objective_banks) def has_parent_objective_banks(self, *args, **kwargs): """Pass through to provider ObjectiveBankHierarchySession.has_parent_objective_banks""" # Implemented from kitosid template for - # osid.resource.BinHierarchySession.has_parent_bins return self._get_provider_session('objective_bank_hierarchy_session').has_parent_objective_banks(*args, **kwargs) def is_parent_of_objective_bank(self, *args, **kwargs): """Pass through to provider ObjectiveBankHierarchySession.is_parent_of_objective_bank""" # Implemented from kitosid template for - # osid.resource.BinHierarchySession.is_parent_of_bin return self._get_provider_session('objective_bank_hierarchy_session').is_parent_of_objective_bank(*args, **kwargs) def get_parent_objective_bank_ids(self, *args, **kwargs): """Pass through to provider ObjectiveBankHierarchySession.get_parent_objective_bank_ids""" # Implemented from kitosid template for - # osid.resource.BinHierarchySession.get_parent_bin_ids return self._get_provider_session('objective_bank_hierarchy_session').get_parent_objective_bank_ids(*args, **kwargs) def get_parent_objective_banks(self, *args, **kwargs): """Pass through to provider ObjectiveBankHierarchySession.get_parent_objective_banks""" # Implemented from kitosid template for - # osid.resource.BinHierarchySession.get_parent_bins return self._get_provider_session('objective_bank_hierarchy_session').get_parent_objective_banks(*args, **kwargs) def is_ancestor_of_objective_bank(self, *args, **kwargs): """Pass through to provider ObjectiveBankHierarchySession.is_ancestor_of_objective_bank""" # Implemented from kitosid template for - # osid.resource.BinHierarchySession.is_ancestor_of_bin return self._get_provider_session('objective_bank_hierarchy_session').is_ancestor_of_objective_bank(*args, **kwargs) def has_child_objective_banks(self, *args, **kwargs): """Pass through to provider ObjectiveBankHierarchySession.has_child_objective_banks""" # Implemented from kitosid template for - # osid.resource.BinHierarchySession.has_child_bins return self._get_provider_session('objective_bank_hierarchy_session').has_child_objective_banks(*args, **kwargs) def is_child_of_objective_bank(self, *args, **kwargs): """Pass through to provider ObjectiveBankHierarchySession.is_child_of_objective_bank""" # Implemented from kitosid template for - # osid.resource.BinHierarchySession.is_child_of_bin return self._get_provider_session('objective_bank_hierarchy_session').is_child_of_objective_bank(*args, **kwargs) def get_child_objective_bank_ids(self, *args, **kwargs): """Pass through to provider ObjectiveBankHierarchySession.get_child_objective_bank_ids""" # Implemented from kitosid template for - # osid.resource.BinHierarchySession.get_child_bin_ids return self._get_provider_session('objective_bank_hierarchy_session').get_child_objective_bank_ids(*args, **kwargs) def get_child_objective_banks(self, *args, **kwargs): """Pass through to provider ObjectiveBankHierarchySession.get_child_objective_banks""" # Implemented from kitosid template for - # osid.resource.BinHierarchySession.get_child_bins return self._get_provider_session('objective_bank_hierarchy_session').get_child_objective_banks(*args, **kwargs) def is_descendant_of_objective_bank(self, *args, **kwargs): """Pass through to provider ObjectiveBankHierarchySession.is_descendant_of_objective_bank""" # Implemented from kitosid template for - # osid.resource.BinHierarchySession.is_descendant_of_bin return self._get_provider_session('objective_bank_hierarchy_session').is_descendant_of_objective_bank(*args, **kwargs) def get_objective_bank_node_ids(self, *args, **kwargs): """Pass through to provider ObjectiveBankHierarchySession.get_objective_bank_node_ids""" # Implemented from kitosid template for - # osid.resource.BinHierarchySession.get_bin_node_ids return self._get_provider_session('objective_bank_hierarchy_session').get_objective_bank_node_ids(*args, **kwargs) def get_objective_bank_nodes(self, *args, **kwargs): """Pass through to provider ObjectiveBankHierarchySession.get_objective_bank_nodes""" # Implemented from kitosid template for - # osid.resource.BinHierarchySession.get_bin_nodes return self._get_provider_session('objective_bank_hierarchy_session').get_objective_bank_nodes(*args, **kwargs) ## # The following methods are from osid.learning.ObjectiveBankHierarchyDesignSession def can_modify_objective_bank_hierarchy(self): """Pass through to provider ObjectiveBankHierarchyDesignSession.can_modify_objective_bank_hierarchy""" # Implemented from kitosid template for - # osid.resource.BinHierarchyDesignSession.can_modify_bin_hierarchy return self._get_provider_session('objective_bank_hierarchy_design_session').can_modify_objective_bank_hierarchy() def create_objective_bank_hierarchy(self, *args, **kwargs): """Pass through to provider ObjectiveBankHierarchyDesignSession.can_modify_objective_bank_hierarchy""" # Patched in by cjshaw@mit.edu, Jul 23, 2014, added by birdland to template on Aug 8, 2014 # Is not part of specs for catalog hierarchy design sessions, but may want to be in hierarchy service instead # Will not return an actual object, just JSON # since a BankHierarchy does not seem to be an OSID thing. return self._get_provider_session('objective_bank_hierarchy_design_session').create_objective_bank_hierarchy(*args, **kwargs) def delete_objective_bank_hierarchy(self, *args, **kwargs): """Pass through to provider ObjectiveBankHierarchyDesignSession.can_modify_objective_bank_hierarchy""" # Patched in by cjshaw@mit.edu, Jul 23, 2014, added by birdland to template on Aug 8, 2014 # Is not part of specs for catalog hierarchy design sessions, but may want to be in hierarchy service instead # Will not return an actual object, just JSON # since a BankHierarchy does not seem to be an OSID thing. return self._get_provider_session('objective_bank_hierarchy_design_session').delete_objective_bank_hierarchy(*args, **kwargs) def add_root_objective_bank(self, *args, **kwargs): """Pass through to provider ObjectiveBankHierarchyDesignSession.add_root_objective_bank""" # Implemented from kitosid template for - # osid.resource.BinHierarchyDesignSession.add_root_bin self._get_provider_session('objective_bank_hierarchy_design_session').add_root_objective_bank(*args, **kwargs) def remove_root_objective_bank(self, *args, **kwargs): """Pass through to provider ObjectiveBankHierarchyDesignSession.remove_root_objective_bank""" # Implemented from kitosid template for - # osid.resource.BinHierarchyDesignSession.remove_root_bin self._get_provider_session('objective_bank_hierarchy_design_session').remove_root_objective_bank(*args, **kwargs) def add_child_objective_bank(self, *args, **kwargs): """Pass through to provider ObjectiveBankHierarchyDesignSession.add_child_objective_bank""" # Implemented from kitosid template for - # osid.resource.BinHierarchyDesignSession.add_child_bin self._get_provider_session('objective_bank_hierarchy_design_session').add_child_objective_bank(*args, **kwargs) def remove_child_objective_bank(self, *args, **kwargs): """Pass through to provider ObjectiveBankHierarchyDesignSession.remove_child_objective_bank""" # Implemented from kitosid template for - # osid.resource.BinHierarchyDesignSession.remove_child_bin self._get_provider_session('objective_bank_hierarchy_design_session').remove_child_objective_bank(*args, **kwargs) def remove_child_objective_banks(self, *args, **kwargs): """Pass through to provider ObjectiveBankHierarchyDesignSession.remove_child_objective_banks""" # Implemented from kitosid template for - # osid.resource.BinHierarchyDesignSession.remove_child_bins self._get_provider_session('objective_bank_hierarchy_design_session').remove_child_objective_banks(*args, **kwargs) class LearningProxyManager(osid.OsidProxyManager, LearningProfile, learning_managers.LearningProxyManager): """LearningProxyManager convenience adapter including related Session methods.""" def get_objective_lookup_session(self, *args, **kwargs): """Sends control to Manager""" # Implemented from kitosid template for - # osid.resource.ResourceProxyManager.get_resource_lookup_session_template return LearningManager.get_objective_lookup_session(*args, **kwargs) def get_objective_lookup_session_for_objective_bank(self, *args, **kwargs): """Sends control to Manager""" # Implemented from kitosid template for - # osid.resource.ResourceProxyManager.get_resource_lookup_session_for_bin_template return LearningManager.get_objective_lookup_session_for_objective_bank(*args, **kwargs) def get_objective_query_session(self, *args, **kwargs): """Sends control to Manager""" # Implemented from kitosid template for - # osid.resource.ResourceProxyManager.get_resource_lookup_session_template return LearningManager.get_objective_query_session(*args, **kwargs) def get_objective_query_session_for_objective_bank(self, *args, **kwargs): """Sends control to Manager""" # Implemented from kitosid template for - # osid.resource.ResourceProxyManager.get_resource_lookup_session_for_bin_template return LearningManager.get_objective_query_session_for_objective_bank(*args, **kwargs) def get_objective_admin_session(self, *args, **kwargs): """Pass through to provider unimplemented""" raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs)) def get_objective_admin_session_for_objective_bank(self, *args, **kwargs): """Pass through to provider unimplemented""" raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs)) def get_objective_hierarchy_session(self, *args, **kwargs): """Pass through to provider unimplemented""" raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs)) def get_objective_hierarchy_session_for_objective_bank(self, *args, **kwargs): """Pass through to provider unimplemented""" raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs)) def get_objective_hierarchy_design_session(self, *args, **kwargs): """Pass through to provider unimplemented""" raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs)) def get_objective_hierarchy_design_session_for_objective_bank(self, *args, **kwargs): """Pass through to provider unimplemented""" raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs)) def get_objective_sequencing_session(self, *args, **kwargs): """Pass through to provider unimplemented""" raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs)) def get_objective_sequencing_session_for_objective_bank(self, *args, **kwargs): """Pass through to provider unimplemented""" raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs)) def get_objective_objective_bank_session(self, *args, **kwargs): """Pass through to provider unimplemented""" raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs)) def get_objective_objective_bank_assignment_session(self, *args, **kwargs): """Pass through to provider unimplemented""" raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs)) def get_objective_requisite_session(self, *args, **kwargs): """Pass through to provider unimplemented""" raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs)) def get_objective_requisite_session_for_objective_bank(self, *args, **kwargs): """Pass through to provider unimplemented""" raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs)) def get_objective_requisite_assignment_session(self, *args, **kwargs): """Pass through to provider unimplemented""" raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs)) def get_objective_requisite_assignment_session_for_objective_bank(self, *args, **kwargs): """Pass through to provider unimplemented""" raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs)) def get_activity_lookup_session(self, *args, **kwargs): """Sends control to Manager""" # Implemented from kitosid template for - # osid.resource.ResourceProxyManager.get_resource_lookup_session_template return LearningManager.get_activity_lookup_session(*args, **kwargs) def get_activity_lookup_session_for_objective_bank(self, *args, **kwargs): """Sends control to Manager""" # Implemented from kitosid template for - # osid.resource.ResourceProxyManager.get_resource_lookup_session_for_bin_template return LearningManager.get_activity_lookup_session_for_objective_bank(*args, **kwargs) def get_activity_query_session(self, *args, **kwargs): """Sends control to Manager""" # Implemented from kitosid template for - # osid.resource.ResourceProxyManager.get_resource_lookup_session_template return LearningManager.get_activity_query_session(*args, **kwargs) def get_activity_query_session_for_objective_bank(self, *args, **kwargs): """Sends control to Manager""" # Implemented from kitosid template for - # osid.resource.ResourceProxyManager.get_resource_lookup_session_for_bin_template return LearningManager.get_activity_query_session_for_objective_bank(*args, **kwargs) def get_activity_admin_session(self, *args, **kwargs): """Pass through to provider unimplemented""" raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs)) def get_activity_admin_session_for_objective_bank(self, *args, **kwargs): """Pass through to provider unimplemented""" raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs)) def get_activity_objective_bank_session(self, *args, **kwargs): """Pass through to provider unimplemented""" raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs)) def get_activity_objective_bank_assignment_session(self, *args, **kwargs): """Pass through to provider unimplemented""" raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs)) def get_proficiency_lookup_session(self, *args, **kwargs): """Sends control to Manager""" # Implemented from kitosid template for - # osid.resource.ResourceProxyManager.get_resource_lookup_session_template return LearningManager.get_proficiency_lookup_session(*args, **kwargs) def get_proficiency_lookup_session_for_objective_bank(self, *args, **kwargs): """Sends control to Manager""" # Implemented from kitosid template for - # osid.resource.ResourceProxyManager.get_resource_lookup_session_for_bin_template return LearningManager.get_proficiency_lookup_session_for_objective_bank(*args, **kwargs) def get_proficiency_query_session(self, *args, **kwargs): """Sends control to Manager""" # Implemented from kitosid template for - # osid.resource.ResourceProxyManager.get_resource_lookup_session_template return LearningManager.get_proficiency_query_session(*args, **kwargs) def get_proficiency_query_session_for_objective_bank(self, *args, **kwargs): """Sends control to Manager""" # Implemented from kitosid template for - # osid.resource.ResourceProxyManager.get_resource_lookup_session_for_bin_template return LearningManager.get_proficiency_query_session_for_objective_bank(*args, **kwargs) def get_proficiency_admin_session(self, *args, **kwargs): """Pass through to provider unimplemented""" raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs)) def get_proficiency_admin_session_for_objective_bank(self, *args, **kwargs): """Pass through to provider unimplemented""" raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs)) def get_proficiency_objective_bank_assignment_session(self, *args, **kwargs): """Pass through to provider unimplemented""" raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs)) def get_objective_bank_lookup_session(self, *args, **kwargs): """Pass through to provider unimplemented""" raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs)) def get_objective_bank_admin_session(self, *args, **kwargs): """Pass through to provider unimplemented""" raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs)) def get_objective_bank_hierarchy_session(self, *args, **kwargs): """Pass through to provider unimplemented""" raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs)) def get_objective_bank_hierarchy_design_session(self, *args, **kwargs): """Pass through to provider unimplemented""" raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs)) def get_learning_batch_proxy_manager(self, *args, **kwargs): """Pass through to provider unimplemented""" raise Unimplemented('Unimplemented in dlkit.services') learning_batch_proxy_manager = property(fget=get_learning_batch_proxy_manager) class ObjectiveBank(abc_learning_objects.ObjectiveBank, osid.OsidSession, osid.OsidCatalog): """ObjectiveBank convenience adapter including related Session methods.""" # WILL THIS EVER BE CALLED DIRECTLY - OUTSIDE OF A MANAGER? def __init__(self, provider_manager, catalog, runtime, proxy, **kwargs): self._provider_manager = provider_manager self._catalog = catalog self._runtime = runtime osid.OsidObject.__init__(self, self._catalog) # This is to initialize self._object osid.OsidSession.__init__(self, proxy) # This is to initialize self._proxy self._catalog_id = catalog.get_id() self._provider_sessions = kwargs self._session_management = AUTOMATIC self._objective_bank_view = DEFAULT self._object_views = dict() self._operable_views = dict() self._containable_views = dict() def _set_objective_bank_view(self, session): """Sets the underlying objective_bank view to match current view""" if self._objective_bank_view == FEDERATED: try: session.use_federated_objective_bank_view() except AttributeError: pass else: try: session.use_isolated_objective_bank_view() except AttributeError: pass def _set_object_view(self, session): """Sets the underlying object views to match current view""" for obj_name in self._object_views: if self._object_views[obj_name] == PLENARY: try: getattr(session, 'use_plenary_' + obj_name + '_view')() except AttributeError: pass else: try: getattr(session, 'use_comparative_' + obj_name + '_view')() except AttributeError: pass def _set_operable_view(self, session): """Sets the underlying operable views to match current view""" for obj_name in self._operable_views: if self._operable_views[obj_name] == ACTIVE: try: getattr(session, 'use_active_' + obj_name + '_view')() except AttributeError: pass else: try: getattr(session, 'use_any_status_' + obj_name + '_view')() except AttributeError: pass def _set_containable_view(self, session): """Sets the underlying containable views to match current view""" for obj_name in self._containable_views: if self._containable_views[obj_name] == SEQUESTERED: try: getattr(session, 'use_sequestered_' + obj_name + '_view')() except AttributeError: pass else: try: getattr(session, 'use_unsequestered_' + obj_name + '_view')() except AttributeError: pass def _get_provider_session(self, session_name): """Returns the requested provider session. Instantiates a new one if the named session is not already known. """ agent_key = self._get_agent_key() if session_name in self._provider_sessions[agent_key]: return self._provider_sessions[agent_key][session_name] else: session_class = getattr(self._provider_manager, 'get_' + session_name + '_for_objective_bank') if self._proxy is None: if 'notification_session' in session_name: # Is there something else we should do about the receiver field? session = session_class('fake receiver', self._catalog.get_id()) else: session = session_class(self._catalog.get_id()) else: if 'notification_session' in session_name: # Is there something else we should do about the receiver field? session = session_class('fake receiver', self._catalog.get_id(), self._proxy) else: session = session_class(self._catalog.get_id(), self._proxy) self._set_objective_bank_view(session) self._set_object_view(session) self._set_operable_view(session) self._set_containable_view(session) if self._session_management != DISABLED: self._provider_sessions[agent_key][session_name] = session return session def get_objective_bank_id(self): """Gets the Id of this objective_bank.""" return self._catalog_id def get_objective_bank(self): """Strange little method to assure conformance for inherited Sessions.""" return self def __getattr__(self, name): if '_catalog' in self.__dict__: try: return self._catalog[name] except AttributeError: pass raise AttributeError def close_sessions(self): """Close all sessions currently being managed by this Manager to save memory.""" if self._session_management != MANDATORY: self._provider_sessions = dict() else: raise IllegalState() def use_automatic_session_management(self): """Session state will be saved until closed by consumers.""" self._session_management = AUTOMATIC def use_mandatory_session_management(self): """Session state will always be saved and can not be closed by consumers.""" # Session state will be saved and can not be closed by consumers self._session_management = MANDATORY def disable_session_management(self): """Session state will never be saved.""" self._session_management = DISABLED self.close_sessions() def get_objective_bank_record(self, *args, **kwargs): """Pass through to provider unimplemented""" raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs)) ## # The following methods are from osid.learning.ObjectiveLookupSession def can_lookup_objectives(self): """Pass through to provider ObjectiveLookupSession.can_lookup_objectives""" # Implemented from kitosid template for - # osid.resource.ResourceLookupSession.can_lookup_resources_template return self._get_provider_session('objective_lookup_session').can_lookup_objectives() def use_comparative_objective_view(self): """Pass through to provider ObjectiveLookupSession.use_comparative_objective_view""" self._object_views['objective'] = COMPARATIVE # self._get_provider_session('objective_lookup_session') # To make sure the session is tracked for session in self._get_provider_sessions(): try: session.use_comparative_objective_view() except AttributeError: pass def use_plenary_objective_view(self): """Pass through to provider ObjectiveLookupSession.use_plenary_objective_view""" self._object_views['objective'] = PLENARY # self._get_provider_session('objective_lookup_session') # To make sure the session is tracked for session in self._get_provider_sessions(): try: session.use_plenary_objective_view() except AttributeError: pass def use_federated_objective_bank_view(self): """Pass through to provider ObjectiveLookupSession.use_federated_objective_bank_view""" self._objective_bank_view = FEDERATED # self._get_provider_session('objective_lookup_session') # To make sure the session is tracked for session in self._get_provider_sessions(): try: session.use_federated_objective_bank_view() except AttributeError: pass def use_isolated_objective_bank_view(self): """Pass through to provider ObjectiveLookupSession.use_isolated_objective_bank_view""" self._objective_bank_view = ISOLATED # self._get_provider_session('objective_lookup_session') # To make sure the session is tracked for session in self._get_provider_sessions(): try: session.use_isolated_objective_bank_view() except AttributeError: pass def get_objective(self, *args, **kwargs): """Pass through to provider ObjectiveLookupSession.get_objective""" # Implemented from kitosid template for - # osid.resource.ResourceLookupSession.get_resource_template return self._get_provider_session('objective_lookup_session').get_objective(*args, **kwargs) def get_objectives_by_ids(self, *args, **kwargs): """Pass through to provider ObjectiveLookupSession.get_objectives_by_ids""" # Implemented from kitosid template for - # osid.resource.ResourceLookupSession.get_resources_by_ids_template return self._get_provider_session('objective_lookup_session').get_objectives_by_ids(*args, **kwargs) def get_objectives_by_genus_type(self, *args, **kwargs): """Pass through to provider ObjectiveLookupSession.get_objectives_by_genus_type""" # Implemented from kitosid template for - # osid.resource.ResourceLookupSession.get_resources_by_genus_type_template return self._get_provider_session('objective_lookup_session').get_objectives_by_genus_type(*args, **kwargs) def get_objectives_by_parent_genus_type(self, *args, **kwargs): """Pass through to provider ObjectiveLookupSession.get_objectives_by_parent_genus_type""" # Implemented from kitosid template for - # osid.resource.ResourceLookupSession.get_resources_by_parent_genus_type_template return self._get_provider_session('objective_lookup_session').get_objectives_by_parent_genus_type(*args, **kwargs) def get_objectives_by_record_type(self, *args, **kwargs): """Pass through to provider ObjectiveLookupSession.get_objectives_by_record_type""" # Implemented from kitosid template for - # osid.resource.ResourceLookupSession.get_resources_by_record_type_template return self._get_provider_session('objective_lookup_session').get_objectives_by_record_type(*args, **kwargs) def get_objectives(self): """Pass through to provider ObjectiveLookupSession.get_objectives""" # Implemented from kitosid template for - # osid.resource.ResourceLookupSession.get_resources_template return self._get_provider_session('objective_lookup_session').get_objectives() objectives = property(fget=get_objectives) ## # The following methods are from osid.learning.ObjectiveQuerySession def can_search_objectives(self): """Pass through to provider ObjectiveQuerySession.can_search_objectives""" # Implemented from kitosid template for - # osid.resource.ResourceQuerySession.can_search_resources_template return self._get_provider_session('objective_query_session').can_search_objectives() def get_objective_query(self): """Pass through to provider ObjectiveQuerySession.get_objective_query""" # Implemented from kitosid template for - # osid.resource.ResourceQuerySession.get_item_query_template return self._get_provider_session('objective_query_session').get_objective_query() objective_query = property(fget=get_objective_query) def get_objectives_by_query(self, *args, **kwargs): """Pass through to provider ObjectiveQuerySession.get_objectives_by_query""" # Implemented from kitosid template for - # osid.resource.ResourceQuerySession.get_items_by_query_template return self._get_provider_session('objective_query_session').get_objectives_by_query(*args, **kwargs) ## # The following methods are from osid.learning.ObjectiveAdminSession def can_create_objectives(self): """Pass through to provider ObjectiveAdminSession.can_create_objectives""" # Implemented from kitosid template for - # osid.resource.ResourceAdminSession.can_create_resources return self._get_provider_session('objective_admin_session').can_create_objectives() def can_create_objective_with_record_types(self, *args, **kwargs): """Pass through to provider ObjectiveAdminSession.can_create_objective_with_record_types""" # Implemented from kitosid template for - # osid.resource.ResourceAdminSession.can_create_resource_with_record_types return self._get_provider_session('objective_admin_session').can_create_objective_with_record_types(*args, **kwargs) def get_objective_form_for_create(self, *args, **kwargs): """Pass through to provider ObjectiveAdminSession.get_objective_form_for_create""" # Implemented from kitosid template for - # osid.resource.ResourceAdminSession.get_resource_form_for_create return self._get_provider_session('objective_admin_session').get_objective_form_for_create(*args, **kwargs) def create_objective(self, *args, **kwargs): """Pass through to provider ObjectiveAdminSession.create_objective""" # Implemented from kitosid template for - # osid.resource.ResourceAdminSession.create_resource return self._get_provider_session('objective_admin_session').create_objective(*args, **kwargs) def can_update_objectives(self): """Pass through to provider ObjectiveAdminSession.can_update_objectives""" # Implemented from kitosid template for - # osid.resource.ResourceAdminSession.can_update_resources return self._get_provider_session('objective_admin_session').can_update_objectives() def get_objective_form_for_update(self, *args, **kwargs): """Pass through to provider ObjectiveAdminSession.get_objective_form_for_update""" # Implemented from kitosid template for - # osid.resource.ResourceAdminSession.get_resource_form_for_update return self._get_provider_session('objective_admin_session').get_objective_form_for_update(*args, **kwargs) def get_objective_form(self, *args, **kwargs): """Pass through to provider ObjectiveAdminSession.get_objective_form_for_update""" # Implemented from kitosid template for - # osid.resource.ResourceAdminSession.get_resource_form_for_update # This method might be a bit sketchy. Time will tell. if isinstance(args[-1], list) or 'objective_record_types' in kwargs: return self.get_objective_form_for_create(*args, **kwargs) else: return self.get_objective_form_for_update(*args, **kwargs) def duplicate_objective(self, objective_id): # Implemented from kitosid template for - # osid.resource.ResourceAdminSession.get_resource_form_for_update return self._get_provider_session('objective_admin_session').duplicate_objective(objective_id) def update_objective(self, *args, **kwargs): """Pass through to provider ObjectiveAdminSession.update_objective""" # Implemented from kitosid template for - # osid.resource.ResourceAdminSession.update_resource # Note: The OSID spec does not require returning updated object return self._get_provider_session('objective_admin_session').update_objective(*args, **kwargs) def save_objective(self, objective_form, *args, **kwargs): """Pass through to provider ObjectiveAdminSession.update_objective""" # Implemented from kitosid template for - # osid.resource.ResourceAdminSession.update_resource if objective_form.is_for_update(): return self.update_objective(objective_form, *args, **kwargs) else: return self.create_objective(objective_form, *args, **kwargs) def can_delete_objectives(self): """Pass through to provider ObjectiveAdminSession.can_delete_objectives""" # Implemented from kitosid template for - # osid.resource.ResourceAdminSession.can_delete_resources return self._get_provider_session('objective_admin_session').can_delete_objectives() def delete_objective(self, *args, **kwargs): """Pass through to provider ObjectiveAdminSession.delete_objective""" # Implemented from kitosid template for - # osid.learning.ObjectiveAdminSession.delete_objective self._get_provider_session('objective_admin_session').delete_objective(*args, **kwargs) def can_manage_objective_aliases(self): """Pass through to provider ObjectiveAdminSession.can_manage_objective_aliases""" # Implemented from kitosid template for - # osid.resource.ResourceAdminSession.can_manage_resource_aliases_template return self._get_provider_session('objective_admin_session').can_manage_objective_aliases() def alias_objective(self, *args, **kwargs): """Pass through to provider ObjectiveAdminSession.alias_objective""" # Implemented from kitosid template for - # osid.resource.ResourceAdminSession.alias_resources self._get_provider_session('objective_admin_session').alias_objective(*args, **kwargs) ## # The following methods are from osid.learning.ObjectiveHierarchySession def get_objective_hierarchy_id(self): return self._get_provider_session('objective_hierarchy_session').get_objective_hierarchy_id() objective_hierarchy_id = property(fget=get_objective_hierarchy_id) def get_objective_hierarchy(self): return self._get_provider_session('objective_hierarchy_session').get_objective_hierarchy() objective_hierarchy = property(fget=get_objective_hierarchy) def can_access_objective_hierarchy(self): """Pass through to provider ObjectiveHierarchySession.can_access_objective_hierarchy""" # Implemented from kitosid template for - # osid.ontology.SubjectHierarchySession.can_access_subject_hierarchy_template return self._get_provider_session('objective_hierarchy_session').can_access_objective_hierarchy() def get_root_objective_ids(self): """Pass through to provider ObjectiveHierarchySession.get_root_objective_ids""" # Implemented from kitosid template for - # osid.ontology.SubjectHierarchySession.get_root_subject_ids_template return self._get_provider_session('objective_hierarchy_session').get_root_objective_ids() root_objective_ids = property(fget=get_root_objective_ids) def get_root_objectives(self): """Pass through to provider ObjectiveHierarchySession.get_root_objectives""" # Implemented from kitosid template for - # osid.ontology.SubjectHierarchySession.get_root_subjects_template return self._get_provider_session('objective_hierarchy_session').get_root_objectives() root_objectives = property(fget=get_root_objectives) def has_parent_objectives(self, *args, **kwargs): """Pass through to provider ObjectiveHierarchySession.has_parent_objectives""" # Implemented from kitosid template for - # osid.ontology.SubjectHierarchySession.has_parent_subjects_template return self._get_provider_session('objective_hierarchy_session').has_parent_objectives(*args, **kwargs) def is_parent_of_objective(self, *args, **kwargs): """Pass through to provider ObjectiveHierarchySession.is_parent_of_objective""" # Implemented from kitosid template for - # osid.ontology.SubjectHierarchySession.is_parent_of_subject_template return self._get_provider_session('objective_hierarchy_session').is_parent_of_objective(*args, **kwargs) def get_parent_objective_ids(self, *args, **kwargs): """Pass through to provider ObjectiveHierarchySession.get_parent_objective_ids""" # Implemented from kitosid template for - # osid.ontology.SubjectHierarchySession.get_parent_subject_ids_template return self._get_provider_session('objective_hierarchy_session').get_parent_objective_ids(*args, **kwargs) def get_parent_objectives(self, *args, **kwargs): """Pass through to provider ObjectiveHierarchySession.get_parent_objectives""" # Implemented from kitosid template for - # osid.ontology.SubjectHierarchySession.get_parent_subjects_template return self._get_provider_session('objective_hierarchy_session').get_parent_objectives(*args, **kwargs) def is_ancestor_of_objective(self, *args, **kwargs): """Pass through to provider ObjectiveHierarchySession.is_ancestor_of_objective""" # Implemented from kitosid template for - # osid.ontology.SubjectHierarchySession.is_ancestor_of_subject_template return self._get_provider_session('objective_hierarchy_session').is_ancestor_of_objective(*args, **kwargs) def has_child_objectives(self, *args, **kwargs): """Pass through to provider ObjectiveHierarchySession.has_child_objectives""" # Implemented from kitosid template for - # osid.ontology.SubjectHierarchySession.has_child_subjects_template return self._get_provider_session('objective_hierarchy_session').has_child_objectives(*args, **kwargs) def is_child_of_objective(self, *args, **kwargs): """Pass through to provider ObjectiveHierarchySession.is_child_of_objective""" # Implemented from kitosid template for - # osid.ontology.SubjectHierarchySession.is_child_of_subject_template return self._get_provider_session('objective_hierarchy_session').is_child_of_objective(*args, **kwargs) def get_child_objective_ids(self, *args, **kwargs): """Pass through to provider ObjectiveHierarchySession.get_child_objective_ids""" # Implemented from kitosid template for - # osid.ontology.SubjectHierarchySession.get_child_subject_ids_template return self._get_provider_session('objective_hierarchy_session').get_child_objective_ids(*args, **kwargs) def get_child_objectives(self, *args, **kwargs): """Pass through to provider ObjectiveHierarchySession.get_child_objectives""" # Implemented from kitosid template for - # osid.ontology.SubjectHierarchySession.get_child_subjects_template return self._get_provider_session('objective_hierarchy_session').get_child_objectives(*args, **kwargs) def is_descendant_of_objective(self, *args, **kwargs): """Pass through to provider ObjectiveHierarchySession.is_descendant_of_objective""" # Implemented from kitosid template for - # osid.ontology.SubjectHierarchySession.is_descendant_of_subject_template return self._get_provider_session('objective_hierarchy_session').is_descendant_of_objective(*args, **kwargs) def get_objective_node_ids(self, *args, **kwargs): """Pass through to provider ObjectiveHierarchySession.get_objective_node_ids""" # Implemented from kitosid template for - # osid.ontology.SubjectHierarchySession.get_subject_node_ids_template return self._get_provider_session('objective_hierarchy_session').get_objective_node_ids(*args, **kwargs) def get_objective_nodes(self, *args, **kwargs): """Pass through to provider ObjectiveHierarchySession.get_objective_nodes""" # Implemented from kitosid template for - # osid.ontology.SubjectHierarchySession.get_subject_nodes_template return self._get_provider_session('objective_hierarchy_session').get_objective_nodes(*args, **kwargs) ## # The following methods are from osid.learning.ObjectiveHierarchyDesignSession def can_modify_objective_hierarchy(self): """Pass through to provider ObjectiveHierarchyDesignSession.can_modify_objective_hierarchy""" # Implemented from kitosid template for - # osid.ontology.SubjectHierarchyDesignSession.can_modify_subject_hierarchy return self._get_provider_session('objective_hierarchy_design_session').can_modify_objective_hierarchy() def add_root_objective(self, *args, **kwargs): """Pass through to provider ObjectiveHierarchyDesignSession.add_root_objective""" # Implemented from kitosid template for - # osid.ontology.SubjectHierarchyDesignSession.add_root_subject self._get_provider_session('objective_hierarchy_design_session').add_root_objective(*args, **kwargs) def remove_root_objective(self, *args, **kwargs): """Pass through to provider ObjectiveHierarchyDesignSession.remove_root_objective""" # Implemented from kitosid template for - # osid.ontology.SubjectHierarchyDesignSession.remove_root_subject self._get_provider_session('objective_hierarchy_design_session').remove_root_objective(*args, **kwargs) def add_child_objective(self, *args, **kwargs): """Pass through to provider ObjectiveHierarchyDesignSession.add_child_objective""" # Implemented from kitosid template for - # osid.ontology.SubjectHierarchyDesignSession.add_child_subject self._get_provider_session('objective_hierarchy_design_session').add_child_objective(*args, **kwargs) def remove_child_objective(self, *args, **kwargs): """Pass through to provider ObjectiveHierarchyDesignSession.remove_child_objective""" # Implemented from kitosid template for - # osid.ontology.SubjectHierarchyDesignSession.remove_child_subject self._get_provider_session('objective_hierarchy_design_session').remove_child_objective(*args, **kwargs) def remove_child_objectives(self, *args, **kwargs): """Pass through to provider ObjectiveHierarchyDesignSession.remove_child_objectives""" # Implemented from kitosid template for - # osid.ontology.SubjectHierarchyDesignSession.remove_child_subjects_template self._get_provider_session('objective_hierarchy_design_session').remove_child_objectives(*args, **kwargs) ## # The following methods are from osid.learning.ObjectiveSequencingSession def can_sequence_objectives(self): """Pass through to provider method""" return self._get_provider_session('objective_sequencing_session').can_sequence_objectives() def move_objective_ahead(self, *args, **kwargs): """Pass through to provider method""" return self._get_provider_session('objective_sequencing_session').move_objective_ahead(*args, **kwargs) def move_objective_behind(self, *args, **kwargs): """Pass through to provider method""" return self._get_provider_session('objective_sequencing_session').move_objective_behind(*args, **kwargs) def sequence_objectives(self, *args, **kwargs): """Pass through to provider method""" return self._get_provider_session('objective_sequencing_session').sequence_objectives(*args, **kwargs) ## # The following methods are from osid.learning.ObjectiveRequisiteSession def can_lookup_objective_prerequisites(self): """Pass through to provider unimplemented""" raise Unimplemented('Unimplemented in dlkit.services') def get_requisite_objectives(self, *args, **kwargs): """Pass through to provider ObjectiveRequisiteSession.get_requisite_objectives""" # Implemented from kitosid template for - # osid.learning.ObjectiveRequisiteSession.get_requisite_objectives return self._get_provider_session('objective_requisite_session').get_requisite_objectives(*args, **kwargs) def get_all_requisite_objectives(self, *args, **kwargs): """Pass through to provider ObjectiveRequisiteSession.get_all_requisite_objectives""" # Implemented from kitosid template for - # osid.learning.ObjectiveRequisiteSession.get_all_requisite_objectives return self._get_provider_session('objective_requisite_session').get_all_requisite_objectives(*args, **kwargs) def get_dependent_objectives(self, *args, **kwargs): """Pass through to provider ObjectiveRequisiteSession.get_dependent_objectives""" # Implemented from kitosid template for - # osid.learning.ObjectiveRequisiteSession.get_dependent_objectives return self._get_provider_session('objective_requisite_session').get_dependent_objectives(*args, **kwargs) def is_objective_required(self, *args, **kwargs): """Pass through to provider ObjectiveRequisiteSession.is_objective_required""" # Implemented from kitosid template for - # osid.learning.ObjectiveRequisiteSession.is_objective_required return self._get_provider_session('objective_requisite_session').is_objective_required(*args, **kwargs) def get_equivalent_objectives(self, *args, **kwargs): """Pass through to provider ObjectiveRequisiteSession.get_equivalent_objectives""" # Implemented from kitosid template for - # osid.learning.ObjectiveRequisiteSession.get_equivalent_objectives return self._get_provider_session('objective_requisite_session').get_equivalent_objectives(*args, **kwargs) ## # The following methods are from osid.learning.ObjectiveRequisiteAssignmentSession def can_assign_requisites(self): """Pass through to provider ObjectiveRequisiteAssignmentSession.can_assign_requisites""" # Implemented from kitosid template for - # osid.learning.ObjectiveRequisiteAssignmentSession.can_assign_requisites return self._get_provider_session('objective_requisite_assignment_session').can_assign_requisites() def assign_objective_requisite(self, *args, **kwargs): """Pass through to provider ObjectiveRequisiteAssignmentSession.assign_objective_requisite""" # Implemented from kitosid template for - # osid.learning.ObjectiveRequisiteAssignmentSession.assign_objective_requisite return self._get_provider_session('objective_requisite_assignment_session').assign_objective_requisite(*args, **kwargs) def unassign_objective_requisite(self, *args, **kwargs): """Pass through to provider ObjectiveRequisiteAssignmentSession.unassign_objective_requisite""" # Implemented from kitosid template for - # osid.learning.ObjectiveRequisiteAssignmentSession.unassign_objective_requisite return self._get_provider_session('objective_requisite_assignment_session').unassign_objective_requisite(*args, **kwargs) def assign_equivalent_objective(self, *args, **kwargs): """Pass through to provider ObjectiveRequisiteAssignmentSession.assign_equivalent_objective""" # Implemented from kitosid template for - # osid.learning.ObjectiveRequisiteAssignmentSession.assign_equivalent_objective return self._get_provider_session('objective_requisite_assignment_session').assign_equivalent_objective(*args, **kwargs) def unassign_equivalent_objective(self, *args, **kwargs): """Pass through to provider ObjectiveRequisiteAssignmentSession.unassign_equivalent_objective""" # Implemented from kitosid template for - # osid.learning.ObjectiveRequisiteAssignmentSession.unassign_equivalent_objective return self._get_provider_session('objective_requisite_assignment_session').unassign_equivalent_objective(*args, **kwargs) ## # The following methods are from osid.learning.ActivityLookupSession def can_lookup_activities(self): """Pass through to provider ActivityLookupSession.can_lookup_activities""" # Implemented from kitosid template for - # osid.resource.ResourceLookupSession.can_lookup_resources_template return self._get_provider_session('activity_lookup_session').can_lookup_activities() def use_comparative_activity_view(self): """Pass through to provider ActivityLookupSession.use_comparative_activity_view""" self._object_views['activity'] = COMPARATIVE # self._get_provider_session('activity_lookup_session') # To make sure the session is tracked for session in self._get_provider_sessions(): try: session.use_comparative_activity_view() except AttributeError: pass def use_plenary_activity_view(self): """Pass through to provider ActivityLookupSession.use_plenary_activity_view""" self._object_views['activity'] = PLENARY # self._get_provider_session('activity_lookup_session') # To make sure the session is tracked for session in self._get_provider_sessions(): try: session.use_plenary_activity_view() except AttributeError: pass def get_activity(self, *args, **kwargs): """Pass through to provider ActivityLookupSession.get_activity""" # Implemented from kitosid template for - # osid.resource.ResourceLookupSession.get_resource_template return self._get_provider_session('activity_lookup_session').get_activity(*args, **kwargs) def get_activities_by_ids(self, *args, **kwargs): """Pass through to provider ActivityLookupSession.get_activities_by_ids""" # Implemented from kitosid template for - # osid.resource.ResourceLookupSession.get_resources_by_ids_template return self._get_provider_session('activity_lookup_session').get_activities_by_ids(*args, **kwargs) def get_activities_by_genus_type(self, *args, **kwargs): """Pass through to provider ActivityLookupSession.get_activities_by_genus_type""" # Implemented from kitosid template for - # osid.resource.ResourceLookupSession.get_resources_by_genus_type_template return self._get_provider_session('activity_lookup_session').get_activities_by_genus_type(*args, **kwargs) def get_activities_by_parent_genus_type(self, *args, **kwargs): """Pass through to provider ActivityLookupSession.get_activities_by_parent_genus_type""" # Implemented from kitosid template for - # osid.resource.ResourceLookupSession.get_resources_by_parent_genus_type_template return self._get_provider_session('activity_lookup_session').get_activities_by_parent_genus_type(*args, **kwargs) def get_activities_by_record_type(self, *args, **kwargs): """Pass through to provider ActivityLookupSession.get_activities_by_record_type""" # Implemented from kitosid template for - # osid.resource.ResourceLookupSession.get_resources_by_record_type_template return self._get_provider_session('activity_lookup_session').get_activities_by_record_type(*args, **kwargs) def get_activities_for_objective(self, *args, **kwargs): """Pass through to provider ActivityLookupSession.get_activities_for_objective""" # Implemented from kitosid template for - # osid.resource.ActivityLookupSession.get_activities_for_objective return self._get_provider_session('activity_lookup_session').get_activities_for_objective(*args, **kwargs) def get_activities_for_objectives(self, *args, **kwargs): """Pass through to provider ActivityLookupSession.get_activities_for_objectives""" # Implemented from kitosid template for - # osid.resource.ActivityLookupSession.get_activities_for_objectives return self._get_provider_session('activity_lookup_session').get_activities_for_objectives(*args, **kwargs) def get_activities_by_asset(self, *args, **kwargs): """Pass through to provider unimplemented""" raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs)) def get_activities_by_assets(self, *args, **kwargs): """Pass through to provider unimplemented""" raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs)) def get_activities(self): """Pass through to provider ActivityLookupSession.get_activities""" # Implemented from kitosid template for - # osid.resource.ResourceLookupSession.get_resources_template return self._get_provider_session('activity_lookup_session').get_activities() activities = property(fget=get_activities) ## # The following methods are from osid.learning.ActivityQuerySession def can_search_activities(self): """Pass through to provider ActivityQuerySession.can_search_activities""" # Implemented from kitosid template for - # osid.resource.ResourceQuerySession.can_search_resources_template return self._get_provider_session('activity_query_session').can_search_activities() def get_activity_query(self): """Pass through to provider ActivityQuerySession.get_activity_query""" # Implemented from kitosid template for - # osid.resource.ResourceQuerySession.get_item_query_template return self._get_provider_session('activity_query_session').get_activity_query() activity_query = property(fget=get_activity_query) def get_activities_by_query(self, *args, **kwargs): """Pass through to provider ActivityQuerySession.get_activities_by_query""" # Implemented from kitosid template for - # osid.resource.ResourceQuerySession.get_items_by_query_template return self._get_provider_session('activity_query_session').get_activities_by_query(*args, **kwargs) ## # The following methods are from osid.learning.ActivityAdminSession def can_create_activities(self): """Pass through to provider ActivityAdminSession.can_create_activities""" # Implemented from kitosid template for - # osid.resource.ResourceAdminSession.can_create_resources return self._get_provider_session('activity_admin_session').can_create_activities() def can_create_activity_with_record_types(self, *args, **kwargs): """Pass through to provider ActivityAdminSession.can_create_activity_with_record_types""" # Implemented from kitosid template for - # osid.resource.ResourceAdminSession.can_create_resource_with_record_types return self._get_provider_session('activity_admin_session').can_create_activity_with_record_types(*args, **kwargs) def get_activity_form_for_create(self, *args, **kwargs): """Pass through to provider ActivityAdminSession.get_activity_form_for_create""" # Implemented from - # osid.learning.ActivityAdminSession.get_activity_form_for_create_template return self._get_provider_session('activity_admin_session').get_activity_form_for_create(*args, **kwargs) def create_activity(self, *args, **kwargs): """Pass through to provider ActivityAdminSession.create_activity""" # Implemented from kitosid template for - # osid.resource.ResourceAdminSession.create_resource return self._get_provider_session('activity_admin_session').create_activity(*args, **kwargs) def can_update_activities(self): """Pass through to provider ActivityAdminSession.can_update_activities""" # Implemented from kitosid template for - # osid.resource.ResourceAdminSession.can_update_resources return self._get_provider_session('activity_admin_session').can_update_activities() def get_activity_form_for_update(self, *args, **kwargs): """Pass through to provider ActivityAdminSession.get_activity_form_for_update""" # Implemented from kitosid template for - # osid.resource.ResourceAdminSession.get_resource_form_for_update return self._get_provider_session('activity_admin_session').get_activity_form_for_update(*args, **kwargs) def get_activity_form(self, *args, **kwargs): """Pass through to provider ActivityAdminSession.get_activity_form_for_update""" # Implemented from kitosid template for - # osid.resource.ResourceAdminSession.get_resource_form_for_update # This method might be a bit sketchy. Time will tell. if isinstance(args[-1], list) or 'activity_record_types' in kwargs: return self.get_activity_form_for_create(*args, **kwargs) else: return self.get_activity_form_for_update(*args, **kwargs) def duplicate_activity(self, activity_id): # Implemented from kitosid template for - # osid.resource.ResourceAdminSession.get_resource_form_for_update return self._get_provider_session('activity_admin_session').duplicate_activity(activity_id) def update_activity(self, *args, **kwargs): """Pass through to provider ActivityAdminSession.update_activity""" # Implemented from kitosid template for - # osid.resource.ResourceAdminSession.update_resource # Note: The OSID spec does not require returning updated object return self._get_provider_session('activity_admin_session').update_activity(*args, **kwargs) def save_activity(self, activity_form, *args, **kwargs): """Pass through to provider ActivityAdminSession.update_activity""" # Implemented from kitosid template for - # osid.resource.ResourceAdminSession.update_resource if activity_form.is_for_update(): return self.update_activity(activity_form, *args, **kwargs) else: return self.create_activity(activity_form, *args, **kwargs) def can_delete_activities(self): """Pass through to provider ActivityAdminSession.can_delete_activities""" # Implemented from kitosid template for - # osid.resource.ResourceAdminSession.can_delete_resources return self._get_provider_session('activity_admin_session').can_delete_activities() def delete_activity(self, *args, **kwargs): """Pass through to provider ActivityAdminSession.delete_activity""" # Implemented from kitosid template for - # osid.resource.ResourceAdminSession.delete_resource self._get_provider_session('activity_admin_session').delete_activity(*args, **kwargs) def can_manage_activity_aliases(self): """Pass through to provider ActivityAdminSession.can_manage_activity_aliases""" # Implemented from kitosid template for - # osid.resource.ResourceAdminSession.can_manage_resource_aliases_template return self._get_provider_session('activity_admin_session').can_manage_activity_aliases() def alias_activity(self, *args, **kwargs): """Pass through to provider ActivityAdminSession.alias_activity""" # Implemented from kitosid template for - # osid.resource.ResourceAdminSession.alias_resources self._get_provider_session('activity_admin_session').alias_activity(*args, **kwargs) ## # The following methods are from osid.learning.ProficiencyLookupSession def can_lookup_proficiencies(self): """Pass through to provider ProficiencyLookupSession.can_lookup_proficiencies""" # Implemented from kitosid template for - # osid.resource.ResourceLookupSession.can_lookup_resources_template return self._get_provider_session('proficiency_lookup_session').can_lookup_proficiencies() def use_comparative_proficiency_view(self): """Pass through to provider ProficiencyLookupSession.use_comparative_proficiency_view""" self._object_views['proficiency'] = COMPARATIVE # self._get_provider_session('proficiency_lookup_session') # To make sure the session is tracked for session in self._get_provider_sessions(): try: session.use_comparative_proficiency_view() except AttributeError: pass def use_plenary_proficiency_view(self): """Pass through to provider ProficiencyLookupSession.use_plenary_proficiency_view""" self._object_views['proficiency'] = PLENARY # self._get_provider_session('proficiency_lookup_session') # To make sure the session is tracked for session in self._get_provider_sessions(): try: session.use_plenary_proficiency_view() except AttributeError: pass def use_effective_proficiency_view(self): """Pass through to provider unimplemented""" raise Unimplemented('Unimplemented in dlkit.services') def use_any_effective_proficiency_view(self): """Pass through to provider unimplemented""" raise Unimplemented('Unimplemented in dlkit.services') def get_proficiency(self, *args, **kwargs): """Pass through to provider ProficiencyLookupSession.get_proficiency""" # Implemented from kitosid template for - # osid.resource.ResourceLookupSession.get_resource_template return self._get_provider_session('proficiency_lookup_session').get_proficiency(*args, **kwargs) def get_proficiencies_by_ids(self, *args, **kwargs): """Pass through to provider ProficiencyLookupSession.get_proficiencies_by_ids""" # Implemented from kitosid template for - # osid.resource.ResourceLookupSession.get_resources_by_ids_template return self._get_provider_session('proficiency_lookup_session').get_proficiencies_by_ids(*args, **kwargs) def get_proficiencies_by_genus_type(self, *args, **kwargs): """Pass through to provider ProficiencyLookupSession.get_proficiencies_by_genus_type""" # Implemented from kitosid template for - # osid.resource.ResourceLookupSession.get_resources_by_genus_type_template return self._get_provider_session('proficiency_lookup_session').get_proficiencies_by_genus_type(*args, **kwargs) def get_proficiencies_by_parent_genus_type(self, *args, **kwargs): """Pass through to provider ProficiencyLookupSession.get_proficiencies_by_parent_genus_type""" # Implemented from kitosid template for - # osid.resource.ResourceLookupSession.get_resources_by_parent_genus_type_template return self._get_provider_session('proficiency_lookup_session').get_proficiencies_by_parent_genus_type(*args, **kwargs) def get_proficiencies_by_record_type(self, *args, **kwargs): """Pass through to provider ProficiencyLookupSession.get_proficiencies_by_record_type""" # Implemented from kitosid template for - # osid.resource.ResourceLookupSession.get_resources_by_record_type_template return self._get_provider_session('proficiency_lookup_session').get_proficiencies_by_record_type(*args, **kwargs) def get_proficiencies_on_date(self, *args, **kwargs): """Pass through to provider unimplemented""" raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs)) def get_proficiencies_by_genus_type_on_date(self, *args, **kwargs): """Pass through to provider unimplemented""" raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs)) def get_proficiencies_for_objective(self, *args, **kwargs): """Pass through to provider unimplemented""" raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs)) def get_proficiencies_for_objective_on_date(self, *args, **kwargs): """Pass through to provider unimplemented""" raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs)) def get_proficiencies_by_genus_type_for_objective(self, *args, **kwargs): """Pass through to provider unimplemented""" raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs)) def get_proficiencies_by_genus_type_for_objective_on_date(self, *args, **kwargs): """Pass through to provider unimplemented""" raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs)) def get_proficiencies_for_objectives(self, *args, **kwargs): """Pass through to provider unimplemented""" raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs)) def get_proficiencies_for_resource(self, *args, **kwargs): """Pass through to provider ProficiencyLookupSession.get_proficiencies_for_resource""" # Implemented from kitosid template for - # osid.relationship.RelationshipLookupSession.get_relationships_for_source_template return self._get_provider_session('proficiency_lookup_session').get_proficiencies_for_resource(*args, **kwargs) def get_proficiencies_for_resource_on_date(self, *args, **kwargs): """Pass through to provider ProficiencyLookupSession.get_proficiencies_for_resource_on_date""" # Implemented from kitosid template for - # osid.relationship.RelationshipLookupSession.get_relationships_for_source_on_date_template return self._get_provider_session('proficiency_lookup_session').get_proficiencies_for_resource_on_date(*args, **kwargs) def get_proficiencies_by_genus_type_for_resource(self, *args, **kwargs): """Pass through to provider unimplemented""" raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs)) def get_proficiencies_by_genus_type_for_resource_on_date(self, *args, **kwargs): """Pass through to provider unimplemented""" raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs)) def get_proficiencies_for_resources(self, *args, **kwargs): """Pass through to provider ProficiencyLookupSession.get_proficiencies_for_resources""" # Implemented from kitosid template for - # osid.relationship.RelationshipLookupSession.get_relationships_for_source_template return self._get_provider_session('proficiency_lookup_session').get_proficiencies_for_resources(*args, **kwargs) def get_proficiencies_for_objective_and_resource(self, *args, **kwargs): """Pass through to provider unimplemented""" raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs)) def get_proficiencies_for_objective_and_resource_on_date(self, *args, **kwargs): """Pass through to provider unimplemented""" raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs)) def get_proficiencies_by_genus_type_for_objective_and_resource(self, *args, **kwargs): """Pass through to provider unimplemented""" raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs)) def get_proficiencies_by_genus_type_for_objective_and_resource_on_date(self, *args, **kwargs): """Pass through to provider unimplemented""" raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs)) def get_proficiencies(self): """Pass through to provider ProficiencyLookupSession.get_proficiencies""" # Implemented from kitosid template for - # osid.resource.ResourceLookupSession.get_resources_template return self._get_provider_session('proficiency_lookup_session').get_proficiencies() proficiencies = property(fget=get_proficiencies) ## # The following methods are from osid.learning.ProficiencyQuerySession def can_search_proficiencies(self): """Pass through to provider ProficiencyQuerySession.can_search_proficiencies""" # Implemented from kitosid template for - # osid.resource.ResourceQuerySession.can_search_resources_template return self._get_provider_session('proficiency_query_session').can_search_proficiencies() def get_proficiency_query(self): """Pass through to provider ProficiencyQuerySession.get_proficiency_query""" # Implemented from kitosid template for - # osid.resource.ResourceQuerySession.get_item_query_template return self._get_provider_session('proficiency_query_session').get_proficiency_query() proficiency_query = property(fget=get_proficiency_query) def get_proficiencies_by_query(self, *args, **kwargs): """Pass through to provider ProficiencyQuerySession.get_proficiencies_by_query""" # Implemented from kitosid template for - # osid.resource.ResourceQuerySession.get_items_by_query_template return self._get_provider_session('proficiency_query_session').get_proficiencies_by_query(*args, **kwargs) ## # The following methods are from osid.learning.ProficiencyAdminSession def can_create_proficiencies(self): """Pass through to provider ProficiencyAdminSession.can_create_proficiencies""" # Implemented from kitosid template for - # osid.resource.ResourceAdminSession.can_create_resources return self._get_provider_session('proficiency_admin_session').can_create_proficiencies() def can_create_proficiency_with_record_types(self, *args, **kwargs): """Pass through to provider ProficiencyAdminSession.can_create_proficiency_with_record_types""" # Implemented from kitosid template for - # osid.resource.ResourceAdminSession.can_create_resource_with_record_types return self._get_provider_session('proficiency_admin_session').can_create_proficiency_with_record_types(*args, **kwargs) def get_proficiency_form_for_create(self, *args, **kwargs): """Pass through to provider ProficiencyAdminSession.get_proficiency_form_for_create""" # Implemented from kitosid template for - # osid.relationship.RelationshipAdminSession.get_relationship_form_for_create_template return self._get_provider_session('proficiency_admin_session').get_proficiency_form_for_create(*args, **kwargs) def create_proficiency(self, *args, **kwargs): """Pass through to provider ProficiencyAdminSession.create_proficiency""" # Implemented from kitosid template for - # osid.resource.ResourceAdminSession.create_resource return self._get_provider_session('proficiency_admin_session').create_proficiency(*args, **kwargs) def can_update_proficiencies(self): """Pass through to provider ProficiencyAdminSession.can_update_proficiencies""" # Implemented from kitosid template for - # osid.resource.ResourceAdminSession.can_update_resources return self._get_provider_session('proficiency_admin_session').can_update_proficiencies() def get_proficiency_form_for_update(self, *args, **kwargs): """Pass through to provider ProficiencyAdminSession.get_proficiency_form_for_update""" # Implemented from kitosid template for - # osid.resource.ResourceAdminSession.get_resource_form_for_update return self._get_provider_session('proficiency_admin_session').get_proficiency_form_for_update(*args, **kwargs) def get_proficiency_form(self, *args, **kwargs): """Pass through to provider ProficiencyAdminSession.get_proficiency_form_for_update""" # Implemented from kitosid template for - # osid.resource.ResourceAdminSession.get_resource_form_for_update # This method might be a bit sketchy. Time will tell. if isinstance(args[-1], list) or 'proficiency_record_types' in kwargs: return self.get_proficiency_form_for_create(*args, **kwargs) else: return self.get_proficiency_form_for_update(*args, **kwargs) def duplicate_proficiency(self, proficiency_id): # Implemented from kitosid template for - # osid.resource.ResourceAdminSession.get_resource_form_for_update return self._get_provider_session('proficiency_admin_session').duplicate_proficiency(proficiency_id) def update_proficiency(self, *args, **kwargs): """Pass through to provider ProficiencyAdminSession.update_proficiency""" # Implemented from kitosid template for - # osid.resource.ResourceAdminSession.update_resource # Note: The OSID spec does not require returning updated object return self._get_provider_session('proficiency_admin_session').update_proficiency(*args, **kwargs) def save_proficiency(self, proficiency_form, *args, **kwargs): """Pass through to provider ProficiencyAdminSession.update_proficiency""" # Implemented from kitosid template for - # osid.resource.ResourceAdminSession.update_resource if proficiency_form.is_for_update(): return self.update_proficiency(proficiency_form, *args, **kwargs) else: return self.create_proficiency(proficiency_form, *args, **kwargs) def can_delete_proficiencies(self): """Pass through to provider ProficiencyAdminSession.can_delete_proficiencies""" # Implemented from kitosid template for - # osid.resource.ResourceAdminSession.can_delete_resources return self._get_provider_session('proficiency_admin_session').can_delete_proficiencies() def delete_proficiency(self, *args, **kwargs): """Pass through to provider ProficiencyAdminSession.delete_proficiency""" # Implemented from kitosid template for - # osid.resource.ResourceAdminSession.delete_resource self._get_provider_session('proficiency_admin_session').delete_proficiency(*args, **kwargs) def delete_proficiencies(self): """Pass through to provider unimplemented""" raise Unimplemented('Unimplemented in dlkit.services') def can_manage_proficiency_aliases(self): """Pass through to provider ProficiencyAdminSession.can_manage_proficiency_aliases""" # Implemented from kitosid template for - # osid.resource.ResourceAdminSession.can_manage_resource_aliases_template return self._get_provider_session('proficiency_admin_session').can_manage_proficiency_aliases() def alias_proficiency(self, *args, **kwargs): """Pass through to provider ProficiencyAdminSession.alias_proficiency""" # Implemented from kitosid template for - # osid.resource.ResourceAdminSession.alias_resources self._get_provider_session('proficiency_admin_session').alias_proficiency(*args, **kwargs) class ObjectiveBankList(abc_learning_objects.ObjectiveBankList, osid.OsidList): """ObjectiveBankList convenience adapter including related Session methods.""" def get_next_objective_bank(self): """Gets next object""" # Implemented from kitosid template for - # osid.resource.ResourceList.get_next_resource try: next_item = next(self) except StopIteration: raise IllegalState('no more elements available in this list') else: return next_item def next(self): """next method for enumerator""" # Implemented from kitosid template for - # osid.resource.ResourceList.get_next_resource next_item = osid.OsidList.next(self) return next_item __next__ = next next_objective_bank = property(fget=get_next_objective_bank) def get_next_objective_banks(self, n): """gets next n objects from list""" # Implemented from kitosid template for - # osid.resource.ResourceList.get_next_resources if n > self.available(): # !!! This is not quite as specified (see method docs) !!! raise IllegalState('not enough elements available in this list') else: next_list = [] i = 0 while i < n: try: next_list.append(next(self)) except StopIteration: break i += 1 return next_list
56.539798
157
0.745934
15,589
139,936
6.314388
0.024697
0.048255
0.041205
0.066562
0.918982
0.888312
0.847046
0.798852
0.756509
0.709819
0
0.000355
0.174351
139,936
2,474
158
56.562652
0.851616
0.39711
0
0.324651
0
0
0.099946
0.067038
0
0
0
0
0
1
0.332093
false
0.019535
0.004651
0.004651
0.64093
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
8
0e2dfbd8910c1ee5cc589408b726ef27f299a13c
549
py
Python
pytorch_myself/tensor_basic.py
94JuHo/study_for_deeplearning
ababf482b6a24d94b5f860ea9a68e34fe324d182
[ "MIT" ]
null
null
null
pytorch_myself/tensor_basic.py
94JuHo/study_for_deeplearning
ababf482b6a24d94b5f860ea9a68e34fe324d182
[ "MIT" ]
null
null
null
pytorch_myself/tensor_basic.py
94JuHo/study_for_deeplearning
ababf482b6a24d94b5f860ea9a68e34fe324d182
[ "MIT" ]
null
null
null
import torch x = torch.tensor([[1, 2, 3], [4, 5, 6], [7, 8, 9]]) print(x) print("Size:", x.size()) print("Shape:", x.shape) print("Rank:",x.ndimension()) x = torch.unsqueeze(x, 0) print(x) print("Size:", x.size()) print("Shape:", x.shape) print("Rank:",x.ndimension()) x = torch.squeeze(x) print(x) print("Size:", x.size()) print("Shape:", x.shape) print("Rank:",x.ndimension()) x = x.view(9) print(x) print("Size:", x.size()) print("Shape:", x.shape) print("Rank:",x.ndimension()) try: x = x.view(2, 4) except Exception as e: print(e)
17.709677
51
0.599271
95
549
3.463158
0.284211
0.091185
0.133739
0.182371
0.726444
0.726444
0.726444
0.726444
0.726444
0.726444
0
0.02714
0.127505
549
30
52
18.3
0.659708
0
0
0.64
0
0
0.116576
0
0
0
0
0
0
1
0
false
0
0.04
0
0.04
0.68
0
0
0
null
0
0
1
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
7
0e5ed790daabb0eda61cd93338f38f8bae9c9ce7
114
py
Python
vestlus/search_indexes/__init__.py
lehvitus/vestlus
6d9c8b1de7821e544e0c7c99f42d60f8f3805557
[ "BSD-3-Clause" ]
12
2020-07-02T23:36:02.000Z
2020-12-15T07:29:20.000Z
vestlus/search_indexes/__init__.py
lehvitus/vestlus
6d9c8b1de7821e544e0c7c99f42d60f8f3805557
[ "BSD-3-Clause" ]
null
null
null
vestlus/search_indexes/__init__.py
lehvitus/vestlus
6d9c8b1de7821e544e0c7c99f42d60f8f3805557
[ "BSD-3-Clause" ]
null
null
null
from .channel import ChannelIndex from .message import PrivateMessageIndex from .message import GroupMessageIndex
28.5
40
0.868421
12
114
8.25
0.583333
0.222222
0.343434
0
0
0
0
0
0
0
0
0
0.105263
114
3
41
38
0.970588
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
0e831bf1c22a83ba6c4e1123beb7df5d6dcc4a96
154
py
Python
hebpipe/lib/xrenner/__init__.py
amir-zeldes/HebPipe
a3eb95abd2dd9588806b545eba7b28f57e589973
[ "Apache-2.0" ]
21
2019-02-04T16:27:37.000Z
2022-03-22T11:45:05.000Z
hebpipe/lib/xrenner/__init__.py
amir-zeldes/HebPipe
a3eb95abd2dd9588806b545eba7b28f57e589973
[ "Apache-2.0" ]
12
2020-01-02T15:56:31.000Z
2022-03-31T10:35:16.000Z
hebpipe/lib/xrenner/__init__.py
amir-zeldes/HebPipe
a3eb95abd2dd9588806b545eba7b28f57e589973
[ "Apache-2.0" ]
5
2020-01-01T20:51:35.000Z
2021-12-21T12:56:53.000Z
## xrenner init ## import sys if sys.version_info[0] < 3: from modules.xrenner_xrenner import Xrenner else: from .modules.xrenner_xrenner import Xrenner
25.666667
45
0.785714
23
154
5.130435
0.521739
0.186441
0.305085
0.423729
0.644068
0.644068
0
0
0
0
0
0.014925
0.12987
154
6
45
25.666667
0.865672
0.077922
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.6
0
0.6
0
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
7eba47dc78322e6f5149d7cfcee2d62d8722c3a6
9,701
py
Python
webapp/apps/btax/migrations/0001_initial.py
codekansas/PolicyBrain
83c459db883536bae70cc78ca92ebdcff554ac2d
[ "MIT" ]
null
null
null
webapp/apps/btax/migrations/0001_initial.py
codekansas/PolicyBrain
83c459db883536bae70cc78ca92ebdcff554ac2d
[ "MIT" ]
null
null
null
webapp/apps/btax/migrations/0001_initial.py
codekansas/PolicyBrain
83c459db883536bae70cc78ca92ebdcff554ac2d
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import datetime import jsonfield.fields from django.conf import settings import uuidfield.fields import webapp.apps.taxbrain.models class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='BTaxOutputUrl', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('model_pk', models.IntegerField(default=None, null=True)), ('exp_comp_datetime', models.DateTimeField(default=datetime.datetime(2015, 1, 1, 0, 0))), ('uuid', uuidfield.fields.UUIDField(null=True, default=None, editable=False, max_length=32, blank=True, unique=True)), ('btax_vers', models.CharField(default=None, max_length=50, null=True, blank=True)), ('taxcalc_vers', models.CharField(default=None, max_length=50, null=True, blank=True)), ], ), migrations.CreateModel( name='BTaxSaveInputs', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('btax_betr_corp', webapp.apps.taxbrain.models.CommaSeparatedField(default=None, max_length=200, null=True, blank=True)), ('btax_betr_entity_Switch', models.NullBooleanField(default=None)), ('btax_betr_pass', webapp.apps.taxbrain.models.CommaSeparatedField(default=None, max_length=200, null=True, blank=True)), ('btax_depr_allyr', models.CharField(default=None, max_length=50, null=True, blank=True)), ('btax_depr_3yr', models.CharField(default=None, max_length=50, null=True, blank=True)), ('btax_depr_5yr', models.CharField(default=None, max_length=50, null=True, blank=True)), ('btax_depr_7yr', models.CharField(default=None, max_length=50, null=True, blank=True)), ('btax_depr_10yr', models.CharField(default=None, max_length=50, null=True, blank=True)), ('btax_depr_15yr', models.CharField(default=None, max_length=50, null=True, blank=True)), ('btax_depr_20yr', models.CharField(default=None, max_length=50, null=True, blank=True)), ('btax_depr_25yr', models.CharField(default=None, max_length=50, null=True, blank=True)), ('btax_depr_27_5yr', models.CharField(default=None, max_length=50, null=True, blank=True)), ('btax_depr_39yr', models.CharField(default=None, max_length=50, null=True, blank=True)), ('btax_depr_allyr_gds_Switch', models.CharField(default=b'True', max_length=50, null=True, blank=True)), ('btax_depr_3yr_gds_Switch', models.CharField(default=b'True', max_length=50, null=True, blank=True)), ('btax_depr_5yr_gds_Switch', models.CharField(default=b'True', max_length=50, null=True, blank=True)), ('btax_depr_7yr_gds_Switch', models.CharField(default=b'True', max_length=50, null=True, blank=True)), ('btax_depr_10yr_gds_Switch', models.CharField(default=b'True', max_length=50, null=True, blank=True)), ('btax_depr_15yr_gds_Switch', models.CharField(default=b'True', max_length=50, null=True, blank=True)), ('btax_depr_20yr_gds_Switch', models.CharField(default=b'True', max_length=50, null=True, blank=True)), ('btax_depr_25yr_gds_Switch', models.CharField(default=b'True', max_length=50, null=True, blank=True)), ('btax_depr_27_5yr_gds_Switch', models.CharField(default=b'True', max_length=50, null=True, blank=True)), ('btax_depr_39yr_gds_Switch', models.CharField(default=b'True', max_length=50, null=True, blank=True)), ('btax_depr_allyr_ads_Switch', models.CharField(default=b'False', max_length=50, null=True, blank=True)), ('btax_depr_3yr_ads_Switch', models.CharField(default=b'False', max_length=50, null=True, blank=True)), ('btax_depr_5yr_ads_Switch', models.CharField(default=b'False', max_length=50, null=True, blank=True)), ('btax_depr_7yr_ads_Switch', models.CharField(default=b'False', max_length=50, null=True, blank=True)), ('btax_depr_10yr_ads_Switch', models.CharField(default=b'False', max_length=50, null=True, blank=True)), ('btax_depr_15yr_ads_Switch', models.CharField(default=b'False', max_length=50, null=True, blank=True)), ('btax_depr_20yr_ads_Switch', models.CharField(default=b'False', max_length=50, null=True, blank=True)), ('btax_depr_25yr_ads_Switch', models.CharField(default=b'False', max_length=50, null=True, blank=True)), ('btax_depr_27_5yr_ads_Switch', models.CharField(default=b'False', max_length=50, null=True, blank=True)), ('btax_depr_39yr_ads_Switch', models.CharField(default=b'False', max_length=50, null=True, blank=True)), ('btax_depr_allyr_tax_Switch', models.CharField(default=b'False', max_length=50, null=True, blank=True)), ('btax_depr_3yr_tax_Switch', models.CharField(default=b'False', max_length=50, null=True, blank=True)), ('btax_depr_5yr_tax_Switch', models.CharField(default=b'False', max_length=50, null=True, blank=True)), ('btax_depr_7yr_tax_Switch', models.CharField(default=b'False', max_length=50, null=True, blank=True)), ('btax_depr_10yr_tax_Switch', models.CharField(default=b'False', max_length=50, null=True, blank=True)), ('btax_depr_15yr_tax_Switch', models.CharField(default=b'False', max_length=50, null=True, blank=True)), ('btax_depr_20yr_tax_Switch', models.CharField(default=b'False', max_length=50, null=True, blank=True)), ('btax_depr_25yr_tax_Switch', models.CharField(default=b'False', max_length=50, null=True, blank=True)), ('btax_depr_27_5yr_tax_Switch', models.CharField(default=b'False', max_length=50, null=True, blank=True)), ('btax_depr_39yr_tax_Switch', models.CharField(default=b'False', max_length=50, null=True, blank=True)), ('btax_depr_allyr_exp', webapp.apps.taxbrain.models.CommaSeparatedField(default=None, max_length=200, null=True, blank=True)), ('btax_depr_3yr_exp', webapp.apps.taxbrain.models.CommaSeparatedField(default=None, max_length=200, null=True, blank=True)), ('btax_depr_5yr_exp', webapp.apps.taxbrain.models.CommaSeparatedField(default=None, max_length=200, null=True, blank=True)), ('btax_depr_7yr_exp', webapp.apps.taxbrain.models.CommaSeparatedField(default=None, max_length=200, null=True, blank=True)), ('btax_depr_10yr_exp', webapp.apps.taxbrain.models.CommaSeparatedField(default=None, max_length=200, null=True, blank=True)), ('btax_depr_15yr_exp', webapp.apps.taxbrain.models.CommaSeparatedField(default=None, max_length=200, null=True, blank=True)), ('btax_depr_20yr_exp', webapp.apps.taxbrain.models.CommaSeparatedField(default=None, max_length=200, null=True, blank=True)), ('btax_depr_25yr_exp', webapp.apps.taxbrain.models.CommaSeparatedField(default=None, max_length=200, null=True, blank=True)), ('btax_depr_27_5yr_exp', webapp.apps.taxbrain.models.CommaSeparatedField(default=None, max_length=200, null=True, blank=True)), ('btax_depr_39yr_exp', webapp.apps.taxbrain.models.CommaSeparatedField(default=None, max_length=200, null=True, blank=True)), ('btax_other_hair', webapp.apps.taxbrain.models.CommaSeparatedField(default=None, max_length=200, null=True, blank=True)), ('btax_other_corpeq', webapp.apps.taxbrain.models.CommaSeparatedField(default=None, max_length=200, null=True, blank=True)), ('btax_other_proptx', webapp.apps.taxbrain.models.CommaSeparatedField(default=None, max_length=200, null=True, blank=True)), ('btax_other_invest', webapp.apps.taxbrain.models.CommaSeparatedField(default=None, max_length=200, null=True, blank=True)), ('btax_econ_nomint', webapp.apps.taxbrain.models.CommaSeparatedField(default=None, max_length=200, null=True, blank=True)), ('btax_econ_inflat', webapp.apps.taxbrain.models.CommaSeparatedField(default=None, max_length=200, null=True, blank=True)), ('job_ids', webapp.apps.taxbrain.models.SeparatedValuesField(default=None, null=True, blank=True)), ('jobs_not_ready', webapp.apps.taxbrain.models.SeparatedValuesField(default=None, null=True, blank=True)), ('first_year', models.IntegerField(default=None, null=True)), ('tax_result', jsonfield.fields.JSONField(default=None, null=True, blank=True)), ('creation_date', models.DateTimeField(default=datetime.datetime(2015, 1, 1, 0, 0))), ], options={ 'permissions': (('view_inputs', 'Allowed to view Taxbrain.'),), }, ), migrations.AddField( model_name='btaxoutputurl', name='unique_inputs', field=models.ForeignKey(default=None, to='btax.BTaxSaveInputs'), ), migrations.AddField( model_name='btaxoutputurl', name='user', field=models.ForeignKey(default=None, to=settings.AUTH_USER_MODEL, null=True), ), ]
85.096491
143
0.67024
1,227
9,701
5.066015
0.09617
0.086229
0.131757
0.172297
0.873713
0.873713
0.832207
0.82722
0.826898
0.826898
0
0.030111
0.18864
9,701
113
144
85.849558
0.759624
0.002165
0
0.149533
0
0
0.166047
0.079872
0
0
0
0
0
1
0
false
0.009346
0.065421
0
0.093458
0
0
0
0
null
0
0
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
7ee0a5ca756112a988e770b9a1fd6ad6f9dca9b1
65,722
py
Python
sdk/python/pulumi_gcp/certificateauthority/authority.py
sisisin/pulumi-gcp
af6681d70ea457843409110c1324817fe55f68ad
[ "ECL-2.0", "Apache-2.0" ]
null
null
null
sdk/python/pulumi_gcp/certificateauthority/authority.py
sisisin/pulumi-gcp
af6681d70ea457843409110c1324817fe55f68ad
[ "ECL-2.0", "Apache-2.0" ]
null
null
null
sdk/python/pulumi_gcp/certificateauthority/authority.py
sisisin/pulumi-gcp
af6681d70ea457843409110c1324817fe55f68ad
[ "ECL-2.0", "Apache-2.0" ]
null
null
null
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload from .. import _utilities from . import outputs from ._inputs import * __all__ = ['AuthorityArgs', 'Authority'] @pulumi.input_type class AuthorityArgs: def __init__(__self__, *, certificate_authority_id: pulumi.Input[str], config: pulumi.Input['AuthorityConfigArgs'], key_spec: pulumi.Input['AuthorityKeySpecArgs'], location: pulumi.Input[str], pool: pulumi.Input[str], gcs_bucket: Optional[pulumi.Input[str]] = None, ignore_active_certificates_on_deletion: Optional[pulumi.Input[bool]] = None, labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None, lifetime: Optional[pulumi.Input[str]] = None, project: Optional[pulumi.Input[str]] = None, type: Optional[pulumi.Input[str]] = None): """ The set of arguments for constructing a Authority resource. :param pulumi.Input[str] certificate_authority_id: The user provided Resource ID for this Certificate Authority. :param pulumi.Input['AuthorityConfigArgs'] config: The config used to create a self-signed X.509 certificate or CSR. Structure is documented below. :param pulumi.Input['AuthorityKeySpecArgs'] key_spec: Used when issuing certificates for this CertificateAuthority. If this CertificateAuthority is a self-signed CertificateAuthority, this key is also used to sign the self-signed CA certificate. Otherwise, it is used to sign a CSR. Structure is documented below. :param pulumi.Input[str] location: Location of the CertificateAuthority. A full list of valid locations can be found by running `gcloud privateca locations list`. :param pulumi.Input[str] pool: The name of the CaPool this Certificate Authority belongs to. :param pulumi.Input[str] gcs_bucket: The name of a Cloud Storage bucket where this CertificateAuthority will publish content, such as the CA certificate and CRLs. This must be a bucket name, without any prefixes (such as `gs://`) or suffixes (such as `.googleapis.com`). For example, to use a bucket named my-bucket, you would simply specify `my-bucket`. If not specified, a managed bucket will be created. :param pulumi.Input[bool] ignore_active_certificates_on_deletion: This field allows the CA to be deleted even if the CA has active certs. Active certs include both unrevoked and unexpired certs. Use with care. Defaults to `false`. :param pulumi.Input[Mapping[str, pulumi.Input[str]]] labels: Labels with user-defined metadata. An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }. :param pulumi.Input[str] lifetime: The desired lifetime of the CA certificate. Used to create the "notBeforeTime" and "notAfterTime" fields inside an X.509 certificate. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s". :param pulumi.Input[str] project: The ID of the project in which the resource belongs. If it is not provided, the provider project is used. :param pulumi.Input[str] type: The Type of this CertificateAuthority. > **Note:** For `SUBORDINATE` Certificate Authorities, they need to be manually activated (via Cloud Console of `gcloud`) before they can issue certificates. Default value is `SELF_SIGNED`. Possible values are `SELF_SIGNED` and `SUBORDINATE`. """ pulumi.set(__self__, "certificate_authority_id", certificate_authority_id) pulumi.set(__self__, "config", config) pulumi.set(__self__, "key_spec", key_spec) pulumi.set(__self__, "location", location) pulumi.set(__self__, "pool", pool) if gcs_bucket is not None: pulumi.set(__self__, "gcs_bucket", gcs_bucket) if ignore_active_certificates_on_deletion is not None: pulumi.set(__self__, "ignore_active_certificates_on_deletion", ignore_active_certificates_on_deletion) if labels is not None: pulumi.set(__self__, "labels", labels) if lifetime is not None: pulumi.set(__self__, "lifetime", lifetime) if project is not None: pulumi.set(__self__, "project", project) if type is not None: pulumi.set(__self__, "type", type) @property @pulumi.getter(name="certificateAuthorityId") def certificate_authority_id(self) -> pulumi.Input[str]: """ The user provided Resource ID for this Certificate Authority. """ return pulumi.get(self, "certificate_authority_id") @certificate_authority_id.setter def certificate_authority_id(self, value: pulumi.Input[str]): pulumi.set(self, "certificate_authority_id", value) @property @pulumi.getter def config(self) -> pulumi.Input['AuthorityConfigArgs']: """ The config used to create a self-signed X.509 certificate or CSR. Structure is documented below. """ return pulumi.get(self, "config") @config.setter def config(self, value: pulumi.Input['AuthorityConfigArgs']): pulumi.set(self, "config", value) @property @pulumi.getter(name="keySpec") def key_spec(self) -> pulumi.Input['AuthorityKeySpecArgs']: """ Used when issuing certificates for this CertificateAuthority. If this CertificateAuthority is a self-signed CertificateAuthority, this key is also used to sign the self-signed CA certificate. Otherwise, it is used to sign a CSR. Structure is documented below. """ return pulumi.get(self, "key_spec") @key_spec.setter def key_spec(self, value: pulumi.Input['AuthorityKeySpecArgs']): pulumi.set(self, "key_spec", value) @property @pulumi.getter def location(self) -> pulumi.Input[str]: """ Location of the CertificateAuthority. A full list of valid locations can be found by running `gcloud privateca locations list`. """ return pulumi.get(self, "location") @location.setter def location(self, value: pulumi.Input[str]): pulumi.set(self, "location", value) @property @pulumi.getter def pool(self) -> pulumi.Input[str]: """ The name of the CaPool this Certificate Authority belongs to. """ return pulumi.get(self, "pool") @pool.setter def pool(self, value: pulumi.Input[str]): pulumi.set(self, "pool", value) @property @pulumi.getter(name="gcsBucket") def gcs_bucket(self) -> Optional[pulumi.Input[str]]: """ The name of a Cloud Storage bucket where this CertificateAuthority will publish content, such as the CA certificate and CRLs. This must be a bucket name, without any prefixes (such as `gs://`) or suffixes (such as `.googleapis.com`). For example, to use a bucket named my-bucket, you would simply specify `my-bucket`. If not specified, a managed bucket will be created. """ return pulumi.get(self, "gcs_bucket") @gcs_bucket.setter def gcs_bucket(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "gcs_bucket", value) @property @pulumi.getter(name="ignoreActiveCertificatesOnDeletion") def ignore_active_certificates_on_deletion(self) -> Optional[pulumi.Input[bool]]: """ This field allows the CA to be deleted even if the CA has active certs. Active certs include both unrevoked and unexpired certs. Use with care. Defaults to `false`. """ return pulumi.get(self, "ignore_active_certificates_on_deletion") @ignore_active_certificates_on_deletion.setter def ignore_active_certificates_on_deletion(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "ignore_active_certificates_on_deletion", value) @property @pulumi.getter def labels(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]: """ Labels with user-defined metadata. An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }. """ return pulumi.get(self, "labels") @labels.setter def labels(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]): pulumi.set(self, "labels", value) @property @pulumi.getter def lifetime(self) -> Optional[pulumi.Input[str]]: """ The desired lifetime of the CA certificate. Used to create the "notBeforeTime" and "notAfterTime" fields inside an X.509 certificate. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s". """ return pulumi.get(self, "lifetime") @lifetime.setter def lifetime(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "lifetime", value) @property @pulumi.getter def project(self) -> Optional[pulumi.Input[str]]: """ The ID of the project in which the resource belongs. If it is not provided, the provider project is used. """ return pulumi.get(self, "project") @project.setter def project(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "project", value) @property @pulumi.getter def type(self) -> Optional[pulumi.Input[str]]: """ The Type of this CertificateAuthority. > **Note:** For `SUBORDINATE` Certificate Authorities, they need to be manually activated (via Cloud Console of `gcloud`) before they can issue certificates. Default value is `SELF_SIGNED`. Possible values are `SELF_SIGNED` and `SUBORDINATE`. """ return pulumi.get(self, "type") @type.setter def type(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "type", value) @pulumi.input_type class _AuthorityState: def __init__(__self__, *, access_urls: Optional[pulumi.Input[Sequence[pulumi.Input['AuthorityAccessUrlArgs']]]] = None, certificate_authority_id: Optional[pulumi.Input[str]] = None, config: Optional[pulumi.Input['AuthorityConfigArgs']] = None, create_time: Optional[pulumi.Input[str]] = None, gcs_bucket: Optional[pulumi.Input[str]] = None, ignore_active_certificates_on_deletion: Optional[pulumi.Input[bool]] = None, key_spec: Optional[pulumi.Input['AuthorityKeySpecArgs']] = None, labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None, lifetime: Optional[pulumi.Input[str]] = None, location: Optional[pulumi.Input[str]] = None, name: Optional[pulumi.Input[str]] = None, pem_ca_certificates: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None, pool: Optional[pulumi.Input[str]] = None, project: Optional[pulumi.Input[str]] = None, state: Optional[pulumi.Input[str]] = None, type: Optional[pulumi.Input[str]] = None, update_time: Optional[pulumi.Input[str]] = None): """ Input properties used for looking up and filtering Authority resources. :param pulumi.Input[Sequence[pulumi.Input['AuthorityAccessUrlArgs']]] access_urls: URLs for accessing content published by this CA, such as the CA certificate and CRLs. :param pulumi.Input[str] certificate_authority_id: The user provided Resource ID for this Certificate Authority. :param pulumi.Input['AuthorityConfigArgs'] config: The config used to create a self-signed X.509 certificate or CSR. Structure is documented below. :param pulumi.Input[str] create_time: The time at which this CertificateAuthority was created. A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z". :param pulumi.Input[str] gcs_bucket: The name of a Cloud Storage bucket where this CertificateAuthority will publish content, such as the CA certificate and CRLs. This must be a bucket name, without any prefixes (such as `gs://`) or suffixes (such as `.googleapis.com`). For example, to use a bucket named my-bucket, you would simply specify `my-bucket`. If not specified, a managed bucket will be created. :param pulumi.Input[bool] ignore_active_certificates_on_deletion: This field allows the CA to be deleted even if the CA has active certs. Active certs include both unrevoked and unexpired certs. Use with care. Defaults to `false`. :param pulumi.Input['AuthorityKeySpecArgs'] key_spec: Used when issuing certificates for this CertificateAuthority. If this CertificateAuthority is a self-signed CertificateAuthority, this key is also used to sign the self-signed CA certificate. Otherwise, it is used to sign a CSR. Structure is documented below. :param pulumi.Input[Mapping[str, pulumi.Input[str]]] labels: Labels with user-defined metadata. An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }. :param pulumi.Input[str] lifetime: The desired lifetime of the CA certificate. Used to create the "notBeforeTime" and "notAfterTime" fields inside an X.509 certificate. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s". :param pulumi.Input[str] location: Location of the CertificateAuthority. A full list of valid locations can be found by running `gcloud privateca locations list`. :param pulumi.Input[str] name: The resource name for this CertificateAuthority in the format projects/*/locations/*/certificateAuthorities/*. :param pulumi.Input[Sequence[pulumi.Input[str]]] pem_ca_certificates: This CertificateAuthority's certificate chain, including the current CertificateAuthority's certificate. Ordered such that the root issuer is the final element (consistent with RFC 5246). For a self-signed CA, this will only list the current CertificateAuthority's certificate. :param pulumi.Input[str] pool: The name of the CaPool this Certificate Authority belongs to. :param pulumi.Input[str] project: The ID of the project in which the resource belongs. If it is not provided, the provider project is used. :param pulumi.Input[str] state: The State for this CertificateAuthority. :param pulumi.Input[str] type: The Type of this CertificateAuthority. > **Note:** For `SUBORDINATE` Certificate Authorities, they need to be manually activated (via Cloud Console of `gcloud`) before they can issue certificates. Default value is `SELF_SIGNED`. Possible values are `SELF_SIGNED` and `SUBORDINATE`. :param pulumi.Input[str] update_time: The time at which this CertificateAuthority was updated. A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z". """ if access_urls is not None: pulumi.set(__self__, "access_urls", access_urls) if certificate_authority_id is not None: pulumi.set(__self__, "certificate_authority_id", certificate_authority_id) if config is not None: pulumi.set(__self__, "config", config) if create_time is not None: pulumi.set(__self__, "create_time", create_time) if gcs_bucket is not None: pulumi.set(__self__, "gcs_bucket", gcs_bucket) if ignore_active_certificates_on_deletion is not None: pulumi.set(__self__, "ignore_active_certificates_on_deletion", ignore_active_certificates_on_deletion) if key_spec is not None: pulumi.set(__self__, "key_spec", key_spec) if labels is not None: pulumi.set(__self__, "labels", labels) if lifetime is not None: pulumi.set(__self__, "lifetime", lifetime) if location is not None: pulumi.set(__self__, "location", location) if name is not None: pulumi.set(__self__, "name", name) if pem_ca_certificates is not None: pulumi.set(__self__, "pem_ca_certificates", pem_ca_certificates) if pool is not None: pulumi.set(__self__, "pool", pool) if project is not None: pulumi.set(__self__, "project", project) if state is not None: pulumi.set(__self__, "state", state) if type is not None: pulumi.set(__self__, "type", type) if update_time is not None: pulumi.set(__self__, "update_time", update_time) @property @pulumi.getter(name="accessUrls") def access_urls(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['AuthorityAccessUrlArgs']]]]: """ URLs for accessing content published by this CA, such as the CA certificate and CRLs. """ return pulumi.get(self, "access_urls") @access_urls.setter def access_urls(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['AuthorityAccessUrlArgs']]]]): pulumi.set(self, "access_urls", value) @property @pulumi.getter(name="certificateAuthorityId") def certificate_authority_id(self) -> Optional[pulumi.Input[str]]: """ The user provided Resource ID for this Certificate Authority. """ return pulumi.get(self, "certificate_authority_id") @certificate_authority_id.setter def certificate_authority_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "certificate_authority_id", value) @property @pulumi.getter def config(self) -> Optional[pulumi.Input['AuthorityConfigArgs']]: """ The config used to create a self-signed X.509 certificate or CSR. Structure is documented below. """ return pulumi.get(self, "config") @config.setter def config(self, value: Optional[pulumi.Input['AuthorityConfigArgs']]): pulumi.set(self, "config", value) @property @pulumi.getter(name="createTime") def create_time(self) -> Optional[pulumi.Input[str]]: """ The time at which this CertificateAuthority was created. A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z". """ return pulumi.get(self, "create_time") @create_time.setter def create_time(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "create_time", value) @property @pulumi.getter(name="gcsBucket") def gcs_bucket(self) -> Optional[pulumi.Input[str]]: """ The name of a Cloud Storage bucket where this CertificateAuthority will publish content, such as the CA certificate and CRLs. This must be a bucket name, without any prefixes (such as `gs://`) or suffixes (such as `.googleapis.com`). For example, to use a bucket named my-bucket, you would simply specify `my-bucket`. If not specified, a managed bucket will be created. """ return pulumi.get(self, "gcs_bucket") @gcs_bucket.setter def gcs_bucket(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "gcs_bucket", value) @property @pulumi.getter(name="ignoreActiveCertificatesOnDeletion") def ignore_active_certificates_on_deletion(self) -> Optional[pulumi.Input[bool]]: """ This field allows the CA to be deleted even if the CA has active certs. Active certs include both unrevoked and unexpired certs. Use with care. Defaults to `false`. """ return pulumi.get(self, "ignore_active_certificates_on_deletion") @ignore_active_certificates_on_deletion.setter def ignore_active_certificates_on_deletion(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "ignore_active_certificates_on_deletion", value) @property @pulumi.getter(name="keySpec") def key_spec(self) -> Optional[pulumi.Input['AuthorityKeySpecArgs']]: """ Used when issuing certificates for this CertificateAuthority. If this CertificateAuthority is a self-signed CertificateAuthority, this key is also used to sign the self-signed CA certificate. Otherwise, it is used to sign a CSR. Structure is documented below. """ return pulumi.get(self, "key_spec") @key_spec.setter def key_spec(self, value: Optional[pulumi.Input['AuthorityKeySpecArgs']]): pulumi.set(self, "key_spec", value) @property @pulumi.getter def labels(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]: """ Labels with user-defined metadata. An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }. """ return pulumi.get(self, "labels") @labels.setter def labels(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]): pulumi.set(self, "labels", value) @property @pulumi.getter def lifetime(self) -> Optional[pulumi.Input[str]]: """ The desired lifetime of the CA certificate. Used to create the "notBeforeTime" and "notAfterTime" fields inside an X.509 certificate. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s". """ return pulumi.get(self, "lifetime") @lifetime.setter def lifetime(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "lifetime", value) @property @pulumi.getter def location(self) -> Optional[pulumi.Input[str]]: """ Location of the CertificateAuthority. A full list of valid locations can be found by running `gcloud privateca locations list`. """ return pulumi.get(self, "location") @location.setter def location(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "location", value) @property @pulumi.getter def name(self) -> Optional[pulumi.Input[str]]: """ The resource name for this CertificateAuthority in the format projects/*/locations/*/certificateAuthorities/*. """ return pulumi.get(self, "name") @name.setter def name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "name", value) @property @pulumi.getter(name="pemCaCertificates") def pem_ca_certificates(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]: """ This CertificateAuthority's certificate chain, including the current CertificateAuthority's certificate. Ordered such that the root issuer is the final element (consistent with RFC 5246). For a self-signed CA, this will only list the current CertificateAuthority's certificate. """ return pulumi.get(self, "pem_ca_certificates") @pem_ca_certificates.setter def pem_ca_certificates(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]): pulumi.set(self, "pem_ca_certificates", value) @property @pulumi.getter def pool(self) -> Optional[pulumi.Input[str]]: """ The name of the CaPool this Certificate Authority belongs to. """ return pulumi.get(self, "pool") @pool.setter def pool(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "pool", value) @property @pulumi.getter def project(self) -> Optional[pulumi.Input[str]]: """ The ID of the project in which the resource belongs. If it is not provided, the provider project is used. """ return pulumi.get(self, "project") @project.setter def project(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "project", value) @property @pulumi.getter def state(self) -> Optional[pulumi.Input[str]]: """ The State for this CertificateAuthority. """ return pulumi.get(self, "state") @state.setter def state(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "state", value) @property @pulumi.getter def type(self) -> Optional[pulumi.Input[str]]: """ The Type of this CertificateAuthority. > **Note:** For `SUBORDINATE` Certificate Authorities, they need to be manually activated (via Cloud Console of `gcloud`) before they can issue certificates. Default value is `SELF_SIGNED`. Possible values are `SELF_SIGNED` and `SUBORDINATE`. """ return pulumi.get(self, "type") @type.setter def type(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "type", value) @property @pulumi.getter(name="updateTime") def update_time(self) -> Optional[pulumi.Input[str]]: """ The time at which this CertificateAuthority was updated. A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z". """ return pulumi.get(self, "update_time") @update_time.setter def update_time(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "update_time", value) class Authority(pulumi.CustomResource): @overload def __init__(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, certificate_authority_id: Optional[pulumi.Input[str]] = None, config: Optional[pulumi.Input[pulumi.InputType['AuthorityConfigArgs']]] = None, gcs_bucket: Optional[pulumi.Input[str]] = None, ignore_active_certificates_on_deletion: Optional[pulumi.Input[bool]] = None, key_spec: Optional[pulumi.Input[pulumi.InputType['AuthorityKeySpecArgs']]] = None, labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None, lifetime: Optional[pulumi.Input[str]] = None, location: Optional[pulumi.Input[str]] = None, pool: Optional[pulumi.Input[str]] = None, project: Optional[pulumi.Input[str]] = None, type: Optional[pulumi.Input[str]] = None, __props__=None): """ A CertificateAuthority represents an individual Certificate Authority. A CertificateAuthority can be used to create Certificates. To get more information about CertificateAuthority, see: * [API documentation](https://cloud.google.com/certificate-authority-service/docs/reference/rest) * How-to Guides * [Official Documentation](https://cloud.google.com/certificate-authority-service) ## Example Usage ### Privateca Certificate Authority Basic ```python import pulumi import pulumi_gcp as gcp default = gcp.certificateauthority.Authority("default", certificate_authority_id="my-certificate-authority", config=gcp.certificateauthority.AuthorityConfigArgs( subject_config=gcp.certificateauthority.AuthorityConfigSubjectConfigArgs( subject=gcp.certificateauthority.AuthorityConfigSubjectConfigSubjectArgs( common_name="my-certificate-authority", organization="HashiCorp", ), subject_alt_name=gcp.certificateauthority.AuthorityConfigSubjectConfigSubjectAltNameArgs( dns_names=["hashicorp.com"], ), ), x509_config=gcp.certificateauthority.AuthorityConfigX509ConfigArgs( ca_options=gcp.certificateauthority.AuthorityConfigX509ConfigCaOptionsArgs( is_ca=True, max_issuer_path_length=10, ), key_usage=gcp.certificateauthority.AuthorityConfigX509ConfigKeyUsageArgs( base_key_usage=gcp.certificateauthority.AuthorityConfigX509ConfigKeyUsageBaseKeyUsageArgs( cert_sign=True, content_commitment=True, crl_sign=True, data_encipherment=True, decipher_only=True, digital_signature=True, key_agreement=True, key_encipherment=False, ), extended_key_usage=gcp.certificateauthority.AuthorityConfigX509ConfigKeyUsageExtendedKeyUsageArgs( client_auth=False, code_signing=True, email_protection=True, server_auth=True, time_stamping=True, ), ), ), ), key_spec=gcp.certificateauthority.AuthorityKeySpecArgs( algorithm="RSA_PKCS1_4096_SHA256", ), lifetime="86400s", location="us-central1", pool="ca-pool") ``` ### Privateca Certificate Authority Subordinate ```python import pulumi import pulumi_gcp as gcp default = gcp.certificateauthority.Authority("default", certificate_authority_id="my-certificate-authority", config=gcp.certificateauthority.AuthorityConfigArgs( subject_config=gcp.certificateauthority.AuthorityConfigSubjectConfigArgs( subject=gcp.certificateauthority.AuthorityConfigSubjectConfigSubjectArgs( common_name="my-subordinate-authority", organization="HashiCorp", ), subject_alt_name=gcp.certificateauthority.AuthorityConfigSubjectConfigSubjectAltNameArgs( dns_names=["hashicorp.com"], ), ), x509_config=gcp.certificateauthority.AuthorityConfigX509ConfigArgs( ca_options=gcp.certificateauthority.AuthorityConfigX509ConfigCaOptionsArgs( is_ca=True, max_issuer_path_length=0, ), key_usage=gcp.certificateauthority.AuthorityConfigX509ConfigKeyUsageArgs( base_key_usage=gcp.certificateauthority.AuthorityConfigX509ConfigKeyUsageBaseKeyUsageArgs( cert_sign=True, content_commitment=True, crl_sign=True, data_encipherment=True, decipher_only=True, digital_signature=True, key_agreement=True, key_encipherment=False, ), extended_key_usage=gcp.certificateauthority.AuthorityConfigX509ConfigKeyUsageExtendedKeyUsageArgs( client_auth=False, code_signing=True, email_protection=True, server_auth=True, time_stamping=True, ), ), ), ), key_spec=gcp.certificateauthority.AuthorityKeySpecArgs( algorithm="RSA_PKCS1_4096_SHA256", ), lifetime="86400s", location="us-central1", pool="ca-pool", type="SUBORDINATE") ``` ### Privateca Certificate Authority Byo Key ```python import pulumi import pulumi_gcp as gcp privateca_sa = gcp.projects.ServiceIdentity("privatecaSa", service="privateca.googleapis.com") privateca_sa_keyuser_signerverifier = gcp.kms.CryptoKeyIAMBinding("privatecaSaKeyuserSignerverifier", crypto_key_id="projects/keys-project/locations/us-central1/keyRings/key-ring/cryptoKeys/crypto-key", role="roles/cloudkms.signerVerifier", members=[privateca_sa.email.apply(lambda email: f"serviceAccount:{email}")]) privateca_sa_keyuser_viewer = gcp.kms.CryptoKeyIAMBinding("privatecaSaKeyuserViewer", crypto_key_id="projects/keys-project/locations/us-central1/keyRings/key-ring/cryptoKeys/crypto-key", role="roles/viewer", members=[privateca_sa.email.apply(lambda email: f"serviceAccount:{email}")]) default = gcp.certificateauthority.Authority("default", pool="ca-pool", certificate_authority_id="my-certificate-authority", location="us-central1", key_spec=gcp.certificateauthority.AuthorityKeySpecArgs( cloud_kms_key_version="projects/keys-project/locations/us-central1/keyRings/key-ring/cryptoKeys/crypto-key/cryptoKeyVersions/1", ), config=gcp.certificateauthority.AuthorityConfigArgs( subject_config=gcp.certificateauthority.AuthorityConfigSubjectConfigArgs( subject=gcp.certificateauthority.AuthorityConfigSubjectConfigSubjectArgs( organization="Example, Org.", common_name="Example Authority", ), ), x509_config=gcp.certificateauthority.AuthorityConfigX509ConfigArgs( ca_options=gcp.certificateauthority.AuthorityConfigX509ConfigCaOptionsArgs( is_ca=True, max_issuer_path_length=10, ), key_usage=gcp.certificateauthority.AuthorityConfigX509ConfigKeyUsageArgs( base_key_usage=gcp.certificateauthority.AuthorityConfigX509ConfigKeyUsageBaseKeyUsageArgs( cert_sign=True, crl_sign=True, ), extended_key_usage=gcp.certificateauthority.AuthorityConfigX509ConfigKeyUsageExtendedKeyUsageArgs( server_auth=False, ), ), ), ), opts=pulumi.ResourceOptions(depends_on=[ privateca_sa_keyuser_signerverifier, privateca_sa_keyuser_viewer, ])) ``` ## Import CertificateAuthority can be imported using any of these accepted formats ```sh $ pulumi import gcp:certificateauthority/authority:Authority default projects/{{project}}/locations/{{location}}/caPools/{{pool}}/certificateAuthorities/{{certificate_authority_id}} ``` ```sh $ pulumi import gcp:certificateauthority/authority:Authority default {{project}}/{{location}}/{{pool}}/{{certificate_authority_id}} ``` ```sh $ pulumi import gcp:certificateauthority/authority:Authority default {{location}}/{{pool}}/{{certificate_authority_id}} ``` :param str resource_name: The name of the resource. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[str] certificate_authority_id: The user provided Resource ID for this Certificate Authority. :param pulumi.Input[pulumi.InputType['AuthorityConfigArgs']] config: The config used to create a self-signed X.509 certificate or CSR. Structure is documented below. :param pulumi.Input[str] gcs_bucket: The name of a Cloud Storage bucket where this CertificateAuthority will publish content, such as the CA certificate and CRLs. This must be a bucket name, without any prefixes (such as `gs://`) or suffixes (such as `.googleapis.com`). For example, to use a bucket named my-bucket, you would simply specify `my-bucket`. If not specified, a managed bucket will be created. :param pulumi.Input[bool] ignore_active_certificates_on_deletion: This field allows the CA to be deleted even if the CA has active certs. Active certs include both unrevoked and unexpired certs. Use with care. Defaults to `false`. :param pulumi.Input[pulumi.InputType['AuthorityKeySpecArgs']] key_spec: Used when issuing certificates for this CertificateAuthority. If this CertificateAuthority is a self-signed CertificateAuthority, this key is also used to sign the self-signed CA certificate. Otherwise, it is used to sign a CSR. Structure is documented below. :param pulumi.Input[Mapping[str, pulumi.Input[str]]] labels: Labels with user-defined metadata. An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }. :param pulumi.Input[str] lifetime: The desired lifetime of the CA certificate. Used to create the "notBeforeTime" and "notAfterTime" fields inside an X.509 certificate. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s". :param pulumi.Input[str] location: Location of the CertificateAuthority. A full list of valid locations can be found by running `gcloud privateca locations list`. :param pulumi.Input[str] pool: The name of the CaPool this Certificate Authority belongs to. :param pulumi.Input[str] project: The ID of the project in which the resource belongs. If it is not provided, the provider project is used. :param pulumi.Input[str] type: The Type of this CertificateAuthority. > **Note:** For `SUBORDINATE` Certificate Authorities, they need to be manually activated (via Cloud Console of `gcloud`) before they can issue certificates. Default value is `SELF_SIGNED`. Possible values are `SELF_SIGNED` and `SUBORDINATE`. """ ... @overload def __init__(__self__, resource_name: str, args: AuthorityArgs, opts: Optional[pulumi.ResourceOptions] = None): """ A CertificateAuthority represents an individual Certificate Authority. A CertificateAuthority can be used to create Certificates. To get more information about CertificateAuthority, see: * [API documentation](https://cloud.google.com/certificate-authority-service/docs/reference/rest) * How-to Guides * [Official Documentation](https://cloud.google.com/certificate-authority-service) ## Example Usage ### Privateca Certificate Authority Basic ```python import pulumi import pulumi_gcp as gcp default = gcp.certificateauthority.Authority("default", certificate_authority_id="my-certificate-authority", config=gcp.certificateauthority.AuthorityConfigArgs( subject_config=gcp.certificateauthority.AuthorityConfigSubjectConfigArgs( subject=gcp.certificateauthority.AuthorityConfigSubjectConfigSubjectArgs( common_name="my-certificate-authority", organization="HashiCorp", ), subject_alt_name=gcp.certificateauthority.AuthorityConfigSubjectConfigSubjectAltNameArgs( dns_names=["hashicorp.com"], ), ), x509_config=gcp.certificateauthority.AuthorityConfigX509ConfigArgs( ca_options=gcp.certificateauthority.AuthorityConfigX509ConfigCaOptionsArgs( is_ca=True, max_issuer_path_length=10, ), key_usage=gcp.certificateauthority.AuthorityConfigX509ConfigKeyUsageArgs( base_key_usage=gcp.certificateauthority.AuthorityConfigX509ConfigKeyUsageBaseKeyUsageArgs( cert_sign=True, content_commitment=True, crl_sign=True, data_encipherment=True, decipher_only=True, digital_signature=True, key_agreement=True, key_encipherment=False, ), extended_key_usage=gcp.certificateauthority.AuthorityConfigX509ConfigKeyUsageExtendedKeyUsageArgs( client_auth=False, code_signing=True, email_protection=True, server_auth=True, time_stamping=True, ), ), ), ), key_spec=gcp.certificateauthority.AuthorityKeySpecArgs( algorithm="RSA_PKCS1_4096_SHA256", ), lifetime="86400s", location="us-central1", pool="ca-pool") ``` ### Privateca Certificate Authority Subordinate ```python import pulumi import pulumi_gcp as gcp default = gcp.certificateauthority.Authority("default", certificate_authority_id="my-certificate-authority", config=gcp.certificateauthority.AuthorityConfigArgs( subject_config=gcp.certificateauthority.AuthorityConfigSubjectConfigArgs( subject=gcp.certificateauthority.AuthorityConfigSubjectConfigSubjectArgs( common_name="my-subordinate-authority", organization="HashiCorp", ), subject_alt_name=gcp.certificateauthority.AuthorityConfigSubjectConfigSubjectAltNameArgs( dns_names=["hashicorp.com"], ), ), x509_config=gcp.certificateauthority.AuthorityConfigX509ConfigArgs( ca_options=gcp.certificateauthority.AuthorityConfigX509ConfigCaOptionsArgs( is_ca=True, max_issuer_path_length=0, ), key_usage=gcp.certificateauthority.AuthorityConfigX509ConfigKeyUsageArgs( base_key_usage=gcp.certificateauthority.AuthorityConfigX509ConfigKeyUsageBaseKeyUsageArgs( cert_sign=True, content_commitment=True, crl_sign=True, data_encipherment=True, decipher_only=True, digital_signature=True, key_agreement=True, key_encipherment=False, ), extended_key_usage=gcp.certificateauthority.AuthorityConfigX509ConfigKeyUsageExtendedKeyUsageArgs( client_auth=False, code_signing=True, email_protection=True, server_auth=True, time_stamping=True, ), ), ), ), key_spec=gcp.certificateauthority.AuthorityKeySpecArgs( algorithm="RSA_PKCS1_4096_SHA256", ), lifetime="86400s", location="us-central1", pool="ca-pool", type="SUBORDINATE") ``` ### Privateca Certificate Authority Byo Key ```python import pulumi import pulumi_gcp as gcp privateca_sa = gcp.projects.ServiceIdentity("privatecaSa", service="privateca.googleapis.com") privateca_sa_keyuser_signerverifier = gcp.kms.CryptoKeyIAMBinding("privatecaSaKeyuserSignerverifier", crypto_key_id="projects/keys-project/locations/us-central1/keyRings/key-ring/cryptoKeys/crypto-key", role="roles/cloudkms.signerVerifier", members=[privateca_sa.email.apply(lambda email: f"serviceAccount:{email}")]) privateca_sa_keyuser_viewer = gcp.kms.CryptoKeyIAMBinding("privatecaSaKeyuserViewer", crypto_key_id="projects/keys-project/locations/us-central1/keyRings/key-ring/cryptoKeys/crypto-key", role="roles/viewer", members=[privateca_sa.email.apply(lambda email: f"serviceAccount:{email}")]) default = gcp.certificateauthority.Authority("default", pool="ca-pool", certificate_authority_id="my-certificate-authority", location="us-central1", key_spec=gcp.certificateauthority.AuthorityKeySpecArgs( cloud_kms_key_version="projects/keys-project/locations/us-central1/keyRings/key-ring/cryptoKeys/crypto-key/cryptoKeyVersions/1", ), config=gcp.certificateauthority.AuthorityConfigArgs( subject_config=gcp.certificateauthority.AuthorityConfigSubjectConfigArgs( subject=gcp.certificateauthority.AuthorityConfigSubjectConfigSubjectArgs( organization="Example, Org.", common_name="Example Authority", ), ), x509_config=gcp.certificateauthority.AuthorityConfigX509ConfigArgs( ca_options=gcp.certificateauthority.AuthorityConfigX509ConfigCaOptionsArgs( is_ca=True, max_issuer_path_length=10, ), key_usage=gcp.certificateauthority.AuthorityConfigX509ConfigKeyUsageArgs( base_key_usage=gcp.certificateauthority.AuthorityConfigX509ConfigKeyUsageBaseKeyUsageArgs( cert_sign=True, crl_sign=True, ), extended_key_usage=gcp.certificateauthority.AuthorityConfigX509ConfigKeyUsageExtendedKeyUsageArgs( server_auth=False, ), ), ), ), opts=pulumi.ResourceOptions(depends_on=[ privateca_sa_keyuser_signerverifier, privateca_sa_keyuser_viewer, ])) ``` ## Import CertificateAuthority can be imported using any of these accepted formats ```sh $ pulumi import gcp:certificateauthority/authority:Authority default projects/{{project}}/locations/{{location}}/caPools/{{pool}}/certificateAuthorities/{{certificate_authority_id}} ``` ```sh $ pulumi import gcp:certificateauthority/authority:Authority default {{project}}/{{location}}/{{pool}}/{{certificate_authority_id}} ``` ```sh $ pulumi import gcp:certificateauthority/authority:Authority default {{location}}/{{pool}}/{{certificate_authority_id}} ``` :param str resource_name: The name of the resource. :param AuthorityArgs args: The arguments to use to populate this resource's properties. :param pulumi.ResourceOptions opts: Options for the resource. """ ... def __init__(__self__, resource_name: str, *args, **kwargs): resource_args, opts = _utilities.get_resource_args_opts(AuthorityArgs, pulumi.ResourceOptions, *args, **kwargs) if resource_args is not None: __self__._internal_init(resource_name, opts, **resource_args.__dict__) else: __self__._internal_init(resource_name, *args, **kwargs) def _internal_init(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, certificate_authority_id: Optional[pulumi.Input[str]] = None, config: Optional[pulumi.Input[pulumi.InputType['AuthorityConfigArgs']]] = None, gcs_bucket: Optional[pulumi.Input[str]] = None, ignore_active_certificates_on_deletion: Optional[pulumi.Input[bool]] = None, key_spec: Optional[pulumi.Input[pulumi.InputType['AuthorityKeySpecArgs']]] = None, labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None, lifetime: Optional[pulumi.Input[str]] = None, location: Optional[pulumi.Input[str]] = None, pool: Optional[pulumi.Input[str]] = None, project: Optional[pulumi.Input[str]] = None, type: Optional[pulumi.Input[str]] = None, __props__=None): if opts is None: opts = pulumi.ResourceOptions() if not isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected resource options to be a ResourceOptions instance') if opts.version is None: opts.version = _utilities.get_version() if opts.id is None: if __props__ is not None: raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource') __props__ = AuthorityArgs.__new__(AuthorityArgs) if certificate_authority_id is None and not opts.urn: raise TypeError("Missing required property 'certificate_authority_id'") __props__.__dict__["certificate_authority_id"] = certificate_authority_id if config is None and not opts.urn: raise TypeError("Missing required property 'config'") __props__.__dict__["config"] = config __props__.__dict__["gcs_bucket"] = gcs_bucket __props__.__dict__["ignore_active_certificates_on_deletion"] = ignore_active_certificates_on_deletion if key_spec is None and not opts.urn: raise TypeError("Missing required property 'key_spec'") __props__.__dict__["key_spec"] = key_spec __props__.__dict__["labels"] = labels __props__.__dict__["lifetime"] = lifetime if location is None and not opts.urn: raise TypeError("Missing required property 'location'") __props__.__dict__["location"] = location if pool is None and not opts.urn: raise TypeError("Missing required property 'pool'") __props__.__dict__["pool"] = pool __props__.__dict__["project"] = project __props__.__dict__["type"] = type __props__.__dict__["access_urls"] = None __props__.__dict__["create_time"] = None __props__.__dict__["name"] = None __props__.__dict__["pem_ca_certificates"] = None __props__.__dict__["state"] = None __props__.__dict__["update_time"] = None super(Authority, __self__).__init__( 'gcp:certificateauthority/authority:Authority', resource_name, __props__, opts) @staticmethod def get(resource_name: str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions] = None, access_urls: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AuthorityAccessUrlArgs']]]]] = None, certificate_authority_id: Optional[pulumi.Input[str]] = None, config: Optional[pulumi.Input[pulumi.InputType['AuthorityConfigArgs']]] = None, create_time: Optional[pulumi.Input[str]] = None, gcs_bucket: Optional[pulumi.Input[str]] = None, ignore_active_certificates_on_deletion: Optional[pulumi.Input[bool]] = None, key_spec: Optional[pulumi.Input[pulumi.InputType['AuthorityKeySpecArgs']]] = None, labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None, lifetime: Optional[pulumi.Input[str]] = None, location: Optional[pulumi.Input[str]] = None, name: Optional[pulumi.Input[str]] = None, pem_ca_certificates: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None, pool: Optional[pulumi.Input[str]] = None, project: Optional[pulumi.Input[str]] = None, state: Optional[pulumi.Input[str]] = None, type: Optional[pulumi.Input[str]] = None, update_time: Optional[pulumi.Input[str]] = None) -> 'Authority': """ Get an existing Authority resource's state with the given name, id, and optional extra properties used to qualify the lookup. :param str resource_name: The unique name of the resulting resource. :param pulumi.Input[str] id: The unique provider ID of the resource to lookup. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AuthorityAccessUrlArgs']]]] access_urls: URLs for accessing content published by this CA, such as the CA certificate and CRLs. :param pulumi.Input[str] certificate_authority_id: The user provided Resource ID for this Certificate Authority. :param pulumi.Input[pulumi.InputType['AuthorityConfigArgs']] config: The config used to create a self-signed X.509 certificate or CSR. Structure is documented below. :param pulumi.Input[str] create_time: The time at which this CertificateAuthority was created. A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z". :param pulumi.Input[str] gcs_bucket: The name of a Cloud Storage bucket where this CertificateAuthority will publish content, such as the CA certificate and CRLs. This must be a bucket name, without any prefixes (such as `gs://`) or suffixes (such as `.googleapis.com`). For example, to use a bucket named my-bucket, you would simply specify `my-bucket`. If not specified, a managed bucket will be created. :param pulumi.Input[bool] ignore_active_certificates_on_deletion: This field allows the CA to be deleted even if the CA has active certs. Active certs include both unrevoked and unexpired certs. Use with care. Defaults to `false`. :param pulumi.Input[pulumi.InputType['AuthorityKeySpecArgs']] key_spec: Used when issuing certificates for this CertificateAuthority. If this CertificateAuthority is a self-signed CertificateAuthority, this key is also used to sign the self-signed CA certificate. Otherwise, it is used to sign a CSR. Structure is documented below. :param pulumi.Input[Mapping[str, pulumi.Input[str]]] labels: Labels with user-defined metadata. An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }. :param pulumi.Input[str] lifetime: The desired lifetime of the CA certificate. Used to create the "notBeforeTime" and "notAfterTime" fields inside an X.509 certificate. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s". :param pulumi.Input[str] location: Location of the CertificateAuthority. A full list of valid locations can be found by running `gcloud privateca locations list`. :param pulumi.Input[str] name: The resource name for this CertificateAuthority in the format projects/*/locations/*/certificateAuthorities/*. :param pulumi.Input[Sequence[pulumi.Input[str]]] pem_ca_certificates: This CertificateAuthority's certificate chain, including the current CertificateAuthority's certificate. Ordered such that the root issuer is the final element (consistent with RFC 5246). For a self-signed CA, this will only list the current CertificateAuthority's certificate. :param pulumi.Input[str] pool: The name of the CaPool this Certificate Authority belongs to. :param pulumi.Input[str] project: The ID of the project in which the resource belongs. If it is not provided, the provider project is used. :param pulumi.Input[str] state: The State for this CertificateAuthority. :param pulumi.Input[str] type: The Type of this CertificateAuthority. > **Note:** For `SUBORDINATE` Certificate Authorities, they need to be manually activated (via Cloud Console of `gcloud`) before they can issue certificates. Default value is `SELF_SIGNED`. Possible values are `SELF_SIGNED` and `SUBORDINATE`. :param pulumi.Input[str] update_time: The time at which this CertificateAuthority was updated. A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z". """ opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = _AuthorityState.__new__(_AuthorityState) __props__.__dict__["access_urls"] = access_urls __props__.__dict__["certificate_authority_id"] = certificate_authority_id __props__.__dict__["config"] = config __props__.__dict__["create_time"] = create_time __props__.__dict__["gcs_bucket"] = gcs_bucket __props__.__dict__["ignore_active_certificates_on_deletion"] = ignore_active_certificates_on_deletion __props__.__dict__["key_spec"] = key_spec __props__.__dict__["labels"] = labels __props__.__dict__["lifetime"] = lifetime __props__.__dict__["location"] = location __props__.__dict__["name"] = name __props__.__dict__["pem_ca_certificates"] = pem_ca_certificates __props__.__dict__["pool"] = pool __props__.__dict__["project"] = project __props__.__dict__["state"] = state __props__.__dict__["type"] = type __props__.__dict__["update_time"] = update_time return Authority(resource_name, opts=opts, __props__=__props__) @property @pulumi.getter(name="accessUrls") def access_urls(self) -> pulumi.Output[Sequence['outputs.AuthorityAccessUrl']]: """ URLs for accessing content published by this CA, such as the CA certificate and CRLs. """ return pulumi.get(self, "access_urls") @property @pulumi.getter(name="certificateAuthorityId") def certificate_authority_id(self) -> pulumi.Output[str]: """ The user provided Resource ID for this Certificate Authority. """ return pulumi.get(self, "certificate_authority_id") @property @pulumi.getter def config(self) -> pulumi.Output['outputs.AuthorityConfig']: """ The config used to create a self-signed X.509 certificate or CSR. Structure is documented below. """ return pulumi.get(self, "config") @property @pulumi.getter(name="createTime") def create_time(self) -> pulumi.Output[str]: """ The time at which this CertificateAuthority was created. A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z". """ return pulumi.get(self, "create_time") @property @pulumi.getter(name="gcsBucket") def gcs_bucket(self) -> pulumi.Output[Optional[str]]: """ The name of a Cloud Storage bucket where this CertificateAuthority will publish content, such as the CA certificate and CRLs. This must be a bucket name, without any prefixes (such as `gs://`) or suffixes (such as `.googleapis.com`). For example, to use a bucket named my-bucket, you would simply specify `my-bucket`. If not specified, a managed bucket will be created. """ return pulumi.get(self, "gcs_bucket") @property @pulumi.getter(name="ignoreActiveCertificatesOnDeletion") def ignore_active_certificates_on_deletion(self) -> pulumi.Output[Optional[bool]]: """ This field allows the CA to be deleted even if the CA has active certs. Active certs include both unrevoked and unexpired certs. Use with care. Defaults to `false`. """ return pulumi.get(self, "ignore_active_certificates_on_deletion") @property @pulumi.getter(name="keySpec") def key_spec(self) -> pulumi.Output['outputs.AuthorityKeySpec']: """ Used when issuing certificates for this CertificateAuthority. If this CertificateAuthority is a self-signed CertificateAuthority, this key is also used to sign the self-signed CA certificate. Otherwise, it is used to sign a CSR. Structure is documented below. """ return pulumi.get(self, "key_spec") @property @pulumi.getter def labels(self) -> pulumi.Output[Optional[Mapping[str, str]]]: """ Labels with user-defined metadata. An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }. """ return pulumi.get(self, "labels") @property @pulumi.getter def lifetime(self) -> pulumi.Output[Optional[str]]: """ The desired lifetime of the CA certificate. Used to create the "notBeforeTime" and "notAfterTime" fields inside an X.509 certificate. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s". """ return pulumi.get(self, "lifetime") @property @pulumi.getter def location(self) -> pulumi.Output[str]: """ Location of the CertificateAuthority. A full list of valid locations can be found by running `gcloud privateca locations list`. """ return pulumi.get(self, "location") @property @pulumi.getter def name(self) -> pulumi.Output[str]: """ The resource name for this CertificateAuthority in the format projects/*/locations/*/certificateAuthorities/*. """ return pulumi.get(self, "name") @property @pulumi.getter(name="pemCaCertificates") def pem_ca_certificates(self) -> pulumi.Output[Sequence[str]]: """ This CertificateAuthority's certificate chain, including the current CertificateAuthority's certificate. Ordered such that the root issuer is the final element (consistent with RFC 5246). For a self-signed CA, this will only list the current CertificateAuthority's certificate. """ return pulumi.get(self, "pem_ca_certificates") @property @pulumi.getter def pool(self) -> pulumi.Output[str]: """ The name of the CaPool this Certificate Authority belongs to. """ return pulumi.get(self, "pool") @property @pulumi.getter def project(self) -> pulumi.Output[str]: """ The ID of the project in which the resource belongs. If it is not provided, the provider project is used. """ return pulumi.get(self, "project") @property @pulumi.getter def state(self) -> pulumi.Output[str]: """ The State for this CertificateAuthority. """ return pulumi.get(self, "state") @property @pulumi.getter def type(self) -> pulumi.Output[Optional[str]]: """ The Type of this CertificateAuthority. > **Note:** For `SUBORDINATE` Certificate Authorities, they need to be manually activated (via Cloud Console of `gcloud`) before they can issue certificates. Default value is `SELF_SIGNED`. Possible values are `SELF_SIGNED` and `SUBORDINATE`. """ return pulumi.get(self, "type") @property @pulumi.getter(name="updateTime") def update_time(self) -> pulumi.Output[str]: """ The time at which this CertificateAuthority was updated. A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z". """ return pulumi.get(self, "update_time")
50.361686
202
0.63335
7,006
65,722
5.781188
0.057237
0.05649
0.047009
0.038022
0.941757
0.927117
0.908723
0.895292
0.886305
0.871812
0
0.012743
0.276422
65,722
1,304
203
50.400307
0.838965
0.555552
0
0.684536
1
0
0.114816
0.040441
0
0
0
0
0
1
0.164948
false
0.002062
0.014433
0
0.280412
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
7ee40b5e26936ed1beda4ff941c748c29781b364
24,467
py
Python
sdk/python/pulumi_aws/route53/resolver_firewall_rule_group_association.py
rapzo/pulumi-aws
390a098221315d98a54ba97d1559e750dc3053b7
[ "ECL-2.0", "Apache-2.0" ]
260
2018-06-18T14:57:00.000Z
2022-03-29T11:41:03.000Z
sdk/python/pulumi_aws/route53/resolver_firewall_rule_group_association.py
rapzo/pulumi-aws
390a098221315d98a54ba97d1559e750dc3053b7
[ "ECL-2.0", "Apache-2.0" ]
1,154
2018-06-19T20:38:20.000Z
2022-03-31T19:48:16.000Z
sdk/python/pulumi_aws/route53/resolver_firewall_rule_group_association.py
rapzo/pulumi-aws
390a098221315d98a54ba97d1559e750dc3053b7
[ "ECL-2.0", "Apache-2.0" ]
115
2018-06-28T03:20:27.000Z
2022-03-29T11:41:06.000Z
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload from .. import _utilities __all__ = ['ResolverFirewallRuleGroupAssociationArgs', 'ResolverFirewallRuleGroupAssociation'] @pulumi.input_type class ResolverFirewallRuleGroupAssociationArgs: def __init__(__self__, *, firewall_rule_group_id: pulumi.Input[str], priority: pulumi.Input[int], vpc_id: pulumi.Input[str], mutation_protection: Optional[pulumi.Input[str]] = None, name: Optional[pulumi.Input[str]] = None, tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None): """ The set of arguments for constructing a ResolverFirewallRuleGroupAssociation resource. :param pulumi.Input[str] firewall_rule_group_id: The unique identifier of the firewall rule group. :param pulumi.Input[int] priority: The setting that determines the processing order of the rule group among the rule groups that you associate with the specified VPC. DNS Firewall filters VPC traffic starting from the rule group with the lowest numeric priority setting. :param pulumi.Input[str] vpc_id: The unique identifier of the VPC that you want to associate with the rule group. :param pulumi.Input[str] mutation_protection: If enabled, this setting disallows modification or removal of the association, to help prevent against accidentally altering DNS firewall protections. Valid values: `ENABLED`, `DISABLED`. :param pulumi.Input[str] name: A name that lets you identify the rule group association, to manage and use it. :param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Key-value map of resource tags. .If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level. """ pulumi.set(__self__, "firewall_rule_group_id", firewall_rule_group_id) pulumi.set(__self__, "priority", priority) pulumi.set(__self__, "vpc_id", vpc_id) if mutation_protection is not None: pulumi.set(__self__, "mutation_protection", mutation_protection) if name is not None: pulumi.set(__self__, "name", name) if tags is not None: pulumi.set(__self__, "tags", tags) @property @pulumi.getter(name="firewallRuleGroupId") def firewall_rule_group_id(self) -> pulumi.Input[str]: """ The unique identifier of the firewall rule group. """ return pulumi.get(self, "firewall_rule_group_id") @firewall_rule_group_id.setter def firewall_rule_group_id(self, value: pulumi.Input[str]): pulumi.set(self, "firewall_rule_group_id", value) @property @pulumi.getter def priority(self) -> pulumi.Input[int]: """ The setting that determines the processing order of the rule group among the rule groups that you associate with the specified VPC. DNS Firewall filters VPC traffic starting from the rule group with the lowest numeric priority setting. """ return pulumi.get(self, "priority") @priority.setter def priority(self, value: pulumi.Input[int]): pulumi.set(self, "priority", value) @property @pulumi.getter(name="vpcId") def vpc_id(self) -> pulumi.Input[str]: """ The unique identifier of the VPC that you want to associate with the rule group. """ return pulumi.get(self, "vpc_id") @vpc_id.setter def vpc_id(self, value: pulumi.Input[str]): pulumi.set(self, "vpc_id", value) @property @pulumi.getter(name="mutationProtection") def mutation_protection(self) -> Optional[pulumi.Input[str]]: """ If enabled, this setting disallows modification or removal of the association, to help prevent against accidentally altering DNS firewall protections. Valid values: `ENABLED`, `DISABLED`. """ return pulumi.get(self, "mutation_protection") @mutation_protection.setter def mutation_protection(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "mutation_protection", value) @property @pulumi.getter def name(self) -> Optional[pulumi.Input[str]]: """ A name that lets you identify the rule group association, to manage and use it. """ return pulumi.get(self, "name") @name.setter def name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "name", value) @property @pulumi.getter def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]: """ Key-value map of resource tags. .If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level. """ return pulumi.get(self, "tags") @tags.setter def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]): pulumi.set(self, "tags", value) @pulumi.input_type class _ResolverFirewallRuleGroupAssociationState: def __init__(__self__, *, arn: Optional[pulumi.Input[str]] = None, firewall_rule_group_id: Optional[pulumi.Input[str]] = None, mutation_protection: Optional[pulumi.Input[str]] = None, name: Optional[pulumi.Input[str]] = None, priority: Optional[pulumi.Input[int]] = None, tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None, tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None, vpc_id: Optional[pulumi.Input[str]] = None): """ Input properties used for looking up and filtering ResolverFirewallRuleGroupAssociation resources. :param pulumi.Input[str] arn: The ARN (Amazon Resource Name) of the firewall rule group association. :param pulumi.Input[str] firewall_rule_group_id: The unique identifier of the firewall rule group. :param pulumi.Input[str] mutation_protection: If enabled, this setting disallows modification or removal of the association, to help prevent against accidentally altering DNS firewall protections. Valid values: `ENABLED`, `DISABLED`. :param pulumi.Input[str] name: A name that lets you identify the rule group association, to manage and use it. :param pulumi.Input[int] priority: The setting that determines the processing order of the rule group among the rule groups that you associate with the specified VPC. DNS Firewall filters VPC traffic starting from the rule group with the lowest numeric priority setting. :param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Key-value map of resource tags. .If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level. :param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags_all: A map of tags assigned to the resource, including those inherited from the provider . :param pulumi.Input[str] vpc_id: The unique identifier of the VPC that you want to associate with the rule group. """ if arn is not None: pulumi.set(__self__, "arn", arn) if firewall_rule_group_id is not None: pulumi.set(__self__, "firewall_rule_group_id", firewall_rule_group_id) if mutation_protection is not None: pulumi.set(__self__, "mutation_protection", mutation_protection) if name is not None: pulumi.set(__self__, "name", name) if priority is not None: pulumi.set(__self__, "priority", priority) if tags is not None: pulumi.set(__self__, "tags", tags) if tags_all is not None: pulumi.set(__self__, "tags_all", tags_all) if vpc_id is not None: pulumi.set(__self__, "vpc_id", vpc_id) @property @pulumi.getter def arn(self) -> Optional[pulumi.Input[str]]: """ The ARN (Amazon Resource Name) of the firewall rule group association. """ return pulumi.get(self, "arn") @arn.setter def arn(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "arn", value) @property @pulumi.getter(name="firewallRuleGroupId") def firewall_rule_group_id(self) -> Optional[pulumi.Input[str]]: """ The unique identifier of the firewall rule group. """ return pulumi.get(self, "firewall_rule_group_id") @firewall_rule_group_id.setter def firewall_rule_group_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "firewall_rule_group_id", value) @property @pulumi.getter(name="mutationProtection") def mutation_protection(self) -> Optional[pulumi.Input[str]]: """ If enabled, this setting disallows modification or removal of the association, to help prevent against accidentally altering DNS firewall protections. Valid values: `ENABLED`, `DISABLED`. """ return pulumi.get(self, "mutation_protection") @mutation_protection.setter def mutation_protection(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "mutation_protection", value) @property @pulumi.getter def name(self) -> Optional[pulumi.Input[str]]: """ A name that lets you identify the rule group association, to manage and use it. """ return pulumi.get(self, "name") @name.setter def name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "name", value) @property @pulumi.getter def priority(self) -> Optional[pulumi.Input[int]]: """ The setting that determines the processing order of the rule group among the rule groups that you associate with the specified VPC. DNS Firewall filters VPC traffic starting from the rule group with the lowest numeric priority setting. """ return pulumi.get(self, "priority") @priority.setter def priority(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "priority", value) @property @pulumi.getter def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]: """ Key-value map of resource tags. .If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level. """ return pulumi.get(self, "tags") @tags.setter def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]): pulumi.set(self, "tags", value) @property @pulumi.getter(name="tagsAll") def tags_all(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]: """ A map of tags assigned to the resource, including those inherited from the provider . """ return pulumi.get(self, "tags_all") @tags_all.setter def tags_all(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]): pulumi.set(self, "tags_all", value) @property @pulumi.getter(name="vpcId") def vpc_id(self) -> Optional[pulumi.Input[str]]: """ The unique identifier of the VPC that you want to associate with the rule group. """ return pulumi.get(self, "vpc_id") @vpc_id.setter def vpc_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "vpc_id", value) class ResolverFirewallRuleGroupAssociation(pulumi.CustomResource): @overload def __init__(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, firewall_rule_group_id: Optional[pulumi.Input[str]] = None, mutation_protection: Optional[pulumi.Input[str]] = None, name: Optional[pulumi.Input[str]] = None, priority: Optional[pulumi.Input[int]] = None, tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None, vpc_id: Optional[pulumi.Input[str]] = None, __props__=None): """ Provides a Route 53 Resolver DNS Firewall rule group association resource. ## Example Usage ```python import pulumi import pulumi_aws as aws example_resolver_firewall_rule_group = aws.route53.ResolverFirewallRuleGroup("exampleResolverFirewallRuleGroup") example_resolver_firewall_rule_group_association = aws.route53.ResolverFirewallRuleGroupAssociation("exampleResolverFirewallRuleGroupAssociation", firewall_rule_group_id=example_resolver_firewall_rule_group.id, priority=100, vpc_id=aws_vpc["example"]["id"]) ``` ## Import Route 53 Resolver DNS Firewall rule group associations can be imported using the Route 53 Resolver DNS Firewall rule group association ID, e.g. ```sh $ pulumi import aws:route53/resolverFirewallRuleGroupAssociation:ResolverFirewallRuleGroupAssociation example rslvr-frgassoc-0123456789abcdef ``` :param str resource_name: The name of the resource. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[str] firewall_rule_group_id: The unique identifier of the firewall rule group. :param pulumi.Input[str] mutation_protection: If enabled, this setting disallows modification or removal of the association, to help prevent against accidentally altering DNS firewall protections. Valid values: `ENABLED`, `DISABLED`. :param pulumi.Input[str] name: A name that lets you identify the rule group association, to manage and use it. :param pulumi.Input[int] priority: The setting that determines the processing order of the rule group among the rule groups that you associate with the specified VPC. DNS Firewall filters VPC traffic starting from the rule group with the lowest numeric priority setting. :param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Key-value map of resource tags. .If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level. :param pulumi.Input[str] vpc_id: The unique identifier of the VPC that you want to associate with the rule group. """ ... @overload def __init__(__self__, resource_name: str, args: ResolverFirewallRuleGroupAssociationArgs, opts: Optional[pulumi.ResourceOptions] = None): """ Provides a Route 53 Resolver DNS Firewall rule group association resource. ## Example Usage ```python import pulumi import pulumi_aws as aws example_resolver_firewall_rule_group = aws.route53.ResolverFirewallRuleGroup("exampleResolverFirewallRuleGroup") example_resolver_firewall_rule_group_association = aws.route53.ResolverFirewallRuleGroupAssociation("exampleResolverFirewallRuleGroupAssociation", firewall_rule_group_id=example_resolver_firewall_rule_group.id, priority=100, vpc_id=aws_vpc["example"]["id"]) ``` ## Import Route 53 Resolver DNS Firewall rule group associations can be imported using the Route 53 Resolver DNS Firewall rule group association ID, e.g. ```sh $ pulumi import aws:route53/resolverFirewallRuleGroupAssociation:ResolverFirewallRuleGroupAssociation example rslvr-frgassoc-0123456789abcdef ``` :param str resource_name: The name of the resource. :param ResolverFirewallRuleGroupAssociationArgs args: The arguments to use to populate this resource's properties. :param pulumi.ResourceOptions opts: Options for the resource. """ ... def __init__(__self__, resource_name: str, *args, **kwargs): resource_args, opts = _utilities.get_resource_args_opts(ResolverFirewallRuleGroupAssociationArgs, pulumi.ResourceOptions, *args, **kwargs) if resource_args is not None: __self__._internal_init(resource_name, opts, **resource_args.__dict__) else: __self__._internal_init(resource_name, *args, **kwargs) def _internal_init(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, firewall_rule_group_id: Optional[pulumi.Input[str]] = None, mutation_protection: Optional[pulumi.Input[str]] = None, name: Optional[pulumi.Input[str]] = None, priority: Optional[pulumi.Input[int]] = None, tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None, vpc_id: Optional[pulumi.Input[str]] = None, __props__=None): if opts is None: opts = pulumi.ResourceOptions() if not isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected resource options to be a ResourceOptions instance') if opts.version is None: opts.version = _utilities.get_version() if opts.id is None: if __props__ is not None: raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource') __props__ = ResolverFirewallRuleGroupAssociationArgs.__new__(ResolverFirewallRuleGroupAssociationArgs) if firewall_rule_group_id is None and not opts.urn: raise TypeError("Missing required property 'firewall_rule_group_id'") __props__.__dict__["firewall_rule_group_id"] = firewall_rule_group_id __props__.__dict__["mutation_protection"] = mutation_protection __props__.__dict__["name"] = name if priority is None and not opts.urn: raise TypeError("Missing required property 'priority'") __props__.__dict__["priority"] = priority __props__.__dict__["tags"] = tags if vpc_id is None and not opts.urn: raise TypeError("Missing required property 'vpc_id'") __props__.__dict__["vpc_id"] = vpc_id __props__.__dict__["arn"] = None __props__.__dict__["tags_all"] = None super(ResolverFirewallRuleGroupAssociation, __self__).__init__( 'aws:route53/resolverFirewallRuleGroupAssociation:ResolverFirewallRuleGroupAssociation', resource_name, __props__, opts) @staticmethod def get(resource_name: str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions] = None, arn: Optional[pulumi.Input[str]] = None, firewall_rule_group_id: Optional[pulumi.Input[str]] = None, mutation_protection: Optional[pulumi.Input[str]] = None, name: Optional[pulumi.Input[str]] = None, priority: Optional[pulumi.Input[int]] = None, tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None, tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None, vpc_id: Optional[pulumi.Input[str]] = None) -> 'ResolverFirewallRuleGroupAssociation': """ Get an existing ResolverFirewallRuleGroupAssociation resource's state with the given name, id, and optional extra properties used to qualify the lookup. :param str resource_name: The unique name of the resulting resource. :param pulumi.Input[str] id: The unique provider ID of the resource to lookup. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[str] arn: The ARN (Amazon Resource Name) of the firewall rule group association. :param pulumi.Input[str] firewall_rule_group_id: The unique identifier of the firewall rule group. :param pulumi.Input[str] mutation_protection: If enabled, this setting disallows modification or removal of the association, to help prevent against accidentally altering DNS firewall protections. Valid values: `ENABLED`, `DISABLED`. :param pulumi.Input[str] name: A name that lets you identify the rule group association, to manage and use it. :param pulumi.Input[int] priority: The setting that determines the processing order of the rule group among the rule groups that you associate with the specified VPC. DNS Firewall filters VPC traffic starting from the rule group with the lowest numeric priority setting. :param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Key-value map of resource tags. .If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level. :param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags_all: A map of tags assigned to the resource, including those inherited from the provider . :param pulumi.Input[str] vpc_id: The unique identifier of the VPC that you want to associate with the rule group. """ opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = _ResolverFirewallRuleGroupAssociationState.__new__(_ResolverFirewallRuleGroupAssociationState) __props__.__dict__["arn"] = arn __props__.__dict__["firewall_rule_group_id"] = firewall_rule_group_id __props__.__dict__["mutation_protection"] = mutation_protection __props__.__dict__["name"] = name __props__.__dict__["priority"] = priority __props__.__dict__["tags"] = tags __props__.__dict__["tags_all"] = tags_all __props__.__dict__["vpc_id"] = vpc_id return ResolverFirewallRuleGroupAssociation(resource_name, opts=opts, __props__=__props__) @property @pulumi.getter def arn(self) -> pulumi.Output[str]: """ The ARN (Amazon Resource Name) of the firewall rule group association. """ return pulumi.get(self, "arn") @property @pulumi.getter(name="firewallRuleGroupId") def firewall_rule_group_id(self) -> pulumi.Output[str]: """ The unique identifier of the firewall rule group. """ return pulumi.get(self, "firewall_rule_group_id") @property @pulumi.getter(name="mutationProtection") def mutation_protection(self) -> pulumi.Output[str]: """ If enabled, this setting disallows modification or removal of the association, to help prevent against accidentally altering DNS firewall protections. Valid values: `ENABLED`, `DISABLED`. """ return pulumi.get(self, "mutation_protection") @property @pulumi.getter def name(self) -> pulumi.Output[str]: """ A name that lets you identify the rule group association, to manage and use it. """ return pulumi.get(self, "name") @property @pulumi.getter def priority(self) -> pulumi.Output[int]: """ The setting that determines the processing order of the rule group among the rule groups that you associate with the specified VPC. DNS Firewall filters VPC traffic starting from the rule group with the lowest numeric priority setting. """ return pulumi.get(self, "priority") @property @pulumi.getter def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]: """ Key-value map of resource tags. .If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level. """ return pulumi.get(self, "tags") @property @pulumi.getter(name="tagsAll") def tags_all(self) -> pulumi.Output[Mapping[str, str]]: """ A map of tags assigned to the resource, including those inherited from the provider . """ return pulumi.get(self, "tags_all") @property @pulumi.getter(name="vpcId") def vpc_id(self) -> pulumi.Output[str]: """ The unique identifier of the VPC that you want to associate with the rule group. """ return pulumi.get(self, "vpc_id")
50.551653
278
0.679609
2,952
24,467
5.446477
0.072493
0.077311
0.06879
0.042543
0.854957
0.83692
0.817017
0.802152
0.793755
0.776278
0
0.002816
0.230678
24,467
483
279
50.656315
0.851352
0.421261
0
0.664179
1
0
0.09593
0.032233
0
0
0
0
0
1
0.160448
false
0.048507
0.018657
0
0.276119
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
bc0d103c643e738a9bf6f4d08b076079f1f027f0
5,280
py
Python
server/openapi_server/test/test_model_controller.py
mintproject/MINT-ModelCatalogIngestionAPI
026d3495483a3e48ea3c1364d0dda09beeea69e4
[ "Apache-2.0" ]
2
2019-05-30T21:33:43.000Z
2019-09-27T21:04:38.000Z
server/openapi_server/test/test_model_controller.py
mintproject/MINT-ModelCatalogIngestionAPI
026d3495483a3e48ea3c1364d0dda09beeea69e4
[ "Apache-2.0" ]
25
2019-03-28T14:44:57.000Z
2019-07-25T00:20:09.000Z
server/openapi_server/test/test_model_controller.py
mintproject/MINT-ModelCatalogIngestionAPI
026d3495483a3e48ea3c1364d0dda09beeea69e4
[ "Apache-2.0" ]
null
null
null
# coding: utf-8 from __future__ import absolute_import import logging import unittest from flask import json from six import BytesIO from openapi_server.models import Model from openapi_server.test import BaseTestCase MINT_USERNAME = "mint@isi.edu" class TestModel(BaseTestCase): """Model integration test stubs""" logger = logging.getLogger("TestModel") def test_models_get(self): """Test case for models_get List all Model entities """ query_string = [('username', self.get_username)] headers = { 'Accept': 'application/json', } response = self.client.open( '/v1.8.0/modelconfigurations', method='GET', headers=headers, query_string=query_string) self.logger.info("Response length {}".format(len(response.json))) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) self.assertTrue(response.json) for _item in response.json: keys = _item.keys() for key in keys: self.check_key(key) def test_model_custom_index_get(self): """Test case for model_get List all Models entities """ query_string = [('username', MINT_USERNAME), ('label', 'flooding')] headers = { 'Accept': 'application/json', } response = self.client.open( '/v1.8.0/custom/models/variable', method='GET', headers=headers, query_string=query_string) self.logger.info("Response length {}".format(len(response.json))) for item in response.json: self.assertIsInstance(Model.from_dict(item), Model) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_model_custom_intervention_get(self): """Test case for model_get List all Models entities """ query_string = [('username', MINT_USERNAME), ('label', 'Fertilizer')] headers = { 'Accept': 'application/json', } response = self.client.open( '/v1.8.0/custom/model/intervention', method='GET', headers=headers, query_string=query_string) self.logger.info("Response length {}".format(len(response.json))) for item in response.json: self.assertIsInstance(Model.from_dict(item), Model) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_model_custom_region_get(self): """Test case for model_get List all Models entities """ query_string = [('username', MINT_USERNAME), ('label', 'baro')] headers = { 'Accept': 'application/json', } response = self.client.open( '/v1.8.0/custom/model/region', method='GET', headers=headers, query_string=query_string) self.logger.info("Response length {}".format(len(response.json))) for item in response.json: self.assertIsInstance(Model.from_dict(item), Model) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) for _item in response.json: keys = _item.keys() for key in keys: self.check_key(key) def test_model_custom_variable_get(self): """Test case for model_get List all Models entities """ query_string = [('username', MINT_USERNAME), ('label', 'crop')] headers = { 'Accept': 'application/json', } response = self.client.open( '/v1.8.0/custom/models/variable', method='GET', headers=headers, query_string=query_string) self.logger.info("Response length {}".format(len(response.json))) for item in response.json: self.assertIsInstance(Model.from_dict(item), Model) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) def test_model_custom_standard_variable_get(self): """Test case for model_get List all Models entities """ query_string = [('username', MINT_USERNAME), ('label', 'crop')] headers = { 'Accept': 'application/json', } response = self.client.open( '/v1.8.0/custom/models/standard_variable', method='GET', headers=headers, query_string=query_string) self.logger.info("Response length {}".format(len(response.json))) for item in response.json: self.assertIsInstance(Model.from_dict(item), Model) self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) for _item in response.json: keys = _item.keys() for key in keys: self.check_key(key) if __name__ == '__main__': unittest.main()
33
77
0.557386
552
5,280
5.179348
0.144928
0.069255
0.025184
0.047569
0.827562
0.821266
0.821266
0.817069
0.817069
0.817069
0
0.012092
0.326515
5,280
159
78
33.207547
0.791901
0.064962
0
0.720339
0
0
0.150031
0.038758
0
0
0
0
0.101695
1
0.050847
false
0
0.059322
0
0.127119
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
bc307db5b30ba05ce1bf6c5a3174210c86d82796
30
py
Python
src/BearSki/install_data.py
Sirius1942/BearFramework
333b6f98c00441b3606d116625faabd0d66c2a37
[ "MIT" ]
null
null
null
src/BearSki/install_data.py
Sirius1942/BearFramework
333b6f98c00441b3606d116625faabd0d66c2a37
[ "MIT" ]
null
null
null
src/BearSki/install_data.py
Sirius1942/BearFramework
333b6f98c00441b3606d116625faabd0d66c2a37
[ "MIT" ]
null
null
null
def testRun(): return "OK"
15
15
0.6
4
30
4.5
1
0
0
0
0
0
0
0
0
0
0
0
0.233333
30
2
15
15
0.782609
0
0
0
0
0
0.064516
0
0
0
0
0
0
1
0.5
true
0
0
0.5
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
0
1
1
0
0
7
70fd15c7839e62af7a2ea01b37774a8f23087692
62,256
py
Python
src/pyrad_proc/pyrad/proc/process_monitoring.py
jfigui/pyrad
7811d593bb09a7f8a621c0e8ae3f32c2b85a0254
[ "BSD-3-Clause" ]
41
2016-12-01T08:46:06.000Z
2021-06-24T21:14:33.000Z
src/pyrad_proc/pyrad/proc/process_monitoring.py
jfigui/pyrad
7811d593bb09a7f8a621c0e8ae3f32c2b85a0254
[ "BSD-3-Clause" ]
42
2017-02-23T14:52:49.000Z
2021-02-01T10:43:52.000Z
src/pyrad_proc/pyrad/proc/process_monitoring.py
jfigui/pyrad
7811d593bb09a7f8a621c0e8ae3f32c2b85a0254
[ "BSD-3-Clause" ]
21
2016-08-25T15:02:12.000Z
2021-05-27T04:09:40.000Z
""" pyrad.proc.process_monitoring ============================= Functions for monitoring of the polarimetric variables .. autosummary:: :toctree: generated/ process_selfconsistency_kdp_phidp process_selfconsistency_bias process_selfconsistency_bias2 process_estimate_phidp0 process_rhohv_rain process_zdr_precip process_zdr_snow process_monitoring """ from copy import deepcopy from warnings import warn import numpy as np import pyart from ..io.io_aux import get_datatype_fields, get_fieldname_pyart from ..io.read_data_other import read_selfconsistency from ..io.read_data_radar import interpol_field from ..util.radar_utils import get_histogram_bins from ..util.stat_utils import ratio_bootstrapping def process_selfconsistency_kdp_phidp(procstatus, dscfg, radar_list=None): """ Computes specific differential phase and differential phase in rain using the selfconsistency between Zdr, Zh and KDP Parameters ---------- procstatus : int Processing status: 0 initializing, 1 processing volume, 2 post-processing dscfg : dictionary of dictionaries data set configuration. Accepted Configuration Keywords:: datatype : list of strings. Dataset keyword The input data types parametrization : str The type of parametrization for the self-consistency curves. Can be 'None', 'Gourley', 'Wolfensberger', 'Louf', 'Gorgucci' or 'Vaccarono' 'None' will use tables from config files. Default 'None'. rsmooth : float. Dataset keyword length of the smoothing window [m]. Default 2000. min_rhohv : float. Dataset keyword minimum valid RhoHV. Default 0.92 filter_rain : Bool. Dataset keyword If True the hydrometeor classification is used to filter out gates that are not rain. Default True max_phidp : float. Dataset keyword maximum valid PhiDP [deg]. Default 20. ml_thickness : float. Dataset keyword assumed melting layer thickness [m]. Default 700. fzl : float. Dataset keyword The default freezing level height. It will be used if no temperature field name is specified or the temperature field is not in the radar object. Default 2000. frequency : float. Dataset keyword the radar frequency [Hz]. If None that of the key frequency in attribute instrument_parameters of the radar object will be used. If the key or the attribute are not present the selfconsistency will not be computed radar_list : list of Radar objects Optional. list of radar objects Returns ------- new_dataset : dict dictionary containing the output ind_rad : int radar index """ if procstatus != 1: return None, None temp = None iso0 = None hydro = None for datatypedescr in dscfg['datatype']: radarnr, _, datatype, _, _ = get_datatype_fields(datatypedescr) if datatype == 'dBZc': refl = 'corrected_reflectivity' if datatype == 'dBZ': refl = 'reflectivity' if datatype == 'ZDRc': zdr = 'corrected_differential_reflectivity' if datatype == 'ZDR': zdr = 'differential_reflectivity' if datatype == 'PhiDPc': phidp = 'corrected_differential_phase' if datatype == 'PhiDP': phidp = 'differential_phase' if datatype == 'TEMP': temp = 'temperature' if datatype == 'H_ISO0': iso0 = 'height_over_iso0' if datatype == 'hydro': hydro = 'radar_echo_classification' if datatype == 'RhoHV': rhohv = 'cross_correlation_ratio' if datatype == 'RhoHVc': rhohv = 'corrected_cross_correlation_ratio' if datatype == 'uRhoHV': rhohv = 'uncorrected_cross_correlation_ratio' ind_rad = int(radarnr[5:8])-1 if radar_list[ind_rad] is None: warn('No valid radar') return None, None radar = radar_list[ind_rad] if ((refl not in radar.fields) or (zdr not in radar.fields) or (phidp not in radar.fields) or (rhohv not in radar.fields)): warn('Unable to estimate PhiDP/KDP using selfconsistency. ' + 'Missing data') return None, None # determine which freezing level reference if temp is not None: if temp in radar.fields: temp_ref = 'temperature' else: warn('COSMO temperature field not available. ' + 'Using fixed freezing level height to determine liquid phase') temp_ref = 'fixed_fzl' elif iso0 is not None: if iso0 in radar.fields: temp_ref = 'height_over_iso0' else: warn('Height over iso0 field not available. ' + 'Using fixed freezing level height to determine liquid phase') temp_ref = 'fixed_fzl' else: warn('Field to obtain the freezing level was not specified. ' + 'Using fixed freezing level height') temp_ref = 'fixed_fzl' # determine freezing level height if necessary fzl = None if temp_ref == 'fixed_fzl': if 'fzl' in dscfg: fzl = dscfg['fzl'] else: fzl = 2000. warn('Freezing level height not defined. Using default ' + str(fzl)+' m') # get self-consistency parametrization or curves parametrization = dscfg.get('parametrization', 'None') if dscfg['initialized'] == 0: # get frequency band freq = dscfg.get('frequency', None) if freq is None: if (radar.instrument_parameters is not None and 'frequency' in radar.instrument_parameters): freq = radar.instrument_parameters['frequency']['data'][0] else: warn('Unable to estimate Zh bias using ' + 'self-consistency. Unknown radar frequency') return None, None freq_band = pyart.retrieve.get_freq_band(freq) if parametrization == 'None': # find unique elevations el_vec = np.unique( (10.*np.round(radar.elevation['data'], decimals=1)).astype(int)) zdr_kdpzh_list = list() el_list = list() for el in el_vec: fname = ( dscfg['configpath'] + 'selfconsistency/' + 'selfconsistency_zdr_zhkdp_'+freq_band+'band_temp10_elev' + '{:03d}'.format(el)+'_mu05.txt') zdr_kdpzh_table = read_selfconsistency(fname) if zdr_kdpzh_table is not None: zdr_kdpzh_list.append(zdr_kdpzh_table) el_list.append((el/10.).astype(int)) if not el_list: warn('Unable to retrieve PhiDP and KDP using self-consistency. ' + 'No selfconsistency files for the radar elevations.') return None, None zdr_kdpzh_dict = {'zdr_kdpzh': zdr_kdpzh_list, 'elev': el_list, 'freq_band': freq_band} else: zdr_kdpzh_dict = {'zdr_kdpzh': None, 'elev': None, 'freq_band': freq_band} dscfg['global_data'] = zdr_kdpzh_dict dscfg['initialized'] = 1 if dscfg['initialized'] == 1: # get user defined values rsmooth = dscfg.get('rsmooth', 2000.) min_rhohv = dscfg.get('min_rhohv', 0.92) filter_rain = dscfg.get('filter_rain', True) max_phidp = dscfg.get('max_phidp', 20.) ml_thickness = dscfg.get('ml_thickness', 700.) kdpsim_field = 'specific_differential_phase' phidpsim_field = 'differential_phase' r_res = radar.range['data'][1]-radar.range['data'][0] smooth_wind_len = int(rsmooth/r_res) kdpsim, phidpsim = pyart.correct.selfconsistency_kdp_phidp( radar, dscfg['global_data'], min_rhohv=min_rhohv, filter_rain=filter_rain, max_phidp=max_phidp, smooth_wind_len=smooth_wind_len, doc=15, fzl=fzl, thickness=ml_thickness, parametrization=parametrization, refl_field=refl, phidp_field=phidp, zdr_field=zdr, temp_field=temp, iso0_field=iso0, hydro_field=hydro, rhohv_field=rhohv, kdpsim_field=kdpsim_field, phidpsim_field=phidpsim_field, temp_ref=temp_ref) # prepare for exit new_dataset = {'radar_out': deepcopy(radar)} new_dataset['radar_out'].fields = dict() new_dataset['radar_out'].add_field(kdpsim_field, kdpsim) new_dataset['radar_out'].add_field(phidpsim_field, phidpsim) return new_dataset, ind_rad def process_selfconsistency_bias(procstatus, dscfg, radar_list=None): """ Estimates the reflectivity bias by means of the selfconsistency algorithm by Gourley Parameters ---------- procstatus : int Processing status: 0 initializing, 1 processing volume, 2 post-processing dscfg : dictionary of dictionaries data set configuration. Accepted Configuration Keywords:: datatype : list of string. Dataset keyword The input data types parametrization : str The type of parametrization for the self-consistency curves. Can be 'None', 'Gourley', 'Wolfensberger', 'Louf', 'Gorgucci' or 'Vaccarono' 'None' will use tables from config files. Default 'None'. fzl : float. Dataset keyword Default freezing level height. Default 2000. rsmooth : float. Dataset keyword length of the smoothing window [m]. Default 2000. min_rhohv : float. Dataset keyword minimum valid RhoHV. Default 0.92 filter_rain : Bool. Dataset keyword If True the hydrometeor classification is used to filter out gates that are not rain. Default True max_phidp : float. Dataset keyword maximum valid PhiDP [deg]. Default 20. ml_thickness : float. Dataset keyword Melting layer thickness [m]. Default 700. rcell : float. Dataset keyword length of continuous precipitation to consider the precipitation cell a valid phidp segment [m]. Default 15000. dphidp_min : float. Dataset keyword minimum phase shift [deg]. Default 2. dphidp_max : float. Dataset keyword maximum phase shift [deg]. Default 16. frequency : float. Dataset keyword the radar frequency [Hz]. If None that of the key frequency in attribute instrument_parameters of the radar object will be used. If the key or the attribute are not present the selfconsistency will not be computed check_wet_radome : Bool. Dataset keyword if True the average reflectivity of the closest gates to the radar is going to be check to find out whether there is rain over the radome. If there is rain no bias will be computed. Default True. wet_radome_refl : Float. Dataset keyword Average reflectivity [dBZ] of the gates close to the radar to consider the radome as wet. Default 25. wet_radome_rng_min, wet_radome_rng_max : Float. Dataset keyword Min and max range [m] of the disk around the radar used to compute the average reflectivity to determine whether the radome is wet. Default 2000 and 4000. wet_radome_ngates_min : int Minimum number of valid gates to consider that the radome is wet. Default 180 valid_gates_only : Bool If True the reflectivity bias obtained for each valid ray is going to be assigned only to gates of the segment used. That will give more weight to longer segments when computing the total bias. Default False keep_points : Bool If True the ZDR, ZH and KDP of the gates used in the self- consistency algorithm are going to be stored for further analysis. Default False rkdp : float The length of the window used to compute KDP with the single window least square method [m]. Default 6000. radar_list : list of Radar objects Optional. list of radar objects Returns ------- new_dataset : dict dictionary containing the output ind_rad : int radar index """ if procstatus == 0: return None, None keep_points = dscfg.get('keep_points', False) if procstatus == 2: if not keep_points or dscfg['initialized'] == 0: return None, None return ( {'selfconsistency_points': dscfg['global_data']['selfconsistency_points']}, None) temp = None iso0 = None hydro = None for datatypedescr in dscfg['datatype']: radarnr, _, datatype, _, _ = get_datatype_fields(datatypedescr) if datatype == 'dBZc': refl = 'corrected_reflectivity' if datatype == 'dBZ': refl = 'reflectivity' if datatype == 'ZDRc': zdr = 'corrected_differential_reflectivity' if datatype == 'ZDR': zdr = 'differential_reflectivity' if datatype == 'PhiDPc': phidp = 'corrected_differential_phase' if datatype == 'PhiDP': phidp = 'differential_phase' if datatype == 'TEMP': temp = 'temperature' if datatype == 'H_ISO0': iso0 = 'height_over_iso0' if datatype == 'hydro': hydro = 'radar_echo_classification' if datatype == 'RhoHV': rhohv = 'cross_correlation_ratio' if datatype == 'RhoHVc': rhohv = 'corrected_cross_correlation_ratio' if datatype == 'uRhoHV': rhohv = 'uncorrected_cross_correlation_ratio' ind_rad = int(radarnr[5:8])-1 if radar_list[ind_rad] is None: warn('No valid radar') return None, None radar = radar_list[ind_rad] if ((refl not in radar.fields) or (zdr not in radar.fields) or (phidp not in radar.fields) or (rhohv not in radar.fields)): warn('Unable to estimate reflectivity bias using selfconsistency. ' + 'Missing data') return None, None # determine which freezing level reference if temp is not None: if temp in radar.fields: temp_ref = 'temperature' else: warn('COSMO temperature field not available. ' + 'Using fixed freezing level height to determine liquid phase') temp_ref = 'fixed_fzl' elif iso0 is not None: if iso0 in radar.fields: temp_ref = 'height_over_iso0' else: warn('Height over iso0 field not available. ' + 'Using fixed freezing level height to determine liquid phase') temp_ref = 'fixed_fzl' else: warn('Field to obtain the freezing level was not specified. ' + 'Using fixed freezing level height') temp_ref = 'fixed_fzl' # determine freezing level height if necessary fzl = None if temp_ref == 'fixed_fzl': if 'fzl' in dscfg: fzl = dscfg['fzl'] else: fzl = 2000. warn('Freezing level height not defined. Using default ' + str(fzl)+' m') # get self-consistency parametrization or curves parametrization = dscfg.get('parametrization', 'None') if dscfg['initialized'] == 0: # get frequency band freq = dscfg.get('frequency', None) if freq is None: if (radar.instrument_parameters is not None and 'frequency' in radar.instrument_parameters): freq = radar.instrument_parameters['frequency']['data'][0] else: warn('Unable to estimate Zh bias using ' + 'self-consistency. Unknown radar frequency') return None, None freq_band = pyart.retrieve.get_freq_band(freq) if parametrization == 'None': # find unique elevations el_vec = np.unique( (10.*np.round(radar.elevation['data'], decimals=1)).astype(int)) zdr_kdpzh_list = list() el_list = list() for el in el_vec: fname = ( dscfg['configpath'] + 'selfconsistency/' + 'selfconsistency_zdr_zhkdp_'+freq_band+'band_temp10_elev' + '{:03d}'.format(el)+'_mu05.txt') zdr_kdpzh_table = read_selfconsistency(fname) if zdr_kdpzh_table is not None: zdr_kdpzh_list.append(zdr_kdpzh_table) el_list.append((el/10.).astype(int)) if not el_list: warn('Unable to retrieve Zh bias using self-consistency. ' + 'No selfconsistency files for the radar elevations.') return None, None zdr_kdpzh_dict = {'zdr_kdpzh': zdr_kdpzh_list, 'elev': el_list, 'freq_band': freq_band} else: zdr_kdpzh_dict = {'zdr_kdpzh': None, 'elev': None, 'freq_band': freq_band} dscfg['global_data'] = {'zdr_kdpzh_dict': zdr_kdpzh_dict} if keep_points: dscfg['global_data'].update({'selfconsistency_points': { 'zdr': [], 'kdp': [], 'zh': [], 'timeinfo': dscfg['timeinfo'], 'parametrization': parametrization, 'zdr_kdpzh_dict': zdr_kdpzh_dict }}) dscfg['initialized'] = 1 if dscfg['initialized'] == 1: # get user defined values rsmooth = dscfg.get('rsmooth', 2000.) min_rhohv = dscfg.get('min_rhohv', 0.92) filter_rain = dscfg.get('filter_rain', True) max_phidp = dscfg.get('max_phidp', 20.) ml_thickness = dscfg.get('ml_thickness', 700.) rcell = dscfg.get('rcell', 15000.) dphidp_min = dscfg.get('dphidp_min', 2.) dphidp_max = dscfg.get('dphidp_max', 16.) check_wet_radome = dscfg.get('check_wet_radome', True) wet_radome_refl = dscfg.get('wet_radome_refl', 25.) wet_radome_rng_min = dscfg.get('wet_radome_rng_min', 2000.) wet_radome_rng_max = dscfg.get('wet_radome_rng_max', 4000.) wet_radome_ngates_min = dscfg.get('wet_radome_ngates_min', 180) valid_gates_only = dscfg.get('valid_gates_only', False) rkdp = dscfg.get('rkdp', 6000.) r_res = radar.range['data'][1]-radar.range['data'][0] smooth_wind_len = int(rsmooth/r_res) kdp_wind_len = int(rkdp/r_res) min_rcons = int(rcell/r_res) wet_radome_len_min = int(wet_radome_rng_min/r_res) wet_radome_len_max = int(wet_radome_rng_max/r_res) refl_bias, selfconsistency_dict = pyart.correct.selfconsistency_bias( radar, dscfg['global_data']['zdr_kdpzh_dict'], min_rhohv=min_rhohv, filter_rain=filter_rain, max_phidp=max_phidp, smooth_wind_len=smooth_wind_len, doc=15, fzl=fzl, thickness=ml_thickness, min_rcons=min_rcons, dphidp_min=dphidp_min, dphidp_max=dphidp_max, parametrization=parametrization, refl_field=refl, phidp_field=phidp, zdr_field=zdr, temp_field=temp, iso0_field=iso0, hydro_field=hydro, rhohv_field=rhohv, temp_ref=temp_ref, check_wet_radome=check_wet_radome, wet_radome_refl=wet_radome_refl, wet_radome_len_min=wet_radome_len_min, wet_radome_len_max=wet_radome_len_max, valid_gates_only=valid_gates_only, keep_points=keep_points, kdp_wind_len=kdp_wind_len) if keep_points: if selfconsistency_dict is not None: dscfg['global_data']['selfconsistency_points']['zdr'].extend( selfconsistency_dict['zdr']) dscfg['global_data']['selfconsistency_points']['zh'].extend( selfconsistency_dict['zh']) dscfg['global_data']['selfconsistency_points']['kdp'].extend( selfconsistency_dict['kdp']) # prepare for exit new_dataset = {'radar_out': deepcopy(radar)} new_dataset['radar_out'].fields = dict() new_dataset['radar_out'].add_field('reflectivity_bias', refl_bias) return new_dataset, ind_rad def process_selfconsistency_bias2(procstatus, dscfg, radar_list=None): """ Estimates the reflectivity bias by means of the selfconsistency algorithm by Gourley Parameters ---------- procstatus : int Processing status: 0 initializing, 1 processing volume, 2 post-processing dscfg : dictionary of dictionaries data set configuration. Accepted Configuration Keywords:: datatype : list of string. Dataset keyword The input data types parametrization : str The type of parametrization for the self-consistency curves. Can be 'None', 'Gourley', 'Wolfensberger', 'Louf', 'Gorgucci' or 'Vaccarono' 'None' will use tables from config files. Default 'None'. fzl : float. Dataset keyword Default freezing level height. Default 2000. rsmooth : float. Dataset keyword length of the smoothing window [m]. Default 2000. min_rhohv : float. Dataset keyword minimum valid RhoHV. Default 0.92 filter_rain : Bool. Dataset keyword If True the hydrometeor classification is used to filter out gates that are not rain. Default True max_phidp : float. Dataset keyword maximum valid PhiDP [deg]. Default 20. ml_thickness : float. Dataset keyword Melting layer thickness [m]. Default 700. rcell : float. Dataset keyword length of continuous precipitation to consider the precipitation cell a valid phidp segment [m]. Default 15000. frequency : float. Dataset keyword the radar frequency [Hz]. If None that of the key frequency in attribute instrument_parameters of the radar object will be used. If the key or the attribute are not present the selfconsistency will not be computed check_wet_radome : Bool. Dataset keyword if True the average reflectivity of the closest gates to the radar is going to be check to find out whether there is rain over the radome. If there is rain no bias will be computed. Default True. wet_radome_refl : Float. Dataset keyword Average reflectivity [dBZ] of the gates close to the radar to consider the radome as wet. Default 25. wet_radome_rng_min, wet_radome_rng_max : Float. Dataset keyword Min and max range [m] of the disk around the radar used to compute the average reflectivity to determine whether the radome is wet. Default 2000 and 4000. wet_radome_ngates_min : int Minimum number of valid gates to consider that the radome is wet. Default 180 keep_points : Bool If True the ZDR, ZH and KDP of the gates used in the self- consistency algorithm are going to be stored for further analysis. Default False bias_per_gate : Bool If True the bias per gate will be computed radar_list : list of Radar objects Optional. list of radar objects Returns ------- new_dataset : dict dictionary containing the output ind_rad : int radar index """ if procstatus == 0: return None, None keep_points = dscfg.get('keep_points', False) bias_type = dscfg.get('bias_type', 'cumulative') provide_confidence = dscfg.get('provide_confidence', False) nsamples_confidence = dscfg.get('nsamples_confidence', 1000) if procstatus == 2: if dscfg['initialized'] == 0: return None, None dataset = None if bias_type == 'cumulative': kdp_obs = np.ma.array( dscfg['global_data']['kdp_data_dict']['kdp_obs']) kdp_sim = np.ma.array( dscfg['global_data']['kdp_data_dict']['kdp_sim']) reflectivity_bias = { 'value': 10.*np.ma.log10( np.ma.sum(kdp_sim)/np.ma.sum(kdp_obs)), 'npoints': kdp_obs.size, 'timeinfo': dscfg['global_data']['kdp_data_dict']['timeinfo'], 'bias_type': 'cumulative'} if provide_confidence: samples = ratio_bootstrapping( kdp_sim, kdp_obs, nsamples=nsamples_confidence) reflectivity_bias.update( {'samples': 10.*np.ma.log10(samples)}) dataset = {'reflectivity_bias': reflectivity_bias} if keep_points: if dataset is None: dataset = {'selfconsistency_points': dscfg['global_data']['selfconsistency_points']} else: dataset.update( {'selfconsistency_points': dscfg['global_data']['selfconsistency_points']}) return dataset, None temp = None iso0 = None hydro = None phidp = None for datatypedescr in dscfg['datatype']: radarnr, _, datatype, _, _ = get_datatype_fields(datatypedescr) if datatype == 'dBZc': refl = 'corrected_reflectivity' if datatype == 'dBZ': refl = 'reflectivity' if datatype == 'ZDRc': zdr = 'corrected_differential_reflectivity' if datatype == 'ZDR': zdr = 'differential_reflectivity' if datatype == 'PhiDPc': phidp = 'corrected_differential_phase' if datatype == 'PhiDP': phidp = 'differential_phase' if datatype == 'KDPc': kdp = 'corrected_specific_differential_phase' if datatype == 'KDP': kdp = 'specific_differential_phase' if datatype == 'TEMP': temp = 'temperature' if datatype == 'H_ISO0': iso0 = 'height_over_iso0' if datatype == 'hydro': hydro = 'radar_echo_classification' if datatype == 'RhoHV': rhohv = 'cross_correlation_ratio' if datatype == 'RhoHVc': rhohv = 'corrected_cross_correlation_ratio' if datatype == 'uRhoHV': rhohv = 'uncorrected_cross_correlation_ratio' ind_rad = int(radarnr[5:8])-1 if radar_list[ind_rad] is None: warn('No valid radar') return None, None radar = radar_list[ind_rad] if ((refl not in radar.fields) or (zdr not in radar.fields) or (kdp not in radar.fields) or (rhohv not in radar.fields)): warn('Unable to estimate reflectivity bias using selfconsistency. ' + 'Missing data') return None, None # determine which freezing level reference if temp is not None: if temp in radar.fields: temp_ref = 'temperature' else: warn('COSMO temperature field not available. ' + 'Using fixed freezing level height to determine liquid phase') temp_ref = 'fixed_fzl' elif iso0 is not None: if iso0 in radar.fields: temp_ref = 'height_over_iso0' else: warn('Height over iso0 field not available. ' + 'Using fixed freezing level height to determine liquid phase') temp_ref = 'fixed_fzl' else: warn('Field to obtain the freezing level was not specified. ' + 'Using fixed freezing level height') temp_ref = 'fixed_fzl' # determine freezing level height if necessary fzl = None if temp_ref == 'fixed_fzl': if 'fzl' in dscfg: fzl = dscfg['fzl'] else: fzl = 2000. warn('Freezing level height not defined. Using default ' + str(fzl)+' m') # get self-consistency parametrization or curves parametrization = dscfg.get('parametrization', 'None') if dscfg['initialized'] == 0: # get frequency band freq = dscfg.get('frequency', None) if freq is None: if (radar.instrument_parameters is not None and 'frequency' in radar.instrument_parameters): freq = radar.instrument_parameters['frequency']['data'][0] else: warn('Unable to estimate Zh bias using ' + 'self-consistency. Unknown radar frequency') return None, None freq_band = pyart.retrieve.get_freq_band(freq) if parametrization == 'None': # find unique elevations el_vec = np.unique( (10.*np.round(radar.elevation['data'], decimals=1)).astype(int)) zdr_kdpzh_list = list() el_list = list() for el in el_vec: fname = ( dscfg['configpath'] + 'selfconsistency/' + 'selfconsistency_zdr_zhkdp_'+freq_band+'band_temp10_elev' + '{:03d}'.format(el)+'_mu05.txt') zdr_kdpzh_table = read_selfconsistency(fname) if zdr_kdpzh_table is not None: zdr_kdpzh_list.append(zdr_kdpzh_table) el_list.append((el/10.).astype(int)) if not el_list: warn('Unable to retrieve Zh bias using self-consistency. ' + 'No selfconsistency files for the radar elevations.') return None, None zdr_kdpzh_dict = {'zdr_kdpzh': zdr_kdpzh_list, 'elev': el_list, 'freq_band': freq_band} else: zdr_kdpzh_dict = {'zdr_kdpzh': None, 'elev': None, 'freq_band': freq_band} dscfg['global_data'] = {'zdr_kdpzh_dict': zdr_kdpzh_dict} dscfg['global_data'].update({'kdp_data_dict': { 'kdp_obs': [], 'kdp_sim': [], 'timeinfo': dscfg['timeinfo'] }}) if keep_points: dscfg['global_data'].update({'selfconsistency_points': { 'zdr': [], 'kdp': [], 'zh': [], 'timeinfo': dscfg['timeinfo'], 'parametrization': parametrization, 'zdr_kdpzh_dict': zdr_kdpzh_dict }}) dscfg['initialized'] = 1 if dscfg['initialized'] == 1: # get user defined values rsmooth = dscfg.get('rsmooth', 2000.) min_rhohv = dscfg.get('min_rhohv', 0.92) min_zdr = dscfg.get('min_zdr', 0.2) filter_rain = dscfg.get('filter_rain', True) max_phidp = dscfg.get('max_phidp', 20.) ml_thickness = dscfg.get('ml_thickness', 700.) rcell = dscfg.get('rcell', 15000.) check_wet_radome = dscfg.get('check_wet_radome', True) wet_radome_refl = dscfg.get('wet_radome_refl', 25.) wet_radome_rng_min = dscfg.get('wet_radome_rng_min', 2000.) wet_radome_rng_max = dscfg.get('wet_radome_rng_max', 4000.) wet_radome_ngates_min = dscfg.get('wet_radome_ngates_min', 180) bias_per_gate = dscfg.get('bias_per_gate', False) r_res = radar.range['data'][1]-radar.range['data'][0] smooth_wind_len = int(rsmooth/r_res) min_rcons = int(rcell/r_res) wet_radome_len_min = int(wet_radome_rng_min/r_res) wet_radome_len_max = int(wet_radome_rng_max/r_res) kdp_data_dict, refl_bias, selfcons_dict = pyart.correct.selfconsistency_bias2( radar, dscfg['global_data']['zdr_kdpzh_dict'], min_rhohv=min_rhohv, min_zdr=min_zdr, filter_rain=filter_rain, max_phidp=max_phidp, smooth_wind_len=smooth_wind_len, doc=15, fzl=fzl, thickness=ml_thickness, min_rcons=min_rcons, parametrization=parametrization, refl_field=refl, phidp_field=phidp, zdr_field=zdr, temp_field=temp, iso0_field=iso0, hydro_field=hydro, rhohv_field=rhohv, kdp_field=kdp, temp_ref=temp_ref, check_wet_radome=check_wet_radome, wet_radome_refl=wet_radome_refl, wet_radome_len_min=wet_radome_len_min, wet_radome_len_max=wet_radome_len_max, keep_points=keep_points, bias_per_gate=bias_per_gate) if keep_points: if selfcons_dict is not None: dscfg['global_data']['selfconsistency_points']['zdr'].extend( selfcons_dict['zdr']) dscfg['global_data']['selfconsistency_points']['zh'].extend( selfcons_dict['zh']) dscfg['global_data']['selfconsistency_points']['kdp'].extend( selfcons_dict['kdp']) if kdp_data_dict is not None: dscfg['global_data']['kdp_data_dict']['kdp_sim'].extend( kdp_data_dict['kdp_sim']) dscfg['global_data']['kdp_data_dict']['kdp_obs'].extend( kdp_data_dict['kdp_obs']) # prepare for exit dataset = None if bias_type == 'instant': reflectivity_bias = { 'value': np.ma.masked, 'npoints': 0, 'timeinfo': dscfg['timeinfo'], 'bias_type': 'instant'} if kdp_data_dict is not None: kdp_obs = np.ma.array(kdp_data_dict['kdp_obs']) kdp_sim = np.ma.array(kdp_data_dict['kdp_sim']) reflectivity_bias['value'] = 10.*np.ma.log10( np.ma.sum(kdp_sim)/np.ma.sum(kdp_obs)) reflectivity_bias['npoints'] = kdp_obs.size if provide_confidence: samples = ratio_bootstrapping( kdp_sim, kdp_obs, iter=nsamples_confidence) reflectivity_bias.update( {'samples': 10.*np.ma.log10(samples)}) dataset = {'reflectivity_bias': reflectivity_bias} if bias_per_gate: if refl_bias is not None: if dataset is None: dataset = {'radar_out': deepcopy(radar)} else: dataset.update({'radar_out': deepcopy(radar)}) dataset['radar_out'].fields = dict() dataset['radar_out'].add_field( 'reflectivity_bias', refl_bias) return dataset, ind_rad def process_estimate_phidp0(procstatus, dscfg, radar_list=None): """ estimates the system differential phase offset at each ray Parameters ---------- procstatus : int Processing status: 0 initializing, 1 processing volume, 2 post-processing dscfg : dictionary of dictionaries data set configuration. Accepted Configuration Keywords:: datatype : list of string. Dataset keyword The input data types rmin : float. Dataset keyword The minimum range where to look for valid data [m] rmax : float. Dataset keyword The maximum range where to look for valid data [m] rcell : float. Dataset keyword The length of a continuous cell to consider it valid precip [m] Zmin : float. Dataset keyword The minimum reflectivity [dBZ] Zmax : float. Dataset keyword The maximum reflectivity [dBZ] radar_list : list of Radar objects Optional. list of radar objects Returns ------- new_dataset : dict dictionary containing the output ind_rad : int radar index """ if procstatus != 1: return None, None for datatypedescr in dscfg['datatype']: radarnr, _, datatype, _, _ = get_datatype_fields(datatypedescr) if datatype == 'dBZ': refl_field = 'reflectivity' if datatype == 'dBZc': refl_field = 'corrected_reflectivity' if datatype == 'PhiDP': psidp_field = 'differential_phase' if datatype == 'PhiDPc': psidp_field = 'corrected_differential_phase' if datatype == 'uPhiDP': psidp_field = 'uncorrected_differential_phase' ind_rad = int(radarnr[5:8])-1 if radar_list[ind_rad] is None: warn('No valid radar') return None, None radar = radar_list[ind_rad] if (refl_field not in radar.fields) or (psidp_field not in radar.fields): warn('Unable to estimate PhiDP system offset. Missing data') return None, None ind_rmin = np.where(radar.range['data'] > dscfg['rmin'])[0][0] ind_rmax = np.where(radar.range['data'] < dscfg['rmax'])[0][-1] r_res = radar.range['data'][1]-radar.range['data'][0] min_rcons = int(dscfg['rcell']/r_res) phidp0, first_gates = pyart.correct.det_sys_phase_ray( radar, ind_rmin=ind_rmin, ind_rmax=ind_rmax, min_rcons=min_rcons, zmin=dscfg['Zmin'], zmax=dscfg['Zmax'], phidp_field=psidp_field, refl_field=refl_field) # prepare for exit new_dataset = {'radar_out': deepcopy(radar)} new_dataset['radar_out'].fields = dict() new_dataset['radar_out'].add_field('system_differential_phase', phidp0) new_dataset['radar_out'].add_field( 'first_gate_differential_phase', first_gates) return new_dataset, ind_rad def process_rhohv_rain(procstatus, dscfg, radar_list=None): """ Keeps only suitable data to evaluate the 80 percentile of RhoHV in rain Parameters ---------- procstatus : int Processing status: 0 initializing, 1 processing volume, 2 post-processing dscfg : dictionary of dictionaries data set configuration. Accepted Configuration Keywords:: datatype : list of string. Dataset keyword The input data types rmin : float. Dataset keyword minimum range where to look for rain [m]. Default 1000. rmax : float. Dataset keyword maximum range where to look for rain [m]. Default 50000. Zmin : float. Dataset keyword minimum reflectivity to consider the bin as precipitation [dBZ]. Default 20. Zmax : float. Dataset keyword maximum reflectivity to consider the bin as precipitation [dBZ] Default 40. ml_thickness : float. Dataset keyword assumed thickness of the melting layer. Default 700. fzl : float. Dataset keyword The default freezing level height. It will be used if no temperature field name is specified or the temperature field is not in the radar object. Default 2000. radar_list : list of Radar objects Optional. list of radar objects Returns ------- new_dataset : dict dictionary containing the output ind_rad : int radar index """ if procstatus != 1: return None, None temp_field = None iso0_field = None for datatypedescr in dscfg['datatype']: radarnr, _, datatype, _, _ = get_datatype_fields(datatypedescr) if datatype == 'RhoHV': rhohv_field = 'cross_correlation_ratio' if datatype == 'RhoHVc': rhohv_field = 'corrected_cross_correlation_ratio' if datatype == 'uRhoHV': rhohv_field = 'uncorrected_cross_correlation_ratio' if datatype == 'dBZc': refl_field = 'corrected_reflectivity' if datatype == 'dBZ': refl_field = 'reflectivity' if datatype == 'TEMP': temp_field = 'temperature' if datatype == 'H_ISO0': iso0_field = 'height_over_iso0' ind_rad = int(radarnr[5:8])-1 if radar_list[ind_rad] is None: warn('No valid radar') return None, None radar = radar_list[ind_rad] if ((refl_field not in radar.fields) or (rhohv_field not in radar.fields)): warn('Unable to estimate RhoHV in rain. Missing data') return None, None # determine which freezing level reference temp_ref = 'temperature' if temp_field is None and iso0_field is None: warn('Field to obtain the freezing level was not specified. ' + 'Using fixed freezing level height') temp_ref = 'fixed_fzl' elif temp_field is not None: if temp_field not in radar.fields: warn('COSMO temperature field not available. ' + 'Using fixed freezing level height') temp_ref = 'fixed_fzl' elif iso0_field is not None: if iso0_field not in radar.fields: warn('Height over iso0 field not available. ' + 'Using fixed freezing level height') temp_ref = 'fixed_fzl' else: temp_ref = 'height_over_iso0' # determine freezing level height if necessary fzl = None if temp_ref == 'fixed_fzl': if 'fzl' in dscfg: fzl = dscfg['fzl'] else: fzl = 2000. warn('Freezing level height not defined. Using default ' + str(fzl)+' m') # default values rmin = 1000. rmax = 50000. zmin = 20. zmax = 40. thickness = 700. # user defined values if 'rmin' in dscfg: rmin = dscfg['rmin'] if 'rmax' in dscfg: rmax = dscfg['rmax'] if 'Zmin' in dscfg: zmin = dscfg['Zmin'] if 'Zmax' in dscfg: zmax = dscfg['Zmax'] if 'ml_thickness' in dscfg: thickness = dscfg['ml_thickness'] ind_rmin = np.where(radar.range['data'] > rmin)[0][0] ind_rmax = np.where(radar.range['data'] < rmax)[0][-1] rhohv_rain = pyart.correct.est_rhohv_rain( radar, ind_rmin=ind_rmin, ind_rmax=ind_rmax, zmin=zmin, zmax=zmax, thickness=thickness, doc=15, fzl=fzl, rhohv_field=rhohv_field, temp_field=temp_field, iso0_field=iso0_field, refl_field=refl_field, temp_ref=temp_ref) # prepare for exit new_dataset = {'radar_out': deepcopy(radar)} new_dataset['radar_out'].fields = dict() new_dataset['radar_out'].add_field( 'cross_correlation_ratio_in_rain', rhohv_rain) return new_dataset, ind_rad def process_zdr_precip(procstatus, dscfg, radar_list=None): """ Keeps only suitable data to evaluate the differential reflectivity in moderate rain or precipitation (for vertical scans) Parameters ---------- procstatus : int Processing status: 0 initializing, 1 processing volume, 2 post-processing dscfg : dictionary of dictionaries data set configuration. Accepted Configuration Keywords:: datatype : list of string. Dataset keyword The input data types ml_filter : boolean. Dataset keyword indicates if a filter on data in and above the melting layer is applied. Default True. rmin : float. Dataset keyword minimum range where to look for rain [m]. Default 1000. rmax : float. Dataset keyword maximum range where to look for rain [m]. Default 50000. Zmin : float. Dataset keyword minimum reflectivity to consider the bin as precipitation [dBZ]. Default 20. Zmax : float. Dataset keyword maximum reflectivity to consider the bin as precipitation [dBZ] Default 22. RhoHVmin : float. Dataset keyword minimum RhoHV to consider the bin as precipitation Default 0.97 PhiDPmax : float. Dataset keyword maximum PhiDP to consider the bin as precipitation [deg] Default 10. elmax : float. Dataset keyword maximum elevation angle where to look for precipitation [deg] Default None. ml_thickness : float. Dataset keyword assumed thickness of the melting layer. Default 700. fzl : float. Dataset keyword The default freezing level height. It will be used if no temperature field name is specified or the temperature field is not in the radar object. Default 2000. radar_list : list of Radar objects Optional. list of radar objects Returns ------- new_dataset : dict dictionary containing the output ind_rad : int radar index """ if procstatus != 1: return None, None temp_field = None iso0_field = None for datatypedescr in dscfg['datatype']: radarnr, _, datatype, _, _ = get_datatype_fields(datatypedescr) if datatype == 'ZDR': zdr_field = 'differential_reflectivity' if datatype == 'ZDRc': zdr_field = 'corrected_differential_reflectivity' if datatype == 'PhiDP': phidp_field = 'differential_phase' if datatype == 'PhiDPc': phidp_field = 'corrected_differential_phase' if datatype == 'RhoHV': rhohv_field = 'cross_correlation_ratio' if datatype == 'RhoHVc': rhohv_field = 'corrected_cross_correlation_ratio' if datatype == 'uRhoHV': rhohv_field = 'uncorrected_cross_correlation_ratio' if datatype == 'dBZc': refl_field = 'corrected_reflectivity' if datatype == 'dBZ': refl_field = 'reflectivity' if datatype == 'TEMP': temp_field = 'temperature' if datatype == 'H_ISO0': iso0_field = 'height_over_iso0' ind_rad = int(radarnr[5:8])-1 if radar_list[ind_rad] is None: warn('No valid radar') return None, None radar = radar_list[ind_rad] if ((refl_field not in radar.fields) or (rhohv_field not in radar.fields) or (zdr_field not in radar.fields) or (phidp_field not in radar.fields)): warn('Unable to estimate ZDR in rain. Missing data') return None, None # if data in and above the melting layer has to be filtered determine the # field to use fzl = None ml_filter = True if 'ml_filter' in dscfg: ml_filter = dscfg['ml_filter'] if ml_filter: # determine which freezing level reference temp_ref = 'temperature' if temp_field is None and iso0_field is None: warn('Field to obtain the freezing level was not specified. ' + 'Using fixed freezing level height') temp_ref = 'fixed_fzl' elif temp_field is not None: if temp_field not in radar.fields: warn('COSMO temperature field not available. ' + 'Using fixed freezing level height') temp_ref = 'fixed_fzl' elif iso0_field is not None: if iso0_field not in radar.fields: warn('Height over iso0 field not available. ' + 'Using fixed freezing level height') temp_ref = 'fixed_fzl' else: temp_ref = 'height_over_iso0' # determine freezing level height if necessary if temp_ref == 'fixed_fzl': if 'fzl' in dscfg: fzl = dscfg['fzl'] else: fzl = 2000. warn('Freezing level height not defined. Using default ' + str(fzl)+' m') else: temp_ref = None # default values rmin = 1000. rmax = 50000. zmin = 20. zmax = 22. rhohvmin = 0.97 phidpmax = 10. elmax = None thickness = 700. # user defined values if 'rmin' in dscfg: rmin = dscfg['rmin'] if 'rmax' in dscfg: rmax = dscfg['rmax'] if 'Zmin' in dscfg: zmin = dscfg['Zmin'] if 'Zmax' in dscfg: zmax = dscfg['Zmax'] if 'RhoHVmin' in dscfg: rhohvmin = dscfg['RhoHVmin'] if 'PhiDPmax' in dscfg: phidpmax = dscfg['PhiDPmax'] if 'elmax' in dscfg: elmax = dscfg['elmax'] if 'ml_thickness' in dscfg: thickness = dscfg['ml_thickness'] ind_rmin = np.where(radar.range['data'] > rmin)[0][0] ind_rmax = np.where(radar.range['data'] < rmax)[0][-1] zdr_precip = pyart.correct.est_zdr_precip( radar, ind_rmin=ind_rmin, ind_rmax=ind_rmax, zmin=zmin, zmax=zmax, rhohvmin=rhohvmin, phidpmax=phidpmax, elmax=elmax, thickness=thickness, doc=15, fzl=fzl, zdr_field=zdr_field, rhohv_field=rhohv_field, phidp_field=phidp_field, temp_field=temp_field, iso0_field=iso0_field, refl_field=refl_field, temp_ref=temp_ref) # prepare for exit new_dataset = {'radar_out': deepcopy(radar)} new_dataset['radar_out'].fields = dict() new_dataset['radar_out'].add_field( 'differential_reflectivity_in_precipitation', zdr_precip) return new_dataset, ind_rad def process_zdr_snow(procstatus, dscfg, radar_list=None): """ Keeps only suitable data to evaluate the differential reflectivity in snow Parameters ---------- procstatus : int Processing status: 0 initializing, 1 processing volume, 2 post-processing dscfg : dictionary of dictionaries data set configuration. Accepted Configuration Keywords:: datatype : list of string. Dataset keyword The input data types rmin : float. Dataset keyword minimum range where to look for rain [m]. Default 1000. rmax : float. Dataset keyword maximum range where to look for rain [m]. Default 50000. Zmin : float. Dataset keyword minimum reflectivity to consider the bin as snow [dBZ]. Default 0. Zmax : float. Dataset keyword maximum reflectivity to consider the bin as snow [dBZ] Default 30. SNRmin : float. Dataset keyword minimum SNR to consider the bin as snow [dB]. Default 10. SNRmax : float. Dataset keyword maximum SNR to consider the bin as snow [dB] Default 50. RhoHVmin : float. Dataset keyword minimum RhoHV to consider the bin as snow Default 0.97 PhiDPmax : float. Dataset keyword maximum PhiDP to consider the bin as snow [deg] Default 10. elmax : float. Dataset keyword maximum elevation angle where to look for snow [deg] Default None. KDPmax : float. Dataset keyword maximum KDP to consider the bin as snow [deg] Default None TEMPmin : float. Dataset keyword minimum temperature to consider the bin as snow [deg C]. Default None TEMPmax : float. Dataset keyword maximum temperature to consider the bin as snow [deg C] Default None hydroclass : list of ints. Dataset keyword list of hydrometeor classes to keep for the analysis Default [2] (dry snow) radar_list : list of Radar objects Optional. list of radar objects Returns ------- new_dataset : dict dictionary containing the output ind_rad : int radar index """ if procstatus != 1: return None, None temp_field = None kdp_field = None for datatypedescr in dscfg['datatype']: radarnr, _, datatype, _, _ = get_datatype_fields(datatypedescr) if datatype == 'ZDR': zdr_field = 'differential_reflectivity' if datatype == 'ZDRc': zdr_field = 'corrected_differential_reflectivity' if datatype == 'PhiDP': phidp_field = 'differential_phase' if datatype == 'PhiDPc': phidp_field = 'corrected_differential_phase' if datatype == 'RhoHV': rhohv_field = 'cross_correlation_ratio' if datatype == 'RhoHVc': rhohv_field = 'corrected_cross_correlation_ratio' if datatype == 'uRhoHV': rhohv_field = 'uncorrected_cross_correlation_ratio' if datatype == 'dBZc': refl_field = 'corrected_reflectivity' if datatype == 'dBZ': refl_field = 'reflectivity' if datatype == 'TEMP': temp_field = 'temperature' if datatype == 'PhiDP': kdp_field = 'specific_differential_phase' if datatype == 'PhiDPc': kdp_field = 'corrected_specific_differential_phase' if datatype == 'SNRh': snr_field = 'signal_to_noise_ratio_hh' if datatype == 'SNRv': snr_field = 'signal_to_noise_ratio_vv' if datatype == 'hydro': hydro_field = 'radar_echo_classification' ind_rad = int(radarnr[5:8])-1 if radar_list[ind_rad] is None: warn('No valid radar') return None, None radar = radar_list[ind_rad] if ((refl_field not in radar.fields) or (rhohv_field not in radar.fields) or (zdr_field not in radar.fields) or (phidp_field not in radar.fields) or (hydro_field not in radar.fields)): warn('Unable to estimate ZDR in snow. Missing data') return None, None # User defined values rmin = dscfg.get('rmin', 1000.) rmax = dscfg.get('rmax', 50000.) zmin = dscfg.get('Zmin', 0.) zmax = dscfg.get('Zmax', 30.) snrmin = dscfg.get('SNRmin', 10.) snrmax = dscfg.get('SNRmax', 50.) rhohvmin = dscfg.get('RhoHVmin', 0.97) phidpmax = dscfg.get('PhiDPmax', 10.) elmax = dscfg.get('elmax', None) kdpmax = dscfg.get('KDPmax', None) tempmin = dscfg.get('TEMPmin', None) tempmax = dscfg.get('TEMPmax', None) hydroclass = dscfg.get('hydroclass', [2]) ind_rmin = np.where(radar.range['data'] > rmin)[0][0] ind_rmax = np.where(radar.range['data'] < rmax)[0][-1] zdr_snow = pyart.correct.est_zdr_snow( radar, ind_rmin=ind_rmin, ind_rmax=ind_rmax, zmin=zmin, zmax=zmax, snrmin=snrmin, snrmax=snrmax, rhohvmin=rhohvmin, kept_values=hydroclass, phidpmax=phidpmax, kdpmax=kdpmax, tempmin=tempmin, tempmax=tempmax, elmax=elmax, zdr_field=zdr_field, rhohv_field=rhohv_field, phidp_field=phidp_field, temp_field=temp_field, snr_field=snr_field, hydro_field=hydro_field, kdp_field=kdp_field, refl_field=refl_field) # prepare for exit new_dataset = {'radar_out': deepcopy(radar)} new_dataset['radar_out'].fields = dict() new_dataset['radar_out'].add_field( 'differential_reflectivity_in_snow', zdr_snow) return new_dataset, ind_rad def process_monitoring(procstatus, dscfg, radar_list=None): """ computes monitoring statistics Parameters ---------- procstatus : int Processing status: 0 initializing, 1 processing volume, 2 post-processing dscfg : dictionary of dictionaries data set configuration. Accepted Configuration Keywords:: datatype : list of string. Dataset keyword The input data types step : float. Dataset keyword The width of the histogram bin. Default is None. In that case the default step in function get_histogram_bins is used max_rays : int. Dataset keyword The maximum number of rays per sweep used when computing the histogram. If set above 0 the number of rays per sweep will be checked and if above max_rays the last rays of the sweep will be removed radar_list : list of Radar objects Optional. list of radar objects Returns ------- new_dataset : Radar radar object containing histogram data ind_rad : int radar index """ if procstatus == 0: return None, None if procstatus == 1: for datatypedescr in dscfg['datatype']: radarnr, _, datatype, _, _ = get_datatype_fields(datatypedescr) field_name = get_fieldname_pyart(datatype) break ind_rad = int(radarnr[5:8])-1 if radar_list[ind_rad] is None: warn('No valid radar') return None, None radar = radar_list[ind_rad] if field_name not in radar.fields: warn(field_name+' not available.') return None, None step = dscfg.get('step', None) max_rays = dscfg.get('max_rays', 0) bin_edges = get_histogram_bins(field_name, step=step) nbins = len(bin_edges)-1 step = bin_edges[1]-bin_edges[0] bin_centers = bin_edges[:-1]+step/2. radar_aux = deepcopy(radar) if max_rays > 0: # remove excess of rays ind_above_max = np.where( radar.rays_per_sweep['data'] > max_rays)[0] if ind_above_max.size > 0: radar_aux.rays_per_sweep['data'][ind_above_max] = max_rays for ind in ind_above_max: excess_rays = radar.rays_per_sweep['data'][ind]-max_rays radar_aux.sweep_end_ray_index['data'][ind:] -= ( excess_rays) if ind < radar.nsweeps-1: radar_aux.sweep_start_ray_index['data'][ind+1:] = ( radar_aux.sweep_end_ray_index['data'][ind:-1]+1) radar_aux.nrays = np.sum(radar_aux.rays_per_sweep['data']) radar_aux.fields[field_name]['data'] = np.ma.masked_all( (radar_aux.nrays, radar_aux.ngates), dtype=radar.fields[field_name]['data'].dtype) radar_aux.azimuth['data'] = np.empty( radar_aux.nrays, dtype=radar.azimuth['data'].dtype) radar_aux.elevation['data'] = np.empty( radar_aux.nrays, dtype=radar.elevation['data'].dtype) radar_aux.time['data'] = np.empty( radar_aux.nrays, dtype=radar.time['data'].dtype) for sweep in range(radar.nsweeps): ind_start_old = radar.sweep_start_ray_index['data'][sweep] nrays_sweep = radar_aux.rays_per_sweep['data'][sweep] ind_start_new = radar_aux.sweep_start_ray_index['data'][ sweep] ind_end_new = radar_aux.sweep_end_ray_index['data'][sweep] radar_aux.fields[field_name]['data'][ ind_start_new:ind_end_new+1, :] = ( radar.fields[field_name]['data'][ ind_start_old:ind_start_old+nrays_sweep, :]) radar_aux.azimuth['data'][ind_start_new:ind_end_new+1] = ( radar.azimuth['data'][ ind_start_old:ind_start_old+nrays_sweep]) radar_aux.elevation['data'][ ind_start_new:ind_end_new+1] = ( radar.elevation['data'][ ind_start_old:ind_start_old+nrays_sweep]) radar_aux.time['data'][ind_start_new:ind_end_new+1] = ( radar.time['data'][ ind_start_old:ind_start_old+nrays_sweep]) radar_hist = deepcopy(radar_aux) radar_hist.fields = dict() radar_hist.range['data'] = bin_centers radar_hist.ngates = nbins field_dict = pyart.config.get_metadata(field_name) field_dict['data'] = np.ma.zeros((radar_aux.nrays, nbins), dtype=int) field = deepcopy(radar_aux.fields[field_name]['data']) # put gates with values off limits to limit mask = np.ma.getmaskarray(field) ind = np.where(np.logical_and(mask == False, field < bin_centers[0])) field[ind] = bin_centers[0] ind = np.where(np.logical_and(mask == False, field > bin_centers[-1])) field[ind] = bin_centers[-1] for ray in range(radar_aux.nrays): field_dict['data'][ray, :], bin_edges = np.histogram( field[ray, :].compressed(), bins=bin_edges) radar_hist.add_field(field_name, field_dict) start_time = pyart.graph.common.generate_radar_time_begin(radar_hist) # keep histogram in Memory or add to existing histogram if dscfg['initialized'] == 0: dscfg['global_data'] = {'hist_obj': radar_hist, 'timeinfo': start_time} dscfg['initialized'] = 1 else: field_interp = interpol_field( dscfg['global_data']['hist_obj'], radar_hist, field_name, fill_value=0) dscfg['global_data']['hist_obj'].fields[field_name]['data'] += ( field_interp['data'].filled(fill_value=0)).astype('int64') # dscfg['global_data']['timeinfo'] = dscfg['timeinfo'] dataset = dict() dataset.update({'hist_obj': radar_hist}) dataset.update({'hist_type': 'instant'}) dataset.update({'timeinfo': start_time}) return dataset, ind_rad if procstatus == 2: if dscfg['initialized'] == 0: return None, None for datatypedescr in dscfg['datatype']: radarnr, _, datatype, _, _ = get_datatype_fields(datatypedescr) field_name = get_fieldname_pyart(datatype) break ind_rad = int(radarnr[5:8])-1 dataset = dict() dataset.update({'hist_obj': dscfg['global_data']['hist_obj']}) dataset.update({'hist_type': 'cumulative'}) dataset.update({'timeinfo': dscfg['global_data']['timeinfo']}) return dataset, ind_rad
38.861423
86
0.597083
7,327
62,256
4.878668
0.061144
0.021261
0.03136
0.013428
0.821938
0.80644
0.78336
0.765009
0.750909
0.732502
0
0.013434
0.314893
62,256
1,601
87
38.885696
0.824651
0.272183
0
0.716942
1
0
0.196345
0.047838
0
0
0
0
0
1
0.008264
false
0
0.009298
0
0.063017
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
cb1fb11d71097166f5d564ef924aa75e741f9d9a
2,902
py
Python
Plugins/UnrealEnginePython/Binaries/Win64/Lib/site-packages/tensorflow/_api/v1/linalg/__init__.py
JustinACoder/H22-GR3-UnrealAI
361eb9ef1147f8a2991e5f98c4118cd823184adf
[ "MIT" ]
6
2022-02-04T18:12:24.000Z
2022-03-21T23:57:12.000Z
Lib/site-packages/tensorflow/_api/v1/linalg/__init__.py
shfkdroal/Robot-Learning-in-Mixed-Adversarial-and-Collaborative-Settings
1fa4cd6a566c8745f455fc3d2273208f21f88ced
[ "bzip2-1.0.6" ]
null
null
null
Lib/site-packages/tensorflow/_api/v1/linalg/__init__.py
shfkdroal/Robot-Learning-in-Mixed-Adversarial-and-Collaborative-Settings
1fa4cd6a566c8745f455fc3d2273208f21f88ced
[ "bzip2-1.0.6" ]
1
2022-02-08T03:53:23.000Z
2022-02-08T03:53:23.000Z
# This file is MACHINE GENERATED! Do not edit. # Generated by: tensorflow/python/tools/api/generator/create_python_api.py script. """Operations for linear algebra. """ from __future__ import print_function from tensorflow.python import cholesky from tensorflow.python import cholesky_solve from tensorflow.python import cross from tensorflow.python import diag as tensor_diag from tensorflow.python import diag_part as tensor_diag_part from tensorflow.python import einsum from tensorflow.python import eye from tensorflow.python import global_norm from tensorflow.python import log_matrix_determinant as slogdet from tensorflow.python import matmul from tensorflow.python import matrix_band_part as band_part from tensorflow.python import matrix_determinant as det from tensorflow.python import matrix_diag as diag from tensorflow.python import matrix_diag_part as diag_part from tensorflow.python import matrix_inverse as inv from tensorflow.python import matrix_logarithm as logm from tensorflow.python import matrix_set_diag as set_diag from tensorflow.python import matrix_solve as solve from tensorflow.python import matrix_solve_ls as lstsq from tensorflow.python import matrix_transpose as transpose from tensorflow.python import matrix_triangular_solve as triangular_solve from tensorflow.python import norm from tensorflow.python import qr from tensorflow.python import self_adjoint_eig as eigh from tensorflow.python import self_adjoint_eigvals as eigvalsh from tensorflow.python import svd from tensorflow.python import tensordot from tensorflow.python import trace from tensorflow.python.ops.linalg.linalg import LinearOperator from tensorflow.python.ops.linalg.linalg import LinearOperatorBlockDiag from tensorflow.python.ops.linalg.linalg import LinearOperatorCirculant from tensorflow.python.ops.linalg.linalg import LinearOperatorCirculant2D from tensorflow.python.ops.linalg.linalg import LinearOperatorCirculant3D from tensorflow.python.ops.linalg.linalg import LinearOperatorComposition from tensorflow.python.ops.linalg.linalg import LinearOperatorDiag from tensorflow.python.ops.linalg.linalg import LinearOperatorFullMatrix from tensorflow.python.ops.linalg.linalg import LinearOperatorIdentity from tensorflow.python.ops.linalg.linalg import LinearOperatorKronecker from tensorflow.python.ops.linalg.linalg import LinearOperatorLowRankUpdate from tensorflow.python.ops.linalg.linalg import LinearOperatorLowerTriangular from tensorflow.python.ops.linalg.linalg import LinearOperatorScaledIdentity from tensorflow.python.ops.linalg.linalg import LinearOperatorZeros from tensorflow.python.ops.linalg.linalg import adjoint from tensorflow.python.ops.linalg.linalg import logdet from tensorflow.python.ops.linalg.linalg import matrix_exponential as expm from tensorflow.python.ops.nn import l2_normalize del print_function
51.821429
83
0.855617
383
2,902
6.373368
0.224543
0.30807
0.376895
0.298238
0.595658
0.424007
0.285539
0
0
0
0
0.001153
0.103722
2,902
55
84
52.763636
0.937332
0.054101
0
0
1
0
0
0
0
0
0
0
0
1
0
true
0
0.979167
0
0.979167
0.041667
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
cb3dfb6191946d73c05959de5c99b8ee9660001a
178
py
Python
src/sqlIntuitive/exceptions/parameterExceptions/subexceptions/__init__.py
einfachIrgendwer0815/SqlIntuitive
11a0548ac2d6cfce295952bbf0f09a4faa4c42af
[ "MIT" ]
6
2021-09-10T10:34:47.000Z
2022-03-09T13:50:39.000Z
src/sqlIntuitive/exceptions/parameterExceptions/subexceptions/__init__.py
einfachIrgendwer0815/SqlIntuitive
11a0548ac2d6cfce295952bbf0f09a4faa4c42af
[ "MIT" ]
1
2021-11-25T07:10:16.000Z
2021-11-26T12:18:14.000Z
src/sqlIntuitive/exceptions/parameterExceptions/subexceptions/__init__.py
einfachIrgendwer0815/SqlIntuitive
11a0548ac2d6cfce295952bbf0f09a4faa4c42af
[ "MIT" ]
null
null
null
from sqlIntuitive.exceptions.parameterExceptions.subexceptions.invalidConstraintExceptions import * from sqlIntuitive.exceptions.parameterExceptions.subexceptions.other import *
59.333333
99
0.898876
14
178
11.428571
0.571429
0.2
0.325
0.5625
0.725
0
0
0
0
0
0
0
0.044944
178
2
100
89
0.941176
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
1
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
8
cb8ab5949ae70e4dece989275017f23b82f3adbc
171
py
Python
physDBD/gauss/__init__.py
smrfeld/phys_dbd
38743d753a8a0fe9259453da6fce7260ec175364
[ "MIT" ]
null
null
null
physDBD/gauss/__init__.py
smrfeld/phys_dbd
38743d753a8a0fe9259453da6fce7260ec175364
[ "MIT" ]
null
null
null
physDBD/gauss/__init__.py
smrfeld/phys_dbd
38743d753a8a0fe9259453da6fce7260ec175364
[ "MIT" ]
null
null
null
from .dparams0_traj import * from .dparams0 import * from .net import * from .params_traj import * from .params import * from .params0_traj import * from .params0 import *
24.428571
28
0.760234
24
171
5.291667
0.291667
0.472441
0.330709
0
0
0
0
0
0
0
0
0.027778
0.157895
171
7
29
24.428571
0.854167
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
cbdff8a6104a7ffe10627c3836cbce07ec0bb989
1,556
py
Python
dpa_chile/models.py
epineda/django-dpa-chile
aeb00b1d113d6e7bc896426552e4507afd166ef3
[ "MIT" ]
1
2019-03-08T03:20:55.000Z
2019-03-08T03:20:55.000Z
dpa_chile/models.py
epineda/django-dpa-chile
aeb00b1d113d6e7bc896426552e4507afd166ef3
[ "MIT" ]
null
null
null
dpa_chile/models.py
epineda/django-dpa-chile
aeb00b1d113d6e7bc896426552e4507afd166ef3
[ "MIT" ]
4
2019-03-08T03:34:32.000Z
2021-04-21T13:57:28.000Z
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models from django.utils.encoding import python_2_unicode_compatible @python_2_unicode_compatible class Region(models.Model): """ Region object """ codigo = models.CharField(max_length=10, primary_key=True) tipo = models.CharField(max_length=10) nombre = models.CharField(max_length=255) lat = models.CharField(max_length=50) lng = models.CharField(max_length=50) url = models.URLField(max_length=200) def __str__(self): return self.nombre @python_2_unicode_compatible class Provincia(models.Model): """ Provincia object """ codigo = models.CharField(max_length=10, primary_key=True) tipo = models.CharField(max_length=10) nombre = models.CharField(max_length=255) lat = models.CharField(max_length=50) lng = models.CharField(max_length=50) url = models.URLField(max_length=200) region = models.ForeignKey('Region') def __str__(self): return self.nombre @python_2_unicode_compatible class Comuna(models.Model): """ Comuna object """ codigo = models.CharField(max_length=10, primary_key=True) tipo = models.CharField(max_length=10) nombre = models.CharField(max_length=255) lat = models.CharField(max_length=50) lng = models.CharField(max_length=50) url = models.URLField(max_length=200) region = models.ForeignKey('Region') provincia = models.ForeignKey('Provincia') def __str__(self): return self.nombre
27.785714
62
0.709512
199
1,556
5.296482
0.221106
0.1537
0.256167
0.341556
0.78463
0.757116
0.732448
0.732448
0.732448
0.732448
0
0.036863
0.180591
1,556
55
63
28.290909
0.789804
0.043059
0
0.805556
0
0
0.014553
0
0
0
0
0
0
1
0.083333
false
0
0.083333
0.083333
0.916667
0
0
0
0
null
0
1
1
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
9
6985e9241e1e8f0bddaeed11e0fa925f03e9e79b
79
py
Python
app/models/__init__.py
eddy0/todolist
e0b01961271120631eb53dc5364f0fbf15db5d2f
[ "Apache-2.0" ]
null
null
null
app/models/__init__.py
eddy0/todolist
e0b01961271120631eb53dc5364f0fbf15db5d2f
[ "Apache-2.0" ]
null
null
null
app/models/__init__.py
eddy0/todolist
e0b01961271120631eb53dc5364f0fbf15db5d2f
[ "Apache-2.0" ]
null
null
null
from app.models.item_model import Item from app.models.user_model import User
19.75
38
0.835443
14
79
4.571429
0.5
0.21875
0.40625
0
0
0
0
0
0
0
0
0
0.113924
79
3
39
26.333333
0.914286
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
6992ca99d13da8801d97284f7680e10b4e4644b9
3,011
py
Python
Lab3/q2_a.py
ViniciusRCortez/Monitoria-de-metodos-numericos-com-python
85678fe8907752533d0dc97dc83550411ba079f0
[ "MIT" ]
null
null
null
Lab3/q2_a.py
ViniciusRCortez/Monitoria-de-metodos-numericos-com-python
85678fe8907752533d0dc97dc83550411ba079f0
[ "MIT" ]
null
null
null
Lab3/q2_a.py
ViniciusRCortez/Monitoria-de-metodos-numericos-com-python
85678fe8907752533d0dc97dc83550411ba079f0
[ "MIT" ]
null
null
null
""" Objetivo: Resolver questão 2a do terceiro laboratorio. é exatamente o mesmo codigo da questão anterior porém mudando a função a ser passada e o palpite """ import matplotlib.pyplot as plt import numpy as np from Newton_Raphson import newton_raphson #PARA X0 = 1 y = lambda x: 180*x**3 - 117*x**2 - 80*x + 52 # f(x) = 180x^3 - 117x^2 - 80x + 52 y_linha = lambda x: 540*x**2 - 234*x - 80 # f'(x) = 540x^2 - 234x - 80 ini = 1 # ponto inicial #Verificar o arquivo Nexton-Raphson.py para entender o funcionamento dessa função resp = newton_raphson(ini, y, y_linha, 'q2') print(f'Para essa X0 = 1 levamos {resp["iterações"] - 1} iterações e temos como valor de f(x) = {resp["modulo"]}') print('Abaixo seguem os graficos dos valores de f(x) e dos erros, respectivamente:') # Grafico com os valores de f(x) x_a = np.linspace(-1.2, 1.2, 30) # valores escolhidos para melhor visualização do grafico y_a = [y(i) for i in x_a] plt.style.use('ggplot') plt.figure(figsize=(7, 5)) plt.title(f'F(x) por X\ncom X0 = 1') plt.xlabel('Valores de x') plt.ylabel('Valores de f(x)') # função com valores entre 0.4 e 0.8 plt.plot(x_a, y_a, label='Função exata') # Nossa aproximação Newton-Raphson por iteração plt.plot(resp['valores'], resp['valores_função'], label='Valores de Newton-Raphson por iteração') plt.tight_layout() plt.legend(loc='best') plt.show() # Grafico com os valores dos erros plt.style.use('ggplot') plt.figure(figsize=(7, 5)) plt.title('Erro por Iterações') plt.xlabel('Numero de iterações') plt.ylabel('Valores dos erros') plt.plot([i for i in range(1, resp['iterações'])], resp['erros']) plt.tight_layout() plt.show() #PARA X0 = -1 y = lambda x: 180*x**3 - 117*x**2 - 80*x + 52 # f(x) = 180x^3 - 117x^2 - 80x + 52 y_linha = lambda x: 540*x**2 - 234*x - 80 # f'(x) = 540x^2 - 234x - 80 ini = -1 # ponto inicial #Verificar o arquivo Nexton-Raphson.py para entender o funcionamento dessa função resp = newton_raphson(ini, y, y_linha, 'q2') print(f'Para essa X0 = -1 levamos {resp["iterações"] - 1} iterações e temos como valor de f(x) = {resp["modulo"]}') print('Abaixo seguem os graficos dos valores de f(x) e dos erros, respectivamente:') # Grafico com os valores de f(x) x_a = np.linspace(-1.2, 1.2, 30) # valores escolhidos para melhor visualização do grafico y_a = [y(i) for i in x_a] plt.style.use('ggplot') plt.figure(figsize=(7, 5)) plt.title(f'F(x) por X\ncom X0 = -1') plt.xlabel('Valores de x') plt.ylabel('Valores de f(x)') # função com valores entre 0.4 e 0.8 plt.plot(x_a, y_a, label='Função exata') # Nossa aproximação Newton-Raphson por iteração plt.plot(resp['valores'], resp['valores_função'], label='Valores de Newton-Raphson por iteração') plt.tight_layout() plt.legend(loc='best') plt.show() # Grafico com os valores dos erros plt.style.use('ggplot') plt.figure(figsize=(7, 5)) plt.title('Erro por Iterações') plt.xlabel('Numero de iterações') plt.ylabel('Valores dos erros') plt.plot([i for i in range(1, resp['iterações'])], resp['erros']) plt.tight_layout() plt.show()
38.602564
115
0.698439
546
3,011
3.807692
0.225275
0.013468
0.015392
0.031746
0.902357
0.902357
0.902357
0.902357
0.902357
0.902357
0
0.051751
0.146463
3,011
77
116
39.103896
0.757198
0.295251
0
0.836364
0
0.036364
0.368496
0
0
0
0
0
0
1
0
false
0
0.054545
0
0.054545
0.072727
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
699fe19a53dcca152156ecb428694f3408e21869
4,637
py
Python
fittingjob/fittingjob/test_datadog.py
cyberagent-oss/intelligent-hpa
3b9db9101a885a3b7c54e0e237ad343bde17e9f4
[ "Apache-2.0" ]
27
2020-06-13T05:17:05.000Z
2022-03-02T05:27:47.000Z
fittingjob/fittingjob/test_datadog.py
cyberagent-oss/intelligent-hpa
3b9db9101a885a3b7c54e0e237ad343bde17e9f4
[ "Apache-2.0" ]
2
2020-06-14T05:54:38.000Z
2021-04-30T21:27:13.000Z
fittingjob/fittingjob/test_datadog.py
cyberagent-oss/intelligent-hpa
3b9db9101a885a3b7c54e0e237ad343bde17e9f4
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python3 from datetime import datetime import pytest from fittingjob import datadog @pytest.mark.parametrize( 'tags, expected', [ ( {'key': 'value'}, 'key:value' ), ( {'key1': 'value1', 'key2': 'value2', 'key3': 'value3'}, 'key1:value1,key2:value2,key3:value3' ), ( {}, '' ) ] ) def test_tags_string(tags, expected): assert datadog.tags_string(tags) == expected @pytest.mark.parametrize( 'end, days, hours, minutes, expected', [ ( datetime(year=2019, month=12, day=25), 5, 0, 0, [ { 'start': datetime(year=2019, month=12, day=20), 'end': datetime(year=2019, month=12, day=21) }, { 'start': datetime(year=2019, month=12, day=21), 'end': datetime(year=2019, month=12, day=22) }, { 'start': datetime(year=2019, month=12, day=22), 'end': datetime(year=2019, month=12, day=23) }, { 'start': datetime(year=2019, month=12, day=23), 'end': datetime(year=2019, month=12, day=24) }, { 'start': datetime(year=2019, month=12, day=24), 'end': datetime(year=2019, month=12, day=25) }, ] ), ( datetime(year=2019, month=12, day=25, hour=3), 5, 12, 0, [ { 'start': datetime(year=2019, month=12, day=19, hour=15), 'end': datetime(year=2019, month=12, day=20, hour=15) }, { 'start': datetime(year=2019, month=12, day=20, hour=15), 'end': datetime(year=2019, month=12, day=21, hour=15) }, { 'start': datetime(year=2019, month=12, day=21, hour=15), 'end': datetime(year=2019, month=12, day=22, hour=15) }, { 'start': datetime(year=2019, month=12, day=22, hour=15), 'end': datetime(year=2019, month=12, day=23, hour=15) }, { 'start': datetime(year=2019, month=12, day=23, hour=15), 'end': datetime(year=2019, month=12, day=24, hour=15) }, { 'start': datetime(year=2019, month=12, day=24, hour=15), 'end': datetime(year=2019, month=12, day=25, hour=3) }, ] ), ( datetime(year=2019, month=12, day=25), 5, 36, 10, [ { 'start': datetime(year=2019, month=12, day=18, hour=11, minute=50), 'end': datetime(year=2019, month=12, day=19, hour=11, minute=50) }, { 'start': datetime(year=2019, month=12, day=19, hour=11, minute=50), 'end': datetime(year=2019, month=12, day=20, hour=11, minute=50) }, { 'start': datetime(year=2019, month=12, day=20, hour=11, minute=50), 'end': datetime(year=2019, month=12, day=21, hour=11, minute=50) }, { 'start': datetime(year=2019, month=12, day=21, hour=11, minute=50), 'end': datetime(year=2019, month=12, day=22, hour=11, minute=50) }, { 'start': datetime(year=2019, month=12, day=22, hour=11, minute=50), 'end': datetime(year=2019, month=12, day=23, hour=11, minute=50) }, { 'start': datetime(year=2019, month=12, day=23, hour=11, minute=50), 'end': datetime(year=2019, month=12, day=24, hour=11, minute=50) }, { 'start': datetime(year=2019, month=12, day=24, hour=11, minute=50), 'end': datetime(year=2019, month=12, day=25) }, ] ) ] ) def test_separate_date_range_per_days(end, days, hours, minutes, expected): assert datadog.separate_date_range_per_days( end, days, hours, minutes) == expected
35.128788
87
0.418805
475
4,637
4.063158
0.122105
0.242487
0.323316
0.424352
0.873057
0.859067
0.827979
0.811917
0.61399
0.36114
0
0.158602
0.43843
4,637
131
88
35.396947
0.582565
0.004529
0
0.129032
0
0
0.059588
0.007584
0
0
0
0
0.016129
1
0.016129
false
0
0.024194
0
0.040323
0
0
0
0
null
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
69ad5864332d2ea6177ae5bd31905f17a9391f01
108
py
Python
PaGraph/partition/__init__.py
lzhangbv/PaGraph
2686a4e2ffcba7de3af1e6bb015b8a4d211160b8
[ "MIT" ]
13
2021-06-25T23:55:12.000Z
2022-02-16T22:40:18.000Z
PaGraph/partition/__init__.py
lzhangbv/PaGraph
2686a4e2ffcba7de3af1e6bb015b8a4d211160b8
[ "MIT" ]
null
null
null
PaGraph/partition/__init__.py
lzhangbv/PaGraph
2686a4e2ffcba7de3af1e6bb015b8a4d211160b8
[ "MIT" ]
6
2021-05-15T13:51:43.000Z
2022-03-17T07:06:30.000Z
from .partition import kl_2partition from .refine import build_train_graph from .refine import wrap_neighbor
36
37
0.87037
16
108
5.625
0.6875
0.222222
0.355556
0
0
0
0
0
0
0
0
0.010309
0.101852
108
3
38
36
0.917526
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
69e653292be8a77869622bf4d3a708932f857957
2,526
py
Python
tests/test_mission/test_channel_tracker.py
matan1008/srsran-controller
8389a78976efb7dfe3ef5dc17f5ac14adcae732c
[ "MIT" ]
null
null
null
tests/test_mission/test_channel_tracker.py
matan1008/srsran-controller
8389a78976efb7dfe3ef5dc17f5ac14adcae732c
[ "MIT" ]
null
null
null
tests/test_mission/test_channel_tracker.py
matan1008/srsran-controller
8389a78976efb7dfe3ef5dc17f5ac14adcae732c
[ "MIT" ]
null
null
null
from srsran_controller.mission.channel_tracker import ChannelTracker, RA_RESPONSE_NAME, ATTACH_REQUEST_NAME, \ ATTACH_ACCEPT_NAME def test_create_channel(): tracker = ChannelTracker() tracker.handle_uu_event({'c-rnti': 20, 'ta': 5, 'event': RA_RESPONSE_NAME}) event = {'rnti': 20} tracker.enrich_event(event) assert event == {'rnti': 20, 'ta': 5} def test_attach_request(): tracker = ChannelTracker() tracker.handle_uu_event({'c-rnti': 20, 'ta': 5, 'event': RA_RESPONSE_NAME}) tracker.handle_uu_event({'rnti': 20, 'imsi': '001010123456789', 'event': ATTACH_REQUEST_NAME}) event = {'rnti': 20} tracker.enrich_event(event) assert event == {'rnti': 20, 'ta': 5, 'imsi': '001010123456789'} def test_attach_accept(): tracker = ChannelTracker() tracker.handle_uu_event({'c-rnti': 20, 'ta': 3, 'event': RA_RESPONSE_NAME}) tracker.handle_uu_event({'rnti': 20, 'imsi': '001010123456789', 'event': ATTACH_REQUEST_NAME}) tracker.handle_uu_event({'rnti': 20, 'tmsi': '0x53d764bc', 'event': ATTACH_ACCEPT_NAME, 'ip': '172.16.0.2'}) event = {'rnti': 20} tracker.enrich_event(event) assert event == {'rnti': 20, 'ta': 3, 'imsi': '001010123456789', 'ip': '172.16.0.2'} def test_imsi_to_ip(): tracker = ChannelTracker() # Test receiving the correct IP. tracker.handle_uu_event({'c-rnti': 20, 'ta': 3, 'event': RA_RESPONSE_NAME}) tracker.handle_uu_event({'rnti': 20, 'imsi': '001010123456789', 'event': ATTACH_REQUEST_NAME}) tracker.handle_uu_event({'rnti': 20, 'tmsi': '0x53d764bc', 'event': ATTACH_ACCEPT_NAME, 'ip': '172.16.0.2'}) assert tracker.imsi_to_ip('001010123456789') == '172.16.0.2' # Test receiving the correct IP after other subsribers attach. tracker.handle_uu_event({'c-rnti': 21, 'ta': 3, 'event': RA_RESPONSE_NAME}) tracker.handle_uu_event({'rnti': 21, 'imsi': '001010123456788', 'event': ATTACH_REQUEST_NAME}) tracker.handle_uu_event({'rnti': 21, 'tmsi': '0x53d764bd', 'event': ATTACH_ACCEPT_NAME, 'ip': '172.16.0.3'}) assert tracker.imsi_to_ip('001010123456789') == '172.16.0.2' # Test receiving the correct IP after assigning new IP. tracker.handle_uu_event({'c-rnti': 24, 'ta': 3, 'event': RA_RESPONSE_NAME}) tracker.handle_uu_event({'rnti': 24, 'imsi': '001010123456789', 'event': ATTACH_REQUEST_NAME}) tracker.handle_uu_event({'rnti': 24, 'tmsi': '0x53d764bc', 'event': ATTACH_ACCEPT_NAME, 'ip': '172.16.0.4'}) assert tracker.imsi_to_ip('001010123456789') == '172.16.0.4'
51.55102
112
0.680523
349
2,526
4.679083
0.146132
0.119412
0.137783
0.183711
0.807103
0.787508
0.772198
0.749541
0.731782
0.677893
0
0.129374
0.140143
2,526
48
113
52.625
0.622468
0.057403
0
0.567568
0
0
0.215818
0
0
0
0.016828
0
0.162162
1
0.108108
false
0
0.027027
0
0.135135
0
0
0
0
null
0
0
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
69e9b0d0cfd3b4ba1ecd1565324b82b3aec20c53
109
py
Python
src/reset_hardware.py
foxdb/raspi-chill
f699c584fc40e15e16faa2c9eee24365fea661ba
[ "MIT" ]
1
2021-08-12T11:24:43.000Z
2021-08-12T11:24:43.000Z
src/reset_hardware.py
foxdb/raspi-chill
f699c584fc40e15e16faa2c9eee24365fea661ba
[ "MIT" ]
2
2018-10-15T13:11:48.000Z
2018-11-11T22:30:11.000Z
src/reset_hardware.py
foxdb/raspi-chill
f699c584fc40e15e16faa2c9eee24365fea661ba
[ "MIT" ]
null
null
null
from buzzer import turn_buzzer_off from cooler import turn_cooling_off turn_cooling_off() turn_buzzer_off()
18.166667
35
0.862385
18
109
4.777778
0.388889
0.232558
0.302326
0.418605
0
0
0
0
0
0
0
0
0.100917
109
5
36
21.8
0.877551
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
7
69edfd510cfd9f2799c4eaed33a5649eec5a3ce6
15,808
py
Python
convertCallData.py
supernovahs/multisig
8de1362d84c0f61cb1efc00319c9362eb4379f37
[ "MIT" ]
null
null
null
convertCallData.py
supernovahs/multisig
8de1362d84c0f61cb1efc00319c9362eb4379f37
[ "MIT" ]
null
null
null
convertCallData.py
supernovahs/multisig
8de1362d84c0f61cb1efc00319c9362eb4379f37
[ "MIT" ]
null
null
null
import traceback import sys from functools import lru_cache from web3 import Web3 from web3.auto import w3 from web3.contract import Contract from web3._utils.events import get_event_data from web3._utils.abi import exclude_indexed_event_inputs, get_abi_input_names, get_indexed_event_inputs, normalize_event_input_types from web3.exceptions import MismatchedABI, LogTopicError from web3.types import ABIEvent from eth_utils import event_abi_to_log_topic, to_hex from hexbytes import HexBytes import json import re def decode_tuple(t, target_field): output = dict() for i in range(len(t)): if isinstance(t[i], (bytes, bytearray)): output[target_field[i]['name']] = to_hex(t[i]) elif isinstance(t[i], (tuple)): output[target_field[i]['name']] = decode_tuple(t[i], target_field[i]['components']) else: output[target_field[i]['name']] = t[i] return output def decode_list_tuple(l, target_field): output = l for i in range(len(l)): output[i] = decode_tuple(l[i], target_field) return output def decode_list(l): output = l for i in range(len(l)): if isinstance(l[i], (bytes, bytearray)): output[i] = to_hex(l[i]) else: output[i] = l[i] return output def convert_to_hex(arg, target_schema): """ utility function to convert byte codes into human readable and json serializable data structures """ output = dict() for k in arg: if isinstance(arg[k], (bytes, bytearray)): output[k] = to_hex(arg[k]) elif isinstance(arg[k], (list)) and len(arg[k]) > 0: target = [a for a in target_schema if 'name' in a and a['name'] == k][0] if target['type'] == 'tuple[]': target_field = target['components'] output[k] = decode_list_tuple(arg[k], target_field) else: output[k] = decode_list(arg[k]) elif isinstance(arg[k], (tuple)): target_field = [a['components'] for a in target_schema if 'name' in a and a['name'] == k][0] output[k] = decode_tuple(arg[k], target_field) else: output[k] = arg[k] return output @lru_cache(maxsize=None) def _get_contract(address, abi): """ This helps speed up execution of decoding across a large dataset by caching the contract object It assumes that we are decoding a small set, on the order of thousands, of target smart contracts """ if isinstance(abi, (str)): abi = json.loads(abi) contract = w3.eth.contract(address=Web3.toChecksumAddress(address), abi=abi) return (contract, abi) def decode_tx(address, input_data, abi): if abi is not None: try: (contract, abi) = _get_contract(address, abi) func_obj, func_params = contract.decode_function_input(input_data) target_schema = [a['inputs'] for a in abi if 'name' in a and a['name'] == func_obj.fn_name][0] decoded_func_params = convert_to_hex(func_params, target_schema) return (func_obj.fn_name, json.dumps(decoded_func_params), json.dumps(target_schema)) except: e = sys.exc_info()[0] return ('decode error', repr(e), None) else: return ('no matching abi', None, None) sample_abi = '[{"inputs":[{"internalType":"address","name":"_factory","type":"address"},{"internalType":"address","name":"_WETH","type":"address"}],"stateMutability":"nonpayable","type":"constructor"},{"inputs":[],"name":"WETH","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"tokenA","type":"address"},{"internalType":"address","name":"tokenB","type":"address"},{"internalType":"uint256","name":"amountADesired","type":"uint256"},{"internalType":"uint256","name":"amountBDesired","type":"uint256"},{"internalType":"uint256","name":"amountAMin","type":"uint256"},{"internalType":"uint256","name":"amountBMin","type":"uint256"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"}],"name":"addLiquidity","outputs":[{"internalType":"uint256","name":"amountA","type":"uint256"},{"internalType":"uint256","name":"amountB","type":"uint256"},{"internalType":"uint256","name":"liquidity","type":"uint256"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"token","type":"address"},{"internalType":"uint256","name":"amountTokenDesired","type":"uint256"},{"internalType":"uint256","name":"amountTokenMin","type":"uint256"},{"internalType":"uint256","name":"amountETHMin","type":"uint256"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"}],"name":"addLiquidityETH","outputs":[{"internalType":"uint256","name":"amountToken","type":"uint256"},{"internalType":"uint256","name":"amountETH","type":"uint256"},{"internalType":"uint256","name":"liquidity","type":"uint256"}],"stateMutability":"payable","type":"function"},{"inputs":[],"name":"factory","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"amountOut","type":"uint256"},{"internalType":"uint256","name":"reserveIn","type":"uint256"},{"internalType":"uint256","name":"reserveOut","type":"uint256"}],"name":"getAmountIn","outputs":[{"internalType":"uint256","name":"amountIn","type":"uint256"}],"stateMutability":"pure","type":"function"},{"inputs":[{"internalType":"uint256","name":"amountIn","type":"uint256"},{"internalType":"uint256","name":"reserveIn","type":"uint256"},{"internalType":"uint256","name":"reserveOut","type":"uint256"}],"name":"getAmountOut","outputs":[{"internalType":"uint256","name":"amountOut","type":"uint256"}],"stateMutability":"pure","type":"function"},{"inputs":[{"internalType":"uint256","name":"amountOut","type":"uint256"},{"internalType":"address[]","name":"path","type":"address[]"}],"name":"getAmountsIn","outputs":[{"internalType":"uint256[]","name":"amounts","type":"uint256[]"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"amountIn","type":"uint256"},{"internalType":"address[]","name":"path","type":"address[]"}],"name":"getAmountsOut","outputs":[{"internalType":"uint256[]","name":"amounts","type":"uint256[]"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"amountA","type":"uint256"},{"internalType":"uint256","name":"reserveA","type":"uint256"},{"internalType":"uint256","name":"reserveB","type":"uint256"}],"name":"quote","outputs":[{"internalType":"uint256","name":"amountB","type":"uint256"}],"stateMutability":"pure","type":"function"},{"inputs":[{"internalType":"address","name":"tokenA","type":"address"},{"internalType":"address","name":"tokenB","type":"address"},{"internalType":"uint256","name":"liquidity","type":"uint256"},{"internalType":"uint256","name":"amountAMin","type":"uint256"},{"internalType":"uint256","name":"amountBMin","type":"uint256"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"}],"name":"removeLiquidity","outputs":[{"internalType":"uint256","name":"amountA","type":"uint256"},{"internalType":"uint256","name":"amountB","type":"uint256"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"token","type":"address"},{"internalType":"uint256","name":"liquidity","type":"uint256"},{"internalType":"uint256","name":"amountTokenMin","type":"uint256"},{"internalType":"uint256","name":"amountETHMin","type":"uint256"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"}],"name":"removeLiquidityETH","outputs":[{"internalType":"uint256","name":"amountToken","type":"uint256"},{"internalType":"uint256","name":"amountETH","type":"uint256"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"token","type":"address"},{"internalType":"uint256","name":"liquidity","type":"uint256"},{"internalType":"uint256","name":"amountTokenMin","type":"uint256"},{"internalType":"uint256","name":"amountETHMin","type":"uint256"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"}],"name":"removeLiquidityETHSupportingFeeOnTransferTokens","outputs":[{"internalType":"uint256","name":"amountETH","type":"uint256"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"token","type":"address"},{"internalType":"uint256","name":"liquidity","type":"uint256"},{"internalType":"uint256","name":"amountTokenMin","type":"uint256"},{"internalType":"uint256","name":"amountETHMin","type":"uint256"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"},{"internalType":"bool","name":"approveMax","type":"bool"},{"internalType":"uint8","name":"v","type":"uint8"},{"internalType":"bytes32","name":"r","type":"bytes32"},{"internalType":"bytes32","name":"s","type":"bytes32"}],"name":"removeLiquidityETHWithPermit","outputs":[{"internalType":"uint256","name":"amountToken","type":"uint256"},{"internalType":"uint256","name":"amountETH","type":"uint256"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"token","type":"address"},{"internalType":"uint256","name":"liquidity","type":"uint256"},{"internalType":"uint256","name":"amountTokenMin","type":"uint256"},{"internalType":"uint256","name":"amountETHMin","type":"uint256"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"},{"internalType":"bool","name":"approveMax","type":"bool"},{"internalType":"uint8","name":"v","type":"uint8"},{"internalType":"bytes32","name":"r","type":"bytes32"},{"internalType":"bytes32","name":"s","type":"bytes32"}],"name":"removeLiquidityETHWithPermitSupportingFeeOnTransferTokens","outputs":[{"internalType":"uint256","name":"amountETH","type":"uint256"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"tokenA","type":"address"},{"internalType":"address","name":"tokenB","type":"address"},{"internalType":"uint256","name":"liquidity","type":"uint256"},{"internalType":"uint256","name":"amountAMin","type":"uint256"},{"internalType":"uint256","name":"amountBMin","type":"uint256"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"},{"internalType":"bool","name":"approveMax","type":"bool"},{"internalType":"uint8","name":"v","type":"uint8"},{"internalType":"bytes32","name":"r","type":"bytes32"},{"internalType":"bytes32","name":"s","type":"bytes32"}],"name":"removeLiquidityWithPermit","outputs":[{"internalType":"uint256","name":"amountA","type":"uint256"},{"internalType":"uint256","name":"amountB","type":"uint256"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"amountOut","type":"uint256"},{"internalType":"address[]","name":"path","type":"address[]"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"}],"name":"swapETHForExactTokens","outputs":[{"internalType":"uint256[]","name":"amounts","type":"uint256[]"}],"stateMutability":"payable","type":"function"},{"inputs":[{"internalType":"uint256","name":"amountOutMin","type":"uint256"},{"internalType":"address[]","name":"path","type":"address[]"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"}],"name":"swapExactETHForTokens","outputs":[{"internalType":"uint256[]","name":"amounts","type":"uint256[]"}],"stateMutability":"payable","type":"function"},{"inputs":[{"internalType":"uint256","name":"amountOutMin","type":"uint256"},{"internalType":"address[]","name":"path","type":"address[]"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"}],"name":"swapExactETHForTokensSupportingFeeOnTransferTokens","outputs":[],"stateMutability":"payable","type":"function"},{"inputs":[{"internalType":"uint256","name":"amountIn","type":"uint256"},{"internalType":"uint256","name":"amountOutMin","type":"uint256"},{"internalType":"address[]","name":"path","type":"address[]"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"}],"name":"swapExactTokensForETH","outputs":[{"internalType":"uint256[]","name":"amounts","type":"uint256[]"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"amountIn","type":"uint256"},{"internalType":"uint256","name":"amountOutMin","type":"uint256"},{"internalType":"address[]","name":"path","type":"address[]"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"}],"name":"swapExactTokensForETHSupportingFeeOnTransferTokens","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"amountIn","type":"uint256"},{"internalType":"uint256","name":"amountOutMin","type":"uint256"},{"internalType":"address[]","name":"path","type":"address[]"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"}],"name":"swapExactTokensForTokens","outputs":[{"internalType":"uint256[]","name":"amounts","type":"uint256[]"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"amountIn","type":"uint256"},{"internalType":"uint256","name":"amountOutMin","type":"uint256"},{"internalType":"address[]","name":"path","type":"address[]"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"}],"name":"swapExactTokensForTokensSupportingFeeOnTransferTokens","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"amountOut","type":"uint256"},{"internalType":"uint256","name":"amountInMax","type":"uint256"},{"internalType":"address[]","name":"path","type":"address[]"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"}],"name":"swapTokensForExactETH","outputs":[{"internalType":"uint256[]","name":"amounts","type":"uint256[]"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"amountOut","type":"uint256"},{"internalType":"uint256","name":"amountInMax","type":"uint256"},{"internalType":"address[]","name":"path","type":"address[]"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"}],"name":"swapTokensForExactTokens","outputs":[{"internalType":"uint256[]","name":"amounts","type":"uint256[]"}],"stateMutability":"nonpayable","type":"function"},{"stateMutability":"payable","type":"receive"}]' output = decode_tx('0x7a250d5630b4cf539739df2c5dacb4c659f2488d', '0x38ed1739000000000000000000000000000000000000000000000000000000009502f900000000000000000000000000000000000000000000a07e38bf71936cbe39594100000000000000000000000000000000000000000000000000000000000000a00000000000000000000000003c02cebb49f6e8f1fc96158099ffa064bbfee38b00000000000000000000000000000000000000000000000000000000616e11230000000000000000000000000000000000000000000000000000000000000003000000000000000000000000a0b86991c6218b36c1d19d4a2e9eb0ce3606eb48000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2000000000000000000000000528b3e98c63ce21c6f680b713918e0f89dfae555', sample_abi) print('function called: ', output[0]) print('arguments: ', json.dumps(json.loads(output[1]), indent=2)) view rawdecode_transaction.py hosted with ❤ by GitHub
164.666667
11,890
0.692561
1,610
15,808
6.747826
0.130435
0.113402
0.201123
0.102172
0.699558
0.678203
0.668354
0.661727
0.647183
0.630707
0
0.075799
0.038588
15,808
96
11,891
164.666667
0.638966
0
0
0.189873
0
0.012658
0.816128
0.807218
0
0
0.040545
0
0
0
null
null
0
0.177215
null
null
0.025316
0
0
0
null
0
1
0
0
0
0
0
0
1
0
0
0
0
0
1
1
0
0
0
0
0
0
1
1
null
0
0
0
0
1
0
0
0
0
0
0
0
0
7
0e0bc40fd5d6f9970419fa055801ba082976dcb8
17,454
py
Python
bloscpickle/test.py
robbmcleod/bloscpickle
8ab0a92a82de4bd4f699d0412973fac3321ba88e
[ "BSD-2-Clause" ]
1
2021-12-30T08:16:11.000Z
2021-12-30T08:16:11.000Z
bloscpickle/test.py
robbmcleod/bloscpickle
8ab0a92a82de4bd4f699d0412973fac3321ba88e
[ "BSD-2-Clause" ]
null
null
null
bloscpickle/test.py
robbmcleod/bloscpickle
8ab0a92a82de4bd4f699d0412973fac3321ba88e
[ "BSD-2-Clause" ]
null
null
null
# coding=UTF-8 """ Created on Wed Dec 28 11:47:37 2016 @author: Robert A. McLeod """ import bloscpickle import json import ujson import rapidjson import pickle import marshal import msgpack import os, os.path import uuid from time import time MB = 2**20 from itertools import count; COUNTER = count() import matplotlib.pyplot as plt import numpy as np ####### MODULE TESTS ####### #from memory_profiler import profile #@profile def testUUID(): """ UUIDs are more or less random ascii codes, so they represent a difficult target for data compression. """ write = {} read = {} sizes = {} testDict = {} testDict['name'] = "Foo" testDict['id'] = next(COUNTER) testDict['uuCount'] = 2**18 testDict['uuids'] = [str(uuid.uuid4()) for I in range( testDict['uuCount'] )] def execTest( testDict, pickler, useBlosc=False, compressor='zstd', clevel=1, shuffle=0, **pickler_kwargs ): if useBlosc: picklerName = 'blosc_{}{}_'.format(compressor,clevel) + pickler.__name__ filename = "testfile." + picklerName with open( filename, 'wb' ) as stream: t0 = time() bloscpickle.dump(testDict, stream, pickler= pickler, compressor=compressor, clevel=clevel, shuffle=shuffle, **pickler_kwargs ) write[picklerName] = time() - t0 with open( filename, 'rb' ) as stream: t1 = time() outDict = bloscpickle.load( stream, pickler=pickler ) read[picklerName] = time() - t1 if pickler is msgpack: print( "WARNING: {} failed in/out assert test".format(picklerName) ) else: assert( testDict == outDict ) else: picklerName = pickler.__name__ filename = "testfile." + picklerName if pickler in (pickle,marshal,msgpack): with open( filename, 'wb' ) as stream: t0 = time() pickler.dump( testDict, stream, **pickler_kwargs ) write[picklerName] = time() - t0 with open( filename, 'rb' ) as stream: t1 = time() outDict = pickler.load( stream ) read[picklerName] = time() - t1 try: assert( testDict == outDict ) except: print( "WARNING: {} failed in/out assert test".format(picklerName) ) else: with open( filename, 'w' ) as stream: t0 = time() pickler.dump( testDict, stream, **pickler_kwargs ) write[picklerName] = time() - t0 with open( filename, 'r' ) as stream: t1 = time() outDict = pickler.load( stream ) read[picklerName] = time() - t1 try: assert( testDict == outDict ) except: print( "WARNING: {} failed in/out assert test".format(picklerName) ) sizes[picklerName] = os.path.getsize( filename ) / MB os.remove( filename ) print( "{}:: write {:.2e} s, read {:.2e} s, size: {:.3f} MB"\ .format( picklerName, write[picklerName], read[picklerName], sizes[picklerName] ) ) return write[picklerName], sizes[picklerName] execTest( testDict, pickle ) execTest( testDict, pickle, useBlosc=True, compressor='zstd', clevel=1 ) execTest( testDict, pickle, useBlosc=True, compressor='lz4', clevel=9 ) execTest( testDict, marshal ) execTest( testDict, marshal, useBlosc=True, compressor='zstd', clevel=1 ) execTest( testDict, marshal, useBlosc=True, compressor='lz4', clevel=9 ) execTest( testDict, json, ensure_ascii=False ) execTest( testDict, json, useBlosc=True, compressor='zstd', clevel=1, ensure_ascii=False ) execTest( testDict, json, useBlosc=True, compressor='lz4', clevel=9, ensure_ascii=False ) execTest( testDict, rapidjson ) execTest( testDict, rapidjson, useBlosc=True, compressor='zstd', clevel=1 ) execTest( testDict, rapidjson, useBlosc=True, compressor='lz4', clevel=9 ) execTest( testDict, ujson, ensure_ascii=False ) execTest( testDict, ujson, useBlosc=True, compressor='zstd', clevel=1, ensure_ascii=False ) execTest( testDict, ujson, useBlosc=True, compressor='lz4', clevel=9, ensure_ascii=False ) execTest( testDict, msgpack, use_bin_type=False ) execTest( testDict, msgpack, useBlosc=True, compressor='zstd', clevel=1, use_bin_type=False ) execTest( testDict, msgpack, useBlosc=True, compressor='lz4', clevel=9, use_bin_type=False ) uncompressed_writes = [write['pickle'], write['marshal'], write['json'], write['rapidjson'], write['ujson'], write['msgpack'] ] zstd_writes = [write['blosc_zstd1_pickle'],write['blosc_zstd1_marshal'], write['blosc_zstd1_json'], write['blosc_zstd1_rapidjson'], write['blosc_zstd1_ujson'], write['blosc_zstd1_msgpack'] ] lz4_writes = [write['blosc_lz49_pickle'],write['blosc_lz49_marshal'], write['blosc_lz49_json'], write['blosc_zstd1_rapidjson'], write['blosc_lz49_ujson'], write['blosc_lz49_msgpack'] ] indices = np.arange(6) bwidth = 0.25 fig, ax = plt.subplots( figsize=(10,8) ) bars_uncomp = ax.bar( indices, uncompressed_writes, bwidth, color='steelblue' ) bars_zstd = ax.bar( indices+bwidth, zstd_writes, bwidth, color='orange' ) bars_lz4 = ax.bar( indices+2*bwidth, lz4_writes, bwidth, color='purple' ) ax.set_ylabel( "Serialization write to disk time (s)" ) ax.set_xticks( indices + 0.33 ) ax.set_xticklabels( ('pickle','marshal','json','rapidjson','ujson','msgpack') ) ax.legend( (bars_uncomp,bars_zstd,bars_lz4), ('uncompressed', 'zstd','lz4'), loc='best' ) plt.savefig( "bloscpickle_uuid_writerate.png" ) uncompressed_reads = [read['pickle'], read['marshal'], read['json'], read['rapidjson'], read['ujson'], read['msgpack'] ] zstd_reads = [read['blosc_zstd1_pickle'],read['blosc_zstd1_marshal'], read['blosc_zstd1_json'], read['blosc_zstd1_rapidjson'], read['blosc_zstd1_ujson'], read['blosc_zstd1_msgpack'] ] lz4_reads = [read['blosc_lz49_pickle'],read['blosc_lz49_marshal'], read['blosc_lz49_json'], read['blosc_lz49_rapidjson'], read['blosc_lz49_ujson'], read['blosc_lz49_msgpack'] ] fig, ax = plt.subplots( figsize=(10,8) ) bars_uncomp = ax.bar( indices, uncompressed_reads, bwidth, color='steelblue' ) bars_zstd = ax.bar( indices+bwidth, zstd_reads, bwidth, color='orange' ) bars_lz4 = ax.bar( indices+2*bwidth, lz4_reads, bwidth, color='purple' ) ax.set_ylabel( "Serialization read from disk time (s)" ) #ax.set_title( "German dictionary with 326980 words" ) ax.set_xticks( indices + 0.33 ) ax.set_xticklabels( ('pickle','marshal','json', 'rapidjson', 'ujson','msgpack') ) ax.legend( (bars_uncomp,bars_zstd,bars_lz4), ('uncompressed', 'zstd','lz4'), loc='best' ) plt.savefig( "bloscpickle_uuid_readrate.png" ) uncompressed_sizes = [sizes['pickle'], sizes['marshal'], sizes['json'], sizes['rapidjson'], sizes['ujson'], sizes['msgpack'] ] zstd_sizes = [sizes['blosc_zstd1_pickle'],sizes['blosc_zstd1_marshal'], sizes['blosc_zstd1_json'], sizes['blosc_zstd1_rapidjson'], sizes['blosc_zstd1_ujson'], sizes['blosc_zstd1_msgpack'] ] lz4_sizes = [sizes['blosc_lz49_pickle'],sizes['blosc_lz49_marshal'], sizes['blosc_lz49_json'], sizes['blosc_lz49_rapidjson'], sizes['blosc_lz49_ujson'], sizes['blosc_lz49_msgpack'] ] fig2, ax2 = plt.subplots( figsize=(10,8) ) bars_uncomp2 = ax2.bar( indices, uncompressed_sizes, bwidth, color='steelblue' ) bars_zstd2 = ax2.bar( indices+bwidth, zstd_sizes, bwidth, color='orange' ) bars_lz42 = ax2.bar( indices+2*bwidth, lz4_sizes, bwidth, color='purple' ) ax2.set_ylabel( "Disk usage (MB)" ) ax2.set_xticks( indices + 0.33 ) ax2.set_xticklabels( ('pickle','marshal','json','rapidjson','ujson','msgpack') ) ax2.legend( (bars_uncomp2,bars_zstd2,bars_lz42), ('uncompressed', 'zstd','lz4'), loc='best' ) plt.savefig( "bloscpickle_uuid_disksize.png" ) def testJSON(): """ Compressing structured text dictionaries is a test case where we expect compression to have a big impact. This requires a 'sample.json' file which is not included in the distribution. I recommend the following tool for generating sample JSON data: http://www.json-generator.com/ It can be trivally modified to generate 10'000 entries. """ write = {} read = {} sizes = {} with open( "sample.json", 'r' ) as jh: testDict = ujson.load( jh ) def execTest( testDict, pickler, useBlosc=False, compressor='zstd', clevel=1, shuffle=0, **pickler_kwargs ): if useBlosc: picklerName = 'blosc_{}{}_'.format(compressor,clevel) + pickler.__name__ filename = "testfile." + picklerName with open( filename, 'wb' ) as stream: t0 = time() bloscpickle.dump(testDict, stream, pickler= pickler, compressor=compressor, clevel=clevel, shuffle=shuffle, **pickler_kwargs ) write[picklerName] = time() - t0 with open( filename, 'rb' ) as stream: t1 = time() outDict = bloscpickle.load( stream, pickler=pickler ) read[picklerName] = time() - t1 try: assert( testDict == outDict ) except AssertionError: print( "WARNING: {} failed in/out assert test".format(picklerName) ) else: picklerName = pickler.__name__ filename = "testfile." + picklerName if pickler in (pickle,marshal,msgpack): with open( filename, 'wb' ) as stream: t0 = time() pickler.dump( testDict, stream, **pickler_kwargs ) write[picklerName] = time() - t0 with open( filename, 'rb' ) as stream: t1 = time() outDict = pickler.load( stream ) read[picklerName] = time() - t1 try: assert( testDict == outDict ) except AssertionError: print( "WARNING: {} failed in/out assert test".format(picklerName) ) else: with open( filename, 'w' ) as stream: t0 = time() pickler.dump( testDict, stream, **pickler_kwargs ) write[picklerName] = time() - t0 with open( filename, 'r' ) as stream: t1 = time() outDict = pickler.load( stream ) read[picklerName] = time() - t1 try: assert( testDict == outDict ) except AssertionError: print( "WARNING: {} failed in/out assert test".format(picklerName) ) sizes[picklerName] = os.path.getsize( filename ) / MB os.remove( filename ) print( "{}:: write {:.2e} s, read {:.2e} s, size: {:.3f} MB"\ .format( picklerName, write[picklerName], read[picklerName], sizes[picklerName] ) ) return outDict execTest( testDict, pickle ) execTest( testDict, pickle, useBlosc=True, compressor='zstd', clevel=1 ) execTest( testDict, pickle, useBlosc=True, compressor='lz4', clevel=9 ) execTest( testDict, marshal ) execTest( testDict, marshal, useBlosc=True, compressor='zstd', clevel=1 ) execTest( testDict, marshal, useBlosc=True, compressor='lz4', clevel=9 ) execTest( testDict, json, ensure_ascii=False ) execTest( testDict, json, useBlosc=True, compressor='zstd', clevel=1, ensure_ascii=False ) execTest( testDict, json, useBlosc=True, compressor='lz4', clevel=9, ensure_ascii=False ) execTest( testDict, rapidjson ) execTest( testDict, rapidjson, useBlosc=True, compressor='zstd', clevel=1 ) execTest( testDict, rapidjson, useBlosc=True, compressor='lz4', clevel=9 ) # Testing here to try and find what broke on read/write for UltraJSON. # Probably floating point precision outDict = execTest( testDict, ujson, encode_html_chars=True ) execTest( testDict, ujson, useBlosc=True, compressor='zstd', clevel=1, encode_html_chars=True ) execTest( testDict, ujson, useBlosc=True, compressor='lz4', clevel=9, encode_html_chars=True ) execTest( testDict, msgpack, use_bin_type=False ) execTest( testDict, msgpack, useBlosc=True, compressor='zstd', clevel=1, use_bin_type=False ) execTest( testDict, msgpack, useBlosc=True, compressor='lz4', clevel=9, use_bin_type=False ) uncompressed_writes = [write['pickle'], write['marshal'], write['json'], write['rapidjson'], write['ujson'], write['msgpack'] ] zstd_writes = [write['blosc_zstd1_pickle'],write['blosc_zstd1_marshal'], write['blosc_zstd1_json'], write['blosc_zstd1_rapidjson'], write['blosc_zstd1_ujson'], write['blosc_zstd1_msgpack'] ] lz4_writes = [write['blosc_lz49_pickle'],write['blosc_lz49_marshal'], write['blosc_lz49_json'], write['blosc_zstd1_rapidjson'], write['blosc_lz49_ujson'], write['blosc_lz49_msgpack'] ] indices = np.arange(6) bwidth = 0.25 fig, ax = plt.subplots( figsize=(10,8) ) bars_uncomp = ax.bar( indices, uncompressed_writes, bwidth, color='steelblue' ) bars_zstd = ax.bar( indices+bwidth, zstd_writes, bwidth, color='orange' ) bars_lz4 = ax.bar( indices+2*bwidth, lz4_writes, bwidth, color='purple' ) ax.set_ylabel( "Serialization write to disk time (s)" ) ax.set_xticks( indices + 0.33 ) ax.set_xticklabels( ('pickle','marshal','json', 'rapidjson', 'ujson','msgpack') ) ax.legend( (bars_uncomp,bars_zstd,bars_lz4), ('uncompressed', 'zstd','lz4'), loc='best' ) plt.savefig( "bloscpickle_jsongen_writerate.png" ) uncompressed_reads = [read['pickle'], read['marshal'], read['json'], read['rapidjson'], read['ujson'], read['msgpack'] ] zstd_reads = [read['blosc_zstd1_pickle'],read['blosc_zstd1_marshal'], read['blosc_zstd1_json'], read['blosc_zstd1_rapidjson'], read['blosc_zstd1_ujson'], read['blosc_zstd1_msgpack'] ] lz4_reads = [read['blosc_lz49_pickle'],read['blosc_lz49_marshal'], read['blosc_lz49_json'], read['blosc_lz49_rapidjson'], read['blosc_lz49_ujson'], read['blosc_lz49_msgpack'] ] fig, ax = plt.subplots( figsize=(10,8) ) bars_uncomp = ax.bar( indices, uncompressed_reads, bwidth, color='steelblue' ) bars_zstd = ax.bar( indices+bwidth, zstd_reads, bwidth, color='orange' ) bars_lz4 = ax.bar( indices+2*bwidth, lz4_reads, bwidth, color='purple' ) ax.set_ylabel( "Serialization read from disk time (s)" ) #ax.set_title( "German dictionary with 326980 words" ) ax.set_xticks( indices + 0.33 ) ax.set_xticklabels( ('pickle','marshal','json', 'rapidjson', 'ujson','msgpack') ) ax.legend( (bars_uncomp,bars_zstd,bars_lz4), ('uncompressed', 'zstd','lz4'), loc='best' ) plt.savefig( "bloscpickle_jsongen_readrate.png" ) uncompressed_sizes = [sizes['pickle'], sizes['marshal'], sizes['json'], sizes['rapidjson'], sizes['ujson'], sizes['msgpack'] ] zstd_sizes = [sizes['blosc_zstd1_pickle'],sizes['blosc_zstd1_marshal'], sizes['blosc_zstd1_json'], sizes['blosc_zstd1_rapidjson'], sizes['blosc_zstd1_ujson'], sizes['blosc_zstd1_msgpack'] ] lz4_sizes = [sizes['blosc_lz49_pickle'],sizes['blosc_lz49_marshal'], sizes['blosc_lz49_json'], sizes['blosc_lz49_rapidjson'], sizes['blosc_lz49_ujson'], sizes['blosc_lz49_msgpack'] ] fig2, ax2 = plt.subplots( figsize=(10,8) ) bars_uncomp2 = ax2.bar( indices, uncompressed_sizes, bwidth, color='steelblue' ) bars_zstd2 = ax2.bar( indices+bwidth, zstd_sizes, bwidth, color='orange' ) bars_lz42 = ax2.bar( indices+2*bwidth, lz4_sizes, bwidth, color='purple' ) ax2.set_ylabel( "Disk usage (MB)" ) ax2.set_xticks( indices + 0.33 ) ax2.set_xticklabels( ('pickle','marshal','json','rapidjson','ujson','msgpack') ) ax2.legend( (bars_uncomp2,bars_zstd2,bars_lz42), ('uncompressed', 'zstd','lz4'), loc='best' ) plt.savefig( "bloscpickle_jsongen_disksize.png" ) pass if __name__ == "__main__": testUUID() testJSON()
49.726496
111
0.594133
1,896
17,454
5.304852
0.125
0.060449
0.052496
0.02923
0.88785
0.88785
0.885961
0.885961
0.884371
0.877013
0
0.025513
0.274665
17,454
350
112
49.868571
0.768957
0.045606
0
0.842697
0
0
0.179413
0.021771
0
0
0
0
0.05618
1
0.014981
false
0.003745
0.048689
0
0.071161
0.029963
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
38638e3e88a2434d1aef63d8a623b63d27b64afb
162
py
Python
saleor/graphql/payment/utils.py
fairhopeweb/saleor
9ac6c22652d46ba65a5b894da5f1ba5bec48c019
[ "CC-BY-4.0" ]
15,337
2015-01-12T02:11:52.000Z
2021-10-05T19:19:29.000Z
saleor/graphql/payment/utils.py
fairhopeweb/saleor
9ac6c22652d46ba65a5b894da5f1ba5bec48c019
[ "CC-BY-4.0" ]
7,486
2015-02-11T10:52:13.000Z
2021-10-06T09:37:15.000Z
saleor/graphql/payment/utils.py
fairhopeweb/saleor
9ac6c22652d46ba65a5b894da5f1ba5bec48c019
[ "CC-BY-4.0" ]
5,864
2015-01-16T14:52:54.000Z
2021-10-05T23:01:15.000Z
from typing import List def metadata_contains_empty_key(metadata_list: List[dict]) -> bool: return not all([data["key"].strip() for data in metadata_list])
27
67
0.746914
25
162
4.64
0.72
0.206897
0
0
0
0
0
0
0
0
0
0
0.135802
162
5
68
32.4
0.828571
0
0
0
0
0
0.018519
0
0
0
0
0
0
1
0.333333
false
0
0.333333
0.333333
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
1
1
0
0
7
38a39313338477f46367bc7b16653313029d8a99
11,617
py
Python
@muuTera$u.py
Tripl0Color/AmuteraSu-Pattern
c78e73481b54ef20cba6a5657cb5f05c74682cbf
[ "Unlicense" ]
null
null
null
@muuTera$u.py
Tripl0Color/AmuteraSu-Pattern
c78e73481b54ef20cba6a5657cb5f05c74682cbf
[ "Unlicense" ]
null
null
null
@muuTera$u.py
Tripl0Color/AmuteraSu-Pattern
c78e73481b54ef20cba6a5657cb5f05c74682cbf
[ "Unlicense" ]
null
null
null
#Use on py flask print ("""$$$$$$$$\ $$\ $$\ $$$$$$\ $$\ \__$$ __| \__| $$ | $$ __$$\ $$ | $$ | $$$$$$\ $$\ $$$$$$\ $$ | $$ / \__| $$$$$$\ $$ | $$$$$$\ $$$$$$\ $$ |$$ __$$\ $$ |$$ __$$\ $$ | $$ | $$ __$$\ $$ |$$ __$$\ $$ __$$\ $$ |$$ | \__|$$ |$$ / $$ |$$ | $$ | $$ / $$ |$$ |$$ / $$ |$$ | \__| $$ |$$ | $$ |$$ | $$ |$$ | $$ | $$\ $$ | $$ |$$ |$$ | $$ |$$ | $$ |$$ | $$ |$$$$$$$ |$$ | \$$$$$$ |\$$$$$$ |$$ |\$$$$$$ |$$ | \__|\__| \__|$$ ____/ \__|$$$$$$\\______/ \______/ \__| \______/ \__| $$ | \______| $$ | \__| """) from flask import Flask, request, json import time import vk_api import random vk = vk_api.VkApi(token="paste your token vk group here") #vk group token here msg = "msg here" #Your ded message app = Flask(__name__) @app.route('/', methods = ["POST"]) def main(): data = json.loads(request.data) if data["type"] == "confirmation": return "paste code vk here" #code vk here elif data["type"] == "message_new": object = data["object"] id = object["peer_id"] body = object["text"] if body.lower() == "hello": vk.method("messages.send", {"peer_id": id, "message": "hi", "random_id": random.randint(1, 2147483647)}) else: vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) vk.method("messages.send", {"peer_id": id, "message": msg, "random_id": random.randint(1, 2147483647)}) return "ok"
94.447154
121
0.56512
1,362
11,617
4.640969
0.039648
0.081633
0.215156
0.268945
0.940832
0.940832
0.93577
0.93577
0.930233
0.930233
0
0.103797
0.224585
11,617
122
122
95.221311
0.597913
0.005337
0
0.730435
0
0.034783
0.355849
0
0
0
0
0
0
1
0.008696
false
0
0.034783
0
0.06087
0.008696
0
0
0
null
0
1
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
9
38e934436fdf9c4e8e317eea5f7d04ff624ca858
93,243
py
Python
msgraph-cli-extensions/beta/bookings_beta/azext_bookings_beta/vendored_sdks/bookings/operations/_booking_businesses_operations.py
thewahome/msgraph-cli
33127d9efa23a0e5f5303c93242fbdbb73348671
[ "MIT" ]
null
null
null
msgraph-cli-extensions/beta/bookings_beta/azext_bookings_beta/vendored_sdks/bookings/operations/_booking_businesses_operations.py
thewahome/msgraph-cli
33127d9efa23a0e5f5303c93242fbdbb73348671
[ "MIT" ]
null
null
null
msgraph-cli-extensions/beta/bookings_beta/azext_bookings_beta/vendored_sdks/bookings/operations/_booking_businesses_operations.py
thewahome/msgraph-cli
33127d9efa23a0e5f5303c93242fbdbb73348671
[ "MIT" ]
null
null
null
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import TYPE_CHECKING import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, TypeVar, Union T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] class BookingBusinessesOperations(object): """BookingBusinessesOperations operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. :type models: ~bookings.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ models = models def __init__(self, client, config, serializer, deserializer): self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config def list_appointments( self, booking_business_id, # type: str orderby=None, # type: Optional[List[Union[str, "models.Enum13"]]] select=None, # type: Optional[List[Union[str, "models.Enum14"]]] expand=None, # type: Optional[List[str]] **kwargs # type: Any ): # type: (...) -> Iterable["models.CollectionOfBookingAppointment"] """Get appointments from bookingBusinesses. Get appointments from bookingBusinesses. :param booking_business_id: key: id of bookingBusiness. :type booking_business_id: str :param orderby: Order items by property values. :type orderby: list[str or ~bookings.models.Enum13] :param select: Select properties to be returned. :type select: list[str or ~bookings.models.Enum14] :param expand: Expand related entities. :type expand: list[str] :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either CollectionOfBookingAppointment or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~bookings.models.CollectionOfBookingAppointment] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.CollectionOfBookingAppointment"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL url = self.list_appointments.metadata['url'] # type: ignore path_format_arguments = { 'bookingBusiness-id': self._serialize.url("booking_business_id", booking_business_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] if self._config.top is not None: query_parameters['$top'] = self._serialize.query("self._config.top", self._config.top, 'int', minimum=0) if self._config.skip is not None: query_parameters['$skip'] = self._serialize.query("self._config.skip", self._config.skip, 'int', minimum=0) if self._config.search is not None: query_parameters['$search'] = self._serialize.query("self._config.search", self._config.search, 'str') if self._config.filter is not None: query_parameters['$filter'] = self._serialize.query("self._config.filter", self._config.filter, 'str') if self._config.count is not None: query_parameters['$count'] = self._serialize.query("self._config.count", self._config.count, 'bool') if orderby is not None: query_parameters['$orderby'] = self._serialize.query("orderby", orderby, '[str]', div=',') if select is not None: query_parameters['$select'] = self._serialize.query("select", select, '[str]', div=',') if expand is not None: query_parameters['$expand'] = self._serialize.query("expand", expand, '[str]', div=',') request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] request = self._client.get(url, query_parameters, header_parameters) return request def extract_data(pipeline_response): deserialized = self._deserialize('CollectionOfBookingAppointment', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.odata_next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: error = self._deserialize(models.OdataError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response return ItemPaged( get_next, extract_data ) list_appointments.metadata = {'url': '/bookingBusinesses/{bookingBusiness-id}/appointments'} # type: ignore def create_appointments( self, booking_business_id, # type: str body, # type: "models.MicrosoftGraphBookingAppointment" **kwargs # type: Any ): # type: (...) -> "models.MicrosoftGraphBookingAppointment" """Create new navigation property to appointments for bookingBusinesses. Create new navigation property to appointments for bookingBusinesses. :param booking_business_id: key: id of bookingBusiness. :type booking_business_id: str :param body: New navigation property. :type body: ~bookings.models.MicrosoftGraphBookingAppointment :keyword callable cls: A custom type or function that will be passed the direct response :return: MicrosoftGraphBookingAppointment, or the result of cls(response) :rtype: ~bookings.models.MicrosoftGraphBookingAppointment :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.MicrosoftGraphBookingAppointment"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.create_appointments.metadata['url'] # type: ignore path_format_arguments = { 'bookingBusiness-id': self._serialize.url("booking_business_id", booking_business_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(body, 'MicrosoftGraphBookingAppointment') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.OdataError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('MicrosoftGraphBookingAppointment', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized create_appointments.metadata = {'url': '/bookingBusinesses/{bookingBusiness-id}/appointments'} # type: ignore def get_appointments( self, booking_business_id, # type: str booking_appointment_id, # type: str select=None, # type: Optional[List[Union[str, "models.Enum15"]]] expand=None, # type: Optional[List[str]] **kwargs # type: Any ): # type: (...) -> "models.MicrosoftGraphBookingAppointment" """Get appointments from bookingBusinesses. Get appointments from bookingBusinesses. :param booking_business_id: key: id of bookingBusiness. :type booking_business_id: str :param booking_appointment_id: key: id of bookingAppointment. :type booking_appointment_id: str :param select: Select properties to be returned. :type select: list[str or ~bookings.models.Enum15] :param expand: Expand related entities. :type expand: list[str] :keyword callable cls: A custom type or function that will be passed the direct response :return: MicrosoftGraphBookingAppointment, or the result of cls(response) :rtype: ~bookings.models.MicrosoftGraphBookingAppointment :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.MicrosoftGraphBookingAppointment"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) accept = "application/json" # Construct URL url = self.get_appointments.metadata['url'] # type: ignore path_format_arguments = { 'bookingBusiness-id': self._serialize.url("booking_business_id", booking_business_id, 'str'), 'bookingAppointment-id': self._serialize.url("booking_appointment_id", booking_appointment_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] if select is not None: query_parameters['$select'] = self._serialize.query("select", select, '[str]', div=',') if expand is not None: query_parameters['$expand'] = self._serialize.query("expand", expand, '[str]', div=',') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.OdataError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('MicrosoftGraphBookingAppointment', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_appointments.metadata = {'url': '/bookingBusinesses/{bookingBusiness-id}/appointments/{bookingAppointment-id}'} # type: ignore def update_appointments( self, booking_business_id, # type: str booking_appointment_id, # type: str body, # type: "models.MicrosoftGraphBookingAppointment" **kwargs # type: Any ): # type: (...) -> None """Update the navigation property appointments in bookingBusinesses. Update the navigation property appointments in bookingBusinesses. :param booking_business_id: key: id of bookingBusiness. :type booking_business_id: str :param booking_appointment_id: key: id of bookingAppointment. :type booking_appointment_id: str :param body: New navigation property values. :type body: ~bookings.models.MicrosoftGraphBookingAppointment :keyword callable cls: A custom type or function that will be passed the direct response :return: None, or the result of cls(response) :rtype: None :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.update_appointments.metadata['url'] # type: ignore path_format_arguments = { 'bookingBusiness-id': self._serialize.url("booking_business_id", booking_business_id, 'str'), 'bookingAppointment-id': self._serialize.url("booking_appointment_id", booking_appointment_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(body, 'MicrosoftGraphBookingAppointment') body_content_kwargs['content'] = body_content request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [204]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.OdataError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) update_appointments.metadata = {'url': '/bookingBusinesses/{bookingBusiness-id}/appointments/{bookingAppointment-id}'} # type: ignore def delete_appointments( self, booking_business_id, # type: str booking_appointment_id, # type: str if_match=None, # type: Optional[str] **kwargs # type: Any ): # type: (...) -> None """Delete navigation property appointments for bookingBusinesses. Delete navigation property appointments for bookingBusinesses. :param booking_business_id: key: id of bookingBusiness. :type booking_business_id: str :param booking_appointment_id: key: id of bookingAppointment. :type booking_appointment_id: str :param if_match: ETag. :type if_match: str :keyword callable cls: A custom type or function that will be passed the direct response :return: None, or the result of cls(response) :rtype: None :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) accept = "application/json" # Construct URL url = self.delete_appointments.metadata['url'] # type: ignore path_format_arguments = { 'bookingBusiness-id': self._serialize.url("booking_business_id", booking_business_id, 'str'), 'bookingAppointment-id': self._serialize.url("booking_appointment_id", booking_appointment_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] # Construct headers header_parameters = {} # type: Dict[str, Any] if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [204]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.OdataError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) delete_appointments.metadata = {'url': '/bookingBusinesses/{bookingBusiness-id}/appointments/{bookingAppointment-id}'} # type: ignore def list_calendar_view( self, booking_business_id, # type: str start, # type: str end, # type: str orderby=None, # type: Optional[List[Union[str, "models.Enum16"]]] select=None, # type: Optional[List[Union[str, "models.Get9ItemsItem"]]] expand=None, # type: Optional[List[str]] **kwargs # type: Any ): # type: (...) -> Iterable["models.CollectionOfBookingAppointment0"] """Get calendarView from bookingBusinesses. Get calendarView from bookingBusinesses. :param booking_business_id: key: id of bookingBusiness. :type booking_business_id: str :param start: The start date and time of the time range, represented in ISO 8601 format. For example, 2019-11-08T19:00:00-08:00. :type start: str :param end: The end date and time of the time range, represented in ISO 8601 format. For example, 2019-11-08T20:00:00-08:00. :type end: str :param orderby: Order items by property values. :type orderby: list[str or ~bookings.models.Enum16] :param select: Select properties to be returned. :type select: list[str or ~bookings.models.Get9ItemsItem] :param expand: Expand related entities. :type expand: list[str] :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either CollectionOfBookingAppointment0 or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~bookings.models.CollectionOfBookingAppointment0] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.CollectionOfBookingAppointment0"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL url = self.list_calendar_view.metadata['url'] # type: ignore path_format_arguments = { 'bookingBusiness-id': self._serialize.url("booking_business_id", booking_business_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['start'] = self._serialize.query("start", start, 'str') query_parameters['end'] = self._serialize.query("end", end, 'str') if self._config.top is not None: query_parameters['$top'] = self._serialize.query("self._config.top", self._config.top, 'int', minimum=0) if self._config.skip is not None: query_parameters['$skip'] = self._serialize.query("self._config.skip", self._config.skip, 'int', minimum=0) if self._config.search is not None: query_parameters['$search'] = self._serialize.query("self._config.search", self._config.search, 'str') if self._config.filter is not None: query_parameters['$filter'] = self._serialize.query("self._config.filter", self._config.filter, 'str') if self._config.count is not None: query_parameters['$count'] = self._serialize.query("self._config.count", self._config.count, 'bool') if orderby is not None: query_parameters['$orderby'] = self._serialize.query("orderby", orderby, '[str]', div=',') if select is not None: query_parameters['$select'] = self._serialize.query("select", select, '[str]', div=',') if expand is not None: query_parameters['$expand'] = self._serialize.query("expand", expand, '[str]', div=',') request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] request = self._client.get(url, query_parameters, header_parameters) return request def extract_data(pipeline_response): deserialized = self._deserialize('CollectionOfBookingAppointment0', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.odata_next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: error = self._deserialize(models.OdataError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response return ItemPaged( get_next, extract_data ) list_calendar_view.metadata = {'url': '/bookingBusinesses/{bookingBusiness-id}/calendarView'} # type: ignore def create_calendar_view( self, booking_business_id, # type: str body, # type: "models.MicrosoftGraphBookingAppointment" **kwargs # type: Any ): # type: (...) -> "models.MicrosoftGraphBookingAppointment" """Create new navigation property to calendarView for bookingBusinesses. Create new navigation property to calendarView for bookingBusinesses. :param booking_business_id: key: id of bookingBusiness. :type booking_business_id: str :param body: New navigation property. :type body: ~bookings.models.MicrosoftGraphBookingAppointment :keyword callable cls: A custom type or function that will be passed the direct response :return: MicrosoftGraphBookingAppointment, or the result of cls(response) :rtype: ~bookings.models.MicrosoftGraphBookingAppointment :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.MicrosoftGraphBookingAppointment"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.create_calendar_view.metadata['url'] # type: ignore path_format_arguments = { 'bookingBusiness-id': self._serialize.url("booking_business_id", booking_business_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(body, 'MicrosoftGraphBookingAppointment') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.OdataError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('MicrosoftGraphBookingAppointment', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized create_calendar_view.metadata = {'url': '/bookingBusinesses/{bookingBusiness-id}/calendarView'} # type: ignore def get_calendar_view( self, booking_business_id, # type: str booking_appointment_id, # type: str start, # type: str end, # type: str select=None, # type: Optional[List[Union[str, "models.Get4ItemsItem"]]] expand=None, # type: Optional[List[str]] **kwargs # type: Any ): # type: (...) -> "models.MicrosoftGraphBookingAppointment" """Get calendarView from bookingBusinesses. Get calendarView from bookingBusinesses. :param booking_business_id: key: id of bookingBusiness. :type booking_business_id: str :param booking_appointment_id: key: id of bookingAppointment. :type booking_appointment_id: str :param start: The start date and time of the time range, represented in ISO 8601 format. For example, 2019-11-08T19:00:00-08:00. :type start: str :param end: The end date and time of the time range, represented in ISO 8601 format. For example, 2019-11-08T20:00:00-08:00. :type end: str :param select: Select properties to be returned. :type select: list[str or ~bookings.models.Get4ItemsItem] :param expand: Expand related entities. :type expand: list[str] :keyword callable cls: A custom type or function that will be passed the direct response :return: MicrosoftGraphBookingAppointment, or the result of cls(response) :rtype: ~bookings.models.MicrosoftGraphBookingAppointment :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.MicrosoftGraphBookingAppointment"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) accept = "application/json" # Construct URL url = self.get_calendar_view.metadata['url'] # type: ignore path_format_arguments = { 'bookingBusiness-id': self._serialize.url("booking_business_id", booking_business_id, 'str'), 'bookingAppointment-id': self._serialize.url("booking_appointment_id", booking_appointment_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['start'] = self._serialize.query("start", start, 'str') query_parameters['end'] = self._serialize.query("end", end, 'str') if select is not None: query_parameters['$select'] = self._serialize.query("select", select, '[str]', div=',') if expand is not None: query_parameters['$expand'] = self._serialize.query("expand", expand, '[str]', div=',') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.OdataError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('MicrosoftGraphBookingAppointment', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_calendar_view.metadata = {'url': '/bookingBusinesses/{bookingBusiness-id}/calendarView/{bookingAppointment-id}'} # type: ignore def update_calendar_view( self, booking_business_id, # type: str booking_appointment_id, # type: str body, # type: "models.MicrosoftGraphBookingAppointment" **kwargs # type: Any ): # type: (...) -> None """Update the navigation property calendarView in bookingBusinesses. Update the navigation property calendarView in bookingBusinesses. :param booking_business_id: key: id of bookingBusiness. :type booking_business_id: str :param booking_appointment_id: key: id of bookingAppointment. :type booking_appointment_id: str :param body: New navigation property values. :type body: ~bookings.models.MicrosoftGraphBookingAppointment :keyword callable cls: A custom type or function that will be passed the direct response :return: None, or the result of cls(response) :rtype: None :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.update_calendar_view.metadata['url'] # type: ignore path_format_arguments = { 'bookingBusiness-id': self._serialize.url("booking_business_id", booking_business_id, 'str'), 'bookingAppointment-id': self._serialize.url("booking_appointment_id", booking_appointment_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(body, 'MicrosoftGraphBookingAppointment') body_content_kwargs['content'] = body_content request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [204]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.OdataError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) update_calendar_view.metadata = {'url': '/bookingBusinesses/{bookingBusiness-id}/calendarView/{bookingAppointment-id}'} # type: ignore def delete_calendar_view( self, booking_business_id, # type: str booking_appointment_id, # type: str if_match=None, # type: Optional[str] **kwargs # type: Any ): # type: (...) -> None """Delete navigation property calendarView for bookingBusinesses. Delete navigation property calendarView for bookingBusinesses. :param booking_business_id: key: id of bookingBusiness. :type booking_business_id: str :param booking_appointment_id: key: id of bookingAppointment. :type booking_appointment_id: str :param if_match: ETag. :type if_match: str :keyword callable cls: A custom type or function that will be passed the direct response :return: None, or the result of cls(response) :rtype: None :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) accept = "application/json" # Construct URL url = self.delete_calendar_view.metadata['url'] # type: ignore path_format_arguments = { 'bookingBusiness-id': self._serialize.url("booking_business_id", booking_business_id, 'str'), 'bookingAppointment-id': self._serialize.url("booking_appointment_id", booking_appointment_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] # Construct headers header_parameters = {} # type: Dict[str, Any] if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [204]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.OdataError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) delete_calendar_view.metadata = {'url': '/bookingBusinesses/{bookingBusiness-id}/calendarView/{bookingAppointment-id}'} # type: ignore def list_customers( self, booking_business_id, # type: str orderby=None, # type: Optional[List[Union[str, "models.Enum19"]]] select=None, # type: Optional[List[Union[str, "models.Enum20"]]] expand=None, # type: Optional[List[str]] **kwargs # type: Any ): # type: (...) -> Iterable["models.CollectionOfBookingCustomer"] """Get customers from bookingBusinesses. Get customers from bookingBusinesses. :param booking_business_id: key: id of bookingBusiness. :type booking_business_id: str :param orderby: Order items by property values. :type orderby: list[str or ~bookings.models.Enum19] :param select: Select properties to be returned. :type select: list[str or ~bookings.models.Enum20] :param expand: Expand related entities. :type expand: list[str] :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either CollectionOfBookingCustomer or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~bookings.models.CollectionOfBookingCustomer] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.CollectionOfBookingCustomer"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL url = self.list_customers.metadata['url'] # type: ignore path_format_arguments = { 'bookingBusiness-id': self._serialize.url("booking_business_id", booking_business_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] if self._config.top is not None: query_parameters['$top'] = self._serialize.query("self._config.top", self._config.top, 'int', minimum=0) if self._config.skip is not None: query_parameters['$skip'] = self._serialize.query("self._config.skip", self._config.skip, 'int', minimum=0) if self._config.search is not None: query_parameters['$search'] = self._serialize.query("self._config.search", self._config.search, 'str') if self._config.filter is not None: query_parameters['$filter'] = self._serialize.query("self._config.filter", self._config.filter, 'str') if self._config.count is not None: query_parameters['$count'] = self._serialize.query("self._config.count", self._config.count, 'bool') if orderby is not None: query_parameters['$orderby'] = self._serialize.query("orderby", orderby, '[str]', div=',') if select is not None: query_parameters['$select'] = self._serialize.query("select", select, '[str]', div=',') if expand is not None: query_parameters['$expand'] = self._serialize.query("expand", expand, '[str]', div=',') request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] request = self._client.get(url, query_parameters, header_parameters) return request def extract_data(pipeline_response): deserialized = self._deserialize('CollectionOfBookingCustomer', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.odata_next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: error = self._deserialize(models.OdataError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response return ItemPaged( get_next, extract_data ) list_customers.metadata = {'url': '/bookingBusinesses/{bookingBusiness-id}/customers'} # type: ignore def create_customers( self, booking_business_id, # type: str body, # type: "models.MicrosoftGraphBookingCustomer" **kwargs # type: Any ): # type: (...) -> "models.MicrosoftGraphBookingCustomer" """Create new navigation property to customers for bookingBusinesses. Create new navigation property to customers for bookingBusinesses. :param booking_business_id: key: id of bookingBusiness. :type booking_business_id: str :param body: New navigation property. :type body: ~bookings.models.MicrosoftGraphBookingCustomer :keyword callable cls: A custom type or function that will be passed the direct response :return: MicrosoftGraphBookingCustomer, or the result of cls(response) :rtype: ~bookings.models.MicrosoftGraphBookingCustomer :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.MicrosoftGraphBookingCustomer"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.create_customers.metadata['url'] # type: ignore path_format_arguments = { 'bookingBusiness-id': self._serialize.url("booking_business_id", booking_business_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(body, 'MicrosoftGraphBookingCustomer') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.OdataError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('MicrosoftGraphBookingCustomer', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized create_customers.metadata = {'url': '/bookingBusinesses/{bookingBusiness-id}/customers'} # type: ignore def get_customers( self, booking_business_id, # type: str booking_customer_id, # type: str select=None, # type: Optional[List[Union[str, "models.Enum21"]]] expand=None, # type: Optional[List[str]] **kwargs # type: Any ): # type: (...) -> "models.MicrosoftGraphBookingCustomer" """Get customers from bookingBusinesses. Get customers from bookingBusinesses. :param booking_business_id: key: id of bookingBusiness. :type booking_business_id: str :param booking_customer_id: key: id of bookingCustomer. :type booking_customer_id: str :param select: Select properties to be returned. :type select: list[str or ~bookings.models.Enum21] :param expand: Expand related entities. :type expand: list[str] :keyword callable cls: A custom type or function that will be passed the direct response :return: MicrosoftGraphBookingCustomer, or the result of cls(response) :rtype: ~bookings.models.MicrosoftGraphBookingCustomer :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.MicrosoftGraphBookingCustomer"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) accept = "application/json" # Construct URL url = self.get_customers.metadata['url'] # type: ignore path_format_arguments = { 'bookingBusiness-id': self._serialize.url("booking_business_id", booking_business_id, 'str'), 'bookingCustomer-id': self._serialize.url("booking_customer_id", booking_customer_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] if select is not None: query_parameters['$select'] = self._serialize.query("select", select, '[str]', div=',') if expand is not None: query_parameters['$expand'] = self._serialize.query("expand", expand, '[str]', div=',') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.OdataError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('MicrosoftGraphBookingCustomer', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_customers.metadata = {'url': '/bookingBusinesses/{bookingBusiness-id}/customers/{bookingCustomer-id}'} # type: ignore def update_customers( self, booking_business_id, # type: str booking_customer_id, # type: str body, # type: "models.MicrosoftGraphBookingCustomer" **kwargs # type: Any ): # type: (...) -> None """Update the navigation property customers in bookingBusinesses. Update the navigation property customers in bookingBusinesses. :param booking_business_id: key: id of bookingBusiness. :type booking_business_id: str :param booking_customer_id: key: id of bookingCustomer. :type booking_customer_id: str :param body: New navigation property values. :type body: ~bookings.models.MicrosoftGraphBookingCustomer :keyword callable cls: A custom type or function that will be passed the direct response :return: None, or the result of cls(response) :rtype: None :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.update_customers.metadata['url'] # type: ignore path_format_arguments = { 'bookingBusiness-id': self._serialize.url("booking_business_id", booking_business_id, 'str'), 'bookingCustomer-id': self._serialize.url("booking_customer_id", booking_customer_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(body, 'MicrosoftGraphBookingCustomer') body_content_kwargs['content'] = body_content request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [204]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.OdataError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) update_customers.metadata = {'url': '/bookingBusinesses/{bookingBusiness-id}/customers/{bookingCustomer-id}'} # type: ignore def delete_customers( self, booking_business_id, # type: str booking_customer_id, # type: str if_match=None, # type: Optional[str] **kwargs # type: Any ): # type: (...) -> None """Delete navigation property customers for bookingBusinesses. Delete navigation property customers for bookingBusinesses. :param booking_business_id: key: id of bookingBusiness. :type booking_business_id: str :param booking_customer_id: key: id of bookingCustomer. :type booking_customer_id: str :param if_match: ETag. :type if_match: str :keyword callable cls: A custom type or function that will be passed the direct response :return: None, or the result of cls(response) :rtype: None :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) accept = "application/json" # Construct URL url = self.delete_customers.metadata['url'] # type: ignore path_format_arguments = { 'bookingBusiness-id': self._serialize.url("booking_business_id", booking_business_id, 'str'), 'bookingCustomer-id': self._serialize.url("booking_customer_id", booking_customer_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] # Construct headers header_parameters = {} # type: Dict[str, Any] if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [204]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.OdataError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) delete_customers.metadata = {'url': '/bookingBusinesses/{bookingBusiness-id}/customers/{bookingCustomer-id}'} # type: ignore def publish( self, booking_business_id, # type: str **kwargs # type: Any ): # type: (...) -> None """Invoke action publish. Invoke action publish. :param booking_business_id: key: id of bookingBusiness. :type booking_business_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: None, or the result of cls(response) :rtype: None :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) accept = "application/json" # Construct URL url = self.publish.metadata['url'] # type: ignore path_format_arguments = { 'bookingBusiness-id': self._serialize.url("booking_business_id", booking_business_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [204]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.OdataError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) publish.metadata = {'url': '/bookingBusinesses/{bookingBusiness-id}/microsoft.graph.publish'} # type: ignore def unpublish( self, booking_business_id, # type: str **kwargs # type: Any ): # type: (...) -> None """Invoke action unpublish. Invoke action unpublish. :param booking_business_id: key: id of bookingBusiness. :type booking_business_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: None, or the result of cls(response) :rtype: None :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) accept = "application/json" # Construct URL url = self.unpublish.metadata['url'] # type: ignore path_format_arguments = { 'bookingBusiness-id': self._serialize.url("booking_business_id", booking_business_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [204]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.OdataError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) unpublish.metadata = {'url': '/bookingBusinesses/{bookingBusiness-id}/microsoft.graph.unpublish'} # type: ignore def list_services( self, booking_business_id, # type: str orderby=None, # type: Optional[List[Union[str, "models.Enum22"]]] select=None, # type: Optional[List[Union[str, "models.Enum23"]]] expand=None, # type: Optional[List[str]] **kwargs # type: Any ): # type: (...) -> Iterable["models.CollectionOfBookingService"] """Get services from bookingBusinesses. Get services from bookingBusinesses. :param booking_business_id: key: id of bookingBusiness. :type booking_business_id: str :param orderby: Order items by property values. :type orderby: list[str or ~bookings.models.Enum22] :param select: Select properties to be returned. :type select: list[str or ~bookings.models.Enum23] :param expand: Expand related entities. :type expand: list[str] :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either CollectionOfBookingService or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~bookings.models.CollectionOfBookingService] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.CollectionOfBookingService"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL url = self.list_services.metadata['url'] # type: ignore path_format_arguments = { 'bookingBusiness-id': self._serialize.url("booking_business_id", booking_business_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] if self._config.top is not None: query_parameters['$top'] = self._serialize.query("self._config.top", self._config.top, 'int', minimum=0) if self._config.skip is not None: query_parameters['$skip'] = self._serialize.query("self._config.skip", self._config.skip, 'int', minimum=0) if self._config.search is not None: query_parameters['$search'] = self._serialize.query("self._config.search", self._config.search, 'str') if self._config.filter is not None: query_parameters['$filter'] = self._serialize.query("self._config.filter", self._config.filter, 'str') if self._config.count is not None: query_parameters['$count'] = self._serialize.query("self._config.count", self._config.count, 'bool') if orderby is not None: query_parameters['$orderby'] = self._serialize.query("orderby", orderby, '[str]', div=',') if select is not None: query_parameters['$select'] = self._serialize.query("select", select, '[str]', div=',') if expand is not None: query_parameters['$expand'] = self._serialize.query("expand", expand, '[str]', div=',') request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] request = self._client.get(url, query_parameters, header_parameters) return request def extract_data(pipeline_response): deserialized = self._deserialize('CollectionOfBookingService', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.odata_next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: error = self._deserialize(models.OdataError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response return ItemPaged( get_next, extract_data ) list_services.metadata = {'url': '/bookingBusinesses/{bookingBusiness-id}/services'} # type: ignore def create_services( self, booking_business_id, # type: str body, # type: "models.MicrosoftGraphBookingService" **kwargs # type: Any ): # type: (...) -> "models.MicrosoftGraphBookingService" """Create new navigation property to services for bookingBusinesses. Create new navigation property to services for bookingBusinesses. :param booking_business_id: key: id of bookingBusiness. :type booking_business_id: str :param body: New navigation property. :type body: ~bookings.models.MicrosoftGraphBookingService :keyword callable cls: A custom type or function that will be passed the direct response :return: MicrosoftGraphBookingService, or the result of cls(response) :rtype: ~bookings.models.MicrosoftGraphBookingService :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.MicrosoftGraphBookingService"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.create_services.metadata['url'] # type: ignore path_format_arguments = { 'bookingBusiness-id': self._serialize.url("booking_business_id", booking_business_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(body, 'MicrosoftGraphBookingService') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.OdataError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('MicrosoftGraphBookingService', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized create_services.metadata = {'url': '/bookingBusinesses/{bookingBusiness-id}/services'} # type: ignore def get_services( self, booking_business_id, # type: str booking_service_id, # type: str select=None, # type: Optional[List[Union[str, "models.Enum24"]]] expand=None, # type: Optional[List[str]] **kwargs # type: Any ): # type: (...) -> "models.MicrosoftGraphBookingService" """Get services from bookingBusinesses. Get services from bookingBusinesses. :param booking_business_id: key: id of bookingBusiness. :type booking_business_id: str :param booking_service_id: key: id of bookingService. :type booking_service_id: str :param select: Select properties to be returned. :type select: list[str or ~bookings.models.Enum24] :param expand: Expand related entities. :type expand: list[str] :keyword callable cls: A custom type or function that will be passed the direct response :return: MicrosoftGraphBookingService, or the result of cls(response) :rtype: ~bookings.models.MicrosoftGraphBookingService :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.MicrosoftGraphBookingService"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) accept = "application/json" # Construct URL url = self.get_services.metadata['url'] # type: ignore path_format_arguments = { 'bookingBusiness-id': self._serialize.url("booking_business_id", booking_business_id, 'str'), 'bookingService-id': self._serialize.url("booking_service_id", booking_service_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] if select is not None: query_parameters['$select'] = self._serialize.query("select", select, '[str]', div=',') if expand is not None: query_parameters['$expand'] = self._serialize.query("expand", expand, '[str]', div=',') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.OdataError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('MicrosoftGraphBookingService', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_services.metadata = {'url': '/bookingBusinesses/{bookingBusiness-id}/services/{bookingService-id}'} # type: ignore def update_services( self, booking_business_id, # type: str booking_service_id, # type: str body, # type: "models.MicrosoftGraphBookingService" **kwargs # type: Any ): # type: (...) -> None """Update the navigation property services in bookingBusinesses. Update the navigation property services in bookingBusinesses. :param booking_business_id: key: id of bookingBusiness. :type booking_business_id: str :param booking_service_id: key: id of bookingService. :type booking_service_id: str :param body: New navigation property values. :type body: ~bookings.models.MicrosoftGraphBookingService :keyword callable cls: A custom type or function that will be passed the direct response :return: None, or the result of cls(response) :rtype: None :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.update_services.metadata['url'] # type: ignore path_format_arguments = { 'bookingBusiness-id': self._serialize.url("booking_business_id", booking_business_id, 'str'), 'bookingService-id': self._serialize.url("booking_service_id", booking_service_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(body, 'MicrosoftGraphBookingService') body_content_kwargs['content'] = body_content request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [204]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.OdataError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) update_services.metadata = {'url': '/bookingBusinesses/{bookingBusiness-id}/services/{bookingService-id}'} # type: ignore def delete_services( self, booking_business_id, # type: str booking_service_id, # type: str if_match=None, # type: Optional[str] **kwargs # type: Any ): # type: (...) -> None """Delete navigation property services for bookingBusinesses. Delete navigation property services for bookingBusinesses. :param booking_business_id: key: id of bookingBusiness. :type booking_business_id: str :param booking_service_id: key: id of bookingService. :type booking_service_id: str :param if_match: ETag. :type if_match: str :keyword callable cls: A custom type or function that will be passed the direct response :return: None, or the result of cls(response) :rtype: None :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) accept = "application/json" # Construct URL url = self.delete_services.metadata['url'] # type: ignore path_format_arguments = { 'bookingBusiness-id': self._serialize.url("booking_business_id", booking_business_id, 'str'), 'bookingService-id': self._serialize.url("booking_service_id", booking_service_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] # Construct headers header_parameters = {} # type: Dict[str, Any] if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [204]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.OdataError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) delete_services.metadata = {'url': '/bookingBusinesses/{bookingBusiness-id}/services/{bookingService-id}'} # type: ignore def list_staff_members( self, booking_business_id, # type: str orderby=None, # type: Optional[List[Union[str, "models.Enum25"]]] select=None, # type: Optional[List[Union[str, "models.Enum26"]]] expand=None, # type: Optional[List[str]] **kwargs # type: Any ): # type: (...) -> Iterable["models.CollectionOfBookingStaffMember"] """Get staffMembers from bookingBusinesses. Get staffMembers from bookingBusinesses. :param booking_business_id: key: id of bookingBusiness. :type booking_business_id: str :param orderby: Order items by property values. :type orderby: list[str or ~bookings.models.Enum25] :param select: Select properties to be returned. :type select: list[str or ~bookings.models.Enum26] :param expand: Expand related entities. :type expand: list[str] :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either CollectionOfBookingStaffMember or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~bookings.models.CollectionOfBookingStaffMember] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.CollectionOfBookingStaffMember"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL url = self.list_staff_members.metadata['url'] # type: ignore path_format_arguments = { 'bookingBusiness-id': self._serialize.url("booking_business_id", booking_business_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] if self._config.top is not None: query_parameters['$top'] = self._serialize.query("self._config.top", self._config.top, 'int', minimum=0) if self._config.skip is not None: query_parameters['$skip'] = self._serialize.query("self._config.skip", self._config.skip, 'int', minimum=0) if self._config.search is not None: query_parameters['$search'] = self._serialize.query("self._config.search", self._config.search, 'str') if self._config.filter is not None: query_parameters['$filter'] = self._serialize.query("self._config.filter", self._config.filter, 'str') if self._config.count is not None: query_parameters['$count'] = self._serialize.query("self._config.count", self._config.count, 'bool') if orderby is not None: query_parameters['$orderby'] = self._serialize.query("orderby", orderby, '[str]', div=',') if select is not None: query_parameters['$select'] = self._serialize.query("select", select, '[str]', div=',') if expand is not None: query_parameters['$expand'] = self._serialize.query("expand", expand, '[str]', div=',') request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] request = self._client.get(url, query_parameters, header_parameters) return request def extract_data(pipeline_response): deserialized = self._deserialize('CollectionOfBookingStaffMember', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.odata_next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: error = self._deserialize(models.OdataError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response return ItemPaged( get_next, extract_data ) list_staff_members.metadata = {'url': '/bookingBusinesses/{bookingBusiness-id}/staffMembers'} # type: ignore def create_staff_members( self, booking_business_id, # type: str body, # type: "models.MicrosoftGraphBookingStaffMember" **kwargs # type: Any ): # type: (...) -> "models.MicrosoftGraphBookingStaffMember" """Create new navigation property to staffMembers for bookingBusinesses. Create new navigation property to staffMembers for bookingBusinesses. :param booking_business_id: key: id of bookingBusiness. :type booking_business_id: str :param body: New navigation property. :type body: ~bookings.models.MicrosoftGraphBookingStaffMember :keyword callable cls: A custom type or function that will be passed the direct response :return: MicrosoftGraphBookingStaffMember, or the result of cls(response) :rtype: ~bookings.models.MicrosoftGraphBookingStaffMember :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.MicrosoftGraphBookingStaffMember"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.create_staff_members.metadata['url'] # type: ignore path_format_arguments = { 'bookingBusiness-id': self._serialize.url("booking_business_id", booking_business_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(body, 'MicrosoftGraphBookingStaffMember') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.OdataError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('MicrosoftGraphBookingStaffMember', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized create_staff_members.metadata = {'url': '/bookingBusinesses/{bookingBusiness-id}/staffMembers'} # type: ignore def get_staff_members( self, booking_business_id, # type: str booking_staff_member_id, # type: str select=None, # type: Optional[List[Union[str, "models.Enum27"]]] expand=None, # type: Optional[List[str]] **kwargs # type: Any ): # type: (...) -> "models.MicrosoftGraphBookingStaffMember" """Get staffMembers from bookingBusinesses. Get staffMembers from bookingBusinesses. :param booking_business_id: key: id of bookingBusiness. :type booking_business_id: str :param booking_staff_member_id: key: id of bookingStaffMember. :type booking_staff_member_id: str :param select: Select properties to be returned. :type select: list[str or ~bookings.models.Enum27] :param expand: Expand related entities. :type expand: list[str] :keyword callable cls: A custom type or function that will be passed the direct response :return: MicrosoftGraphBookingStaffMember, or the result of cls(response) :rtype: ~bookings.models.MicrosoftGraphBookingStaffMember :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.MicrosoftGraphBookingStaffMember"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) accept = "application/json" # Construct URL url = self.get_staff_members.metadata['url'] # type: ignore path_format_arguments = { 'bookingBusiness-id': self._serialize.url("booking_business_id", booking_business_id, 'str'), 'bookingStaffMember-id': self._serialize.url("booking_staff_member_id", booking_staff_member_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] if select is not None: query_parameters['$select'] = self._serialize.query("select", select, '[str]', div=',') if expand is not None: query_parameters['$expand'] = self._serialize.query("expand", expand, '[str]', div=',') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.OdataError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('MicrosoftGraphBookingStaffMember', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_staff_members.metadata = {'url': '/bookingBusinesses/{bookingBusiness-id}/staffMembers/{bookingStaffMember-id}'} # type: ignore def update_staff_members( self, booking_business_id, # type: str booking_staff_member_id, # type: str body, # type: "models.MicrosoftGraphBookingStaffMember" **kwargs # type: Any ): # type: (...) -> None """Update the navigation property staffMembers in bookingBusinesses. Update the navigation property staffMembers in bookingBusinesses. :param booking_business_id: key: id of bookingBusiness. :type booking_business_id: str :param booking_staff_member_id: key: id of bookingStaffMember. :type booking_staff_member_id: str :param body: New navigation property values. :type body: ~bookings.models.MicrosoftGraphBookingStaffMember :keyword callable cls: A custom type or function that will be passed the direct response :return: None, or the result of cls(response) :rtype: None :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.update_staff_members.metadata['url'] # type: ignore path_format_arguments = { 'bookingBusiness-id': self._serialize.url("booking_business_id", booking_business_id, 'str'), 'bookingStaffMember-id': self._serialize.url("booking_staff_member_id", booking_staff_member_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(body, 'MicrosoftGraphBookingStaffMember') body_content_kwargs['content'] = body_content request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [204]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.OdataError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) update_staff_members.metadata = {'url': '/bookingBusinesses/{bookingBusiness-id}/staffMembers/{bookingStaffMember-id}'} # type: ignore def delete_staff_members( self, booking_business_id, # type: str booking_staff_member_id, # type: str if_match=None, # type: Optional[str] **kwargs # type: Any ): # type: (...) -> None """Delete navigation property staffMembers for bookingBusinesses. Delete navigation property staffMembers for bookingBusinesses. :param booking_business_id: key: id of bookingBusiness. :type booking_business_id: str :param booking_staff_member_id: key: id of bookingStaffMember. :type booking_staff_member_id: str :param if_match: ETag. :type if_match: str :keyword callable cls: A custom type or function that will be passed the direct response :return: None, or the result of cls(response) :rtype: None :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) accept = "application/json" # Construct URL url = self.delete_staff_members.metadata['url'] # type: ignore path_format_arguments = { 'bookingBusiness-id': self._serialize.url("booking_business_id", booking_business_id, 'str'), 'bookingStaffMember-id': self._serialize.url("booking_staff_member_id", booking_staff_member_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] # Construct headers header_parameters = {} # type: Dict[str, Any] if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [204]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.OdataError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) delete_staff_members.metadata = {'url': '/bookingBusinesses/{bookingBusiness-id}/staffMembers/{bookingStaffMember-id}'} # type: ignore
48.162707
139
0.653314
9,826
93,243
6.002544
0.028598
0.032841
0.038911
0.016378
0.957308
0.95663
0.948136
0.939455
0.928401
0.925196
0
0.00685
0.242227
93,243
1,935
140
48.187597
0.827901
0.282724
0
0.856769
0
0
0.117259
0.045256
0
0
0
0
0
1
0.037555
false
0
0.00786
0
0.092576
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
c7f745307ede1bf1503e1cd0489477de3f39885f
277
py
Python
poly/common.py
jdanford/poly
4f3a242dbb54fb68375a310af943be759588f459
[ "0BSD" ]
null
null
null
poly/common.py
jdanford/poly
4f3a242dbb54fb68375a310af943be759588f459
[ "0BSD" ]
null
null
null
poly/common.py
jdanford/poly
4f3a242dbb54fb68375a310af943be759588f459
[ "0BSD" ]
null
null
null
class PolyError(Exception): pass SAFE_IDENT = r"[a-zA-Z!@\$%\^&\*=\|<>\?\/][a-zA-Z0-9_!@\$%\^&\*\-\+=\|<>\?\/]*|[\-\+]([a-zA-Z!@\$%\^&\*\-\+=\|<>\?\/][a-zA-Z0-9_!@\$%\^&\*\-\+=\|<>\?\/]*)?" SAFE_SYMBOL = r"#[a-zA-Z!@\$%\^&\*=\|<>\?\/][a-zA-Z0-9_!@\$%\^&\*\-\+=\|<>\?\/]*"
46.166667
156
0.299639
31
277
2.516129
0.419355
0.230769
0.153846
0.192308
0.410256
0.410256
0.410256
0.282051
0
0
0
0.022814
0.050542
277
5
157
55.4
0.273764
0
0
0
0
0.25
0.736462
0.736462
0
0
0
0
0
1
0
false
0.25
0
0
0.25
0
0
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
1
1
1
null
0
0
0
0
0
0
0
1
0
0
0
0
0
8
2a0aa9c94f65d473518398a5c70f792007bc7e48
4,440
py
Python
tests/test_string_buffer_right.py
robertblackwell/simple_curses
47f2bb81bcbc2087101839372f71a9117f5dc643
[ "MIT" ]
null
null
null
tests/test_string_buffer_right.py
robertblackwell/simple_curses
47f2bb81bcbc2087101839372f71a9117f5dc643
[ "MIT" ]
null
null
null
tests/test_string_buffer_right.py
robertblackwell/simple_curses
47f2bb81bcbc2087101839372f71a9117f5dc643
[ "MIT" ]
null
null
null
import sys import unittest import string_buffer class TestStringBufferRight(unittest.TestCase): def test_right_arrow_01(self): sb = string_buffer.StringBuffer("abc", 5) self.assertEqual(sb.content, "abc") self.assertEqual(sb.display_string, "abc" + sb.EOSPAD) self.assertEqual(sb.cpos_buffer, 3) sb.handle_left() sb.handle_left() sb.handle_left() self.assertEqual(sb.content, "abc") self.assertEqual(sb.display_string, "abc") self.assertEqual(sb.cpos_buffer, 0) self.assertEqual(sb.state, sb.STATE_EDITING) sb.handle_right() self.assertEqual(sb.content, "abc") self.assertEqual(sb.display_string, "abc") self.assertEqual(sb.cpos_buffer, 1) self.assertEqual(sb.state, sb.STATE_EDITING) sb.handle_right() self.assertEqual(sb.content, "abc") self.assertEqual(sb.display_string, "abc") self.assertEqual(sb.cpos_buffer, 2) self.assertEqual(sb.state, sb.STATE_EDITING) sb.handle_right() self.assertEqual(sb.content, "abc") self.assertEqual(sb.display_string, "abc" + sb.EOSPAD) self.assertEqual(sb.cpos_buffer, 3) self.assertEqual(sb.state, sb.STATE_APPENDING) sb.handle_right() self.assertEqual(sb.content, "abc") self.assertEqual(sb.display_string, "abc" + sb.EOSPAD) self.assertEqual(sb.cpos_buffer, 3) self.assertEqual(sb.state, sb.STATE_APPENDING) def test_right_arrow_02(self): sb = string_buffer.StringBuffer("abcdefg", 5) self.assertEqual(sb.content, "abcdefg") self.assertEqual(sb.cpos_buffer, 4) sb.handle_left() sb.handle_left() sb.handle_left() sb.handle_left() sb.handle_left() sb.handle_left() sb.handle_left() sb.handle_left() self.assertEqual(sb.content, "abcdefg") self.assertEqual(sb.display_string, "abcde") self.assertEqual(sb.cpos_buffer, 0) self.assertEqual(sb.state, sb.STATE_EDITING) sb.handle_right() self.assertEqual(sb.content, "abcdefg") self.assertEqual(sb.display_string, "abcde") self.assertEqual(sb.cpos_buffer, 1) self.assertEqual(sb.state, sb.STATE_EDITING) sb.handle_right() self.assertEqual(sb.content, "abcdefg") self.assertEqual(sb.display_string, "abcde") self.assertEqual(sb.cpos_buffer, 2) self.assertEqual(sb.state, sb.STATE_EDITING) sb.handle_right() self.assertEqual(sb.content, "abcdefg") self.assertEqual(sb.display_string, "abcde") self.assertEqual(sb.cpos_buffer, 3) self.assertEqual(sb.state, sb.STATE_EDITING) sb.handle_right() self.assertEqual(sb.content, "abcdefg") self.assertEqual(sb.display_string, "abcde") self.assertEqual(sb.cpos_buffer, 4) self.assertEqual(sb.state, sb.STATE_EDITING) sb.handle_right() self.assertEqual(sb.content, "abcdefg") self.assertEqual(sb.display_string, "bcdef") self.assertEqual(sb.cpos_buffer, 4) self.assertEqual(sb.state, sb.STATE_EDITING) sb.handle_right() self.assertEqual(sb.content, "abcdefg") self.assertEqual(sb.display_string, "cdefg") self.assertEqual(sb.cpos_buffer, 4) self.assertEqual(sb.state, sb.STATE_EDITING) sb.handle_right() self.assertEqual(sb.content, "abcdefg") self.assertEqual(sb.display_string, "defg" + sb.EOSPAD) self.assertEqual(sb.cpos_buffer, 4) self.assertEqual(sb.state, sb.STATE_APPENDING) sb.handle_right() self.assertEqual(sb.content, "abcdefg") self.assertEqual(sb.display_string, "defg" + sb.EOSPAD) self.assertEqual(sb.cpos_buffer, 4) self.assertEqual(sb.state, sb.STATE_APPENDING) sb.handle_right() self.assertEqual(sb.content, "abcdefg") self.assertEqual(sb.display_string, "defg" + sb.EOSPAD) self.assertEqual(sb.cpos_buffer, 4) self.assertEqual(sb.state, sb.STATE_APPENDING) sb.handle_right() self.assertEqual(sb.content, "abcdefg") self.assertEqual(sb.display_string, "defg" + sb.EOSPAD) self.assertEqual(sb.cpos_buffer, 4) self.assertEqual(sb.state, sb.STATE_APPENDING) if __name__ == '__main__': unittest.main()
34.6875
63
0.649324
541
4,440
5.157116
0.077634
0.370968
0.42043
0.154839
0.941577
0.919355
0.915412
0.915412
0.898208
0.898208
0
0.006987
0.226351
4,440
127
64
34.96063
0.80524
0
0
0.884615
0
0
0.042568
0
0
0
0
0
0.663462
1
0.019231
false
0
0.028846
0
0.057692
0
0
0
0
null
1
1
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
10
2a469749b52b2b413e42b92363ee2989065349f9
106
py
Python
Codewars/8kyu/is-it-a-number/Python/test.py
RevansChen/online-judge
ad1b07fee7bd3c49418becccda904e17505f3018
[ "MIT" ]
7
2017-09-20T16:40:39.000Z
2021-08-31T18:15:08.000Z
Codewars/8kyu/is-it-a-number/Python/test.py
RevansChen/online-judge
ad1b07fee7bd3c49418becccda904e17505f3018
[ "MIT" ]
null
null
null
Codewars/8kyu/is-it-a-number/Python/test.py
RevansChen/online-judge
ad1b07fee7bd3c49418becccda904e17505f3018
[ "MIT" ]
null
null
null
# Python - 3.6.0 test.assert_equals(isDigit('s2324'), False) test.assert_equals(isDigit('-234.4'), True)
21.2
43
0.707547
17
106
4.294118
0.764706
0.273973
0.438356
0.630137
0
0
0
0
0
0
0
0.113402
0.084906
106
4
44
26.5
0.639175
0.132075
0
0
0
0
0.122222
0
0
0
0
0
1
1
0
true
0
0
0
0
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
1
0
0
0
0
0
0
7
2a609a64bb8d2da5534f203dd9bec0b73c7725f1
20,582
py
Python
test/unit/testPayment.py
Ghost93/python-sdk
24fffc1d7cfb53e5f8548c6afa05f6706411a8ba
[ "MIT" ]
2
2021-05-07T16:07:55.000Z
2021-12-07T01:52:33.000Z
test/unit/testPayment.py
Ghost93/python-sdk
24fffc1d7cfb53e5f8548c6afa05f6706411a8ba
[ "MIT" ]
5
2018-02-13T16:11:51.000Z
2021-06-25T10:45:36.000Z
test/unit/testPayment.py
Ghost93/python-sdk
24fffc1d7cfb53e5f8548c6afa05f6706411a8ba
[ "MIT" ]
8
2018-06-05T19:20:10.000Z
2021-12-07T02:35:34.000Z
import sys import os import unittest sys.path.append(os.path.abspath('.')) import paymentrails.configuration import paymentrails.payment from mock import MagicMock, Mock, patch import paymentrails.exceptions.notFoundException import paymentrails.exceptions.invalidFieldException def fake_find(paymentId,batchId,term=""): if paymentId == None: raise paymentrails.exceptions.invalidFieldException.InvalidFieldException("Payment id cannot be None") if paymentId[0:1] != "P": raise paymentrails.exceptions.notFoundException.NotFoundException("Payment id is invalid") if batchId == None: raise paymentrails.exceptions.invalidFieldException.InvalidFieldException("Batch id cannot be None") if batchId[0:1] != "B": raise paymentrails.exceptions.notFoundException.NotFoundException("Batch id is invalid") return {"ok":"true","payment":{"id":"P-91XQ40VT54GQM","recipient":{"id":"R-91XQ0PJH39U54","referenceId":"U345678912","email":"Johnny@test.com","name":"mark Test","lastName":"Test","firstName":"mark","type":"individual","status":"active","language":"en","complianceStatus":"pending","dob":"null","payoutMethod":"paypal","updatedAt":"2017-05-08T14:49:12.512Z","createdAt":"2017-05-04T16:17:04.378Z","gravatarUrl":"https://s3.amazonaws.com/static.api.paymentrails.com/icon_user.svg","compliance":{"status":"pending","checkedAt":"null"},"payout":{"autoswitch":{"limit":1000,"active":"false"},"holdup":{"limit":1000,"active":"false"},"primary":{"method":"paypal","currency":{"currency":{}}},"method":"paypal","accounts":{"paypal":{"address":"testpaypal@example.com"}},"methodDisplay":"PayPal"},"address":{"street1":"null","street2":"null","city":"null","postalCode":"null","country":"null","region":"null","phone":"null"}},"status":"pending","sourceAmount":"900.90","exchangeRate":"1.000000","fees":"0.00","recipientFees":"0.00","targetAmount":"65.00","fxRate":"2.000000","memo":"","processedAt":"null","createdAt":"2017-05-08T18:30:45.012Z","updatedAt":"2017-05-12T18:39:06.061Z","merchantFees":"0.00","compliance":{"status":"pending","checkedAt":"null"},"sourceCurrency":"USD","sourceCurrencyName":"US Dollar","targetCurrency":"USD","targetCurrencyName":"US Dollar","batch":{"id":"B-91XQ40VT5HF18","createdAt":"2017-05-08T18:30:44.905Z","updatedAt":"2017-05-12T18:39:06.125Z","sentAt":"null","completedAt":"null"}}} def fake_update(paymentId,batchId, body): if paymentId == None: raise paymentrails.exceptions.invalidFieldException.InvalidFieldException("Payment id cannot be None") if batchId == None: raise paymentrails.exceptions.invalidFieldException.InvalidFieldException("Batch id cannot be None") if batchId[0:1] != "B": raise paymentrails.exceptions.notFoundException.NotFoundException("Batch id is invalid") if paymentId[0:1] != "P": raise paymentrails.exceptions.notFoundException.NotFoundException("Payment id is invalid") if body == None: raise paymentrails.exceptions.invalidFieldException.InvalidFieldException("Body cannot be None") return {"ok": "true", "object": "updated"} def fake_create(body,batchId): if batchId == None: raise paymentrails.exceptions.invalidFieldException.InvalidFieldException("Batch id cannot be None") if body == None: raise paymentrails.exceptions.invalidFieldException.InvalidFieldException("Body is invalid") if batchId[0:1] != "B": raise paymentrails.exceptions.notFoundException.NotFoundException("Batch Id is invalid") return {"ok": "true"} def fake_delete(paymentId,batchId): if paymentId == None: raise paymentrails.exceptions.invalidFieldException.InvalidFieldException("Payment id cannot be None") if batchId == None: raise paymentrails.exceptions.invalidFieldException.InvalidFieldException("Batch id cannot be None") if batchId[0:1] != "B": raise paymentrails.exceptions.notFoundException.NotFoundException("Batch id is invalid") if paymentId[0:1] != "P": raise paymentrails.exceptions.notFoundException.NotFoundException("Payment id is invalid") return {"ok": "true", "object": "deleted"} def fake_search(page,pageSize,term): if term == None: raise paymentrails.exceptions.invalidFieldException.InvalidFieldException("Term cannot be None") return {"ok":"true","payments":[{"id":"P-912Q8JUA75HNC","recipient":{"id":"R-91XQ0PJH39U54","referenceId":"U345678912","email":"Johnny@test.com","name":"mark Test","lastName":"Test","firstName":"mark","type":"individual","status":"active","language":"en","complianceStatus":"pending","dob":"null","payoutMethod":"paypal","updatedAt":"2017-05-08T14:49:12.512Z","createdAt":"2017-05-04T16:17:04.378Z","gravatarUrl":"https://s3.amazonaws.com/static.api.paymentrails.com/icon_user.svg","compliance":{"status":"pending","checkedAt":"null"},"payout":{"autoswitch":{"limit":1000,"active":"false"},"holdup":{"limit":1000,"active":"false"},"primary":{"method":"paypal","currency":{"currency":{}}},"method":"paypal","accounts":{"paypal":{"address":"testpaypal@example.com"}},"methodDisplay":"PayPal"},"address":{"street1":"null","street2":"null","city":"null","postalCode":"null","country":"null","region":"null","phone":"null"}},"status":"pending","sourceAmount":"15.00","exchangeRate":"1.000000","fees":"0.00","recipientFees":"0.00","targetAmount":"65.00","fxRate":"2.000000","memo":"","processedAt":"null","createdAt":"2017-05-08T17:18:16.948Z","updatedAt":"2017-05-11T19:05:53.172Z","merchantFees":"0.00","compliance":{"status":"pending","checkedAt":"null"},"sourceCurrency":"USD","sourceCurrencyName":"US Dollar","targetCurrency":"USD","targetCurrencyName":"US Dollar","batch":{"id":"B-91XQ2ZHXARPJE","createdAt":"2017-05-08T17:18:16.893Z","updatedAt":"2017-05-11T19:05:53.265Z","sentAt":"null","completedAt":"null"}}],"meta":{"page":1,"pages":1,"records":1}} class TestPayment(unittest.TestCase): public_key = ("public_key") private_key = ("private_key") @patch('paymentrails.payment.Payment.find', fake_find) def test_retrieve_payment(self): paymentrails.configuration.Configuration.set_public_key(TestPayment.public_key) paymentrails.configuration.Configuration.set_private_key(TestPayment.private_key) batchId = "B-912Q61G0BRVGC" response = paymentrails.payment.Payment.find('P-91XQ0U0B1RW5M',batchId) status = {"ok":"true","payment":{"id":"P-91XQ40VT54GQM","recipient":{"id":"R-91XQ0PJH39U54","referenceId":"U345678912","email":"Johnny@test.com","name":"mark Test","lastName":"Test","firstName":"mark","type":"individual","status":"active","language":"en","complianceStatus":"pending","dob":"null","payoutMethod":"paypal","updatedAt":"2017-05-08T14:49:12.512Z","createdAt":"2017-05-04T16:17:04.378Z","gravatarUrl":"https://s3.amazonaws.com/static.api.paymentrails.com/icon_user.svg","compliance":{"status":"pending","checkedAt":"null"},"payout":{"autoswitch":{"limit":1000,"active":"false"},"holdup":{"limit":1000,"active":"false"},"primary":{"method":"paypal","currency":{"currency":{}}},"method":"paypal","accounts":{"paypal":{"address":"testpaypal@example.com"}},"methodDisplay":"PayPal"},"address":{"street1":"null","street2":"null","city":"null","postalCode":"null","country":"null","region":"null","phone":"null"}},"status":"pending","sourceAmount":"900.90","exchangeRate":"1.000000","fees":"0.00","recipientFees":"0.00","targetAmount":"65.00","fxRate":"2.000000","memo":"","processedAt":"null","createdAt":"2017-05-08T18:30:45.012Z","updatedAt":"2017-05-12T18:39:06.061Z","merchantFees":"0.00","compliance":{"status":"pending","checkedAt":"null"},"sourceCurrency":"USD","sourceCurrencyName":"US Dollar","targetCurrency":"USD","targetCurrencyName":"US Dollar","batch":{"id":"B-91XQ40VT5HF18","createdAt":"2017-05-08T18:30:44.905Z","updatedAt":"2017-05-12T18:39:06.125Z","sentAt":"null","completedAt":"null"}}} self.assertEqual(response, status) @patch('paymentrails.payment.Payment.find', fake_find) def test_retrieve_payment_InvalidPaymentId(self): with self.assertRaises(paymentrails.exceptions.notFoundException.NotFoundException): paymentrails.configuration.Configuration.set_public_key(TestPayment.public_key) paymentrails.configuration.Configuration.set_private_key(TestPayment.private_key) batchId = "B-912Q61G0BRVGC" response = paymentrails.payment.Payment.find('wdwdwd',batchId) @patch('paymentrails.payment.Payment.find', fake_find) def test_retrieve_payment_InvalidBatchId(self): with self.assertRaises(paymentrails.exceptions.notFoundException.NotFoundException): paymentrails.configuration.Configuration.set_public_key(TestPayment.public_key) paymentrails.configuration.Configuration.set_private_key(TestPayment.private_key) batchId = "wwdwdwd" response = paymentrails.payment.Payment.find('P-91XQ0U0B1RW5M',batchId) @patch('paymentrails.payment.Payment.find', fake_find) def test_retrieve_payment_None_Batch(self): with self.assertRaises( paymentrails.exceptions.invalidFieldException.InvalidFieldException): paymentrails.configuration.Configuration.set_public_key(TestPayment.public_key) paymentrails.configuration.Configuration.set_private_key(TestPayment.private_key) response = paymentrails.payment.Payment.find('P-91XQ0U0B1RW5M',None) @patch('paymentrails.payment.Payment.find', fake_find) def test_retrieve_payment_None_Payment(self): with self.assertRaises( paymentrails.exceptions.invalidFieldException.InvalidFieldException): paymentrails.configuration.Configuration.set_public_key(TestPayment.public_key) paymentrails.configuration.Configuration.set_private_key(TestPayment.private_key) response = paymentrails.payment.Payment.find(None,"B-hghgh") @patch('paymentrails.payment.Payment.create', fake_create) def test_create_payment(self): paymentrails.configuration.Configuration.set_public_key(TestPayment.public_key) paymentrails.configuration.Configuration.set_private_key(TestPayment.private_key) body = {"payments": [{"recipient": {"id": "R-91XPYX3V2MM1G"}, "sourceAmount": "65", "memo": "", "sourceCurrency": "CAD"}]} batchId = "B-912Q61G0BRVGC" response = paymentrails.payment.Payment.create(body,batchId) status = {"ok": "true"} self.assertEqual(response, status) @patch('paymentrails.payment.Payment.create', fake_create) def test_create_payment_InvalidBatchId(self): with self.assertRaises(paymentrails.exceptions.notFoundException.NotFoundException): paymentrails.configuration.Configuration.set_public_key(TestPayment.public_key) paymentrails.configuration.Configuration.set_private_key(TestPayment.private_key) body = {"payments": [{"recipient": {"id": "R-91XPYX3V2MM1G"}, "sourceAmount": "65", "memo": "", "sourceCurrency": "CAD"}]} batchId = "dddd" response = paymentrails.payment.Payment.create(body,batchId) @patch('paymentrails.payment.Payment.create', fake_create) def test_create_payment_None_body(self): with self.assertRaises( paymentrails.exceptions.invalidFieldException.InvalidFieldException): paymentrails.configuration.Configuration.set_public_key(TestPayment.public_key) paymentrails.configuration.Configuration.set_private_key(TestPayment.private_key) batchId = "B-dddd" response = paymentrails.payment.Payment.create(None,batchId) @patch('paymentrails.payment.Payment.create', fake_create) def test_create_payment_None_batch(self): with self.assertRaises( paymentrails.exceptions.invalidFieldException.InvalidFieldException): paymentrails.configuration.Configuration.set_public_key(TestPayment.public_key) paymentrails.configuration.Configuration.set_private_key(TestPayment.private_key) body = {"payments": [{"recipient": {"id": "R-91XPYX3V2MM1G"}, "sourceAmount": "65", "memo": "", "sourceCurrency": "CAD"}]} response = paymentrails.payment.Payment.create(body,None) @patch('paymentrails.payment.Payment.update', fake_update) def test_update_payment(self): paymentrails.configuration.Configuration.set_public_key(TestPayment.public_key) paymentrails.configuration.Configuration.set_private_key(TestPayment.private_key) body = {"sourceAmount": "900.90"} batchId = "B-912Q61G0BRVGC" response = paymentrails.payment.Payment.update("P-91XQ0U0B1RW5M",batchId, body) status = {"ok": "true", "object": "updated"} self.assertEqual(response, status) @patch('paymentrails.payment.Payment.update', fake_update) def test_update_payment_InvalidBatchId(self): with self.assertRaises(paymentrails.exceptions.notFoundException.NotFoundException): paymentrails.configuration.Configuration.set_public_key(TestPayment.public_key) paymentrails.configuration.Configuration.set_private_key(TestPayment.private_key) body = {"sourceAmount": "900.90"} batchId = "dddd" response = paymentrails.payment.Payment.update("P-91XQ0U0B1RW5M",batchId, body) @patch('paymentrails.payment.Payment.update', fake_update) def test_update_payment_InvalidPaymentId(self): with self.assertRaises(paymentrails.exceptions.notFoundException.NotFoundException): paymentrails.configuration.Configuration.set_public_key(TestPayment.public_key) paymentrails.configuration.Configuration.set_private_key(TestPayment.private_key) body = {"sourceAmount": "900.90"} batchId = "B-912Q61G0BRVGC" response = paymentrails.payment.Payment.update("ddd",batchId, body) @patch('paymentrails.payment.Payment.update', fake_update) def test_update_payment_None_body(self): with self.assertRaises( paymentrails.exceptions.invalidFieldException.InvalidFieldException): paymentrails.configuration.Configuration.set_public_key(TestPayment.public_key) paymentrails.configuration.Configuration.set_private_key(TestPayment.private_key) batchId = "B-dddd" response = paymentrails.payment.Payment.update("P-91XQ0U0B1RW5M",batchId, None) @patch('paymentrails.payment.Payment.update', fake_update) def test_update_payment_None_payment(self): with self.assertRaises( paymentrails.exceptions.invalidFieldException.InvalidFieldException): paymentrails.configuration.Configuration.set_public_key(TestPayment.public_key) paymentrails.configuration.Configuration.set_private_key(TestPayment.private_key) body = {"sourceAmount": "900.90"} batchId = "dddd" response = paymentrails.payment.Payment.update(None,batchId, body) @patch('paymentrails.payment.Payment.update', fake_update) def test_update_payment_None_batch(self): with self.assertRaises( paymentrails.exceptions.invalidFieldException.InvalidFieldException): paymentrails.configuration.Configuration.set_public_key(TestPayment.public_key) paymentrails.configuration.Configuration.set_private_key(TestPayment.private_key) body = {"sourceAmount": "900.90"} response = paymentrails.payment.Payment.update("P-91XQ0U0B1RW5M",None, body) @patch('paymentrails.payment.Payment.delete', fake_delete) def test_delete_payment(self): paymentrails.configuration.Configuration.set_public_key(TestPayment.public_key) paymentrails.configuration.Configuration.set_private_key(TestPayment.private_key) batchId = "B-912Q61G0BRVGC" response = paymentrails.payment.Payment.delete('P-912Q61G06TT6A',batchId) status = {"ok": "true", "object": "deleted"} self.assertEqual(response, status) @patch('paymentrails.payment.Payment.delete', fake_delete) def test_delete_payment_InvalidBatchId(self): with self.assertRaises(paymentrails.exceptions.notFoundException.NotFoundException): paymentrails.configuration.Configuration.set_public_key(TestPayment.public_key) paymentrails.configuration.Configuration.set_private_key(TestPayment.private_key) batchId = "ffff" response = paymentrails.payment.Payment.delete('P-912Q61G06TT6A',batchId) @patch('paymentrails.payment.Payment.delete', fake_delete) def test_delete_payment_None_payment(self): with self.assertRaises( paymentrails.exceptions.invalidFieldException.InvalidFieldException): paymentrails.configuration.Configuration.set_public_key(TestPayment.public_key) paymentrails.configuration.Configuration.set_private_key(TestPayment.private_key) batchId = "B-ffff" response = paymentrails.payment.Payment.delete(None,batchId) @patch('paymentrails.payment.Payment.delete', fake_delete) def test_delete_payment_None_batch(self): with self.assertRaises( paymentrails.exceptions.invalidFieldException.InvalidFieldException): paymentrails.configuration.Configuration.set_public_key(TestPayment.public_key) paymentrails.configuration.Configuration.set_private_key(TestPayment.private_key) response = paymentrails.payment.Payment.delete("P-fhfh",None) @patch('paymentrails.payment.Payment.delete', fake_delete) def test_delete_payment_InvalidPaymentId(self): with self.assertRaises(paymentrails.exceptions.notFoundException.NotFoundException): paymentrails.configuration.Configuration.set_public_key(TestPayment.public_key) paymentrails.configuration.Configuration.set_private_key(TestPayment.private_key) batchId = "B-912Q61G0BRVGC" response = paymentrails.payment.Payment.delete('fffff',batchId) @patch('paymentrails.payment.Payment.search',fake_search) def test_list_allPaymentsWithQueries(self): paymentrails.configuration.Configuration.set_public_key(TestPayment.public_key) paymentrails.configuration.Configuration.set_private_key(TestPayment.private_key) response = paymentrails.payment.Payment.search(1, 10, "hnc") status = {"ok":"true","payments":[{"id":"P-912Q8JUA75HNC","recipient":{"id":"R-91XQ0PJH39U54","referenceId":"U345678912","email":"Johnny@test.com","name":"mark Test","lastName":"Test","firstName":"mark","type":"individual","status":"active","language":"en","complianceStatus":"pending","dob":"null","payoutMethod":"paypal","updatedAt":"2017-05-08T14:49:12.512Z","createdAt":"2017-05-04T16:17:04.378Z","gravatarUrl":"https://s3.amazonaws.com/static.api.paymentrails.com/icon_user.svg","compliance":{"status":"pending","checkedAt":"null"},"payout":{"autoswitch":{"limit":1000,"active":"false"},"holdup":{"limit":1000,"active":"false"},"primary":{"method":"paypal","currency":{"currency":{}}},"method":"paypal","accounts":{"paypal":{"address":"testpaypal@example.com"}},"methodDisplay":"PayPal"},"address":{"street1":"null","street2":"null","city":"null","postalCode":"null","country":"null","region":"null","phone":"null"}},"status":"pending","sourceAmount":"15.00","exchangeRate":"1.000000","fees":"0.00","recipientFees":"0.00","targetAmount":"65.00","fxRate":"2.000000","memo":"","processedAt":"null","createdAt":"2017-05-08T17:18:16.948Z","updatedAt":"2017-05-11T19:05:53.172Z","merchantFees":"0.00","compliance":{"status":"pending","checkedAt":"null"},"sourceCurrency":"USD","sourceCurrencyName":"US Dollar","targetCurrency":"USD","targetCurrencyName":"US Dollar","batch":{"id":"B-91XQ2ZHXARPJE","createdAt":"2017-05-08T17:18:16.893Z","updatedAt":"2017-05-11T19:05:53.265Z","sentAt":"null","completedAt":"null"}}],"meta":{"page":1,"pages":1,"records":1}} self.assertEqual(response, status) @patch('paymentrails.payment.Payment.search',fake_search) def test_list_allPayments_None_body(self): with self.assertRaises( paymentrails.exceptions.invalidFieldException.InvalidFieldException): paymentrails.configuration.Configuration.set_public_key(TestPayment.public_key) paymentrails.configuration.Configuration.set_private_key(TestPayment.private_key) response = paymentrails.payment.Payment.search(1, 10, None) if __name__ == '__main__': unittest.main()
81.351779
1,564
0.722525
2,173
20,582
6.730327
0.09434
0.028308
0.078222
0.12335
0.953573
0.952274
0.93559
0.923487
0.903453
0.887863
0
0.047145
0.115781
20,582
252
1,565
81.674603
0.75647
0
0
0.669683
0
0
0.303178
0.069187
0
0
0
0
0.099548
1
0.122172
false
0
0.036199
0
0.19457
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
aa7d856091377820ac55301c149cd841d385f82a
89
py
Python
testm.py
sintrb/sinutils
ab74180ed06e19006ce23d45bb9c6056d8231a14
[ "MIT" ]
null
null
null
testm.py
sintrb/sinutils
ab74180ed06e19006ce23d45bb9c6056d8231a14
[ "MIT" ]
null
null
null
testm.py
sintrb/sinutils
ab74180ed06e19006ce23d45bb9c6056d8231a14
[ "MIT" ]
null
null
null
# -*- coding: UTF-8 -* ''' Created on 2021-04-16 ''' def test(): print('hi 43311')
9.888889
22
0.516854
13
89
3.538462
1
0
0
0
0
0
0
0
0
0
0
0.202899
0.224719
89
8
23
11.125
0.463768
0.483146
0
0
0
0
0.210526
0
0
0
0
0
0
1
0.5
true
0
0
0
0.5
0.5
1
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
0
0
0
1
0
7
aac35425ddc8430c989cd348e418763f50e707fd
9,861
py
Python
egybestAPI.py
unl1kely/egybestAPI
97560aac848ff52416f8805039babfb375d86e3a
[ "MIT" ]
2
2021-02-15T16:50:12.000Z
2022-01-29T00:12:55.000Z
egybestAPI.py
unl1kely/egybestAPI
97560aac848ff52416f8805039babfb375d86e3a
[ "MIT" ]
1
2021-02-13T17:45:37.000Z
2021-02-27T11:49:25.000Z
egybestAPI.py
unl1kely/egybestAPI
97560aac848ff52416f8805039babfb375d86e3a
[ "MIT" ]
1
2021-05-20T22:46:22.000Z
2021-05-20T22:46:22.000Z
from bs4 import BeautifulSoup as bs from JskPy import encodeUrl, best_match from requests import get domainEgyBest = "https://egybest.online" dataUrlScript = "https://cdn-static.egybest.net/packed/G78uOQIvP42dDsB.js" egyResponse = str(get(domainEgyBest)) if "200" not in egyResponse: print(f"[!] - Couldn't reach {domainEgyBest}") input("Press Enter to exit...") exit() class Serie: def __init__(self, title): self.title = title self.search_url = f"{domainEgyBest}/explore/?q={encodeUrl(self.title)}" self.search_page = get(self.search_url).text self.search_soup = bs(self.search_page, "html.parser") self.result_div = self.search_soup.find(id="movies") if self.result_div==None: self.found = 0 self.message = f"Incorrect input : {self.title}" else: self.result_list = self.result_div.find_all("a") if len(self.result_list)==0: self.found = 0 self.message = f"{self.title} - Serie not found." else: self.title_link = False self.series = [ ] for a_tag in self.result_list: if "/series/" in a_tag.get("href"): # is serie self.series.append(a_tag) ###################################### if not self.series: self.found = 0 self.message = f'Nothing found by "{self.title}".' else: self.titles = [a_tag.find(class_="title").text for a_tag in self.series] self.result_index = best_match(self.title, self.titles) if not self.result_index in range(len(self.titles)): self.found = 0 self.message = f"{self.title} - Serie not found." else: self.name = self.titles[self.result_index] self.title_link = self.series[self.result_index].get("href") self.title_page = get(self.title_link).text self.title_soup = bs(self.title_page, "html.parser") self.lastEpLink = self.title_soup.find_all(class_="movies_small")[1].find("a").get("href") ############## self.end_index = self.lastEpLink.index("-season-") self.start_index = self.lastEpLink.index("episode/") + 8 # len("episode/") = 8 self.show_id = self.lastEpLink[self.start_index:self.end_index] self.found = 1 self.message = "Found serie : "+self.name ################################### ''' for a_tag in self.series: if self.title.casefold().replace(" ", "") in a_tag.find(class_="title").text.casefold().replace(" ", ""): # desired serie self.title_link = a_tag.get("href") self.name = a_tag.find(class_="title").text break if not self.title_link: self.found = 0 self.message = f"{self.title} - Serie not found.") else: self.title_page = get(self.title_link).text self.title_soup = bs(self.title_page, "html.parser") #<serie> self.lastEpLink = self.title_soup.find_all(class_="movies_small")[1].find("a").get("href") ############## self.end_index = self.lastEpLink.index("-season-") self.start_index = self.lastEpLink.index("episode/") + 8 # len("episode/") = 8 self.show_id = self.lastEpLink[self.start_index:self.end_index] #</serie> self.found = 1 self.message = "Found serie : "+self.name)''' @staticmethod def define(title): global current_ current_ = Serie(title) def watch(self, season, episode, quality="Auto"): self.season = season self.episode = episode self.page_link = "{0}/episode/{1}-season-{2}-ep-{3}".format(domainEgyBest, self.show_id, self.season, self.episode) self.page_soup = bs(get(self.page_link).text, "html.parser") self.watch_quality = quality if self.watch_quality=="Auto": #iframe self.watch_link = domainEgyBest + self.page_soup.find("iframe").get("src") else: self.tbody = self.page_soup.find(class_="dls_table") self.watch_links = self.tbody.find_all(class_="nop btn b dl _open_window") if len(self.watch_links)==4: if self.watch_quality=="240p": print("[!] - 240p not available for this episode. Using 360p instead...") self.watch_quality = "360p" self.vidstream_link = domainEgyBest + self.watch_links[qualities_dict[self.watch_quality]].get("data-url") print(self.vidstream_link,3*"\n") self.vidstream_page = bs(get(self.vidstream_link).text, "html.parser") self.vidstream_source = self.vidstream_page.find("source") self.watch_link = self.vidstream_source.get("src") print("self vidstream source",self.vidstream_source) if __name__ != '__main__': self.message = f"{self.name} S{self.season}E{self.episode} link copied to clipboard" else: self.message = self.watch_link return self.message def download(self, season, episode, quality="720p"): return 0 self.season = season self.episode = episode self.page_link = "{0}/episode/{1}-season-{2}-ep-{3}".format(domainEgyBest, self.show_id, self.season, self.episode) self.page_soup = bs(get(self.page_link).text, "html.parser") self.download_quality = quality if self.download_quality=="Auto": self.download_quality = "720p" self.tbody = self.page_soup.find(class_="dls_table") self.download_links = self.tbody.find_all(class_="nop btn g dl _open_window") if len(self.download_links)==4: if self.download_quality=="240p": print("[!] - 240p not available for this episode. Using 360p instead...") self.download_quality = "360p" self.vidstream_link = domainEgyBest + self.download_links[qualities_dict[self.download_quality]].get("data-url") #print(self.vidstream_link,3*"\n") self.vidstream_page = bs(get(self.vidstream_link).text, "html.parser") self.vidstream_bigbuttons = self.vidstream_page.find("p").find_all(class_="bigbutton") #print(self.vidstream_bigbuttons) self.download_link = self.vidstream_bigbuttons[0].get("href") #print(self.vidstream_link) if __name__ != '__main__': self.message = f"{self.name} S{self.season}E{self.episode} download link copied to clipboard" else: self.message = self.download_link return self.message # download:g0;watch:b1 class Film: def __init__(self, title): # ============================== self.title = title self.search_url = f"{domainEgyBest}/explore/?q={self.title.replace(' ', '%20')}" self.search_page = get(self.search_url).text self.search_soup = bs(self.search_page, "html.parser") self.result_div = self.search_soup.find(id="movies") if self.result_div==None: self.found = 0 self.message = f"Incorrect input : {self.title}" else: self.result_list = self.result_div.find_all("a") if len(self.result_list)==0: self.found = 0 self.message = f"{self.title} - Film not found." else: self.title_link = False self.films = [ ] for a_tag in self.result_list: if "/movie/" in a_tag.get("href"): # is film self.films.append(a_tag) if not self.films: self.found = 0 self.message = f'Nothing found by "{self.title}".' else: self.titles = [a_tag.find(class_="title").text for a_tag in self.films] self.result_index = best_match(self.title, self.titles) if not self.result_index in range(len(self.titles)): self.found = 0 self.message = f"{self.title} - Film not found." else: self.name = self.titles[self.result_index] self.title_link = self.films[self.result_index].get("href") self.title_page = get(self.title_link).text self.title_soup = bs(self.title_page, "html.parser") self.found = 1 self.message = "Found film : "+self.name @staticmethod def define(title): global current_ current_ = Film(title) def watch(self, quality="Auto"): self.watch_quality = quality if self.watch_quality=="Auto": #iframe self.watch_link = domainEgyBest + self.title_soup.find("iframe").get("src") else: self.tbody = self.title_soup.find(class_="dls_table") self.watch_links = self.tbody.find_all(class_="nop btn b dl _open_window") if len(self.watch_links)==4: if self.watch_quality=="240p": print("[!] - 240p not available for this episode. Using 360p instead...") self.watch_quality = "360p" self.vidstream_link = domainEgyBest + self.watch_links[qualities_dict[self.watch_quality]].get("data-url") #print(self.vidstream_link,3*"\n") self.vidstream_page = bs(get(self.vidstream_link).text, "html.parser") self.vidstream_source = self.vidstream_page.find("source") self.watch_link = self.vidstream_source.get("src") #print("self vidstream source",self.vidstream_source) if __name__ != '__main__': self.message = f"{self.name} video link copied to clipboard" else: self.message = self.watch_link return self.message def download(self, quality="720p"): return 0 # currently working on this self.download_quality = quality if self.download_quality=="Auto": self.download_quality = "720p" self.tbody = self.title_soup.find(class_="dls_table") self.download_links = self.tbody.find_all(class_="nop btn g dl _open_window") if len(self.download_links)==4: if self.download_quality=="240p": print("[!] - 240p not available for this episode. Using 360p instead...") self.download_quality = "360p" self.vidstream_link = domainEgyBest + self.download_links[qualities_dict[self.download_quality]].get("data-url") #print(self.vidstream_link,3*"\n") self.vidstream_page = bs(get(self.vidstream_link).text, "html.parser") self.vidstream_bigbuttons = self.vidstream_page.find("p").find_all(class_="bigbutton") #print(self.vidstream_bigbuttons) self.download_link = self.vidstream_bigbuttons[0].get("href") #print(self.vidstream_link) if __name__ != '__main__': self.message = f"{self.name} ({self.download_quality}) download link copied to clipboard" else: self.message = self.download_link return self.message
44.219731
118
0.671433
1,382
9,861
4.60275
0.111433
0.05801
0.037416
0.019808
0.886653
0.884138
0.859142
0.859142
0.846408
0.810879
0
0.013724
0.172396
9,861
223
119
44.219731
0.765715
0.039854
0
0.752688
0
0.010753
0.200923
0.030005
0
0
0
0
0
1
0.043011
false
0
0.016129
0
0.102151
0.037634
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
2aee82d0752affb83ba587e5bdd11e28cd06c90e
2,297
py
Python
icarus/scenarios/test/test_workload.py
oascigil/icarus_edge_comp
b7bb9f9b8d0f27b4b01469dcba9cfc0c4949d64b
[ "MIT" ]
5
2021-03-20T09:22:55.000Z
2021-12-20T17:01:33.000Z
icarus/scenarios/test/test_workload.py
oascigil/icarus_edge_comp
b7bb9f9b8d0f27b4b01469dcba9cfc0c4949d64b
[ "MIT" ]
1
2021-12-13T07:40:46.000Z
2021-12-20T16:59:08.000Z
icarus/scenarios/test/test_workload.py
oascigil/icarus_edge_comp
b7bb9f9b8d0f27b4b01469dcba9cfc0c4949d64b
[ "MIT" ]
1
2021-11-25T05:42:20.000Z
2021-11-25T05:42:20.000Z
# -*- coding: utf-8 -*- import unittest import icarus.scenarios as workload class TestYCBS(unittest.TestCase): @classmethod def setUpClass(cls): pass @classmethod def tearDownClass(cls): pass def setUp(self): pass def tearDown(self): pass def test_a(self): n_items = 5 event = list(workload.YCSBWorkload("A", n_items, 1, 2)) self.assertEqual(len(event), 3) ev_1 = event[0] self.assertFalse(event[0]['log']) self.assertIn(ev_1['item'], range(1, n_items + 1)) self.assertIn(ev_1['op'], ["READ", "UPDATE"]) ev_2 = event[1] self.assertTrue(ev_2['log']) self.assertIn(ev_2['item'], range(1, n_items + 1)) self.assertIn(ev_2['op'], ["READ", "UPDATE"]) ev_3 = event[2] self.assertTrue(ev_3['log']) self.assertIn(ev_3['item'], range(1, n_items + 1)) self.assertIn(ev_3['op'], ["READ", "UPDATE"]) def test_b(self): n_items = 5 event = list(workload.YCSBWorkload("B", n_items, 1, 2)) self.assertEqual(len(event), 3) ev_1 = event[0] self.assertFalse(event[0]['log']) self.assertIn(ev_1['item'], range(1, n_items + 1)) self.assertIn(ev_1['op'], ["READ", "UPDATE"]) ev_2 = event[1] self.assertTrue(ev_2['log']) self.assertIn(ev_2['item'], range(1, n_items + 1)) self.assertIn(ev_2['op'], ["READ", "UPDATE"]) ev_3 = event[2] self.assertTrue(ev_3['log']) self.assertIn(ev_3['item'], range(1, n_items + 1)) self.assertIn(ev_3['op'], ["READ", "UPDATE"]) def test_c(self): n_items = 5 event = list(workload.YCSBWorkload("C", n_items, 1, 2)) self.assertEqual(len(event), 3) ev_1 = event[0] self.assertFalse(event[0]['log']) self.assertIn(ev_1['item'], range(1, n_items + 1)) self.assertEqual(ev_1['op'], "READ") ev_2 = event[1] self.assertTrue(ev_2['log']) self.assertIn(ev_2['item'], range(1, n_items + 1)) self.assertEqual(ev_2['op'], "READ") ev_3 = event[2] self.assertTrue(ev_3['log']) self.assertIn(ev_3['item'], range(1, n_items + 1)) self.assertEqual(ev_3['op'], "READ")
31.465753
63
0.554201
326
2,297
3.748466
0.144172
0.07365
0.171849
0.125205
0.821604
0.821604
0.821604
0.821604
0.723404
0.691489
0
0.045024
0.265128
2,297
72
64
31.902778
0.67891
0.009142
0
0.737705
0
0
0.068602
0
0
0
0
0
0.491803
1
0.114754
false
0.065574
0.032787
0
0.163934
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
1
0
0
0
1
0
0
0
0
0
9
631e410fda7af47e590c1b29d5d5fc0b94793246
7,651
py
Python
olc_webportalv2/geneseekr/tests/test_views.py
OLC-Bioinformatics/olc_genomics_portal
d70ec669a3a49106f8290fff5dee089726259a23
[ "MIT" ]
3
2019-01-03T21:22:21.000Z
2019-04-23T15:47:29.000Z
olc_webportalv2/geneseekr/tests/test_views.py
OLC-Bioinformatics/olc_genomics_portal
d70ec669a3a49106f8290fff5dee089726259a23
[ "MIT" ]
49
2019-01-03T18:15:12.000Z
2022-03-11T23:37:20.000Z
olc_webportalv2/geneseekr/tests/test_views.py
OLC-Bioinformatics/olc_webportalv2
d70ec669a3a49106f8290fff5dee089726259a23
[ "MIT" ]
58
2019-01-03T21:21:59.000Z
2021-11-02T18:00:20.000Z
from django.test import TestCase, Client from django.urls import reverse from olc_webportalv2.users.models import User from olc_webportalv2.geneseekr.models import GeneSeekrRequest, Tree, AMRSummary, ProkkaRequest class SampleTestCase(TestCase): @classmethod def setUpTestData(cls): user = User.objects.create(username='TestUser') user.set_password('password') user.save() user1 = User.objects.create(username='Test') user1.set_password('password') user1.save() request_one = GeneSeekrRequest.objects.create(name='123',query_sequence='>seqAAAAAAAA', user=user) request_two = GeneSeekrRequest.objects.create(name='456',query_sequence='>seqAAATTTTA', user=user) tree_one = Tree.objects.create(name='123',user=user) tree_two = Tree.objects.create(name='456',user=user) amr_one = AMRSummary.objects.create(name='123',user=user) amr_two = AMRSummary.objects.create(name='456',user=user) prokka_one = ProkkaRequest.objects.create(name='123',user=user) prokka_two = ProkkaRequest.objects.create(name='456',user=user) # Geneseekr View Tests -----------------------------------------------------------------------------------------------------------------> def test_geneseekr_home_login_required(self): resp = self.client.get(reverse('geneseekr:geneseekr_home')) self.assertEqual(resp.status_code, 302) # Should get 302 redirected if user is not logged in. def test_geneseekr_home(self): self.client.login(username='TestUser', password='password') resp = self.client.get(reverse('geneseekr:geneseekr_home')) self.assertEqual(resp.status_code, 200) geneseekr_requests = GeneSeekrRequest.objects.filter() for request in geneseekr_requests: self.assertIn(request.name, resp.content.decode('utf-8')) def test_user_geneseekr_home(self): self.client.login(username='Test', password='password') resp = self.client.get(reverse('geneseekr:geneseekr_home')) self.assertEqual(resp.status_code, 200) geneseekr_requests = GeneSeekrRequest.objects.filter() for request in geneseekr_requests: self.assertNotIn(request.name, resp.content.decode('utf-8')) def test_geneseekr_processing_login_required(self): resp = self.client.get(reverse('geneseekr:geneseekr_processing', kwargs={'geneseekr_request_pk': 1})) self.assertEqual(resp.status_code, 302) # Should get 302 redirected if user is not logged in. def test_geneseekr_processing_404_no_run(self): self.client.login(username='TestUser', password='password') resp = self.client.get(reverse('geneseekr:geneseekr_processing', kwargs={'geneseekr_request_pk': 123})) self.assertEqual(resp.status_code, 404) # Tree View Tests -----------------------------------------------------------------------------------------------------------------> def test_tree_home_login_required(self): resp = self.client.get(reverse('geneseekr:tree_home')) self.assertEqual(resp.status_code, 302) # Should get 302 redirected if user is not logged in. def test_tree_home(self): self.client.login(username='TestUser', password='password') resp = self.client.get(reverse('geneseekr:tree_home')) self.assertEqual(resp.status_code, 200) tree_requests = Tree.objects.filter() for request in tree_requests: self.assertIn(request.name, resp.content.decode('utf-8')) def test_user_tree_home(self): self.client.login(username='Test', password='password') resp = self.client.get(reverse('geneseekr:tree_home')) self.assertEqual(resp.status_code, 200) tree_requests = Tree.objects.filter() for request in tree_requests: self.assertNotIn(request.name, resp.content.decode('utf-8')) def test_tree_result_login_required(self): resp = self.client.get(reverse('geneseekr:tree_result', kwargs={'tree_request_pk': 1})) self.assertEqual(resp.status_code, 302) # Should get 302 redirected if user is not logged in. def test_tree_result_404_no_run(self): self.client.login(username='TestUser', password='password') resp = self.client.get(reverse('geneseekr:tree_result', kwargs={'tree_request_pk': 123})) self.assertEqual(resp.status_code, 404) # AMR View Tests -----------------------------------------------------------------------------------------------------------------> def test_amr_home_login_required(self): resp = self.client.get(reverse('geneseekr:amr_home')) self.assertEqual(resp.status_code, 302) # Should get 302 redirected if user is not logged in. def test_amr_home(self): self.client.login(username='TestUser', password='password') resp = self.client.get(reverse('geneseekr:amr_home')) self.assertEqual(resp.status_code, 200) amr_requests = AMRSummary.objects.filter() for request in amr_requests: self.assertIn(request.name, resp.content.decode('utf-8')) def test_user_amr_home(self): self.client.login(username='Test', password='password') resp = self.client.get(reverse('geneseekr:amr_home')) self.assertEqual(resp.status_code, 200) amr_requests = AMRSummary.objects.filter() for request in amr_requests: self.assertNotIn(request.name, resp.content.decode('utf-8')) def test_amr_result_login_required(self): resp = self.client.get(reverse('geneseekr:amr_result', kwargs={'amr_request_pk': 1})) self.assertEqual(resp.status_code, 302) # Should get 302 redirected if user is not logged in. def test_amr_result_404_no_run(self): self.client.login(username='TestUser', password='password') resp = self.client.get(reverse('geneseekr:amr_result', kwargs={'amr_request_pk': 123})) self.assertEqual(resp.status_code, 404) # Prokka View Tests -----------------------------------------------------------------------------------------------------------------> def test_prokka_home_login_required(self): resp = self.client.get(reverse('geneseekr:prokka_home')) self.assertEqual(resp.status_code, 302) # Should get 302 redirected if user is not logged in. def test_prokka_home(self): self.client.login(username='TestUser', password='password') resp = self.client.get(reverse('geneseekr:prokka_home')) self.assertEqual(resp.status_code, 200) prokka_requests = ProkkaRequest.objects.filter() for request in prokka_requests: self.assertIn(request.name, resp.content.decode('utf-8')) def test_user_prokka_home(self): self.client.login(username='Test', password='password') resp = self.client.get(reverse('geneseekr:prokka_home')) self.assertEqual(resp.status_code, 200) prokka_requests = ProkkaRequest.objects.filter() for request in prokka_requests: self.assertNotIn(request.name, resp.content.decode('utf-8')) def test_prokka_result_login_required(self): resp = self.client.get(reverse('geneseekr:prokka_result', kwargs={'prokka_request_pk': 1})) self.assertEqual(resp.status_code, 302) # Should get 302 redirected if user is not logged in. def test_prokka_result_404_no_run(self): self.client.login(username='TestUser', password='password') resp = self.client.get(reverse('geneseekr:prokka_result', kwargs={'prokka_request_pk': 123})) self.assertEqual(resp.status_code, 404)
52.047619
137
0.657953
917
7,651
5.317339
0.088332
0.065628
0.057424
0.069729
0.840443
0.840443
0.805989
0.801477
0.801477
0.772765
0
0.023498
0.171219
7,651
146
138
52.40411
0.745466
0.123513
0
0.581197
0
0
0.128194
0.042283
0
0
0
0
0.239316
1
0.179487
false
0.119658
0.034188
0
0.222222
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
7
2d6ad08342201fcf5805dcd46c000a9261b8e36d
422
py
Python
dog_cat/settings.py
xiaoyu2018/pytorch_demos
f92d2e10df4a68dac299ee6029d4c38bcdb26b23
[ "Apache-2.0" ]
2
2022-02-15T02:32:15.000Z
2022-03-01T09:36:08.000Z
dog_cat/settings.py
xiaoyu2018/pytorch_demos
f92d2e10df4a68dac299ee6029d4c38bcdb26b23
[ "Apache-2.0" ]
null
null
null
dog_cat/settings.py
xiaoyu2018/pytorch_demos
f92d2e10df4a68dac299ee6029d4c38bcdb26b23
[ "Apache-2.0" ]
null
null
null
<<<<<<< HEAD DATA_DIR="D:/2021UCAS/机器学习/大作业/pytorch_learning/dog_cat/DOGvsCAT" SAVE_DIR="D:/2021UCAS/机器学习/大作业/pytorch_learning/dog_cat" EPOCH=50 BATCH_SIZE=256 IMG_SIZE=32 DEVICE="cuda:0" ======= DATA_DIR="D:/2021UCAS/机器学习/大作业/pytorch_learning/dog_cat/DOGvsCAT" SAVE_DIR="D:/2021UCAS/机器学习/大作业/pytorch_learning/dog_cat" EPOCH=50 BATCH_SIZE=256 IMG_SIZE=32 DEVICE="cuda:0" >>>>>>> 26700f85104969189a826e0cb75e794a9b87d2d2
23.444444
65
0.781991
66
422
4.757576
0.363636
0.050955
0.152866
0.203822
0.859873
0.859873
0.859873
0.859873
0.859873
0.859873
0
0.153846
0.045024
422
17
66
24.823529
0.62531
0
0
0.8
0
0
0.49763
0.469194
0
0
0
0
0
0
null
null
0
0
null
null
0
0
0
0
null
0
0
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
null
0
0
0
0
1
0
0
0
0
0
0
0
0
10
9340d828b660693775c48c80319b51393c472cf7
34
py
Python
external/models/basic_BERT/__init__.py
swapUniba/Elliot_refactor-tesi-Ventrella
3ddffc041696c90a6f6d3e8906c212fc4f55f842
[ "Apache-2.0" ]
null
null
null
external/models/basic_BERT/__init__.py
swapUniba/Elliot_refactor-tesi-Ventrella
3ddffc041696c90a6f6d3e8906c212fc4f55f842
[ "Apache-2.0" ]
null
null
null
external/models/basic_BERT/__init__.py
swapUniba/Elliot_refactor-tesi-Ventrella
3ddffc041696c90a6f6d3e8906c212fc4f55f842
[ "Apache-2.0" ]
null
null
null
from .basic_BERT import basic_BERT
34
34
0.882353
6
34
4.666667
0.666667
0.642857
0
0
0
0
0
0
0
0
0
0
0.088235
34
1
34
34
0.903226
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
93847827b921250ad6ded6e72261c6eb98bbdb10
1,748
py
Python
tests/surrortg/inputs/test_joystick.py
cedricsellin/surrogate
03f13d297fdd74d7e5d164821039acc38d0b2103
[ "MIT" ]
2
2020-11-25T00:29:07.000Z
2020-12-01T20:24:47.000Z
tests/surrortg/inputs/test_joystick.py
cedricsellin/surrogate
03f13d297fdd74d7e5d164821039acc38d0b2103
[ "MIT" ]
1
2021-04-10T11:59:23.000Z
2021-04-10T11:59:23.000Z
tests/surrortg/inputs/test_joystick.py
cedricsellin/surrogate
03f13d297fdd74d7e5d164821039acc38d0b2103
[ "MIT" ]
2
2021-01-28T17:43:13.000Z
2021-03-29T13:19:38.000Z
import unittest from surrortg.inputs import Joystick, Directions class TestJoystick(Joystick): async def reset(self, seat): pass class JoystickTest(unittest.TestCase): def test_get_direction_4(self): """Test that the directions are correct in 4 case """ joystick = TestJoystick() self.assertEqual(joystick.get_direction_4(0, 1), Directions.TOP) self.assertEqual(joystick.get_direction_4(0, -1), Directions.BOTTOM) self.assertEqual(joystick.get_direction_4(1, 0), Directions.RIGHT) self.assertEqual(joystick.get_direction_4(-1, 0), Directions.LEFT) self.assertEqual(joystick.get_direction_4(0, 0), Directions.MIDDLE) self.assertEqual(joystick.get_direction_4(0, 0.09), Directions.MIDDLE) def test_get_direction_8(self): """Test that the directions are correct in 8 case """ joystick = TestJoystick() self.assertEqual(joystick.get_direction_8(0, 1), Directions.TOP) self.assertEqual(joystick.get_direction_8(0, -1), Directions.BOTTOM) self.assertEqual(joystick.get_direction_8(1, 0), Directions.RIGHT) self.assertEqual(joystick.get_direction_8(-1, 0), Directions.LEFT) self.assertEqual(joystick.get_direction_8(0, 0), Directions.MIDDLE) self.assertEqual(joystick.get_direction_8(0, 0.09), Directions.MIDDLE) self.assertEqual(joystick.get_direction_8(-1, 1), Directions.TOP_LEFT) self.assertEqual(joystick.get_direction_8(1, 1), Directions.TOP_RIGHT) self.assertEqual( joystick.get_direction_8(-1, -1), Directions.BOTTOM_LEFT ) self.assertEqual( joystick.get_direction_8(1, -1), Directions.BOTTOM_RIGHT )
38.844444
78
0.693936
221
1,748
5.298643
0.167421
0.184458
0.314261
0.355252
0.841161
0.824082
0.824082
0.824082
0.695132
0.186166
0
0.039886
0.196796
1,748
44
79
39.727273
0.79416
0.058352
0
0.133333
0
0
0
0
0
0
0
0
0.533333
1
0.066667
false
0.033333
0.066667
0
0.2
0
0
0
0
null
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
7
8786b8a72ba6a32b59ec7b85b1823a87f67f1886
25,653
py
Python
metadatautilspkg/premis.py
CLACI/DarkArchiveWorkflow
385bb3fb58dd177573bd9a02412070dae5cc1b91
[ "BSD-3-Clause" ]
2
2017-10-22T20:56:15.000Z
2017-11-16T03:13:55.000Z
metadatautilspkg/premis.py
CLACI/DarkArchiveWorkflow
385bb3fb58dd177573bd9a02412070dae5cc1b91
[ "BSD-3-Clause" ]
6
2017-05-19T15:47:26.000Z
2018-02-15T20:34:10.000Z
metadatautilspkg/premis.py
CLACI/DarkArchiveWorkflow
385bb3fb58dd177573bd9a02412070dae5cc1b91
[ "BSD-3-Clause" ]
1
2017-10-05T20:49:34.000Z
2017-10-05T20:49:34.000Z
# -*- coding: utf-8 -*- # BSD 3-Clause License # # Copyright (c) 2017, ColoredInsaneAsylums # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # * Neither the name of the copyright holder nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # CREDITS # Creator: Nitin Verma (nitin dot verma at utexas dot edu) # import metadatautilspkg.globalvars as globalvars from metadatautilspkg.metadatautils import * from metadatautilspkg.adminmetadatautils import * # FUNCTION DEFINITIONS def getEventDetails(): return "" # TODO: temporary, needs more work!! def getEventOutcome(): return "" # TODO: temporary, needs more work!! def getEventOutcomeDetail(): return "" # TODO: temporary, needs more work!! def getLinkingAgentId(): return "" # TODO: temporary, needs more work!! def getLinkingAgentRole(): return "" # TODO: temporary, needs more work!! def initMetadataRecord(initParams): """initMetadataRecord Arguments: initParams: initial parameters needed to populate the various fields of the new metadata record. Passed as a dictionary object. This function creates a 'skeletal' metadata record, with as many fields populated as possible at this initialization stage. """ metadataRecord = {} uniqueId = getUniqueID() metadataRecord["_id"] = uniqueId # Create the ADMIN entity here: arrangementInfo = initParams[globalvars.ARRANGEMENT_INFO_LABEL] adminEntity = initAdminMetadataEntity(arrangementInfo) metadataRecord[globalvars.labels.admn_entity.name] = adminEntity # Create the PREMIS (or preservation) entity here: metadataRecord[globalvars.labels.pres_entity.name] = {} metadataRecord[globalvars.labels.pres_entity.name][globalvars.labels.obj_entity.name] = {} metadataRecord[globalvars.labels.pres_entity.name][globalvars.labels.obj_entity.name][globalvars.labels.obj_id.name] = {} metadataRecord[globalvars.labels.pres_entity.name][globalvars.labels.obj_entity.name][globalvars.labels.obj_id.name][globalvars.labels.obj_id_typ.name] = globalvars.OBJ_ID_TYPE metadataRecord[globalvars.labels.pres_entity.name][globalvars.labels.obj_entity.name][globalvars.labels.obj_id.name][globalvars.labels.obj_id_val.name] = uniqueId metadataRecord[globalvars.labels.pres_entity.name][globalvars.labels.obj_entity.name][globalvars.labels.obj_cat.name] = globalvars.vocab.objCat metadataRecord[globalvars.labels.pres_entity.name][globalvars.labels.obj_entity.name][globalvars.labels.obj_chars.name] = {} metadataRecord[globalvars.labels.pres_entity.name][globalvars.labels.obj_entity.name][globalvars.labels.obj_chars.name][globalvars.labels.obj_fixity.name] = {} metadataRecord[globalvars.labels.pres_entity.name][globalvars.labels.obj_entity.name][globalvars.labels.obj_chars.name][globalvars.labels.obj_fixity.name][globalvars.labels.obj_msgdgst_algo.name] = globalvars.MD_INIT_STRING metadataRecord[globalvars.labels.pres_entity.name][globalvars.labels.obj_entity.name][globalvars.labels.obj_chars.name][globalvars.labels.obj_fixity.name][globalvars.labels.obj_msgdgst.name] = globalvars.MD_INIT_STRING metadataRecord[globalvars.labels.pres_entity.name][globalvars.labels.obj_entity.name][globalvars.labels.obj_chars.name][globalvars.labels.obj_size.name] = initParams["fileSize"] metadataRecord[globalvars.labels.pres_entity.name][globalvars.labels.obj_entity.name][globalvars.labels.obj_chars.name][globalvars.labels.obj_fmt.name] = {} metadataRecord[globalvars.labels.pres_entity.name][globalvars.labels.obj_entity.name][globalvars.labels.obj_chars.name][globalvars.labels.obj_fmt.name][globalvars.labels.obj_fmt_dsgn.name] = {} metadataRecord[globalvars.labels.pres_entity.name][globalvars.labels.obj_entity.name][globalvars.labels.obj_chars.name][globalvars.labels.obj_fmt.name][globalvars.labels.obj_fmt_dsgn.name][globalvars.labels.obj_fmt_name.name] = initParams["fmtName"] #metadataRecord[globalvars.labels.pres_entity.name][globalvars.labels.obj_entity.name][globalvars.labels.obj_chars.name][globalvars.labels.obj_fmt.name][globalvars.labels.obj_fmt_dsgn.name][globalvars.labels.obj_fmt_ver.name] = initParams["fmtVer"] metadataRecord[globalvars.labels.pres_entity.name][globalvars.labels.obj_entity.name][globalvars.labels.obj_orig_name.name] = initParams["fileName"] # Create a parent entity (list) of all PREMIS 'event' entities. metadataRecord[globalvars.labels.pres_entity.name][globalvars.labels.evt_parent_entity.name] = [] print_info("The following record has been initialized: {}".format(metadataRecord)) return metadataRecord def createIDAssignmentEvent(uniqueId): eventRecord = {} eventRecord[globalvars.labels.evt_entity.name] = {} eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_id.name] = {} eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_id.name][globalvars.labels.evt_id_typ.name] = globalvars.EVT_ID_TYP eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_id.name][globalvars.labels.evt_id_val.name] = getUniqueID() eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_typ.name] = globalvars.vocab.evtTyp.idAssgn eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_dttime.name] = getCurrentEDTFTimestamp() # Create a parent entity (list) for all PREMIS 'eventDetailInformation' entities eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_detail_parent.name] = [] eventDetailRecord = {} # Create a single record for event detail information eventDetailRecord[globalvars.labels.evt_detail_info.name] = {} eventDetailRecord[globalvars.labels.evt_detail_info.name][globalvars.labels.evt_detail_ext.name] = {} eventDetailRecord[globalvars.labels.evt_detail_info.name][globalvars.labels.evt_detail_ext.name][globalvars.labels.evt_detail_algo.name] = globalvars.UNIQUE_ID_ALGO eventDetailRecord[globalvars.labels.evt_detail_info.name][globalvars.labels.evt_detail_ext.name][globalvars.labels.evt_detail_proglang.name] = globalvars.PYTHON_VER_STR eventDetailRecord[globalvars.labels.evt_detail_info.name][globalvars.labels.evt_detail_ext.name][globalvars.labels.evt_detail_mthd.name] = globalvars.UNIQUE_ID_METHOD eventDetailRecord[globalvars.labels.evt_detail_info.name][globalvars.labels.evt_detail_ext.name][globalvars.labels.evt_detail_idAssgn.name] = uniqueId eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_detail_parent.name].append(eventDetailRecord) eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_outcm_info.name] = {} eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_outcm_info.name][globalvars.labels.evt_outcm.name] = globalvars.vocab.evtOutcm.success eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_lnk_agnt_id.name] = {} eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_lnk_agnt_id.name][globalvars.labels.evt_lnk_agnt_id_typ.name] = globalvars.LNK_AGNT_ID_TYPE eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_lnk_agnt_id.name][globalvars.labels.evt_lnk_agnt_id_val.name] = globalvars.LNK_AGNT_ID_VAL return eventRecord def createMsgDigestCalcEvent(chksm, chksmAlgo): eventRecord = {} eventRecord[globalvars.labels.evt_entity.name] = {} eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_id.name] = {} eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_id.name][globalvars.labels.evt_id_typ.name] = globalvars.EVT_ID_TYP eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_id.name][globalvars.labels.evt_id_val.name] = getUniqueID() eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_typ.name] = globalvars.vocab.evtTyp.msgDgstCalc eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_dttime.name] = getCurrentEDTFTimestamp() eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_detail_parent.name] = [] eventDetailRecord = {} # Create a single record for event detail information eventDetailRecord[globalvars.labels.evt_detail_info.name] = {} eventDetailRecord[globalvars.labels.evt_detail_info.name][globalvars.labels.evt_detail_ext.name] = {} eventDetailRecord[globalvars.labels.evt_detail_info.name][globalvars.labels.evt_detail_ext.name][globalvars.labels.evt_detail_algo.name] = globalvars.CHECKSUM_ALGO eventDetailRecord[globalvars.labels.evt_detail_info.name][globalvars.labels.evt_detail_ext.name][globalvars.labels.evt_detail_proglang.name] = globalvars.PYTHON_VER_STR eventDetailRecord[globalvars.labels.evt_detail_info.name][globalvars.labels.evt_detail_ext.name][globalvars.labels.evt_detail_mthd.name] = globalvars.CHECKSUM_METHOD eventDetailRecord[globalvars.labels.evt_detail_info.name][globalvars.labels.evt_detail_ext.name][globalvars.labels.evt_detail_msgDgst.name] = chksm eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_detail_parent.name].append(eventDetailRecord) eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_outcm_info.name] = {} eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_outcm_info.name][globalvars.labels.evt_outcm.name] = globalvars.vocab.evtOutcm.success eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_lnk_agnt_id.name] = {} eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_lnk_agnt_id.name][globalvars.labels.evt_lnk_agnt_id_typ.name] = globalvars.LNK_AGNT_ID_TYPE eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_lnk_agnt_id.name][globalvars.labels.evt_lnk_agnt_id_val.name] = globalvars.LNK_AGNT_ID_VAL return eventRecord def createFileCopyEvent(evtTyp, srcFilePath, dstFilePath): eventRecord = {} eventRecord[globalvars.labels.evt_entity.name] = {} eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_id.name] = {} eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_id.name][globalvars.labels.evt_id_typ.name] = globalvars.EVT_ID_TYP eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_id.name][globalvars.labels.evt_id_val.name] = getUniqueID() eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_typ.name] = globalvars.vocab.evtTyp.replication eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_dttime.name] = getCurrentEDTFTimestamp() eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_detail_parent.name] = [] eventDetailRecord = {} # Create a single record for event detail information eventDetailRecord[globalvars.labels.evt_detail_info.name] = {} eventDetailRecord[globalvars.labels.evt_detail_info.name][globalvars.labels.evt_detail_ext.name] = {} eventDetailRecord[globalvars.labels.evt_detail_info.name][globalvars.labels.evt_detail_ext.name][globalvars.labels.evt_detail_src.name] = srcFilePath eventDetailRecord[globalvars.labels.evt_detail_info.name][globalvars.labels.evt_detail_ext.name][globalvars.labels.evt_detail_dst.name] = dstFilePath eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_detail_parent.name].append(eventDetailRecord) eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_outcm_info.name] = {} eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_outcm_info.name][globalvars.labels.evt_outcm.name] = globalvars.vocab.evtOutcm.success eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_lnk_agnt_id.name] = {} eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_lnk_agnt_id.name][globalvars.labels.evt_lnk_agnt_id_typ.name] = globalvars.LNK_AGNT_ID_TYPE eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_lnk_agnt_id.name][globalvars.labels.evt_lnk_agnt_id_val.name] = globalvars.LNK_AGNT_ID_VAL return eventRecord def createFilenameChangeEvent(dstFilePrelimPath, dstFileUniquePath): eventRecord = {} eventRecord[globalvars.labels.evt_entity.name] = {} eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_id.name] = {} eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_id.name][globalvars.labels.evt_id_typ.name] = globalvars.EVT_ID_TYP eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_id.name][globalvars.labels.evt_id_val.name] = getUniqueID() eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_typ.name] = globalvars.vocab.evtTyp.filenameChg eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_dttime.name] = getCurrentEDTFTimestamp() eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_detail_parent.name] = [] eventDetailRecord = {} # Create a single record for event detail information eventDetailRecord[globalvars.labels.evt_detail_info.name] = {} eventDetailRecord[globalvars.labels.evt_detail_info.name][globalvars.labels.evt_detail_ext.name] = {} eventDetailRecord[globalvars.labels.evt_detail_info.name][globalvars.labels.evt_detail_ext.name][globalvars.labels.evt_detail_src.name] = dstFilePrelimPath eventDetailRecord[globalvars.labels.evt_detail_info.name][globalvars.labels.evt_detail_ext.name][globalvars.labels.evt_detail_dst.name] = dstFileUniquePath eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_detail_parent.name].append(eventDetailRecord) eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_outcm_info.name] = {} eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_outcm_info.name][globalvars.labels.evt_outcm.name] = globalvars.vocab.evtOutcm.success eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_lnk_agnt_id.name] = {} eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_lnk_agnt_id.name][globalvars.labels.evt_lnk_agnt_id_typ.name] = globalvars.LNK_AGNT_ID_TYPE eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_lnk_agnt_id.name][globalvars.labels.evt_lnk_agnt_id_val.name] = globalvars.LNK_AGNT_ID_VAL return eventRecord def createFixityCheckEvent(status, calcChecksum): eventRecord = {} eventRecord[globalvars.labels.evt_entity.name] = {} eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_id.name] = {} eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_id.name][globalvars.labels.evt_id_typ.name] = globalvars.EVT_ID_TYP eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_id.name][globalvars.labels.evt_id_val.name] = getUniqueID() eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_typ.name] = globalvars.vocab.evtTyp.fixityChk eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_dttime.name] = getCurrentEDTFTimestamp() eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_outcm_info.name] = {} eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_outcm_info.name][globalvars.labels.evt_outcm.name] = globalvars.vocab.evtOutcm.success eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_lnk_agnt_id.name] = {} eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_lnk_agnt_id.name][globalvars.labels.evt_lnk_agnt_id_typ.name] = globalvars.LNK_AGNT_ID_TYPE eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_lnk_agnt_id.name][globalvars.labels.evt_lnk_agnt_id_val.name] = globalvars.LNK_AGNT_ID_VAL return eventRecord def createAccessionEvent(): eventRecord = {} eventRecord[globalvars.labels.evt_entity.name] = {} eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_id.name] = {} eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_id.name][globalvars.labels.evt_id_typ.name] = globalvars.EVT_ID_TYP eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_id.name][globalvars.labels.evt_id_val.name] = getUniqueID() eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_typ.name] = globalvars.vocab.evtTyp.accession eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_dttime.name] = getCurrentEDTFTimestamp() eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_outcm_info.name] = {} eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_outcm_info.name][globalvars.labels.evt_outcm.name] = globalvars.vocab.evtOutcm.success eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_lnk_agnt_id.name] = {} eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_lnk_agnt_id.name][globalvars.labels.evt_lnk_agnt_id_typ.name] = globalvars.LNK_AGNT_ID_TYPE eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_lnk_agnt_id.name][globalvars.labels.evt_lnk_agnt_id_val.name] = globalvars.LNK_AGNT_ID_VAL return eventRecord def createMetadataExtractionEvent(extdMethd, extdmeta): eventRecord = {} eventRecord[globalvars.labels.evt_entity.name] = {} eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_id.name] = {} eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_id.name][globalvars.labels.evt_id_typ.name] = globalvars.EVT_ID_TYP eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_id.name][globalvars.labels.evt_id_val.name] = getUniqueID() eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_typ.name] = globalvars.vocab.evtTyp.metadataExt eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_dttime.name] = getCurrentEDTFTimestamp() eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_detail_parent.name] = [] eventDetailRecord = {} # Create a single record for event detail information eventDetailRecord[globalvars.labels.evt_detail_info.name] = {} eventDetailRecord[globalvars.labels.evt_detail_info.name][globalvars.labels.evt_detail_ext.name] = {} eventDetailRecord[globalvars.labels.evt_detail_info.name][globalvars.labels.evt_detail_ext.name][globalvars.labels.evt_detail_mthd.name] = extdMethd eventDetailRecord[globalvars.labels.evt_detail_info.name][globalvars.labels.evt_detail_ext.name][globalvars.labels.evt_detail_metadataExtraction.name] = extdmeta eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_detail_parent.name].append(eventDetailRecord) eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_outcm_info.name] = {} eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_outcm_info.name][globalvars.labels.evt_outcm.name] = globalvars.vocab.evtOutcm.success eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_lnk_agnt_id.name] = {} eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_lnk_agnt_id.name][globalvars.labels.evt_lnk_agnt_id_typ.name] = globalvars.LNK_AGNT_ID_TYPE eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_lnk_agnt_id.name][globalvars.labels.evt_lnk_agnt_id_val.name] = globalvars.LNK_AGNT_ID_VAL return eventRecord def createMetadataModificationEvent(before, after): eventRecord = {} eventRecord[globalvars.labels.evt_entity.name] = {} eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_id.name] = {} eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_id.name][globalvars.labels.evt_id_typ.name] = globalvars.EVT_ID_TYP eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_id.name][globalvars.labels.evt_id_val.name] = getUniqueID() eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_typ.name] = globalvars.vocab.evtTyp.metadataMod eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_dttime.name] = getCurrentEDTFTimestamp() eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_detail_parent.name] = [] eventDetailRecord = {} # Create a single record for event detail information eventDetailRecord[globalvars.labels.evt_detail_info.name] = {} eventDetailRecord[globalvars.labels.evt_detail_info.name][globalvars.labels.evt_detail_ext.name] = {} eventDetailRecord[globalvars.labels.evt_detail_info.name][globalvars.labels.evt_detail_ext.name][globalvars.labels.evt_detail_before.name] = before eventDetailRecord[globalvars.labels.evt_detail_info.name][globalvars.labels.evt_detail_ext.name][globalvars.labels.evt_detail_after.name] = after eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_detail_parent.name].append(eventDetailRecord) eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_outcm_info.name] = {} eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_outcm_info.name][globalvars.labels.evt_outcm.name] = globalvars.vocab.evtOutcm.success eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_lnk_agnt_id.name] = {} eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_lnk_agnt_id.name][globalvars.labels.evt_lnk_agnt_id_typ.name] = globalvars.LNK_AGNT_ID_TYPE eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_lnk_agnt_id.name][globalvars.labels.evt_lnk_agnt_id_val.name] = globalvars.LNK_AGNT_ID_VAL return eventRecord def createMigrationEvent(fileType, fileSize, imgWidth, imgHeight, fileName): eventRecord = {} eventRecord[globalvars.labels.evt_entity.name] = {} eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_id.name] = {} eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_id.name][globalvars.labels.evt_id_typ.name] = globalvars.EVT_ID_TYP eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_id.name][globalvars.labels.evt_id_val.name] = getUniqueID() eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_typ.name] = globalvars.vocab.evtTyp.migration eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_dttime.name] = getCurrentEDTFTimestamp() eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_detail_parent.name] = [] eventDetailRecord = {} # Create a single record for event detail information eventDetailRecord[globalvars.labels.evt_detail_info.name] = {} eventDetailRecord[globalvars.labels.evt_detail_info.name][globalvars.labels.evt_detail_ext.name] = {} eventDetailRecord[globalvars.labels.evt_detail_info.name][globalvars.labels.evt_detail_ext.name][globalvars.labels.evt_detail_fileType.name] = fileType eventDetailRecord[globalvars.labels.evt_detail_info.name][globalvars.labels.evt_detail_ext.name][globalvars.labels.evt_detail_fileSize.name] = fileSize eventDetailRecord[globalvars.labels.evt_detail_info.name][globalvars.labels.evt_detail_ext.name][globalvars.labels.evt_detail_imgWidth.name] = imgWidth eventDetailRecord[globalvars.labels.evt_detail_info.name][globalvars.labels.evt_detail_ext.name][globalvars.labels.evt_detail_imgHeight.name] = imgHeight eventDetailRecord[globalvars.labels.evt_detail_info.name][globalvars.labels.evt_detail_ext.name][globalvars.labels.evt_detail_fileName.name] = fileName eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_detail_parent.name].append(eventDetailRecord) eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_outcm_info.name] = {} eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_outcm_info.name][globalvars.labels.evt_outcm.name] = globalvars.vocab.evtOutcm.success eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_lnk_agnt_id.name] = {} eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_lnk_agnt_id.name][globalvars.labels.evt_lnk_agnt_id_typ.name] = globalvars.LNK_AGNT_ID_TYPE eventRecord[globalvars.labels.evt_entity.name][globalvars.labels.evt_lnk_agnt_id.name][globalvars.labels.evt_lnk_agnt_id_val.name] = globalvars.LNK_AGNT_ID_VAL return eventRecord
75.228739
253
0.810432
3,287
25,653
6.095224
0.087314
0.328226
0.329074
0.22845
0.850262
0.845421
0.843524
0.834789
0.831794
0.831794
0
0.000254
0.080108
25,653
340
254
75.45
0.848758
0.114295
0
0.681818
0
0
0.003136
0
0
0
0
0.002941
0
1
0.068182
false
0
0.013636
0.022727
0.15
0.004545
0
0
0
null
1
1
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
9
87d3a2c5afdbcac9eefbb452cf51217b3b73eecb
188
py
Python
wynncraft/utils/__init__.py
martinkovacs/wynncraft-python
0c35e3eb4d080aa32b997543d21998933ffa7819
[ "0BSD" ]
null
null
null
wynncraft/utils/__init__.py
martinkovacs/wynncraft-python
0c35e3eb4d080aa32b997543d21998933ffa7819
[ "0BSD" ]
null
null
null
wynncraft/utils/__init__.py
martinkovacs/wynncraft-python
0c35e3eb4d080aa32b997543d21998933ffa7819
[ "0BSD" ]
null
null
null
from wynncraft.utils.constants import API_KEY from wynncraft.utils.constants import CACHE_TIME from wynncraft.utils.constants import TIMEOUT from wynncraft.utils.constants import RL_ENABLE
47
48
0.87766
27
188
6
0.444444
0.320988
0.444444
0.666667
0.814815
0
0
0
0
0
0
0
0.079787
188
4
49
47
0.936416
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
1
1
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
9
87dcb1d5748c549a84f7fa561107ed18c6cfa436
2,252
py
Python
tests/test_behavior.py
jamoralp/pyecore
df1f230e6e99dfe0a6ccfa34776b626e2bb13d63
[ "BSD-3-Clause" ]
99
2017-06-02T14:03:51.000Z
2022-03-11T06:34:11.000Z
tests/test_behavior.py
jamoralp/pyecore
df1f230e6e99dfe0a6ccfa34776b626e2bb13d63
[ "BSD-3-Clause" ]
108
2017-05-19T05:53:45.000Z
2022-03-30T04:49:47.000Z
tests/test_behavior.py
jamoralp/pyecore
df1f230e6e99dfe0a6ccfa34776b626e2bb13d63
[ "BSD-3-Clause" ]
41
2017-06-02T14:07:35.000Z
2021-12-02T06:21:01.000Z
import pytest from pyecore.ecore import * from pyecore.behavior import behavior, main, run def test_static_metamodel_behavior_injection(): @EMetaclass class A(object): pass @A.behavior def my_fun(self): return True @A.behavior def __init__(self, i=15): self.i = i a = A() assert a.my_fun() is True assert a.i == 15 a = A(i=0) assert a.i == 0 def test_static_metamodel_entry_point(): @EMetaclass class A(object): pass @A.behavior def my_fun(self, i): return i + 1 a = A() x = 1 with pytest.raises(NotImplementedError): run(a) # We add the behavior *after* the instance creation @main @A.behavior def entry_point(self, i=0): return self.my_fun(i) y = run(a, x) assert y == x + 1 def test_dynamic_metamodel_behavior_injection(): A = EClass('A') @A.behavior def my_fun(self): return True @A.behavior def __init__(self, i=15): self.i = i a = A() assert a.my_fun() is True assert a.i == 15 a = A(i=0) assert a.i == 0 def test_dynamic_metamodel_entry_point(): A = EClass('A') @A.behavior def my_fun(self, i): return i + 1 a = A() x = 1 with pytest.raises(NotImplementedError): run(a) # We add the behavior *after* the instance creation @main @A.behavior def entry_point(self, i=0): return self.my_fun(i) y = run(a, x) assert y == x + 1 def test_dynamic_metamodel_behavior_injection_newdecorator(): A = EClass('A') @behavior(A) def my_fun(self): return True @behavior(A) def __init__(self, i=15): self.i = i a = A() assert a.my_fun() is True assert a.i == 15 a = A(i=0) assert a.i == 0 def test_static_metamodel_behavior_injection_newdecorator(): @EMetaclass class A(object): ... @behavior(A) def my_fun(self, stuff): """mydoc""" return stuff + 5 @behavior(A) def __init__(self, i=15): self.i = i a = A() assert a.my_fun(0) == 5 assert a.i == 15 a = A(i=0) assert a.i == 0 assert A.my_fun.__doc__ == "mydoc"
17.060606
61
0.564831
335
2,252
3.614925
0.149254
0.053675
0.079273
0.059455
0.834021
0.828241
0.739059
0.739059
0.739059
0.725846
0
0.022713
0.315719
2,252
131
62
17.19084
0.763141
0.047069
0
0.847826
0
0
0.00374
0
0
0
0
0
0.163043
1
0.195652
false
0.021739
0.032609
0.076087
0.347826
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
3568c9af4e11e5ca5da46f8f3f5757f7d8ab0c9c
1,740
py
Python
osf/migrations/0192_add_registation_responses_fields.py
gaybro8777/osf.io
30408511510a40bc393565817b343ef5fd76ab14
[ "Apache-2.0" ]
628
2015-01-15T04:33:22.000Z
2022-03-30T06:40:10.000Z
osf/migrations/0192_add_registation_responses_fields.py
gaybro8777/osf.io
30408511510a40bc393565817b343ef5fd76ab14
[ "Apache-2.0" ]
4,712
2015-01-02T01:41:53.000Z
2022-03-30T14:18:40.000Z
osf/migrations/0192_add_registation_responses_fields.py
Johnetordoff/osf.io
de10bf249c46cede04c78f7e6f7e352c69e6e6b5
[ "Apache-2.0" ]
371
2015-01-12T16:14:08.000Z
2022-03-31T18:58:29.000Z
# -*- coding: utf-8 -*- # Generated by Django 1.11.15 on 2019-09-01 23:15 from __future__ import unicode_literals from django.db import migrations, models import osf.utils.datetime_aware_jsonfield class Migration(migrations.Migration): dependencies = [ ('osf', '0191_migrate_schemas_to_schemablocks'), ] operations = [ migrations.AddField( model_name='abstractnode', name='registration_responses', field=osf.utils.datetime_aware_jsonfield.DateTimeAwareJSONField(blank=True, default=dict, encoder=osf.utils.datetime_aware_jsonfield.DateTimeAwareJSONEncoder) ), migrations.AddField( model_name='abstractnode', name='registration_responses_migrated', field=models.NullBooleanField(db_index=True), ), migrations.AddField( model_name='draftregistration', name='registration_responses', field=osf.utils.datetime_aware_jsonfield.DateTimeAwareJSONField(blank=True, default=dict, encoder=osf.utils.datetime_aware_jsonfield.DateTimeAwareJSONEncoder), ), migrations.AddField( model_name='draftregistration', name='registration_responses_migrated', field=models.NullBooleanField(db_index=True), ), migrations.AlterField( model_name='abstractnode', name='registration_responses_migrated', field=models.NullBooleanField(db_index=True, default=True), ), migrations.AlterField( model_name='draftregistration', name='registration_responses_migrated', field=models.NullBooleanField(db_index=True, default=True), ), ]
37.021277
171
0.667241
160
1,740
7.0125
0.34375
0.048128
0.13369
0.093583
0.815508
0.762032
0.762032
0.762032
0.69697
0.69697
0
0.016579
0.237356
1,740
46
172
37.826087
0.828937
0.039655
0
0.717949
1
0
0.176259
0.122302
0
0
0
0
0
1
0
false
0
0.076923
0
0.153846
0
0
0
0
null
0
0
0
1
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
357cb879d8a7229d36e789aa6fa85bd7ee5d5ace
9,041
py
Python
tests/integration/test_lock.py
akudelka/sherlock
9e85f36c01e0cb1d495283f024423bc60c3f7a4e
[ "MIT" ]
165
2015-01-12T09:09:19.000Z
2022-03-14T11:26:23.000Z
tests/integration/test_lock.py
akudelka/sherlock
9e85f36c01e0cb1d495283f024423bc60c3f7a4e
[ "MIT" ]
35
2015-01-07T14:57:24.000Z
2022-03-24T17:43:28.000Z
tests/integration/test_lock.py
akudelka/sherlock
9e85f36c01e0cb1d495283f024423bc60c3f7a4e
[ "MIT" ]
38
2015-03-11T09:10:05.000Z
2022-01-17T11:29:38.000Z
''' Integration tests for backend locks. ''' import etcd import pylibmc import redis import sherlock import time import unittest class TestRedisLock(unittest.TestCase): def setUp(self): try: self.client = redis.StrictRedis(host='redis') except Exception as err: print(str(err)) raise Exception('You must have Redis server running on localhost ' 'to be able to run integration tests.') self.lock_name = 'test_lock' def test_acquire(self): lock = sherlock.RedisLock(self.lock_name, client=self.client) self.assertTrue(lock._acquire()) self.assertEqual(self.client.get(self.lock_name).decode('UTF-8'), str(lock._owner)) def test_acquire_with_namespace(self): lock = sherlock.RedisLock(self.lock_name, client=self.client, namespace='ns') self.assertTrue(lock._acquire()) self.assertEqual( self.client.get('ns_%s' % self.lock_name).decode('UTF-8'), str(lock._owner)) def test_acquire_once_only(self): lock1 = sherlock.RedisLock(self.lock_name, client=self.client) lock2 = sherlock.RedisLock(self.lock_name, client=self.client) self.assertTrue(lock1._acquire()) self.assertFalse(lock2._acquire()) def test_acquire_check_expiry(self): lock = sherlock.RedisLock(self.lock_name, client=self.client, expire=1) lock.acquire() time.sleep(2) self.assertFalse(lock.locked()) def test_acquire_check_expire_is_not_set(self): lock = sherlock.RedisLock(self.lock_name, client=self.client, expire=None) lock.acquire() time.sleep(2) self.assertTrue(self.client.ttl(self.lock_name) < 0) def test_release(self): lock = sherlock.RedisLock(self.lock_name, client=self.client) lock._acquire() lock._release() self.assertEqual(self.client.get(self.lock_name), None) def test_release_with_namespace(self): lock = sherlock.RedisLock(self.lock_name, client=self.client, namespace='ns') lock._acquire() lock._release() self.assertEqual(self.client.get('ns_%s' % self.lock_name), None) def test_release_own_only(self): lock1 = sherlock.RedisLock(self.lock_name, client=self.client) lock2 = sherlock.RedisLock(self.lock_name, client=self.client) lock1._acquire() self.assertRaises(sherlock.LockException, lock2._release) lock1._release() def test_locked(self): lock = sherlock.RedisLock(self.lock_name, client=self.client) lock._acquire() self.assertTrue(lock._locked) lock._release() self.assertFalse(lock._locked) def test_deleting_lock_object_releases_the_lock(self): lock = sherlock.lock.RedisLock(self.lock_name, client=self.client) lock.acquire() self.assertEqual(self.client.get(self.lock_name).decode('UTF-8'), str(lock._owner)) del lock self.assertEqual(self.client.get(self.lock_name), None) def tearDown(self): self.client.delete(self.lock_name) self.client.delete('ns_%s' % self.lock_name) class TestEtcdLock(unittest.TestCase): def setUp(self): self.client = etcd.Client(host='etcd') self.lock_name = 'test_lock' def test_acquire(self): lock = sherlock.EtcdLock(self.lock_name, client=self.client) self.assertTrue(lock._acquire()) self.assertEqual(self.client.get(self.lock_name).value, str(lock._owner)) def test_acquire_with_namespace(self): lock = sherlock.EtcdLock(self.lock_name, client=self.client, namespace='ns') self.assertTrue(lock._acquire()) self.assertEqual(self.client.get('/ns/%s' % self.lock_name).value, str(lock._owner)) def test_acquire_once_only(self): lock1 = sherlock.EtcdLock(self.lock_name, client=self.client) lock2 = sherlock.EtcdLock(self.lock_name, client=self.client) self.assertTrue(lock1._acquire()) self.assertFalse(lock2._acquire()) def test_acquire_check_expiry(self): lock = sherlock.EtcdLock(self.lock_name, client=self.client, expire=1) lock.acquire() time.sleep(2) self.assertFalse(lock.locked()) def test_acquire_check_expire_is_not_set(self): lock = sherlock.EtcdLock(self.lock_name, client=self.client, expire=None) lock.acquire() time.sleep(2) self.assertEquals(self.client.get(self.lock_name).ttl, None) def test_release(self): lock = sherlock.EtcdLock(self.lock_name, client=self.client) lock._acquire() lock._release() self.assertRaises(etcd.EtcdKeyNotFound, self.client.get, self.lock_name) def test_release_with_namespace(self): lock = sherlock.EtcdLock(self.lock_name, client=self.client, namespace='ns') lock._acquire() lock._release() self.assertRaises(etcd.EtcdKeyNotFound, self.client.get, '/ns/%s' % self.lock_name) def test_release_own_only(self): lock1 = sherlock.EtcdLock(self.lock_name, client=self.client) lock2 = sherlock.EtcdLock(self.lock_name, client=self.client) lock1._acquire() self.assertRaises(sherlock.LockException, lock2._release) lock1._release() def test_locked(self): lock = sherlock.EtcdLock(self.lock_name, client=self.client) lock._acquire() self.assertTrue(lock._locked) lock._release() self.assertFalse(lock._locked) def test_deleting_lock_object_releases_the_lock(self): lock = sherlock.lock.EtcdLock(self.lock_name, client=self.client) lock.acquire() self.assertEqual(self.client.get(self.lock_name).value, str(lock._owner)) del lock self.assertRaises(etcd.EtcdKeyNotFound, self.client.get, self.lock_name) def tearDown(self): try: self.client.delete(self.lock_name) except etcd.EtcdKeyNotFound: pass try: self.client.delete('/ns/%s' % self.lock_name) except etcd.EtcdKeyNotFound: pass class TestMCLock(unittest.TestCase): def setUp(self): self.client = pylibmc.Client(['memcached'], binary=True) self.lock_name = 'test_lock' def test_acquire(self): lock = sherlock.MCLock(self.lock_name, client=self.client) self.assertTrue(lock._acquire()) self.assertEqual(self.client.get(self.lock_name), str(lock._owner)) def test_acquire_with_namespace(self): lock = sherlock.MCLock(self.lock_name, client=self.client, namespace='ns') self.assertTrue(lock._acquire()) self.assertEqual(self.client.get('ns_%s' % self.lock_name), str(lock._owner)) def test_acquire_once_only(self): lock1 = sherlock.MCLock(self.lock_name, client=self.client) lock2 = sherlock.MCLock(self.lock_name, client=self.client) self.assertTrue(lock1._acquire()) self.assertFalse(lock2._acquire()) def test_acquire_check_expiry(self): lock = sherlock.MCLock(self.lock_name, client=self.client, expire=1) lock.acquire() time.sleep(2) self.assertFalse(lock.locked()) def test_release(self): lock = sherlock.MCLock(self.lock_name, client=self.client) lock._acquire() lock._release() self.assertEqual(self.client.get(self.lock_name), None) def test_release_with_namespace(self): lock = sherlock.MCLock(self.lock_name, client=self.client, namespace='ns') lock._acquire() lock._release() self.assertEqual(self.client.get('ns_%s' % self.lock_name), None) def test_release_own_only(self): lock1 = sherlock.MCLock(self.lock_name, client=self.client) lock2 = sherlock.MCLock(self.lock_name, client=self.client) lock1._acquire() self.assertRaises(sherlock.LockException, lock2._release) lock1._release() def test_locked(self): lock = sherlock.MCLock(self.lock_name, client=self.client) lock._acquire() self.assertTrue(lock._locked) lock._release() self.assertFalse(lock._locked) def test_deleting_lock_object_releases_the_lock(self): lock = sherlock.lock.MCLock(self.lock_name, client=self.client) lock.acquire() self.assertEqual(self.client.get(self.lock_name), str(lock._owner)) del lock self.assertEqual(self.client.get(self.lock_name), None) def tearDown(self): self.client.delete(self.lock_name) self.client.delete('ns_%s' % self.lock_name)
36.309237
91
0.639089
1,082
9,041
5.145102
0.088725
0.125022
0.137956
0.113167
0.923118
0.918089
0.903179
0.876953
0.863661
0.863661
0
0.005718
0.245548
9,041
248
92
36.455645
0.810438
0.003982
0
0.840796
0
0
0.022684
0
0
0
0
0
0.218905
1
0.174129
false
0.00995
0.029851
0
0.218905
0.004975
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
35be819222bbfcfab0e03a321f77a0e23fb57e40
21,125
py
Python
Binary_Search_Tree_Unbalanced/BST_Test.py
rockgard3n/Data-Structures
93b6b5aaf74cb0f39be233d0f80fd881e27969af
[ "MIT" ]
null
null
null
Binary_Search_Tree_Unbalanced/BST_Test.py
rockgard3n/Data-Structures
93b6b5aaf74cb0f39be233d0f80fd881e27969af
[ "MIT" ]
null
null
null
Binary_Search_Tree_Unbalanced/BST_Test.py
rockgard3n/Data-Structures
93b6b5aaf74cb0f39be233d0f80fd881e27969af
[ "MIT" ]
null
null
null
import unittest from Binary_Search_Tree import Binary_Search_Tree class BinarySearchTree_Tester(unittest.TestCase): def setUp(self): self.__tree = Binary_Search_Tree() ##Tests on empty def test_empty_tree_string_IO(self): self.assertEqual("[ ]", self.__tree.in_order()) def test_empty_tree_string_PRO(self): self.assertEqual("[ ]", self.__tree.pre_order()) def test_empty_tree_string_PSTO(self): self.assertEqual("[ ]", self.__tree.post_order()) def test_empty_tree(self): self.assertEqual(0, self.__tree.get_height()) ##Insertion Tests #Height 1 def test_insert_on_empty_IO(self): self.__tree.insert_element(25) self.assertEqual("[ 25 ]", self.__tree.in_order()) def test_insert_on_empty_PRO(self): self.__tree.insert_element(25) self.assertEqual("[ 25 ]", self.__tree.pre_order()) def test_insert_on_empty_POST(self): self.__tree.insert_element(25) self.assertEqual("[ 25 ]", self.__tree.post_order()) def test_insert_on_empty_height(self): self.__tree.insert_element(25) self.assertEqual(1, self.__tree.get_height()) #Height 2 def test_insert_on_h1_lessthan_IO(self): self.__tree.insert_element(25) self.__tree.insert_element(15) self.assertEqual("[ 15, 25 ]", self.__tree.in_order()) def test_insert_on_h1_lessthan_PRO(self): self.__tree.insert_element(25) self.__tree.insert_element(15) self.assertEqual("[ 25, 15 ]", self.__tree.pre_order()) def test_insert_on_h1_lessthan_IO(self): self.__tree.insert_element(25) self.__tree.insert_element(15) self.assertEqual("[ 15, 25 ]", self.__tree.post_order()) def test_insert_on_h1_lessthan_height(self): self.__tree.insert_element(25) self.__tree.insert_element(15) self.assertEqual(2, self.__tree.get_height()) def test_insert_on_h1_gthan_IO(self): self.__tree.insert_element(25) self.__tree.insert_element(35) self.assertEqual("[ 25, 35 ]", self.__tree.in_order()) def test_insert_on_h1_gthan_PRO(self): self.__tree.insert_element(25) self.__tree.insert_element(35) self.assertEqual("[ 25, 35 ]", self.__tree.pre_order()) def test_insert_on_h1_gthan_PSTO(self): self.__tree.insert_element(25) self.__tree.insert_element(35) self.assertEqual("[ 35, 25 ]", self.__tree.post_order()) def test_insert_on_h1_gthan_height(self): self.__tree.insert_element(25) self.__tree.insert_element(35) self.assertEqual(2, self.__tree.get_height()) #perfect tree tests def test_insert_h2_perfect_tree_IO(self): self.__tree.insert_element(20) self.__tree.insert_element(15) self.__tree.insert_element(25) self.assertEqual("[ 15, 20, 25 ]", self.__tree.in_order()) def test_insert_h2_perfect_tree_PRO(self): self.__tree.insert_element(20) self.__tree.insert_element(15) self.__tree.insert_element(25) self.assertEqual("[ 20, 15, 25 ]", self.__tree.pre_order()) def test_insert_h2_perfect_tree_PSTO(self): self.__tree.insert_element(20) self.__tree.insert_element(15) self.__tree.insert_element(25) self.assertEqual("[ 15, 25, 20 ]", self.__tree.post_order()) #Height 3 less than def test_insert_on_h2_lessthan_IO(self): self.__tree.insert_element(25) self.__tree.insert_element(15) self.__tree.insert_element(10) self.assertEqual("[ 10, 15, 25 ]", self.__tree.in_order()) def test_insert_on_h2_lessthan_PRO(self): self.__tree.insert_element(25) self.__tree.insert_element(15) self.__tree.insert_element(10) self.assertEqual("[ 25, 15, 10 ]", self.__tree.pre_order()) def test_insert_on_h2_lessthan_IO(self): self.__tree.insert_element(25) self.__tree.insert_element(15) self.__tree.insert_element(10) self.assertEqual("[ 10, 15, 25 ]", self.__tree.post_order()) def test_insert_on_h2_lessthan_height(self): self.__tree.insert_element(25) self.__tree.insert_element(15) self.__tree.insert_element(10) self.assertEqual(3, self.__tree.get_height()) #Height 3 greather than def test_insert_on_h2_gthan_IO(self): self.__tree.insert_element(10) self.__tree.insert_element(25) self.__tree.insert_element(35) self.assertEqual("[ 10, 25, 35 ]", self.__tree.in_order()) def test_insert_on_h2_gthan_PRO(self): self.__tree.insert_element(10) self.__tree.insert_element(25) self.__tree.insert_element(35) self.assertEqual("[ 10, 25, 35 ]", self.__tree.pre_order()) def test_insert_on_h2_gthan_PSTO(self): self.__tree.insert_element(10) self.__tree.insert_element(25) self.__tree.insert_element(35) self.assertEqual("[ 35, 25, 10 ]", self.__tree.post_order()) def test_insert_on_h2_gthan_height(self): self.__tree.insert_element(10) self.__tree.insert_element(25) self.__tree.insert_element(15) self.assertEqual(3, self.__tree.get_height()) #Insert height 3 with elbow def test_insert_h3_elbow_IO(self): self.__tree.insert_element(15) self.__tree.insert_element(25) self.__tree.insert_element(20) self.assertEqual("[ 15, 20, 25 ]", self.__tree.in_order()) def test_insert_h3_elbow_PRO(self): self.__tree.insert_element(15) self.__tree.insert_element(25) self.__tree.insert_element(20) self.assertEqual("[ 15, 25, 20 ]", self.__tree.pre_order()) def test_insert_h3_elbow_PSTO(self): self.__tree.insert_element(15) self.__tree.insert_element(25) self.__tree.insert_element(20) self.assertEqual("[ 20, 25, 15 ]", self.__tree.post_order()) def test_insert_h3_elbow_height(self): self.__tree.insert_element(15) self.__tree.insert_element(25) self.__tree.insert_element(20) self.assertEqual(3, self.__tree.get_height()) ###Large tree insertion test def test_insert_h3_perfect_IO(self): self.__tree.insert_element(30) self.__tree.insert_element(20) self.__tree.insert_element(40) self.__tree.insert_element(15) self.__tree.insert_element(25) self.__tree.insert_element(35) self.__tree.insert_element(45) self.assertEqual("[ 15, 20, 25, 30, 35, 40, 45 ]", self.__tree.in_order()) def test_insert_h3_perfect_PRO(self): self.__tree.insert_element(30) self.__tree.insert_element(20) self.__tree.insert_element(40) self.__tree.insert_element(15) self.__tree.insert_element(25) self.__tree.insert_element(35) self.__tree.insert_element(45) self.assertEqual("[ 30, 20, 15, 25, 40, 35, 45 ]", self.__tree.pre_order()) def test_insert_h3_perfect_POST(self): self.__tree.insert_element(30) self.__tree.insert_element(20) self.__tree.insert_element(40) self.__tree.insert_element(15) self.__tree.insert_element(25) self.__tree.insert_element(35) self.__tree.insert_element(45) self.assertEqual("[ 15, 25, 20, 35, 45, 40, 30 ]", self.__tree.post_order()) ##Removal Tests #empty def test_remove_empty_IO(self): self.__tree.remove_element(25) self.assertEqual("[ ]", self.__tree.in_order()) def test_remove_empty_PRO(self): self.__tree.remove_element(25) self.assertEqual("[ ]", self.__tree.pre_order()) def test_remove_empty_PSTO(self): self.__tree.remove_element(25) self.assertEqual("[ ]", self.__tree.post_order()) #Height 1 def test_remove_h1_IO(self): self.__tree.insert_element(25) self.__tree.remove_element(25) self.assertEqual("[ ]", self.__tree.in_order()) def test_remove_h1_PRO(self): self.__tree.insert_element(25) self.__tree.remove_element(25) self.assertEqual("[ ]", self.__tree.pre_order()) def test_remove_h1_POST(self): self.__tree.insert_element(25) self.__tree.remove_element(25) self.assertEqual("[ ]", self.__tree.post_order()) def test_remove_h1_height(self): self.__tree.insert_element(25) self.__tree.remove_element(25) self.assertEqual(0, self.__tree.get_height()) #Height 2 Removals, tests check removal on both leaf and root def test_remove_h2_leaf_IO(self): self.__tree.insert_element(15) self.__tree.insert_element(20) self.__tree.remove_element(20) self.assertEqual("[ 15 ]", self.__tree.in_order()) def test_remove_h2_leaf_PRO(self): self.__tree.insert_element(15) self.__tree.insert_element(20) self.__tree.remove_element(20) self.assertEqual("[ 15 ]", self.__tree.pre_order()) def test_remove_h2_leaf_POST(self): self.__tree.insert_element(15) self.__tree.insert_element(20) self.__tree.remove_element(20) self.assertEqual("[ 15 ]", self.__tree.post_order()) def test_remove_h2_leaf_height(self): self.__tree.insert_element(15) self.__tree.insert_element(20) self.__tree.remove_element(20) self.assertEqual(1, self.__tree.get_height()) def test_remove_h2_root_IO(self): self.__tree.insert_element(15) self.__tree.insert_element(20) self.__tree.remove_element(15) self.assertEqual("[ 20 ]", self.__tree.in_order()) def test_remove_h2_root_PRO(self): self.__tree.insert_element(15) self.__tree.insert_element(20) self.__tree.remove_element(15) self.assertEqual("[ 20 ]", self.__tree.pre_order()) def test_remove_h2_root_POST(self): self.__tree.insert_element(15) self.__tree.insert_element(20) self.__tree.remove_element(15) self.assertEqual("[ 20 ]", self.__tree.post_order()) def test_remove_h2_root_height(self): self.__tree.insert_element(15) self.__tree.insert_element(20) self.__tree.remove_element(15) self.assertEqual(1, self.__tree.get_height()) #3 nodes removal tests ###Remove both leaves test def test_remove_both_leaves_IO(self): self.__tree.insert_element(20) self.__tree.insert_element(15) self.__tree.insert_element(25) self.__tree.remove_element(15) self.__tree.remove_element(25) self.assertEqual("[ 20 ]", self.__tree.in_order()) def test_remove_both_leaves_PRO(self): self.__tree.insert_element(20) self.__tree.insert_element(15) self.__tree.insert_element(25) self.__tree.remove_element(15) self.__tree.remove_element(25) self.assertEqual("[ 20 ]", self.__tree.pre_order()) def test_remove_both_leaves_POST(self): self.__tree.insert_element(20) self.__tree.insert_element(15) self.__tree.insert_element(25) self.__tree.remove_element(15) self.__tree.remove_element(25) self.assertEqual("[ 20 ]", self.__tree.post_order()) def test_remove_both_leaves_height(self): self.__tree.insert_element(20) self.__tree.insert_element(15) self.__tree.insert_element(25) self.__tree.remove_element(15) self.__tree.remove_element(25) self.assertEqual(1, self.__tree.get_height()) def test_remove_both_leaves_IO_reverse(self): self.__tree.insert_element(20) self.__tree.insert_element(15) self.__tree.insert_element(25) self.__tree.remove_element(25) self.__tree.remove_element(15) self.assertEqual("[ 20 ]", self.__tree.in_order()) def test_remove_both_leaves_PRO_reverse(self): self.__tree.insert_element(20) self.__tree.insert_element(15) self.__tree.insert_element(25) self.__tree.remove_element(25) self.__tree.remove_element(15) self.assertEqual("[ 20 ]", self.__tree.pre_order()) def test_remove_both_leaves_POST_reverse(self): self.__tree.insert_element(20) self.__tree.insert_element(15) self.__tree.insert_element(25) self.__tree.remove_element(25) self.__tree.remove_element(15) self.assertEqual("[ 20 ]", self.__tree.post_order()) def test_remove_both_leaves_height_reverse(self): self.__tree.insert_element(20) self.__tree.insert_element(15) self.__tree.insert_element(25) self.__tree.remove_element(25) self.__tree.remove_element(15) self.assertEqual(1, self.__tree.get_height()) ###Remove right leaf def test_remove_right_leaf_IO(self): self.__tree.insert_element(20) self.__tree.insert_element(15) self.__tree.insert_element(25) self.__tree.remove_element(25) self.assertEqual("[ 15, 20 ]", self.__tree.in_order()) def test_remove_right_leaf_PRO(self): self.__tree.insert_element(20) self.__tree.insert_element(15) self.__tree.insert_element(25) self.__tree.remove_element(25) self.assertEqual("[ 20, 15 ]", self.__tree.pre_order()) def test_remove_right_leaf_POST(self): self.__tree.insert_element(20) self.__tree.insert_element(15) self.__tree.insert_element(25) self.__tree.remove_element(25) self.assertEqual("[ 15, 20 ]", self.__tree.post_order()) ###Removal of left leaf def test_remove_left_leaf_IO(self): self.__tree.insert_element(20) self.__tree.insert_element(15) self.__tree.insert_element(25) self.__tree.remove_element(15) self.assertEqual("[ 20, 25 ]", self.__tree.in_order()) def test_remove_left_leaf_PRO(self): self.__tree.insert_element(20) self.__tree.insert_element(15) self.__tree.insert_element(25) self.__tree.remove_element(15) self.assertEqual("[ 20, 25 ]", self.__tree.pre_order()) def test_remove_left_leaf_POST(self): self.__tree.insert_element(20) self.__tree.insert_element(15) self.__tree.insert_element(25) self.__tree.remove_element(15) self.assertEqual("[ 25, 20 ]", self.__tree.post_order()) def test_remove_left_leaf_height(self): self.__tree.insert_element(20) self.__tree.insert_element(15) self.__tree.insert_element(25) self.__tree.remove_element(15) self.assertEqual(2, self.__tree.get_height()) ###3 nodes 2 height, perfect, removal of root def test_remove_root_perfect_IO(self): self.__tree.insert_element(20) self.__tree.insert_element(15) self.__tree.insert_element(25) self.__tree.remove_element(20) self.assertEqual("[ 15, 25 ]", self.__tree.in_order()) def test_remove_root_perfect_PRO(self): self.__tree.insert_element(20) self.__tree.insert_element(15) self.__tree.insert_element(25) self.__tree.remove_element(20) self.assertEqual("[ 25, 15 ]", self.__tree.pre_order()) def test_remove_root_perfect_POST(self): self.__tree.insert_element(20) self.__tree.insert_element(15) self.__tree.insert_element(25) self.__tree.remove_element(20) self.assertEqual("[ 15, 25 ]", self.__tree.post_order()) def test_remove_root_perfect_height(self): self.__tree.insert_element(20) self.__tree.insert_element(15) self.__tree.insert_element(25) self.__tree.remove_element(20) self.assertEqual(2, self.__tree.get_height()) ###Removal of joint of elbow def test_remove_joint_elbowtree_IO(self): self.__tree.insert_element(15) self.__tree.insert_element(25) self.__tree.insert_element(20) self.__tree.remove_element(25) self.assertEqual("[ 15, 20 ]", self.__tree.in_order()) def test_remove_joint_elbowtree_PRO(self): self.__tree.insert_element(15) self.__tree.insert_element(25) self.__tree.insert_element(20) self.__tree.remove_element(25) self.assertEqual("[ 15, 20 ]", self.__tree.pre_order()) def test_remove_joint_elbowtree_POST(self): self.__tree.insert_element(15) self.__tree.insert_element(25) self.__tree.insert_element(20) self.__tree.remove_element(25) self.assertEqual("[ 20, 15 ]", self.__tree.post_order()) def test_remove_joint_elbowtree_IO(self): self.__tree.insert_element(15) self.__tree.insert_element(25) self.__tree.insert_element(20) self.__tree.remove_element(25) self.assertEqual(2, self.__tree.get_height()) ###Removal tests on perfect tree of height 3 (testing removal of node at h2 which has 2 children) def test_remove_left_child_h2_IO(self): self.__tree.insert_element(30) self.__tree.insert_element(20) self.__tree.insert_element(40) self.__tree.insert_element(15) self.__tree.insert_element(25) self.__tree.insert_element(35) self.__tree.insert_element(45) self.__tree.remove_element(20) self.assertEqual("[ 15, 25, 30, 35, 40, 45 ]", self.__tree.in_order()) def test_remove_left_child_h2_PRO(self): self.__tree.insert_element(30) self.__tree.insert_element(20) self.__tree.insert_element(40) self.__tree.insert_element(15) self.__tree.insert_element(25) self.__tree.insert_element(35) self.__tree.insert_element(45) self.__tree.remove_element(20) self.assertEqual("[ 30, 25, 15, 40, 35, 45 ]", self.__tree.pre_order()) def test_remove_left_child_h2_POST(self): self.__tree.insert_element(30) self.__tree.insert_element(20) self.__tree.insert_element(40) self.__tree.insert_element(15) self.__tree.insert_element(25) self.__tree.insert_element(35) self.__tree.insert_element(45) self.__tree.remove_element(20) self.assertEqual("[ 15, 25, 35, 45, 40, 30 ]", self.__tree.post_order()) def test_remove_left_child_h2_height(self): self.__tree.insert_element(30) self.__tree.insert_element(20) self.__tree.insert_element(40) self.__tree.insert_element(15) self.__tree.insert_element(25) self.__tree.insert_element(35) self.__tree.insert_element(45) self.__tree.remove_element(20) self.assertEqual(3, self.__tree.get_height()) ###Removal tests on perfect tree of height 3 (removing root) def test_remove_root_from_perfecth3_IO(self): self.__tree.insert_element(30) self.__tree.insert_element(20) self.__tree.insert_element(40) self.__tree.insert_element(15) self.__tree.insert_element(25) self.__tree.insert_element(35) self.__tree.insert_element(45) self.__tree.remove_element(30) self.assertEqual("[ 15, 20, 25, 35, 40, 45 ]", self.__tree.in_order()) def test_remove_root_from_perfecth3_PRO(self): self.__tree.insert_element(30) self.__tree.insert_element(20) self.__tree.insert_element(40) self.__tree.insert_element(15) self.__tree.insert_element(25) self.__tree.insert_element(35) self.__tree.insert_element(45) self.__tree.remove_element(30) self.assertEqual("[ 35, 20, 15, 25, 40, 45 ]", self.__tree.pre_order()) def test_remove_root_from_perfecth3_POST(self): self.__tree.insert_element(30) self.__tree.insert_element(20) self.__tree.insert_element(40) self.__tree.insert_element(15) self.__tree.insert_element(25) self.__tree.insert_element(35) self.__tree.insert_element(45) self.__tree.remove_element(30) self.assertEqual("[ 15, 25, 20, 45, 40, 35 ]", self.__tree.post_order()) def test_remove_root_from_perfecth3_height(self): self.__tree.insert_element(30) self.__tree.insert_element(20) self.__tree.insert_element(40) self.__tree.insert_element(15) self.__tree.insert_element(25) self.__tree.insert_element(35) self.__tree.insert_element(45) self.__tree.remove_element(30) self.assertEqual(3, self.__tree.get_height()) if __name__ == "__main__": unittest.main()
38.131769
101
0.654533
2,777
21,125
4.455528
0.029888
0.236644
0.261376
0.392063
0.955791
0.934777
0.90851
0.877475
0.822355
0.800048
0
0.057521
0.230533
21,125
554
102
38.131769
0.70366
0.025657
0
0.743363
0
0
0.034998
0
0
0
0
0
0.176991
1
0.179204
false
0
0.004425
0
0.185841
0
0
0
0
null
1
1
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
10
ea581eaeb48210f577dcb0981e3af66092b982a8
10,896
py
Python
tests.py
dmtlvn/dotdict
018288ecc0b1aa12b69ca1b6460133b7f54c50f0
[ "MIT" ]
2
2021-09-11T12:50:04.000Z
2021-09-13T08:20:33.000Z
tests.py
dmtlvn/dotdict
018288ecc0b1aa12b69ca1b6460133b7f54c50f0
[ "MIT" ]
null
null
null
tests.py
dmtlvn/dotdict
018288ecc0b1aa12b69ca1b6460133b7f54c50f0
[ "MIT" ]
null
null
null
import unittest import pickle import json from dotdict.dotdict import dotdict class DotDictTestCase(unittest.TestCase): def setUp(self): self.dict_ex = dict( a = 1, b = 2, c = dict( d = 3, e = 4, f = dict( g = 5, h = 6 ) ) ) self.attrdict_ex = dotdict(self.dict_ex) self.keywords = { "clear", "copy", "fromkeys", "get", "items", "keys", "pop", "popitem", "setdefault", "update", "values", "to_dict" } def test_from_dict(self): d = dict(a = 1, b = dict(c = 2)) a = dotdict(d) self.assertEqual(a.a, d['a']) self.assertEqual(a.b, d['b']) self.assertEqual(a.b.c, d['b']['c']) def test_nested_list(self): d = dict(a = 1, b = [2, dict(c = 3)]) a = dotdict(d) self.assertEqual(a.b[1].c, 3) def test_nested_tuple(self): d = dict(a = 1, b = (2, dict(c = 3))) a = dotdict(d) self.assertEqual(a.b[1].c, 3) def test_subclasses_dict(self): class A(dict): pass d = dict(a = 1, b = A(c = 2)) a = dotdict(d) self.assertRaises(AttributeError, lambda m: m.c, a.b) def test_subclasses_tuple(self): class A(tuple): pass d = dict(a = 1, b = A((2, {'c': 3}))) a = dotdict(d) self.assertRaises(AttributeError, lambda m: m.c, a.b[1]) def test_subclasses_list(self): class A(list): pass d = dict(a = 1, b = A((2, {'c': 3}))) a = dotdict(d) self.assertRaises(AttributeError, lambda m: m.c, a.b[1]) def test_to_dict_dict(self): d = dotdict(a = 1, b = dict(c = 2)).to_dict() self.assertRaises(AttributeError, lambda m: m.c, d['b']) def test_to_dict_list(self): d = dotdict(a = 1, b = [1, dict(c = 2)]).to_dict() self.assertRaises(AttributeError, lambda m: m.c, d['b'][1]) def test_to_dict_tuple(self): d = dotdict(a = 1, b = (1, dict(c = 2))).to_dict() self.assertRaises(AttributeError, lambda m: m.c, d['b'][1]) def test_isinstance(self): self.assertTrue(isinstance(self.attrdict_ex, dict)) def test_hasattr(self): self.assertTrue(hasattr(self.attrdict_ex, 'a')) def test_hasattr_methods(self): for k in self.keywords: self.assertTrue(hasattr(self.attrdict_ex, k)) def test_dict_comparison(self): self.assertEqual(self.attrdict_ex, self.dict_ex) def test_str(self): d = dotdict(a = 42, b = dict(c = 69)) self.assertEqual(str(d), "attrdict({'a': 42, 'b': attrdict({'c': 69})})") def test_pickle(self): ref = dotdict(a = 42, b = dict(c = 69)) pkl = pickle.dumps(ref) rec = pickle.loads(pkl) self.assertEqual(rec, ref) def test_json(self): ref = dotdict(a = 42, b = dict(c = 69)) jsn = json.dumps(ref) rec = json.loads(jsn) self.assertEqual(rec, ref) def test_pyyaml(self): d = dotdict(a = 42, b = dict(c = 69)) try: import yaml s = yaml.dump(d) self.assertEqual(s, 'a: 42\nb:\n c: 69\n') except ImportError: pass # get-tests def test_getattr(self): self.assertEqual(self.attrdict_ex.a, 1) def test_getitem(self): self.assertEqual(self.attrdict_ex['a'], 1) def test_getattr_recursive(self): self.assertEqual(self.attrdict_ex.c.d, 3) def test_getitem_recursive(self): self.assertEqual(self.attrdict_ex['c']['d'], 3) def test_getattr_mixed(self): self.assertEqual(self.attrdict_ex.c['d'], 3) def test_getitem_mixed(self): self.assertEqual(self.attrdict_ex['c'].d, 3) def test_getattr_error(self): self.assertRaises(KeyError, lambda d: d.non_existent, self.attrdict_ex) def test_getitem_error(self): self.assertRaises(KeyError, lambda d: d['non_existent'], self.attrdict_ex) def test_getitem_protected(self): d = dotdict(a = 42, keys = 69) self.assertEqual(d['keys'], 69) def test_getattr_protected(self): d = dotdict(a = 42, keys = 69) self.assertTrue(callable(d.keys)) # set-tests def test_setattr_existing(self): self.attrdict_ex.a = 42 self.assertEqual(self.attrdict_ex.a, 42) def test_setattr_new(self): self.attrdict_ex.q = 69 self.assertEqual(self.attrdict_ex.q, 69) def test_setattr_nested_dict(self): self.attrdict_ex.q = {'a': 1, 'b': {'c': 69}} self.assertEqual(self.attrdict_ex.q.b.c, 69) def test_setattr_nested_dict_subclass(self): class A(dict): pass self.attrdict_ex.q = A(a = 1) self.assertTrue(isinstance(self.attrdict_ex.q, A)) self.assertRaises(AttributeError, lambda m: m.a, self.attrdict_ex.q) def test_setattr_nested_tuple_subclass(self): class A(tuple): pass self.attrdict_ex.q = A(({'a': 1},)) self.assertTrue(isinstance(self.attrdict_ex.q, A)) self.assertTrue(type(self.attrdict_ex.q[0]) is dict) self.assertRaises(AttributeError, lambda m: m.a, self.attrdict_ex.q[0]) def test_setattr_nested_list_subclass(self): class A(tuple): pass self.attrdict_ex.q = A([{'a': 1}]) self.assertTrue(isinstance(self.attrdict_ex.q, A)) self.assertTrue(type(self.attrdict_ex.q[0]) is dict) self.assertRaises(AttributeError, lambda m: m.a, self.attrdict_ex.q[0]) def test_setattr_nested_tuple(self): self.attrdict_ex.q = {'a': 1, 'b': ({'c': 69},)} self.assertEqual(self.attrdict_ex.q.b[0].c, 69) def test_setattr_nested_list(self): self.attrdict_ex.q = {'a': 1, 'b': [{'c': 69}]} self.assertEqual(self.attrdict_ex.q.b[0].c, 69) def test_setitem_existing(self): self.attrdict_ex['a'] = 42 self.assertEqual(self.attrdict_ex['a'], 42) def test_setitem_new(self): self.attrdict_ex['q'] = 69 self.assertEqual(self.attrdict_ex['q'], 69) def test_setattr_recursive_existing(self): self.attrdict_ex.c.f.g = 42 self.assertEqual(self.attrdict_ex.c.f.g, 42) def test_setattr_recursive_new(self): self.attrdict_ex.c.f.q = 69 self.assertEqual(self.attrdict_ex.c.f.q, 69) def test_setitem_recursive_existing(self): self.attrdict_ex['c']['f']['g'] = 42 self.assertEqual(self.attrdict_ex['c']['f']['g'], 42) def test_setitem_recursive_new(self): self.attrdict_ex['c']['f']['q'] = 69 self.assertEqual(self.attrdict_ex['c']['f']['q'], 69) def test_setattr_mixed_existing(self): self.attrdict_ex.c['f'].g = 42 self.assertEqual(self.attrdict_ex.c['f'].g, 42) def test_setattr_mixed_new(self): self.attrdict_ex.c['f'].q = 69 self.assertEqual(self.attrdict_ex.c['f'].q, 69) def test_setattr_unresolved(self): def test(): self.attrdict_ex.c.non_existent.foo = 69 self.assertRaises(KeyError, test) def test_setattr_protected(self): def test(key): self.attrdict_ex.__setattr__(key, 69) for k in self.keywords: self.assertRaises(AttributeError, test, k) # del-tests def test_delattr(self): del self.attrdict_ex.a self.assertFalse('a' in self.attrdict_ex) def test_delitem(self): del self.attrdict_ex['a'] self.assertFalse('a' in self.attrdict_ex) def test_delattr_error(self): def test(): del self.attrdict_ex.non_existent self.assertRaises(KeyError, test) def test_delitem_error(self): def test(): del self.attrdict_ex['non_existent'] self.assertRaises(KeyError, test) def test_delattr_protected(self): def test(d, key): d.__delattr__(key) d = dotdict(a = 42, keys = 69) for k in self.keywords: self.assertRaises(AttributeError, test, d, k) # update tests def test_update_dict(self): upd = dict(a = 42, c = dict(d = 69)) ref = dict(a = 42, b = 2, c = dict(d = 69)) self.attrdict_ex.update(upd) self.assertEqual(self.attrdict_ex, ref) def test_update_iterable(self): upd = [('a', 42), ('c', dict(d = 69))] ref = dict(a = 42, b = 2, c = dict(d = 69)) self.attrdict_ex.update(upd) self.assertEqual(self.attrdict_ex, ref) def test_update_kwargs(self): upd = dict(a = 42, c = dict(d = 69)) ref = dict(a = 42, b = 2, c = 69) self.attrdict_ex.update(upd, c = 69) self.assertEqual(self.attrdict_ex, ref) def test_update_error(self): self.assertRaises(TypeError, self.attrdict_ex.update, 42) self.assertRaises(TypeError, self.attrdict_ex.update, 42, 69) def test_update_iterable_error(self): self.assertRaises(TypeError, self.attrdict_ex.update, [42, 69]) def test_update_dict_recursive(self): upd = dict(a = 42, c = dict(d = 69)) ref = self.dict_ex ref['a'], ref['c']['d'] = 42, 69 self.attrdict_ex.update(upd, recursive = True) self.assertEqual(self.attrdict_ex, ref) def test_update_iterable_recursive(self): upd = [('a', 42), ('c', dict(d = 69))] ref = self.dict_ex ref['a'], ref['c']['d'] = 42, 69 self.attrdict_ex.update(upd, recursive = True) self.assertEqual(self.attrdict_ex, ref) def test_update_nested_iterable_recursive(self): upd = [('a', 42), ('c', [('d', 69)])] ref = dict(a = 42, b = 2, c = [('d', 69)]) self.attrdict_ex.update(upd, recursive = True) self.assertEqual(self.attrdict_ex, ref) def test_update_kwargs_recursive(self): upd = dict(a = 42, c = dict(d = 69)) ref = self.dict_ex ref['a'], ref['c'] = 42, 69 self.attrdict_ex.update(upd, c = 69, recursive = True) self.assertEqual(self.attrdict_ex, ref) def test_update_kwargs_nested_recursive(self): upd = dict(a = 42, c = dict(d = 69)) self.attrdict_ex.update(upd, c = dict(d = 420), recursive = True) ref = self.dict_ex ref['a'], ref['c']['d'] = 42, 420 self.assertEqual(self.attrdict_ex, ref) def test_update_error_recursive(self): self.assertRaises(TypeError, self.attrdict_ex.update, 42, recursive = True) self.assertRaises(TypeError, self.attrdict_ex.update, 42, 69, recursive = True) def test_update_iterable_error_recursive(self): self.assertRaises(TypeError, self.attrdict_ex.update, [42, 69], recursive = True) if __name__ == "__main__": unittest.main()
32.428571
89
0.58627
1,540
10,896
3.982468
0.075325
0.15653
0.182619
0.123268
0.806131
0.785912
0.722811
0.714006
0.704875
0.669493
0
0.030873
0.268722
10,896
335
90
32.525373
0.73883
0.003855
0
0.319231
0
0
0.021845
0
0
0
0
0
0.273077
1
0.261538
false
0.026923
0.023077
0
0.311538
0
0
0
0
null
0
1
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
8
5776a121cc136747bc5980a5421ad56ebd83e72c
232,192
py
Python
openeo/processes.py
jonathom/openeo-python-client
43140b8371b01f67806deff39508b02572944280
[ "Apache-2.0" ]
75
2017-11-17T11:49:15.000Z
2022-02-15T14:21:42.000Z
openeo/processes.py
jonathom/openeo-python-client
43140b8371b01f67806deff39508b02572944280
[ "Apache-2.0" ]
238
2017-12-05T14:48:30.000Z
2022-03-31T15:42:38.000Z
openeo/processes.py
jonathom/openeo-python-client
43140b8371b01f67806deff39508b02572944280
[ "Apache-2.0" ]
28
2018-01-24T08:42:54.000Z
2022-01-27T09:16:26.000Z
# Do not edit this file directly. # It is automatically generated. # Used command line arguments: # openeo/internal/processes/generator.py ../openeo-processes/ ../openeo-processes/proposals/ --output openeo/processes.py import builtins from openeo.internal.processes.builder import ProcessBuilderBase, UNSET class ProcessBuilder(ProcessBuilderBase): def __add__(self, other) -> 'ProcessBuilder': return self.add(other) def __radd__(self, other) -> 'ProcessBuilder': return add(other, self) def __sub__(self, other) -> 'ProcessBuilder': return self.subtract(other) def __rsub__(self, other) -> 'ProcessBuilder': return subtract(other, self) def __mul__(self, other) -> 'ProcessBuilder': return self.multiply(other) def __rmul__(self, other) -> 'ProcessBuilder': return multiply(other, self) def __truediv__(self, other) -> 'ProcessBuilder': return self.divide(other) def __rtruediv__(self, other) -> 'ProcessBuilder': return divide(other, self) def __neg__(self) -> 'ProcessBuilder': return self.multiply(-1) def __pow__(self, other) -> 'ProcessBuilder': return self.power(other) def __getitem__(self, key) -> 'ProcessBuilder': if isinstance(key, builtins.int): return self.array_element(index=key) else: return self.array_element(label=key) def __eq__(self, other) -> 'ProcessBuilder': return eq(self, other) def __ne__(self, other) -> 'ProcessBuilder': return neq(self, other) def absolute(self) -> 'ProcessBuilder': """ Absolute value :param self: A number. :return: The computed absolute value. """ return absolute(x=self) def add(self, y) -> 'ProcessBuilder': """ Addition of two numbers :param self: The first summand. :param y: The second summand. :return: The computed sum of the two numbers. """ return add(x=self, y=y) def add_dimension(self, name, label, type=UNSET) -> 'ProcessBuilder': """ Add a new dimension :param self: A data cube to add the dimension to. :param name: Name for the dimension. :param label: A dimension label. :param type: The type of dimension, defaults to `other`. :return: The data cube with a newly added dimension. The new dimension has exactly one dimension label. All other dimensions remain unchanged. """ return add_dimension(data=self, name=name, label=label, type=type) def aggregate_spatial(self, geometries, reducer, target_dimension=UNSET, context=UNSET) -> 'ProcessBuilder': """ Zonal statistics for geometries :param self: A raster data cube. The data cube must have been reduced to only contain two spatial dimensions and a third dimension the values are aggregated for, for example the temporal dimension to get a time series. Otherwise, this process fails with the `TooManyDimensions` exception. The data cube implicitly gets restricted to the bounds of the geometries as if ``filter_spatial()`` would have been used with the same values for the corresponding parameters immediately before this process. :param geometries: Geometries as GeoJSON on which the aggregation will be based. One value will be computed per GeoJSON `Feature`, `Geometry` or `GeometryCollection`. For a `FeatureCollection` multiple values will be computed, one value per contained `Feature`. For example, a single value will be computed for a `MultiPolygon`, but two values will be computed for a `FeatureCollection` containing two polygons. - For **polygons**, the process considers all pixels for which the point at the pixel center intersects with the corresponding polygon (as defined in the Simple Features standard by the OGC). - For **points**, the process considers the closest pixel center. - For **lines** (line strings), the process considers all the pixels whose centers are closest to at least one point on the line. Thus, pixels may be part of multiple geometries and be part of multiple aggregations. To maximize interoperability, a nested `GeometryCollection` should be avoided. Furthermore, a `GeometryCollection` composed of a single type of geometries should be avoided in favour of the corresponding multi-part type (e.g. `MultiPolygon`). :param reducer: A reducer to be applied on all values of each geometry. A reducer is a single process such as ``mean()`` or a set of processes, which computes a single value for a list of values, see the category 'reducer' for such processes. :param target_dimension: The new dimension name to be used for storing the results. Defaults to `result`. :param context: Additional data to be passed to the reducer. :return: A vector data cube with the computed results and restricted to the bounds of the geometries. The computed value is used for the dimension with the name that was specified in the parameter `target_dimension`. The computation also stores information about the total count of pixels (valid + invalid pixels) and the number of valid pixels (see ``is_valid()``) for each geometry. These values are added as a new dimension with a dimension name derived from `target_dimension` by adding the suffix `_meta`. The new dimension has the dimension labels `total_count` and `valid_count`. """ return aggregate_spatial(data=self, geometries=geometries, reducer=reducer, target_dimension=target_dimension, context=context) def aggregate_spatial_binary(self, geometries, reducer, target_dimension=UNSET, context=UNSET) -> 'ProcessBuilder': """ Zonal statistics for geometries by binary aggregation :param self: A raster data cube. The data cube implicitly gets restricted to the bounds of the geometries as if ``filter_spatial()`` would have been used with the same values for the corresponding parameters immediately before this process. :param geometries: Geometries as GeoJSON on which the aggregation will be based. :param reducer: A reduction operator to be applied consecutively on tuples of values. It must be both associative and commutative as the execution may be executed in parallel and therefore the order of execution is arbitrary. The reduction operator may be a single process such as ``multiply()`` or consist of multiple sub-processes. :param target_dimension: The new dimension name to be used for storing the results. Defaults to `result`. :param context: Additional data to be passed to the reducer. :return: A vector data cube with the computed results and restricted to the bounds of the geometries. The computed value is stored in dimension with the name that was specified in the parameter `target_dimension`. The computation also stores information about the total count of pixels (valid + invalid pixels) and the number of valid pixels (see ``is_valid()``) for each geometry. These values are stored as new dimension with a dimension name derived from `target_dimension` by adding the suffix `_meta`. The new dimension has the dimension labels `total_count` and `valid_count`. """ return aggregate_spatial_binary(data=self, geometries=geometries, reducer=reducer, target_dimension=target_dimension, context=context) def aggregate_spatial_window(self, reducer, size, boundary=UNSET, align=UNSET, context=UNSET) -> 'ProcessBuilder': """ Zonal statistics for rectangular windows :param self: A raster data cube with exactly two horizontal spatial dimensions and an arbitrary number of additional dimensions. The process is applied to all additional dimensions individually. :param reducer: A reducer to be applied on the list of values, which contain all pixels covered by the window. A reducer is a single process such as ``mean()`` or a set of processes, which computes a single value for a list of values, see the category 'reducer' for such processes. :param size: Window size in pixels along the horizontal spatial dimensions. The first value corresponds to the `x` axis, the second value corresponds to the `y` axis. :param boundary: Behavior to apply if the number of values for the axes `x` and `y` is not a multiple of the corresponding value in the `size` parameter. Options are: - `pad` (default): pad the data cube with the no-data value `null` to fit the required window size. - `trim`: trim the data cube to fit the required window size. Set the parameter `align` to specifies to which corner the data is aligned to. :param align: If the data requires padding or trimming (see parameter `boundary`), specifies to which corner of the spatial extent the data is aligned to. For example, if the data is aligned to the upper left, the process pads/trims at the lower-right. :param context: Additional data to be passed to the reducer. :return: A data cube with the newly computed values and the same dimensions. The resolution will change depending on the chosen values for the `size` and `boundary` parameter. It usually decreases for the dimensions which have the corresponding parameter `size` set to values greater than 1. The dimension labels will be set to the coordinate at the center of the window. The other dimension properties (name, type and reference system) remain unchanged. """ return aggregate_spatial_window(data=self, reducer=reducer, size=size, boundary=boundary, align=align, context=context) def aggregate_temporal(self, intervals, reducer, labels=UNSET, dimension=UNSET, context=UNSET) -> 'ProcessBuilder': """ Temporal aggregations :param self: A data cube. :param intervals: Left-closed temporal intervals, which are allowed to overlap. Each temporal interval in the array has exactly two elements: 1. The first element is the start of the temporal interval. The specified instance in time is **included** in the interval. 2. The second element is the end of the temporal interval. The specified instance in time is **excluded** from the interval. The specified temporal strings follow [RFC 3339](https://www.rfc-editor.org/rfc/rfc3339.html). Although [RFC 3339 prohibits the hour to be '24'](https://www.rfc-editor.org/rfc/rfc3339.html#section-5.7), **this process allows the value '24' for the hour** of an end time in order to make it possible that left-closed time intervals can fully cover the day. :param reducer: A reducer to be applied for the values contained in each interval. A reducer is a single process such as ``mean()`` or a set of processes, which computes a single value for a list of values, see the category 'reducer' for such processes. Intervals may not contain any values, which for most reducers leads to no-data (`null`) values by default. :param labels: Distinct labels for the intervals, which can contain dates and/or times. Is only required to be specified if the values for the start of the temporal intervals are not distinct and thus the default labels would not be unique. The number of labels and the number of groups need to be equal. :param dimension: The name of the temporal dimension for aggregation. All data along the dimension is passed through the specified reducer. If the dimension is not set or set to `null`, the data cube is expected to only have one temporal dimension. Fails with a `TooManyDimensions` exception if it has more dimensions. Fails with a `DimensionNotAvailable` exception if the specified dimension does not exist. :param context: Additional data to be passed to the reducer. :return: A new data cube with the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged, except for the resolution and dimension labels of the given temporal dimension. """ return aggregate_temporal(data=self, intervals=intervals, reducer=reducer, labels=labels, dimension=dimension, context=context) def aggregate_temporal_period(self, period, reducer, dimension=UNSET, context=UNSET) -> 'ProcessBuilder': """ Temporal aggregations based on calendar hierarchies :param self: A data cube. :param period: The time intervals to aggregate. The following pre-defined values are available: * `hour`: Hour of the day * `day`: Day of the year * `week`: Week of the year * `dekad`: Ten day periods, counted per year with three periods per month (day 1 - 10, 11 - 20 and 21 - end of month). The third dekad of the month can range from 8 to 11 days. For example, the fourth dekad is Feb, 1 - Feb, 10 each year. * `month`: Month of the year * `season`: Three month periods of the calendar seasons (December - February, March - May, June - August, September - November). * `tropical-season`: Six month periods of the tropical seasons (November - April, May - October). * `year`: Proleptic years * `decade`: Ten year periods ([0-to-9 decade](https://en.wikipedia.org/wiki/Decade#0-to-9_decade)), from a year ending in a 0 to the next year ending in a 9. * `decade-ad`: Ten year periods ([1-to-0 decade](https://en.wikipedia.org/wiki/Decade#1-to-0_decade)) better aligned with the anno Domini (AD) calendar era, from a year ending in a 1 to the next year ending in a 0. :param reducer: A reducer to be applied for the values contained in each period. A reducer is a single process such as ``mean()`` or a set of processes, which computes a single value for a list of values, see the category 'reducer' for such processes. Periods may not contain any values, which for most reducers leads to no-data (`null`) values by default. :param dimension: The name of the temporal dimension for aggregation. All data along the dimension is passed through the specified reducer. If the dimension is not set or set to `null`, the data cube is expected to only have one temporal dimension. Fails with a `TooManyDimensions` exception if it has more dimensions. Fails with a `DimensionNotAvailable` exception if the specified dimension does not exist. :param context: Additional data to be passed to the reducer. :return: A new data cube with the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged, except for the resolution and dimension labels of the given temporal dimension. The specified temporal dimension has the following dimension labels (`YYYY` = four-digit year, `MM` = two-digit month, `DD` two-digit day of month): * `hour`: `YYYY-MM- DD-00` - `YYYY-MM-DD-23` * `day`: `YYYY-001` - `YYYY-365` * `week`: `YYYY-01` - `YYYY-52` * `dekad`: `YYYY-00` - `YYYY-36` * `month`: `YYYY-01` - `YYYY-12` * `season`: `YYYY-djf` (December - February), `YYYY-mam` (March - May), `YYYY-jja` (June - August), `YYYY-son` (September - November). * `tropical- season`: `YYYY-ndjfma` (November - April), `YYYY-mjjaso` (May - October). * `year`: `YYYY` * `decade`: `YYY0` * `decade-ad`: `YYY1` """ return aggregate_temporal_period(data=self, period=period, reducer=reducer, dimension=dimension, context=context) def all(self, ignore_nodata=UNSET) -> 'ProcessBuilder': """ Are all of the values true? :param self: A set of boolean values. :param ignore_nodata: Indicates whether no-data values are ignored or not and ignores them by default. :return: Boolean result of the logical operation. """ return all(data=self, ignore_nodata=ignore_nodata) def and_(self, y) -> 'ProcessBuilder': """ Logical AND :param self: A boolean value. :param y: A boolean value. :return: Boolean result of the logical AND. """ return and_(x=self, y=y) def anomaly(self, normals, period) -> 'ProcessBuilder': """ Compute anomalies :param self: A data cube with exactly one temporal dimension and the following dimension labels for the given period (`YYYY` = four-digit year, `MM` = two-digit month, `DD` two-digit day of month): * `hour`: `YYYY-MM-DD-00` - `YYYY-MM-DD-23` * `day`: `YYYY-001` - `YYYY-365` * `week`: `YYYY-01` - `YYYY-52` * `dekad`: `YYYY-00` - `YYYY-36` * `month`: `YYYY-01` - `YYYY-12` * `season`: `YYYY-djf` (December - February), `YYYY-mam` (March - May), `YYYY-jja` (June - August), `YYYY-son` (September - November). * `tropical-season`: `YYYY-ndjfma` (November - April), `YYYY-mjjaso` (May - October). * `year`: `YYYY` * `decade`: `YYY0` * `decade-ad`: `YYY1` * `single-period` / `climatology-period`: Any ``aggregate_temporal_period()`` can compute such a data cube. :param normals: A data cube with normals, e.g. daily, monthly or yearly values computed from a process such as ``climatological_normal()``. Must contain exactly one temporal dimension with the following dimension labels for the given period: * `hour`: `00` - `23` * `day`: `001` - `365` * `week`: `01` - `52` * `dekad`: `00` - `36` * `month`: `01` - `12` * `season`: `djf` (December - February), `mam` (March - May), `jja` (June - August), `son` (September - November) * `tropical-season`: `ndjfma` (November - April), `mjjaso` (May - October) * `year`: Four-digit year numbers * `decade`: Four-digit year numbers, the last digit being a `0` * `decade-ad`: Four-digit year numbers, the last digit being a `1` * `single-period` / `climatology-period`: A single dimension label with any name is expected. :param period: Specifies the time intervals available in the normals data cube. The following options are available: * `hour`: Hour of the day * `day`: Day of the year * `week`: Week of the year * `dekad`: Ten day periods, counted per year with three periods per month (day 1 - 10, 11 - 20 and 21 - end of month). The third dekad of the month can range from 8 to 11 days. For example, the fourth dekad is Feb, 1 - Feb, 10 each year. * `month`: Month of the year * `season`: Three month periods of the calendar seasons (December - February, March - May, June - August, September - November). * `tropical- season`: Six month periods of the tropical seasons (November - April, May - October). * `year`: Proleptic years * `decade`: Ten year periods ([0-to-9 decade](https://en.wikipedia.org/wiki/Decade#0-to-9_decade)), from a year ending in a 0 to the next year ending in a 9. * `decade-ad`: Ten year periods ([1-to-0 decade](https://en.wikipedia.org/wiki/Decade#1-to-0_decade)) better aligned with the anno Domini (AD) calendar era, from a year ending in a 1 to the next year ending in a 0. * `single-period` / `climatology-period`: A single period of arbitrary length :return: A data cube with the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged. """ return anomaly(data=self, normals=normals, period=period) def any(self, ignore_nodata=UNSET) -> 'ProcessBuilder': """ Is at least one value true? :param self: A set of boolean values. :param ignore_nodata: Indicates whether no-data values are ignored or not and ignores them by default. :return: Boolean result of the logical operation. """ return any(data=self, ignore_nodata=ignore_nodata) def apply(self, process, context=UNSET) -> 'ProcessBuilder': """ Apply a process to each pixel :param self: A data cube. :param process: A process that accepts and returns a single value and is applied on each individual value in the data cube. The process may consist of multiple sub-processes and could, for example, consist of processes such as ``abs()`` or ``linear_scale_range()``. :param context: Additional data to be passed to the process. :return: A data cube with the newly computed values and the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged. """ return apply(data=self, process=process, context=context) def apply_dimension(self, process, dimension, target_dimension=UNSET, context=UNSET) -> 'ProcessBuilder': """ Apply a process to pixels along a dimension :param self: A data cube. :param process: Process to be applied on all pixel values. The specified process needs to accept an array and must return an array with at least one element. A process may consist of multiple sub- processes. :param dimension: The name of the source dimension to apply the process on. Fails with a `DimensionNotAvailable` exception if the specified dimension does not exist. :param target_dimension: The name of the target dimension or `null` (the default) to use the source dimension specified in the parameter `dimension`. By specifying a target dimension, the source dimension is removed. The target dimension with the specified name and the type `other` (see ``add_dimension()``) is created, if it doesn't exist yet. :param context: Additional data to be passed to the process. :return: A data cube with the newly computed values. All dimensions stay the same, except for the dimensions specified in corresponding parameters. There are three cases how the dimensions can change: 1. The source dimension is the target dimension: - The (number of) dimensions remain unchanged as the source dimension is the target dimension. - The source dimension properties name and type remain unchanged. - The dimension labels, the reference system and the resolution are preserved only if the number of pixel values in the source dimension is equal to the number of values computed by the process. Otherwise, all other dimension properties change as defined in the list below. 2. The source dimension is not the target dimension and the latter exists: - The number of dimensions decreases by one as the source dimension is dropped. - The target dimension properties name and type remain unchanged. All other dimension properties change as defined in the list below. 3. The source dimension is not the target dimension and the latter does not exist: - The number of dimensions remain unchanged, but the source dimension is replaced with the target dimension. - The target dimension has the specified name and the type other. All other dimension properties are set as defined in the list below. Unless otherwise stated above, for the given (target) dimension the following applies: - the number of dimension labels is equal to the number of values computed by the process, - the dimension labels are incrementing integers starting from zero, - the resolution changes, and - the reference system is undefined. """ return apply_dimension(data=self, process=process, dimension=dimension, target_dimension=target_dimension, context=context) def apply_kernel(self, kernel, factor=UNSET, border=UNSET, replace_invalid=UNSET) -> 'ProcessBuilder': """ Apply a spatial convolution with a kernel :param self: A data cube. :param kernel: Kernel as a two-dimensional array of weights. The inner level of the nested array aligns with the `x` axis and the outer level aligns with the `y` axis. Each level of the kernel must have an uneven number of elements, otherwise the process throws a `KernelDimensionsUneven` exception. :param factor: A factor that is multiplied to each value after the kernel has been applied. This is basically a shortcut for explicitly multiplying each value by a factor afterwards, which is often required for some kernel-based algorithms such as the Gaussian blur. :param border: Determines how the data is extended when the kernel overlaps with the borders. Defaults to fill the border with zeroes. The following options are available: * *numeric value* - fill with a user-defined constant number `n`: `nnnnnn|abcdefgh|nnnnnn` (default, with `n` = 0) * `replicate` - repeat the value from the pixel at the border: `aaaaaa|abcdefgh|hhhhhh` * `reflect` - mirror/reflect from the border: `fedcba|abcdefgh|hgfedc` * `reflect_pixel` - mirror/reflect from the center of the pixel at the border: `gfedcb|abcdefgh|gfedcb` * `wrap` - repeat/wrap the image: `cdefgh|abcdefgh|abcdef` :param replace_invalid: This parameter specifies the value to replace non-numerical or infinite numerical values with. By default, those values are replaced with zeroes. :return: A data cube with the newly computed values and the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged. """ return apply_kernel(data=self, kernel=kernel, factor=factor, border=border, replace_invalid=replace_invalid) def apply_neighborhood(self, process, size, overlap=UNSET, context=UNSET) -> 'ProcessBuilder': """ Apply a process to pixels in a n-dimensional neighborhood :param self: A data cube. :param process: Process to be applied on all neighborhoods. :param size: Neighborhood sizes along each dimension. This object maps dimension names to either a physical measure (e.g. 100 m, 10 days) or pixels (e.g. 32 pixels). For dimensions not specified, the default is to provide all values. Be aware that including all values from overly large dimensions may not be processed at once. :param overlap: Overlap of neighborhoods along each dimension to avoid border effects. For instance a temporal dimension can add 1 month before and after a neighborhood. In the spatial dimensions, this is often a number of pixels. The overlap specified is added before and after, so an overlap of 8 pixels will add 8 pixels on both sides of the window, so 16 in total. Be aware that large overlaps increase the need for computational resources and modifying overlapping data in subsequent operations have no effect. :param context: Additional data to be passed to the process. :return: A data cube with the newly computed values and the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged. """ return apply_neighborhood(data=self, process=process, size=size, overlap=overlap, context=context) def arccos(self) -> 'ProcessBuilder': """ Inverse cosine :param self: A number. :return: The computed angle in radians. """ return arccos(x=self) def arcosh(self) -> 'ProcessBuilder': """ Inverse hyperbolic cosine :param self: A number. :return: The computed angle in radians. """ return arcosh(x=self) def arcsin(self) -> 'ProcessBuilder': """ Inverse sine :param self: A number. :return: The computed angle in radians. """ return arcsin(x=self) def arctan(self) -> 'ProcessBuilder': """ Inverse tangent :param self: A number. :return: The computed angle in radians. """ return arctan(x=self) def arctan2(self, x) -> 'ProcessBuilder': """ Inverse tangent of two numbers :param self: A number to be used as the dividend. :param x: A number to be used as the divisor. :return: The computed angle in radians. """ return arctan2(y=self, x=x) def ard_normalized_radar_backscatter(self, elevation_model=UNSET, contributing_area=UNSET, ellipsoid_incidence_angle=UNSET, noise_removal=UNSET) -> 'ProcessBuilder': """ CARD4L compliant SAR NRB generation :param self: The source data cube containing SAR input. :param elevation_model: The digital elevation model to use. Set to `null` (the default) to allow the back-end to choose, which will improve portability, but reduce reproducibility. :param contributing_area: If set to `true`, a DEM-based local contributing area band named `contributing_area` is added. The values are given in square meters. :param ellipsoid_incidence_angle: If set to `true`, an ellipsoidal incidence angle band named `ellipsoid_incidence_angle` is added. The values are given in degrees. :param noise_removal: If set to `false`, no noise removal is applied. Defaults to `true`, which removes noise. :return: Backscatter values expressed as gamma0 in linear scale. In addition to the bands `contributing_area` and `ellipsoid_incidence_angle` that can optionally be added with corresponding parameters, the following bands are always added to the data cube: - `mask`: A data mask that indicates which values are valid (1), invalid (0) or contain no-data (null). - `local_incidence_angle`: A band with DEM-based local incidence angles in degrees. The data returned is CARD4L compliant with corresponding metadata. """ return ard_normalized_radar_backscatter(data=self, elevation_model=elevation_model, contributing_area=contributing_area, ellipsoid_incidence_angle=ellipsoid_incidence_angle, noise_removal=noise_removal) def ard_surface_reflectance(self, atmospheric_correction_method, cloud_detection_method, elevation_model=UNSET, atmospheric_correction_options=UNSET, cloud_detection_options=UNSET) -> 'ProcessBuilder': """ CARD4L compliant Surface Reflectance generation :param self: The source data cube containing multi-spectral optical top of the atmosphere (TOA) reflectances. There must be a single dimension of type `bands` available. :param atmospheric_correction_method: The atmospheric correction method to use. :param cloud_detection_method: The cloud detection method to use. Each method supports detecting different atmospheric disturbances such as clouds, cloud shadows, aerosols, haze, ozone and/or water vapour in optical imagery. :param elevation_model: The digital elevation model to use. Set to `null` (the default) to allow the back-end to choose, which will improve portability, but reduce reproducibility. :param atmospheric_correction_options: Proprietary options for the atmospheric correction method. Specifying proprietary options will reduce portability. :param cloud_detection_options: Proprietary options for the cloud detection method. Specifying proprietary options will reduce portability. :return: Data cube containing bottom of atmosphere reflectances for each spectral band in the source data cube, with atmospheric disturbances like clouds and cloud shadows removed. No-data values (null) are directly set in the bands. Depending on the methods used, several additional bands will be added to the data cube: Data cube containing bottom of atmosphere reflectances for each spectral band in the source data cube, with atmospheric disturbances like clouds and cloud shadows removed. Depending on the methods used, several additional bands will be added to the data cube: - `date` (optional): Specifies per-pixel acquisition timestamps. - `incomplete-testing` (required): Identifies pixels with a value of 1 for which the per-pixel tests (at least saturation, cloud and cloud shadows, see CARD4L specification for details) have not all been successfully completed. Otherwise, the value is 0. - `saturation` (required) / `saturation_{band}` (optional): Indicates where pixels in the input spectral bands are saturated (1) or not (0). If the saturation is given per band, the band names are `saturation_{band}` with `{band}` being the band name from the source data cube. - `cloud`, `shadow` (both required),`aerosol`, `haze`, `ozone`, `water_vapor` (all optional): Indicates the probability of pixels being an atmospheric disturbance such as clouds. All bands have values between 0 (clear) and 1, which describes the probability that it is an atmospheric disturbance. - `snow-ice` (optional): Points to a file that indicates whether a pixel is assessed as being snow/ice (1) or not (0). All values describe the probability and must be between 0 and 1. - `land-water` (optional): Indicates whether a pixel is assessed as being land (1) or water (0). All values describe the probability and must be between 0 and 1. - `incidence-angle` (optional): Specifies per-pixel incidence angles in degrees. - `azimuth` (optional): Specifies per-pixel azimuth angles in degrees. - `sun-azimuth:` (optional): Specifies per- pixel sun azimuth angles in degrees. - `sun-elevation` (optional): Specifies per-pixel sun elevation angles in degrees. - `terrain-shadow` (optional): Indicates with a value of 1 whether a pixel is not directly illuminated due to terrain shadowing. Otherwise, the value is 0. - `terrain-occlusion` (optional): Indicates with a value of 1 whether a pixel is not visible to the sensor due to terrain occlusion during off-nadir viewing. Otherwise, the value is 0. - `terrain-illumination` (optional): Contains coefficients used for terrain illumination correction are provided for each pixel. The data returned is CARD4L compliant with corresponding metadata. """ return ard_surface_reflectance(data=self, atmospheric_correction_method=atmospheric_correction_method, cloud_detection_method=cloud_detection_method, elevation_model=elevation_model, atmospheric_correction_options=atmospheric_correction_options, cloud_detection_options=cloud_detection_options) def array_append(self, value) -> 'ProcessBuilder': """ Append a value to an array :param self: An array. :param value: Value to append to the array. :return: The new array with the value being appended. """ return array_append(data=self, value=value) def array_apply(self, process, context=UNSET) -> 'ProcessBuilder': """ Apply a process to each array element :param self: An array. :param process: A process that accepts and returns a single value and is applied on each individual value in the array. The process may consist of multiple sub-processes and could, for example, consist of processes such as ``abs()`` or ``linear_scale_range()``. :param context: Additional data to be passed to the process. :return: An array with the newly computed values. The number of elements are the same as for the original array. """ return array_apply(data=self, process=process, context=context) def array_concat(self, array2) -> 'ProcessBuilder': """ Merge two arrays :param self: The first array. :param array2: The second array. :return: The merged array. """ return array_concat(array1=self, array2=array2) def array_contains(self, value) -> 'ProcessBuilder': """ Check whether the array contains a given value :param self: List to find the value in. :param value: Value to find in `data`. :return: `true` if the list contains the value, false` otherwise. """ return array_contains(data=self, value=value) def array_create(self=UNSET, repeat=UNSET) -> 'ProcessBuilder': """ Create an array :param self: A (native) array to fill the newly created array with. Defaults to an empty array. :param repeat: The number of times the (native) array specified in `data` is repeatedly added after each other to the new array being created. Defaults to `1`. :return: The newly created array. """ return array_create(data=self, repeat=repeat) def array_create_labeled(self, labels) -> 'ProcessBuilder': """ Create a labeled array :param self: An array of values to be used. :param labels: An array of labels to be used. :return: The newly created labeled array. """ return array_create_labeled(data=self, labels=labels) def array_element(self, index=UNSET, label=UNSET, return_nodata=UNSET) -> 'ProcessBuilder': """ Get an element from an array :param self: An array. :param index: The zero-based index of the element to retrieve. :param label: The label of the element to retrieve. Throws an `ArrayNotLabeled` exception, if the given array is not a labeled array and this parameter is set. :param return_nodata: By default this process throws an `ArrayElementNotAvailable` exception if the index or label is invalid. If you want to return `null` instead, set this flag to `true`. :return: The value of the requested element. """ return array_element(data=self, index=index, label=label, return_nodata=return_nodata) def array_filter(self, condition, context=UNSET) -> 'ProcessBuilder': """ Filter an array based on a condition :param self: An array. :param condition: A condition that is evaluated against each value, index and/or label in the array. Only the array elements for which the condition returns `true` are preserved. :param context: Additional data to be passed to the condition. :return: An array filtered by the specified condition. The number of elements are less than or equal compared to the original array. """ return array_filter(data=self, condition=condition, context=context) def array_find(self, value) -> 'ProcessBuilder': """ Get the index for a value in an array :param self: List to find the value in. :param value: Value to find in `data`. :return: The index of the first element with the specified value. If no element was found, `null` is returned. """ return array_find(data=self, value=value) def array_find_label(self, label) -> 'ProcessBuilder': """ Get the index for a label in a labeled array :param self: List to find the label in. :param label: Label to find in `data`. :return: The index of the element with the specified label assigned. If no such label was found, `null` is returned. """ return array_find_label(data=self, label=label) def array_interpolate_linear(self) -> 'ProcessBuilder': """ One-dimensional linear interpolation for arrays :param self: An array of numbers and no-data values. If the given array is a labeled array, the labels must have a natural/inherent label order and the process expects the labels to be sorted accordingly. This is the default behavior in openEO for spatial and temporal dimensions. :return: An array with no-data values being replaced with interpolated values. If not at least 2 numerical values are available in the array, the array stays the same. """ return array_interpolate_linear(data=self) def array_labels(self) -> 'ProcessBuilder': """ Get the labels for an array :param self: An array with labels. :return: The labels as an array. """ return array_labels(data=self) def array_modify(self, values, index, length=UNSET) -> 'ProcessBuilder': """ Change the content of an array (insert, remove, update) :param self: An array. :param values: The values to fill the array with. :param index: The index of the element to insert the value(s) before. If the index is greater than the number of elements, the process throws an `ArrayElementNotAvailable` exception. To insert after the last element, there are two options: 1. Use the simpler processes ``array_append()`` to append a single value or ``array_concat`` to append multiple values. 2. Specify the number of elements in the array. You can retrieve the number of elements with the process ``count()``, having the parameter `condition` set to `true`. :param length: The number of elements to replace. This parameter has no effect in case the given `index` does not exist in the array given. :return: An array with values added, updated or removed. """ return array_modify(data=self, values=values, index=index, length=length) def arsinh(self) -> 'ProcessBuilder': """ Inverse hyperbolic sine :param self: A number. :return: The computed angle in radians. """ return arsinh(x=self) def artanh(self) -> 'ProcessBuilder': """ Inverse hyperbolic tangent :param self: A number. :return: The computed angle in radians. """ return artanh(x=self) def atmospheric_correction(self, method, elevation_model=UNSET, options=UNSET) -> 'ProcessBuilder': """ Apply atmospheric correction :param self: Data cube containing multi-spectral optical top of atmosphere reflectances to be corrected. :param method: The atmospheric correction method to use. To get reproducible results, you have to set a specific method. Set to `null` to allow the back-end to choose, which will improve portability, but reduce reproducibility as you *may* get different results if you run the processes multiple times. :param elevation_model: The digital elevation model to use. Set to `null` (the default) to allow the back-end to choose, which will improve portability, but reduce reproducibility. :param options: Proprietary options for the atmospheric correction method. Specifying proprietary options will reduce portability. :return: Data cube containing bottom of atmosphere reflectances. """ return atmospheric_correction(data=self, method=method, elevation_model=elevation_model, options=options) def between(self, min, max, exclude_max=UNSET) -> 'ProcessBuilder': """ Between comparison :param self: The value to check. :param min: Lower boundary (inclusive) to check against. :param max: Upper boundary (inclusive) to check against. :param exclude_max: Exclude the upper boundary `max` if set to `true`. Defaults to `false`. :return: `true` if `x` is between the specified bounds, otherwise `false`. """ return between(x=self, min=min, max=max, exclude_max=exclude_max) def ceil(self) -> 'ProcessBuilder': """ Round fractions up :param self: A number to round up. :return: The number rounded up. """ return ceil(x=self) def climatological_normal(self, period, climatology_period=UNSET) -> 'ProcessBuilder': """ Compute climatology normals :param self: A data cube with exactly one temporal dimension. The data cube must span at least the temporal interval specified in the parameter `climatology-period`. Seasonal periods may span two consecutive years, e.g. temporal winter that includes months December, January and February. If the required months before the actual climate period are available, the season is taken into account. If not available, the first season is not taken into account and the seasonal mean is based on one year less than the other seasonal normals. The incomplete season at the end of the last year is never taken into account. :param period: The time intervals to aggregate the average value for. The following pre-defined frequencies are supported: * `day`: Day of the year * `month`: Month of the year * `climatology- period`: The period specified in the `climatology-period`. * `season`: Three month periods of the calendar seasons (December - February, March - May, June - August, September - November). * `tropical- season`: Six month periods of the tropical seasons (November - April, May - October). :param climatology_period: The climatology period as a closed temporal interval. The first element of the array is the first year to be fully included in the temporal interval. The second element is the last year to be fully included in the temporal interval. The default period is from 1981 until 2010 (both inclusive). :return: A data cube with the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged, except for the resolution and dimension labels of the temporal dimension. The temporal dimension has the following dimension labels: * `day`: `001` - `365` * `month`: `01` - `12` * `climatology-period`: `climatology-period` * `season`: `djf` (December - February), `mam` (March - May), `jja` (June - August), `son` (September - November) * `tropical- season`: `ndjfma` (November - April), `mjjaso` (May - October) """ return climatological_normal(data=self, period=period, climatology_period=climatology_period) def clip(self, min, max) -> 'ProcessBuilder': """ Clip a value between a minimum and a maximum :param self: A number. :param min: Minimum value. If the value is lower than this value, the process will return the value of this parameter. :param max: Maximum value. If the value is greater than this value, the process will return the value of this parameter. :return: The value clipped to the specified range. """ return clip(x=self, min=min, max=max) def cloud_detection(self, method, options=UNSET) -> 'ProcessBuilder': """ Create cloud masks :param self: The source data cube containing multi-spectral optical top of the atmosphere (TOA) reflectances on which to perform cloud detection. :param method: The cloud detection method to use. To get reproducible results, you have to set a specific method. Set to `null` to allow the back-end to choose, which will improve portability, but reduce reproducibility as you *may* get different results if you run the processes multiple times. :param options: Proprietary options for the cloud detection method. Specifying proprietary options will reduce portability. :return: A data cube with bands for the atmospheric disturbances. Each of the masks contains values between 0 and 1. The data cube has the same spatial and temporal dimensions as the source data cube and a dimension that contains a dimension label for each of the supported/considered atmospheric disturbance. """ return cloud_detection(data=self, method=method, options=options) def constant(self) -> 'ProcessBuilder': """ Define a constant value :param self: The value of the constant. :return: The value of the constant. """ return constant(x=self) def cos(self) -> 'ProcessBuilder': """ Cosine :param self: An angle in radians. :return: The computed cosine of `x`. """ return cos(x=self) def cosh(self) -> 'ProcessBuilder': """ Hyperbolic cosine :param self: An angle in radians. :return: The computed hyperbolic cosine of `x`. """ return cosh(x=self) def count(self, condition=UNSET, context=UNSET) -> 'ProcessBuilder': """ Count the number of elements :param self: An array with elements of any data type. :param condition: A condition consists of one or more processes, which in the end return a boolean value. It is evaluated against each element in the array. An element is counted only if the condition returns `true`. Defaults to count valid elements in a list (see ``is_valid()``). Setting this parameter to boolean `true` counts all elements in the list. :param context: Additional data to be passed to the condition. :return: The counted number of elements. """ return count(data=self, condition=condition, context=context) def create_raster_cube(self) -> 'ProcessBuilder': """ Create an empty raster data cube :return: An empty raster data cube with zero dimensions. """ return create_raster_cube() def cummax(self, ignore_nodata=UNSET) -> 'ProcessBuilder': """ Cumulative maxima :param self: An array of numbers. :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. Setting this flag to `false` considers no-data values so that `null` is set for all the following elements. :return: An array with the computed cumulative maxima. """ return cummax(data=self, ignore_nodata=ignore_nodata) def cummax(self, ignore_nodata=UNSET) -> 'ProcessBuilder': """ Cumulative maxima :param self: An array of numbers. :param ignore_nodata: Indicates whether no-data values are ignored or not and ignores them by default. Setting this flag to `false` considers no-data values so that `null` is set for all the following elements. :return: An array with the computed cumulative maxima. """ return cummax(data=self, ignore_nodata=ignore_nodata) def cummin(self, ignore_nodata=UNSET) -> 'ProcessBuilder': """ Cumulative minima :param self: An array of numbers. :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. Setting this flag to `false` considers no-data values so that `null` is set for all the following elements. :return: An array with the computed cumulative minima. """ return cummin(data=self, ignore_nodata=ignore_nodata) def cummin(self, ignore_nodata=UNSET) -> 'ProcessBuilder': """ Cumulative minima :param self: An array of numbers. :param ignore_nodata: Indicates whether no-data values are ignored or not and ignores them by default. Setting this flag to `false` considers no-data values so that `null` is set for all the following elements. :return: An array with the computed cumulative minima. """ return cummin(data=self, ignore_nodata=ignore_nodata) def cumproduct(self, ignore_nodata=UNSET) -> 'ProcessBuilder': """ Cumulative products :param self: An array of numbers. :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. Setting this flag to `false` considers no-data values so that `null` is set for all the following elements. :return: An array with the computed cumulative products. """ return cumproduct(data=self, ignore_nodata=ignore_nodata) def cumproduct(self, ignore_nodata=UNSET) -> 'ProcessBuilder': """ Cumulative products :param self: An array of numbers. :param ignore_nodata: Indicates whether no-data values are ignored or not and ignores them by default. Setting this flag to `false` considers no-data values so that `null` is set for all the following elements. :return: An array with the computed cumulative products. """ return cumproduct(data=self, ignore_nodata=ignore_nodata) def cumsum(self, ignore_nodata=UNSET) -> 'ProcessBuilder': """ Cumulative sums :param self: An array of numbers. :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. Setting this flag to `false` considers no-data values so that `null` is set for all the following elements. :return: An array with the computed cumulative sums. """ return cumsum(data=self, ignore_nodata=ignore_nodata) def cumsum(self, ignore_nodata=UNSET) -> 'ProcessBuilder': """ Cumulative sums :param self: An array of numbers. :param ignore_nodata: Indicates whether no-data values are ignored or not and ignores them by default. Setting this flag to `false` considers no-data values so that `null` is set for all the following elements. :return: An array with the computed cumulative sums. """ return cumsum(data=self, ignore_nodata=ignore_nodata) def date_shift(self, value, unit) -> 'ProcessBuilder': """ Manipulates dates and times by addition or subtraction :param self: The date (and optionally time) to manipulate. If the given date doesn't include the time, the process assumes that the time component is `00:00:00Z` (i.e. midnight, in UTC). The millisecond part of the time is optional and defaults to `0` if not given. :param value: The period of time in the unit given that is added (positive numbers) or subtracted (negative numbers). The value `0` doesn't have any effect. :param unit: The unit for the value given. The following pre-defined units are available: - millisecond: Milliseconds - second: Seconds - leap seconds are ignored in computations. - minute: Minutes - hour: Hours - day: Days - changes only the the day part of a date - week: Weeks (equivalent to 7 days) - month: Months - year: Years Manipulations with the unit `year`, `month`, `week` or `day` do never change the time. If any of the manipulations result in an invalid date or time, the corresponding part is rounded down to the next valid date or time respectively. For example, adding a month to `2020-01-31` would result in `2020-02-29`. :return: The manipulated date. If a time component was given in the parameter `date`, the time component is returned with the date. """ return date_shift(date=self, value=value, unit=unit) def debug(self, code=UNSET, level=UNSET, message=UNSET) -> 'ProcessBuilder': """ Publish debugging information :param self: Data to publish. :param code: An identifier to help identify the log entry in a bunch of other log entries. :param level: The severity level of this message, defaults to `info`. Note that the level `error` forces the computation to be stopped! :param message: A message to send in addition to the data. :return: Returns the data as passed to the `data` parameter. """ return debug(data=self, code=code, level=level, message=message) def debug(self, code=UNSET, level=UNSET, message=UNSET) -> 'ProcessBuilder': """ Publish debugging information :param self: Data to publish. :param code: An identifier to help identify the log entry in a bunch of other log entries. :param level: The severity level of this message, defaults to `info`. Note that the level `error` forces the computation to be stopped! :param message: A message to send in addition to the data. :return: The data as passed to the `data` parameter without any modification. """ return debug(data=self, code=code, level=level, message=message) def dimension_labels(self, dimension) -> 'ProcessBuilder': """ Get the dimension labels :param self: The data cube. :param dimension: The name of the dimension to get the labels for. :return: The labels as an array. """ return dimension_labels(data=self, dimension=dimension) def divide(self, y) -> 'ProcessBuilder': """ Division of two numbers :param self: The dividend. :param y: The divisor. :return: The computed result. """ return divide(x=self, y=y) def drop_dimension(self, name) -> 'ProcessBuilder': """ Remove a dimension :param self: The data cube to drop a dimension from. :param name: Name of the dimension to drop. :return: A data cube without the specified dimension. The number of dimensions decreases by one, but the dimension properties (name, type, labels, reference system and resolution) for all other dimensions remain unchanged. """ return drop_dimension(data=self, name=name) def e(self) -> 'ProcessBuilder': """ Euler's number (e) :return: The numerical value of Euler's number. """ return e() def eq(self, y, delta=UNSET, case_sensitive=UNSET) -> 'ProcessBuilder': """ Equal to comparison :param self: First operand. :param y: Second operand. :param delta: Only applicable for comparing two numbers. If this optional parameter is set to a positive non-zero number the equality of two numbers is checked against a delta value. This is especially useful to circumvent problems with floating-point inaccuracy in machine-based computation. This option is basically an alias for the following computation: `lte(abs(minus([x, y]), delta)` :param case_sensitive: Only applicable for comparing two strings. Case sensitive comparison can be disabled by setting this parameter to `false`. :return: `true` if `x` is equal to `y`, `null` if any operand is `null`, otherwise `false`. """ return eq(x=self, y=y, delta=delta, case_sensitive=case_sensitive) def exp(self) -> 'ProcessBuilder': """ Exponentiation to the base e :param self: The numerical exponent. :return: The computed value for *e* raised to the power of `p`. """ return exp(p=self) def extrema(self, ignore_nodata=UNSET) -> 'ProcessBuilder': """ Minimum and maximum values :param self: An array of numbers. :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. Setting this flag to `false` considers no-data values so that an array with two `null` values is returned if any value is such a value. :return: An array containing the minimum and maximum values for the specified numbers. The first element is the minimum, the second element is the maximum. If the input array is empty both elements are set to `null`. """ return extrema(data=self, ignore_nodata=ignore_nodata) def filter_bands(self, bands=UNSET, wavelengths=UNSET) -> 'ProcessBuilder': """ Filter the bands by names :param self: A data cube with bands. :param bands: A list of band names. Either the unique band name (metadata field `name` in bands) or one of the common band names (metadata field `common_name` in bands). If the unique band name and the common name conflict, the unique band name has a higher priority. The order of the specified array defines the order of the bands in the data cube. If multiple bands match a common name, all matched bands are included in the original order. :param wavelengths: A list of sub-lists with each sub-list consisting of two elements. The first element is the minimum wavelength and the second element is the maximum wavelength. Wavelengths are specified in micrometers (μm). The order of the specified array defines the order of the bands in the data cube. If multiple bands match the wavelengths, all matched bands are included in the original order. :return: A data cube limited to a subset of its original bands. The dimensions and dimension properties (name, type, labels, reference system and resolution) remain unchanged, except that the dimension of type `bands` has less (or the same) dimension labels. """ return filter_bands(data=self, bands=bands, wavelengths=wavelengths) def filter_bbox(self, extent) -> 'ProcessBuilder': """ Spatial filter using a bounding box :param self: A data cube. :param extent: A bounding box, which may include a vertical axis (see `base` and `height`). :return: A data cube restricted to the bounding box. The dimensions and dimension properties (name, type, labels, reference system and resolution) remain unchanged, except that the spatial dimensions have less (or the same) dimension labels. """ return filter_bbox(data=self, extent=extent) def filter_labels(self, condition, dimension, context=UNSET) -> 'ProcessBuilder': """ Filter dimension labels based on a condition :param self: A data cube. :param condition: A condition that is evaluated against each dimension label in the specified dimension. A dimension label and the corresponding data is preserved for the given dimension, if the condition returns `true`. :param dimension: The name of the dimension to filter on. Fails with a `DimensionNotAvailable` error if the specified dimension does not exist. :param context: Additional data to be passed to the condition. :return: A data cube with the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged, except that the given dimension has less (or the same) dimension labels. """ return filter_labels(data=self, condition=condition, dimension=dimension, context=context) def filter_labels(self, condition, dimension, context=UNSET) -> 'ProcessBuilder': """ Filter dimension labels based on a condition :param self: A data cube. :param condition: A condition that is evaluated against each dimension label in the specified dimension. A dimension label and the corresponding data is preserved for the given dimension, if the condition returns `true`. :param dimension: The name of the dimension to filter on. Fails with a `DimensionNotAvailable` exception if the specified dimension does not exist. :param context: Additional data to be passed to the condition. :return: A data cube with the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged, except that the given dimension has less (or the same) dimension labels. """ return filter_labels(data=self, condition=condition, dimension=dimension, context=context) def filter_spatial(self, geometries) -> 'ProcessBuilder': """ Spatial filter using geometries :param self: A data cube. :param geometries: One or more geometries used for filtering, specified as GeoJSON. :return: A data cube restricted to the specified geometries. The dimensions and dimension properties (name, type, labels, reference system and resolution) remain unchanged, except that the spatial dimensions have less (or the same) dimension labels. """ return filter_spatial(data=self, geometries=geometries) def filter_temporal(self, extent, dimension=UNSET) -> 'ProcessBuilder': """ Temporal filter for a temporal intervals :param self: A data cube. :param extent: Left-closed temporal interval, i.e. an array with exactly two elements: 1. The first element is the start of the temporal interval. The specified instance in time is **included** in the interval. 2. The second element is the end of the temporal interval. The specified instance in time is **excluded** from the interval. The specified temporal strings follow [RFC 3339](https://www.rfc- editor.org/rfc/rfc3339.html). Also supports open intervals by setting one of the boundaries to `null`, but never both. :param dimension: The name of the temporal dimension to filter on. If no specific dimension is specified or it is set to `null`, the filter applies to all temporal dimensions. Fails with a `DimensionNotAvailable` exception if the specified dimension does not exist. :return: A data cube restricted to the specified temporal extent. The dimensions and dimension properties (name, type, labels, reference system and resolution) remain unchanged, except that the temporal dimensions (determined by `dimensions` parameter) may have less dimension labels. """ return filter_temporal(data=self, extent=extent, dimension=dimension) def first(self, ignore_nodata=UNSET) -> 'ProcessBuilder': """ First element :param self: An array with elements of any data type. :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. Setting this flag to `false` considers no-data values so that `null` is returned if the first value is such a value. :return: The first element of the input array. """ return first(data=self, ignore_nodata=ignore_nodata) def floor(self) -> 'ProcessBuilder': """ Round fractions down :param self: A number to round down. :return: The number rounded down. """ return floor(x=self) def gt(self, y) -> 'ProcessBuilder': """ Greater than comparison :param self: First operand. :param y: Second operand. :return: `true` if `x` is strictly greater than `y` or `null` if any operand is `null`, otherwise `false`. """ return gt(x=self, y=y) def gte(self, y) -> 'ProcessBuilder': """ Greater than or equal to comparison :param self: First operand. :param y: Second operand. :return: `true` if `x` is greater than or equal to `y`, `null` if any operand is `null`, otherwise `false`. """ return gte(x=self, y=y) def if_(self, accept, reject=UNSET) -> 'ProcessBuilder': """ If-Then-Else conditional :param self: A boolean value. :param accept: A value that is returned if the boolean value is `true`. :param reject: A value that is returned if the boolean value is **not** `true`. Defaults to `null`. :return: Either the `accept` or `reject` argument depending on the given boolean value. """ return if_(value=self, accept=accept, reject=reject) def int(self) -> 'ProcessBuilder': """ Integer part of a number :param self: A number. :return: Integer part of the number. """ return int(x=self) def is_infinite(self) -> 'ProcessBuilder': """ Value is an infinite number :param self: The data to check. :return: `true` if the data is an infinite number, otherwise `false`. """ return is_infinite(x=self) def is_nan(self) -> 'ProcessBuilder': """ Value is not a number :param self: The data to check. :return: `true` if the data is not a number, otherwise `false`. """ return is_nan(x=self) def is_nodata(self) -> 'ProcessBuilder': """ Value is not a no-data value :param self: The data to check. :return: `true` if the data is a no-data value, otherwise `false`. """ return is_nodata(x=self) def is_valid(self) -> 'ProcessBuilder': """ Value is valid data :param self: The data to check. :return: `true` if the data is valid, otherwise `false`. """ return is_valid(x=self) def last(self, ignore_nodata=UNSET) -> 'ProcessBuilder': """ Last element :param self: An array with elements of any data type. :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. Setting this flag to `false` considers no-data values so that `null` is returned if the last value is such a value. :return: The last element of the input array. """ return last(data=self, ignore_nodata=ignore_nodata) def linear_scale_range(self, inputMin, inputMax, outputMin=UNSET, outputMax=UNSET) -> 'ProcessBuilder': """ Linear transformation between two ranges :param self: A number to transform. The number gets clipped to the bounds specified in `inputMin` and `inputMax`. :param inputMin: Minimum value the input can obtain. :param inputMax: Maximum value the input can obtain. :param outputMin: Minimum value of the desired output range. :param outputMax: Maximum value of the desired output range. :return: The transformed number. """ return linear_scale_range(x=self, inputMin=inputMin, inputMax=inputMax, outputMin=outputMin, outputMax=outputMax) def ln(self) -> 'ProcessBuilder': """ Natural logarithm :param self: A number to compute the natural logarithm for. :return: The computed natural logarithm. """ return ln(x=self) def load_collection(self, spatial_extent, temporal_extent, bands=UNSET, properties=UNSET) -> 'ProcessBuilder': """ Load a collection :param self: The collection id. :param spatial_extent: Limits the data to load from the collection to the specified bounding box or polygons. The process puts a pixel into the data cube if the point at the pixel center intersects with the bounding box or any of the polygons (as defined in the Simple Features standard by the OGC). The GeoJSON can be one of the following feature types: * A `Polygon` or `MultiPolygon` geometry, * a `Feature` with a `Polygon` or `MultiPolygon` geometry, * a `FeatureCollection` containing at least one `Feature` with `Polygon` or `MultiPolygon` geometries, or * a `GeometryCollection` containing `Polygon` or `MultiPolygon` geometries. To maximize interoperability, `GeometryCollection` should be avoided in favour of one of the alternatives above. Set this parameter to `null` to set no limit for the spatial extent. Be careful with this when loading large datasets! It is recommended to use this parameter instead of using ``filter_bbox()`` or ``filter_spatial()`` directly after loading unbounded data. :param temporal_extent: Limits the data to load from the collection to the specified left-closed temporal interval. Applies to all temporal dimensions. The interval has to be specified as an array with exactly two elements: 1. The first element is the start of the temporal interval. The specified instance in time is **included** in the interval. 2. The second element is the end of the temporal interval. The specified instance in time is **excluded** from the interval. The specified temporal strings follow [RFC 3339](https://www.rfc-editor.org/rfc/rfc3339.html). Also supports open intervals by setting one of the boundaries to `null`, but never both. Set this parameter to `null` to set no limit for the temporal extent. Be careful with this when loading large datasets! It is recommended to use this parameter instead of using ``filter_temporal()`` directly after loading unbounded data. :param bands: Only adds the specified bands into the data cube so that bands that don't match the list of band names are not available. Applies to all dimensions of type `bands`. Either the unique band name (metadata field `name` in bands) or one of the common band names (metadata field `common_name` in bands) can be specified. If the unique band name and the common name conflict, the unique band name has a higher priority. The order of the specified array defines the order of the bands in the data cube. If multiple bands match a common name, all matched bands are included in the original order. It is recommended to use this parameter instead of using ``filter_bands()`` directly after loading unbounded data. :param properties: Limits the data by metadata properties to include only data in the data cube which all given conditions return `true` for (AND operation). Specify key-value-pairs with the key being the name of the metadata property, which can be retrieved with the openEO Data Discovery for Collections. The value must a condition (user-defined process) to be evaluated against the collection metadata, see the example. :return: A data cube for further processing. The dimensions and dimension properties (name, type, labels, reference system and resolution) correspond to the collection's metadata, but the dimension labels are restricted as specified in the parameters. """ return load_collection(id=self, spatial_extent=spatial_extent, temporal_extent=temporal_extent, bands=bands, properties=properties) def load_result(self) -> 'ProcessBuilder': """ Load batch job results :param self: The id of a batch job with results. :return: A data cube for further processing. """ return load_result(id=self) def load_result(self) -> 'ProcessBuilder': """ Load batch job results :param self: The id of a batch job with results. :return: A data cube for further processing. """ return load_result(id=self) def load_uploaded_files(self, format, options=UNSET) -> 'ProcessBuilder': """ Load files from the user workspace :param self: The files to read. Folders can't be specified, instead specify all files. An error is thrown if a file can't be read. :param format: The file format to read from. It must be one of the values that the server reports as supported input file formats, which usually correspond to the short GDAL/OGR codes. If the format is not suitable for loading the data, a `FormatUnsuitable` exception will be thrown. This parameter is *case insensitive*. :param options: The file format parameters to be used to read the files. Must correspond to the parameters that the server reports as supported parameters for the chosen `format`. The parameter names and valid values usually correspond to the GDAL/OGR format options. :return: A data cube for further processing. """ return load_uploaded_files(paths=self, format=format, options=options) def load_uploaded_files(self, format, options=UNSET) -> 'ProcessBuilder': """ Load files from the user workspace :param self: The files to read. Folders can't be specified, specify all files instead. An exception is thrown if a file can't be read. :param format: The file format to read from. It must be one of the values that the server reports as supported input file formats, which usually correspond to the short GDAL/OGR codes. If the format is not suitable for loading the data, a `FormatUnsuitable` exception will be thrown. This parameter is *case insensitive*. :param options: The file format parameters to be used to read the files. Must correspond to the parameters that the server reports as supported parameters for the chosen `format`. The parameter names and valid values usually correspond to the GDAL/OGR format options. :return: A data cube for further processing. """ return load_uploaded_files(paths=self, format=format, options=options) def log(self, base) -> 'ProcessBuilder': """ Logarithm to a base :param self: A number to compute the logarithm for. :param base: The numerical base. :return: The computed logarithm. """ return log(x=self, base=base) def lt(self, y) -> 'ProcessBuilder': """ Less than comparison :param self: First operand. :param y: Second operand. :return: `true` if `x` is strictly less than `y`, `null` if any operand is `null`, otherwise `false`. """ return lt(x=self, y=y) def lte(self, y) -> 'ProcessBuilder': """ Less than or equal to comparison :param self: First operand. :param y: Second operand. :return: `true` if `x` is less than or equal to `y`, `null` if any operand is `null`, otherwise `false`. """ return lte(x=self, y=y) def mask(self, mask, replacement=UNSET) -> 'ProcessBuilder': """ Apply a raster mask :param self: A raster data cube. :param mask: A mask as a raster data cube. Every pixel in `data` must have a corresponding element in `mask`. :param replacement: The value used to replace masked values with. :return: A masked raster data cube with the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged. """ return mask(data=self, mask=mask, replacement=replacement) def mask_polygon(self, mask, replacement=UNSET, inside=UNSET) -> 'ProcessBuilder': """ Apply a polygon mask :param self: A raster data cube. :param mask: A GeoJSON object containing at least one polygon. The provided feature types can be one of the following: * A `Polygon` or `MultiPolygon` geometry, * a `Feature` with a `Polygon` or `MultiPolygon` geometry, * a `FeatureCollection` containing at least one `Feature` with `Polygon` or `MultiPolygon` geometries, or * a `GeometryCollection` containing `Polygon` or `MultiPolygon` geometries. To maximize interoperability, `GeometryCollection` should be avoided in favour of one of the alternatives above. :param replacement: The value used to replace masked values with. :param inside: If set to `true` all pixels for which the point at the pixel center **does** intersect with any polygon are replaced. :return: A masked raster data cube with the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged. """ return mask_polygon(data=self, mask=mask, replacement=replacement, inside=inside) def max(self, ignore_nodata=UNSET) -> 'ProcessBuilder': """ Maximum value :param self: An array of numbers. :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. Setting this flag to `false` considers no-data values so that `null` is returned if any value is such a value. :return: The maximum value. """ return max(data=self, ignore_nodata=ignore_nodata) def mean(self, ignore_nodata=UNSET) -> 'ProcessBuilder': """ Arithmetic mean (average) :param self: An array of numbers. :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. Setting this flag to `false` considers no-data values so that `null` is returned if any value is such a value. :return: The computed arithmetic mean. """ return mean(data=self, ignore_nodata=ignore_nodata) def median(self, ignore_nodata=UNSET) -> 'ProcessBuilder': """ Statistical median :param self: An array of numbers. :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. Setting this flag to `false` considers no-data values so that `null` is returned if any value is such a value. :return: The computed statistical median. """ return median(data=self, ignore_nodata=ignore_nodata) def merge_cubes(self, cube2, overlap_resolver=UNSET, context=UNSET) -> 'ProcessBuilder': """ Merge two data cubes :param self: The first data cube. :param cube2: The second data cube. :param overlap_resolver: A reduction operator that resolves the conflict if the data overlaps. The reducer must return a value of the same data type as the input values are. The reduction operator may be a single process such as ``multiply()`` or consist of multiple sub-processes. `null` (the default) can be specified if no overlap resolver is required. :param context: Additional data to be passed to the overlap resolver. :return: The merged data cube. See the process description for details regarding the dimensions and dimension properties (name, type, labels, reference system and resolution). """ return merge_cubes(cube1=self, cube2=cube2, overlap_resolver=overlap_resolver, context=context) def min(self, ignore_nodata=UNSET) -> 'ProcessBuilder': """ Minimum value :param self: An array of numbers. :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. Setting this flag to `false` considers no-data values so that `null` is returned if any value is such a value. :return: The minimum value. """ return min(data=self, ignore_nodata=ignore_nodata) def mod(self, y) -> 'ProcessBuilder': """ Modulo :param self: A number to be used as the dividend. :param y: A number to be used as the divisor. :return: The remainder after division. """ return mod(x=self, y=y) def multiply(self, y) -> 'ProcessBuilder': """ Multiplication of two numbers :param self: The multiplier. :param y: The multiplicand. :return: The computed product of the two numbers. """ return multiply(x=self, y=y) def nan(self) -> 'ProcessBuilder': """ Not a Number (NaN) :return: Returns `NaN`. """ return nan() def ndvi(self, nir=UNSET, red=UNSET, target_band=UNSET) -> 'ProcessBuilder': """ Normalized Difference Vegetation Index :param self: A raster data cube with two bands that have the common names `red` and `nir` assigned. :param nir: The name of the NIR band. Defaults to the band that has the common name `nir` assigned. Either the unique band name (metadata field `name` in bands) or one of the common band names (metadata field `common_name` in bands) can be specified. If the unique band name and the common name conflict, the unique band name has a higher priority. :param red: The name of the red band. Defaults to the band that has the common name `red` assigned. Either the unique band name (metadata field `name` in bands) or one of the common band names (metadata field `common_name` in bands) can be specified. If the unique band name and the common name conflict, the unique band name has a higher priority. :param target_band: By default, the dimension of type `bands` is dropped. To keep the dimension specify a new band name in this parameter so that a new dimension label with the specified name will be added for the computed values. :return: A raster data cube containing the computed NDVI values. The structure of the data cube differs depending on the value passed to `target_band`: * `target_band` is `null`: The data cube does not contain the dimension of type `bands`, the number of dimensions decreases by one. The dimension properties (name, type, labels, reference system and resolution) for all other dimensions remain unchanged. * `target_band` is a string: The data cube keeps the same dimensions. The dimension properties remain unchanged, but the number of dimension labels for the dimension of type `bands` increases by one. The additional label is named as specified in `target_band`. """ return ndvi(data=self, nir=nir, red=red, target_band=target_band) def neq(self, y, delta=UNSET, case_sensitive=UNSET) -> 'ProcessBuilder': """ Not equal to comparison :param self: First operand. :param y: Second operand. :param delta: Only applicable for comparing two numbers. If this optional parameter is set to a positive non-zero number the non-equality of two numbers is checked against a delta value. This is especially useful to circumvent problems with floating-point inaccuracy in machine-based computation. This option is basically an alias for the following computation: `gt(abs(minus([x, y]), delta)` :param case_sensitive: Only applicable for comparing two strings. Case sensitive comparison can be disabled by setting this parameter to `false`. :return: `true` if `x` is *not* equal to `y`, `null` if any operand is `null`, otherwise `false`. """ return neq(x=self, y=y, delta=delta, case_sensitive=case_sensitive) def normalized_difference(self, y) -> 'ProcessBuilder': """ Normalized difference :param self: The value for the first band. :param y: The value for the second band. :return: The computed normalized difference. """ return normalized_difference(x=self, y=y) def not_(self) -> 'ProcessBuilder': """ Inverting a boolean :param self: Boolean value to invert. :return: Inverted boolean value. """ return not_(x=self) def or_(self, y) -> 'ProcessBuilder': """ Logical OR :param self: A boolean value. :param y: A boolean value. :return: Boolean result of the logical OR. """ return or_(x=self, y=y) def order(self, asc=UNSET, nodata=UNSET) -> 'ProcessBuilder': """ Create a permutation :param self: An array to compute the order for. :param asc: The default sort order is ascending, with smallest values first. To sort in reverse (descending) order, set this parameter to `false`. :param nodata: Controls the handling of no-data values (`null`). By default, they are removed. If set to `true`, missing values in the data are put last; if set to `false`, they are put first. :return: The computed permutation. """ return order(data=self, asc=asc, nodata=nodata) def pi(self) -> 'ProcessBuilder': """ Pi (π) :return: The numerical value of Pi. """ return pi() def power(self, p) -> 'ProcessBuilder': """ Exponentiation :param self: The numerical base. :param p: The numerical exponent. :return: The computed value for `base` raised to the power of `p`. """ return power(base=self, p=p) def product(self, ignore_nodata=UNSET) -> 'ProcessBuilder': """ Compute the product by multiplying numbers :param self: An array of numbers. :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. Setting this flag to `false` considers no-data values so that `null` is returned if any value is such a value. :return: The computed product of the sequence of numbers. """ return product(data=self, ignore_nodata=ignore_nodata) def quantiles(self, probabilities=UNSET, q=UNSET, ignore_nodata=UNSET) -> 'ProcessBuilder': """ Quantiles :param self: An array of numbers. :param probabilities: A list of probabilities to calculate quantiles for. The probabilities must be between 0 and 1. :param q: Intervals to calculate quantiles for. Calculates q-quantiles with (nearly) equal-sized intervals. :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. Setting this flag to `false` considers no-data values so that an array with `null` values is returned if any element is such a value. :return: An array with the computed quantiles. The list has either * as many elements as the given list of `probabilities` had or * *`q`-1* elements. If the input array is empty the resulting array is filled with as many `null` values as required according to the list above. See the 'Empty array' example for an example. """ return quantiles(data=self, probabilities=probabilities, q=q, ignore_nodata=ignore_nodata) def rearrange(self, order) -> 'ProcessBuilder': """ Rearrange an array based on a permutation :param self: The array to rearrange. :param order: The permutation used for rearranging. :return: The rearranged array. """ return rearrange(data=self, order=order) def reduce_dimension(self, reducer, dimension, context=UNSET) -> 'ProcessBuilder': """ Reduce dimensions :param self: A data cube. :param reducer: A reducer to apply on the specified dimension. A reducer is a single process such as ``mean()`` or a set of processes, which computes a single value for a list of values, see the category 'reducer' for such processes. :param dimension: The name of the dimension over which to reduce. Fails with a `DimensionNotAvailable` exception if the specified dimension does not exist. :param context: Additional data to be passed to the reducer. :return: A data cube with the newly computed values. It is missing the given dimension, the number of dimensions decreases by one. The dimension properties (name, type, labels, reference system and resolution) for all other dimensions remain unchanged. """ return reduce_dimension(data=self, reducer=reducer, dimension=dimension, context=context) def reduce_dimension_binary(self, reducer, dimension, context=UNSET) -> 'ProcessBuilder': """ Reduce dimensions using binary reduction :param self: A data cube. :param reducer: A reduction operator to be applied consecutively on pairs of values. It must be both associative and commutative as the execution may be executed in parallel and therefore the order of execution is arbitrary. The reduction operator may be a single process such as ``multiply()`` or consist of multiple sub-processes. :param dimension: The name of the dimension over which to reduce. Fails with a `DimensionNotAvailable` error if the specified dimension does not exist. :param context: Additional data to be passed to the reducer. :return: A data cube with the newly computed values. It is missing the given dimension, the number of dimensions decreases by one. The dimension properties (name, type, labels, reference system and resolution) for all other dimensions remain unchanged. """ return reduce_dimension_binary(data=self, reducer=reducer, dimension=dimension, context=context) def reduce_spatial(self, reducer, context=UNSET) -> 'ProcessBuilder': """ Reduce spatial dimensions 'x' and 'y' :param self: A data cube. :param reducer: A reducer to apply on the horizontal spatial dimensions. A reducer is a single process such as ``mean()`` or a set of processes, which computes a single value for a list of values, see the category 'reducer' for such processes. :param context: Additional data to be passed to the reducer. :return: A data cube with the newly computed values. It is missing the horizontal spatial dimensions, the number of dimensions decreases by two. The dimension properties (name, type, labels, reference system and resolution) for all other dimensions remain unchanged. """ return reduce_spatial(data=self, reducer=reducer, context=context) def rename_dimension(self, source, target) -> 'ProcessBuilder': """ Rename a dimension :param self: The data cube. :param source: The current name of the dimension. Fails with a `DimensionNotAvailable` exception if the specified dimension does not exist. :param target: A new Name for the dimension. Fails with a `DimensionExists` exception if a dimension with the specified name exists. :return: A data cube with the same dimensions, but the name of one of the dimensions changes. The old name can not be referred to any longer. The dimension properties (name, type, labels, reference system and resolution) remain unchanged. """ return rename_dimension(data=self, source=source, target=target) def rename_labels(self, dimension, target, source=UNSET) -> 'ProcessBuilder': """ Rename dimension labels :param self: The data cube. :param dimension: The name of the dimension to rename the labels for. :param target: The new names for the labels. The dimension labels in the data cube are expected to be enumerated if the parameter `target` is not specified. If a target dimension label already exists in the data cube, a `LabelExists` exception is thrown. :param source: The names of the labels as they are currently in the data cube. The array defines an unsorted and potentially incomplete list of labels that should be renamed to the names available in the corresponding array elements in the parameter `target`. If one of the source dimension labels doesn't exist, the `LabelNotAvailable` exception is thrown. By default, the array is empty so that the dimension labels in the data cube are expected to be enumerated. :return: The data cube with the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged, except that for the given dimension the labels change. The old labels can not be referred to any longer. The number of labels remains the same. """ return rename_labels(data=self, dimension=dimension, target=target, source=source) def resample_cube_spatial(self, target, method=UNSET) -> 'ProcessBuilder': """ Resample the spatial dimensions to match a target data cube :param self: A data cube. :param target: A data cube that describes the spatial target resolution. :param method: Resampling method to use. The following options are available and are meant to align with [`gdalwarp`](https://gdal.org/programs/gdalwarp.html#cmdoption-gdalwarp-r): * `average`: average (mean) resampling, computes the weighted average of all valid pixels * `bilinear`: bilinear resampling * `cubic`: cubic resampling * `cubicspline`: cubic spline resampling * `lanczos`: Lanczos windowed sinc resampling * `max`: maximum resampling, selects the maximum value from all valid pixels * `med`: median resampling, selects the median value of all valid pixels * `min`: minimum resampling, selects the minimum value from all valid pixels * `mode`: mode resampling, selects the value which appears most often of all the sampled points * `near`: nearest neighbour resampling (default) * `q1`: first quartile resampling, selects the first quartile value of all valid pixels * `q3`: third quartile resampling, selects the third quartile value of all valid pixels * `rms` root mean square (quadratic mean) of all valid pixels * `sum`: compute the weighted sum of all valid pixels Valid pixels are determined based on the function ``is_valid()``. :return: A data cube with the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged, except for the resolution and dimension labels of the spatial dimensions. """ return resample_cube_spatial(data=self, target=target, method=method) def resample_cube_temporal(self, target, method, dimension=UNSET, context=UNSET) -> 'ProcessBuilder': """ Resample a temporal dimension to match a target data cube :param self: A data cube. :param target: A data cube that describes the temporal target resolution. :param method: A resampling method to be applied, could be a reducer for downsampling or other methods for upsampling. A reducer is a single process such as ``mean()`` or a set of processes, which computes a single value for a list of values, see the category 'reducer' for such processes. :param dimension: The name of the temporal dimension to resample, which must exist with this name in both data cubes. If the dimension is not set or is set to `null`, the data cube is expected to only have one temporal dimension. Fails with a `TooManyDimensions` error if it has more dimensions. Fails with a `DimensionNotAvailable` error if the specified dimension does not exist. :param context: Additional data to be passed to the process specified for the parameter `method`. :return: A raster data cube with the same dimensions and the same dimension properties (name, type, labels, reference system and resolution) for all non-temporal dimensions. For the temporal dimension the name and type remain unchanged, but the reference system changes and the labels and resolution may change. """ return resample_cube_temporal(data=self, target=target, method=method, dimension=dimension, context=context) def resample_cube_temporal(self, target, dimension=UNSET, valid_within=UNSET) -> 'ProcessBuilder': """ Resample temporal dimensions to match a target data cube :param self: A data cube with one or more temporal dimensions. :param target: A data cube that describes the temporal target resolution. :param dimension: The name of the temporal dimension to resample, which must exist with this name in both data cubes. If the dimension is not set or is set to `null`, the process resamples all temporal dimensions that exist with the same names in both data cubes. The following exceptions may occur: * A dimension is given, but it does not exist in any of the data cubes: `DimensionNotAvailable` * A dimension is given, but one of them is not temporal: `DimensionMismatch` * No specific dimension name is given and there are no temporal dimensions with the same name in the data: `DimensionMismatch` :param valid_within: Setting this parameter to a numerical value enables that the process searches for valid values within the given period of days before and after the target timestamps. Valid values are determined based on the function ``is_valid()``. For example, the limit of `7` for the target timestamps `2020-01-15 12:00:00` looks for a nearest neighbor after `2020-01-08 12:00:00` and before `2020-01-22 12:00:00`. If no valid value is found within the given period, the value will be set to no- data (`null`). :return: A raster data cube with the same dimensions and the same dimension properties (name, type, labels, reference system and resolution) for all non-temporal dimensions. For the temporal dimension, the name and type remain unchanged, but the dimension labels, resolution and reference system may change. """ return resample_cube_temporal(data=self, target=target, dimension=dimension, valid_within=valid_within) def resample_spatial(self, resolution=UNSET, projection=UNSET, method=UNSET, align=UNSET) -> 'ProcessBuilder': """ Resample and warp the spatial dimensions :param self: A raster data cube. :param resolution: Resamples the data cube to the target resolution, which can be specified either as separate values for x and y or as a single value for both axes. Specified in the units of the target projection. Doesn't change the resolution by default (`0`). :param projection: Warps the data cube to the target projection, specified as as [EPSG code](http://www.epsg-registry.org/), [WKT2 (ISO 19162) string](http://docs.opengeospatial.org/is/18-010r7/18-010r7.html), [PROJ definition (deprecated)](https://proj.org/usage/quickstart.html). By default (`null`), the projection is not changed. :param method: Resampling method to use. The following options are available and are meant to align with [`gdalwarp`](https://gdal.org/programs/gdalwarp.html#cmdoption-gdalwarp-r): * `average`: average (mean) resampling, computes the weighted average of all valid pixels * `bilinear`: bilinear resampling * `cubic`: cubic resampling * `cubicspline`: cubic spline resampling * `lanczos`: Lanczos windowed sinc resampling * `max`: maximum resampling, selects the maximum value from all valid pixels * `med`: median resampling, selects the median value of all valid pixels * `min`: minimum resampling, selects the minimum value from all valid pixels * `mode`: mode resampling, selects the value which appears most often of all the sampled points * `near`: nearest neighbour resampling (default) * `q1`: first quartile resampling, selects the first quartile value of all valid pixels * `q3`: third quartile resampling, selects the third quartile value of all valid pixels * `rms` root mean square (quadratic mean) of all valid pixels * `sum`: compute the weighted sum of all valid pixels Valid pixels are determined based on the function ``is_valid()``. :param align: Specifies to which corner of the spatial extent the new resampled data is aligned to. :return: A raster data cube with values warped onto the new projection. It has the same dimensions and the same dimension properties (name, type, labels, reference system and resolution) for all non-spatial or vertical spatial dimensions. For the horizontal spatial dimensions the name and type remain unchanged, but reference system, labels and resolution may change depending on the given parameters. """ return resample_spatial(data=self, resolution=resolution, projection=projection, method=method, align=align) def round(self, p=UNSET) -> 'ProcessBuilder': """ Round to a specified precision :param self: A number to round. :param p: A positive number specifies the number of digits after the decimal point to round to. A negative number means rounding to a power of ten, so for example *-2* rounds to the nearest hundred. Defaults to *0*. :return: The rounded number. """ return round(x=self, p=p) def run_udf(self, udf, runtime, version=UNSET, context=UNSET) -> 'ProcessBuilder': """ Run a UDF :param self: The data to be passed to the UDF as an array or raster data cube. :param udf: Either source code, an absolute URL or a path to a UDF script. :param runtime: A UDF runtime identifier available at the back-end. :param version: An UDF runtime version. If set to `null`, the default runtime version specified for each runtime is used. :param context: Additional data such as configuration options to be passed to the UDF. :return: The data processed by the UDF. * Returns a raster data cube, if a raster data cube is passed for `data`. Details on the dimensions and dimension properties (name, type, labels, reference system and resolution) depend on the UDF. * If an array is passed for `data`, the returned value can be of any data type, but is exactly what the UDF returns. """ return run_udf(data=self, udf=udf, runtime=runtime, version=version, context=context) def run_udf_externally(self, url, context=UNSET) -> 'ProcessBuilder': """ Run an externally hosted UDF container :param self: The data to be passed to the UDF as array or raster data cube. :param url: URL to a remote UDF service. :param context: Additional data such as configuration options that should be passed to the UDF. :return: The data processed by the UDF service. * Returns a raster data cube, if a raster data cube is passed for `data`. Details on the dimensions and dimension properties (name, type, labels, reference system and resolution) depend on the UDF. * If an array is passed for `data`, the returned value can be of any data type, but is exactly what the UDF returns. """ return run_udf_externally(data=self, url=url, context=context) def run_udf_externally(self, url, context=UNSET) -> 'ProcessBuilder': """ Run an externally hosted UDF container :param self: The data to be passed to the UDF as an array or raster data cube. :param url: Absolute URL to a remote UDF service. :param context: Additional data such as configuration options to be passed to the UDF. :return: The data processed by the UDF service. * Returns a raster data cube if a raster data cube is passed for `data`. Details on the dimensions and dimension properties (name, type, labels, reference system and resolution) depend on the UDF. * If an array is passed for `data`, the returned value can be of any data type, but is exactly what the UDF returns. """ return run_udf_externally(data=self, url=url, context=context) def sar_backscatter(self, coefficient=UNSET, elevation_model=UNSET, mask=UNSET, contributing_area=UNSET, local_incidence_angle=UNSET, ellipsoid_incidence_angle=UNSET, noise_removal=UNSET) -> 'ProcessBuilder': """ Computes backscatter from SAR input :param self: The source data cube containing SAR input. :param coefficient: Select the radiometric correction coefficient. The following options are available: * `beta0`: radar brightness * `sigma0-ellipsoid`: ground area computed with ellipsoid earth model * `sigma0-terrain`: ground area computed with terrain earth model * `gamma0-ellipsoid`: ground area computed with ellipsoid earth model in sensor line of sight * `gamma0-terrain`: ground area computed with terrain earth model in sensor line of sight (default) * `null`: non-normalized backscatter :param elevation_model: The digital elevation model to use. Set to `null` (the default) to allow the back-end to choose, which will improve portability, but reduce reproducibility. :param mask: If set to `true`, a data mask is added to the bands with the name `mask`. It indicates which values are valid (1), invalid (0) or contain no-data (null). :param contributing_area: If set to `true`, a DEM-based local contributing area band named `contributing_area` is added. The values are given in square meters. :param local_incidence_angle: If set to `true`, a DEM-based local incidence angle band named `local_incidence_angle` is added. The values are given in degrees. :param ellipsoid_incidence_angle: If set to `true`, an ellipsoidal incidence angle band named `ellipsoid_incidence_angle` is added. The values are given in degrees. :param noise_removal: If set to `false`, no noise removal is applied. Defaults to `true`, which removes noise. :return: Backscatter values corresponding to the chosen parametrization. The values are given in linear scale. """ return sar_backscatter(data=self, coefficient=coefficient, elevation_model=elevation_model, mask=mask, contributing_area=contributing_area, local_incidence_angle=local_incidence_angle, ellipsoid_incidence_angle=ellipsoid_incidence_angle, noise_removal=noise_removal) def save_result(self, format, options=UNSET) -> 'ProcessBuilder': """ Save processed data to storage :param self: The data to save. :param format: The file format to save to. It must be one of the values that the server reports as supported output file formats, which usually correspond to the short GDAL/OGR codes. If the format is not suitable for storing the underlying data structure, a `FormatUnsuitable` exception will be thrown. This parameter is *case insensitive*. :param options: The file format parameters to be used to create the file(s). Must correspond to the parameters that the server reports as supported parameters for the chosen `format`. The parameter names and valid values usually correspond to the GDAL/OGR format options. :return: `false` if saving failed, `true` otherwise. """ return save_result(data=self, format=format, options=options) def sd(self, ignore_nodata=UNSET) -> 'ProcessBuilder': """ Standard deviation :param self: An array of numbers. :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. Setting this flag to `false` considers no-data values so that `null` is returned if any value is such a value. :return: The computed sample standard deviation. """ return sd(data=self, ignore_nodata=ignore_nodata) def sgn(self) -> 'ProcessBuilder': """ Signum :param self: A number. :return: The computed signum value of `x`. """ return sgn(x=self) def sin(self) -> 'ProcessBuilder': """ Sine :param self: An angle in radians. :return: The computed sine of `x`. """ return sin(x=self) def sinh(self) -> 'ProcessBuilder': """ Hyperbolic sine :param self: An angle in radians. :return: The computed hyperbolic sine of `x`. """ return sinh(x=self) def sort(self, asc=UNSET, nodata=UNSET) -> 'ProcessBuilder': """ Sort data :param self: An array with data to sort. :param asc: The default sort order is ascending, with smallest values first. To sort in reverse (descending) order, set this parameter to `false`. :param nodata: Controls the handling of no-data values (`null`). By default, they are removed. If set to `true`, missing values in the data are put last; if set to `false`, they are put first. :return: The sorted array. """ return sort(data=self, asc=asc, nodata=nodata) def sqrt(self) -> 'ProcessBuilder': """ Square root :param self: A number. :return: The computed square root. """ return sqrt(x=self) def subtract(self, y) -> 'ProcessBuilder': """ Subtraction of two numbers :param self: The minuend. :param y: The subtrahend. :return: The computed result. """ return subtract(x=self, y=y) def sum(self, ignore_nodata=UNSET) -> 'ProcessBuilder': """ Compute the sum by adding up numbers :param self: An array of numbers. :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. Setting this flag to `false` considers no-data values so that `null` is returned if any value is such a value. :return: The computed sum of the sequence of numbers. """ return sum(data=self, ignore_nodata=ignore_nodata) def tan(self) -> 'ProcessBuilder': """ Tangent :param self: An angle in radians. :return: The computed tangent of `x`. """ return tan(x=self) def tanh(self) -> 'ProcessBuilder': """ Hyperbolic tangent :param self: An angle in radians. :return: The computed hyperbolic tangent of `x`. """ return tanh(x=self) def text_begins(self, pattern, case_sensitive=UNSET) -> 'ProcessBuilder': """ Text begins with another text :param self: Text in which to find something at the beginning. :param pattern: Text to find at the beginning of `data`. Regular expressions are not supported. :param case_sensitive: Case sensitive comparison can be disabled by setting this parameter to `false`. :return: `true` if `data` begins with `pattern`, false` otherwise. """ return text_begins(data=self, pattern=pattern, case_sensitive=case_sensitive) def text_contains(self, pattern, case_sensitive=UNSET) -> 'ProcessBuilder': """ Text contains another text :param self: Text in which to find something in. :param pattern: Text to find in `data`. Regular expressions are not supported. :param case_sensitive: Case sensitive comparison can be disabled by setting this parameter to `false`. :return: `true` if `data` contains the `pattern`, false` otherwise. """ return text_contains(data=self, pattern=pattern, case_sensitive=case_sensitive) def text_ends(self, pattern, case_sensitive=UNSET) -> 'ProcessBuilder': """ Text ends with another text :param self: Text in which to find something at the end. :param pattern: Text to find at the end of `data`. Regular expressions are not supported. :param case_sensitive: Case sensitive comparison can be disabled by setting this parameter to `false`. :return: `true` if `data` ends with `pattern`, false` otherwise. """ return text_ends(data=self, pattern=pattern, case_sensitive=case_sensitive) def text_merge(self, separator=UNSET) -> 'ProcessBuilder': """ Concatenate elements to a single text :param self: A set of elements. Numbers, boolean values and null values get converted to their (lower case) string representation. For example: `1` (integer), `-1.5` (number), `true` / `false` (boolean values) :param separator: A separator to put between each of the individual texts. Defaults to an empty string. :return: A string containing a string representation of all the array elements in the same order, with the separator between each element. """ return text_merge(data=self, separator=separator) def trim_cube(self) -> 'ProcessBuilder': """ Remove dimension labels with no-data values :param self: A raster data cube to trim. :return: A trimmed raster data cube with the same dimensions. The dimension properties name, type, reference system and resolution remain unchanged. The number of dimension labels may decrease. """ return trim_cube(data=self) def variance(self, ignore_nodata=UNSET) -> 'ProcessBuilder': """ Variance :param self: An array of numbers. :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. Setting this flag to `false` considers no-data values so that `null` is returned if any value is such a value. :return: The computed sample variance. """ return variance(data=self, ignore_nodata=ignore_nodata) def xor(self, y) -> 'ProcessBuilder': """ Logical XOR (exclusive or) :param self: A boolean value. :param y: A boolean value. :return: Boolean result of the logical XOR. """ return xor(x=self, y=y) # Public shortcut process = ProcessBuilder.process # Private shortcut that has lower chance to collide with a process argument named `process` _process = ProcessBuilder.process def absolute(x) -> ProcessBuilder: """ Absolute value :param x: A number. :return: The computed absolute value. """ return _process('absolute', x=x) def add(x, y) -> ProcessBuilder: """ Addition of two numbers :param x: The first summand. :param y: The second summand. :return: The computed sum of the two numbers. """ return _process('add', x=x, y=y) def add_dimension(data, name, label, type=UNSET) -> ProcessBuilder: """ Add a new dimension :param data: A data cube to add the dimension to. :param name: Name for the dimension. :param label: A dimension label. :param type: The type of dimension, defaults to `other`. :return: The data cube with a newly added dimension. The new dimension has exactly one dimension label. All other dimensions remain unchanged. """ return _process('add_dimension', data=data, name=name, label=label, type=type) def aggregate_spatial(data, geometries, reducer, target_dimension=UNSET, context=UNSET) -> ProcessBuilder: """ Zonal statistics for geometries :param data: A raster data cube. The data cube must have been reduced to only contain two spatial dimensions and a third dimension the values are aggregated for, for example the temporal dimension to get a time series. Otherwise, this process fails with the `TooManyDimensions` exception. The data cube implicitly gets restricted to the bounds of the geometries as if ``filter_spatial()`` would have been used with the same values for the corresponding parameters immediately before this process. :param geometries: Geometries as GeoJSON on which the aggregation will be based. One value will be computed per GeoJSON `Feature`, `Geometry` or `GeometryCollection`. For a `FeatureCollection` multiple values will be computed, one value per contained `Feature`. For example, a single value will be computed for a `MultiPolygon`, but two values will be computed for a `FeatureCollection` containing two polygons. - For **polygons**, the process considers all pixels for which the point at the pixel center intersects with the corresponding polygon (as defined in the Simple Features standard by the OGC). - For **points**, the process considers the closest pixel center. - For **lines** (line strings), the process considers all the pixels whose centers are closest to at least one point on the line. Thus, pixels may be part of multiple geometries and be part of multiple aggregations. To maximize interoperability, a nested `GeometryCollection` should be avoided. Furthermore, a `GeometryCollection` composed of a single type of geometries should be avoided in favour of the corresponding multi-part type (e.g. `MultiPolygon`). :param reducer: A reducer to be applied on all values of each geometry. A reducer is a single process such as ``mean()`` or a set of processes, which computes a single value for a list of values, see the category 'reducer' for such processes. :param target_dimension: The new dimension name to be used for storing the results. Defaults to `result`. :param context: Additional data to be passed to the reducer. :return: A vector data cube with the computed results and restricted to the bounds of the geometries. The computed value is used for the dimension with the name that was specified in the parameter `target_dimension`. The computation also stores information about the total count of pixels (valid + invalid pixels) and the number of valid pixels (see ``is_valid()``) for each geometry. These values are added as a new dimension with a dimension name derived from `target_dimension` by adding the suffix `_meta`. The new dimension has the dimension labels `total_count` and `valid_count`. """ return _process('aggregate_spatial', data=data, geometries=geometries, reducer=reducer, target_dimension=target_dimension, context=context) def aggregate_spatial_binary(data, geometries, reducer, target_dimension=UNSET, context=UNSET) -> ProcessBuilder: """ Zonal statistics for geometries by binary aggregation :param data: A raster data cube. The data cube implicitly gets restricted to the bounds of the geometries as if ``filter_spatial()`` would have been used with the same values for the corresponding parameters immediately before this process. :param geometries: Geometries as GeoJSON on which the aggregation will be based. :param reducer: A reduction operator to be applied consecutively on tuples of values. It must be both associative and commutative as the execution may be executed in parallel and therefore the order of execution is arbitrary. The reduction operator may be a single process such as ``multiply()`` or consist of multiple sub-processes. :param target_dimension: The new dimension name to be used for storing the results. Defaults to `result`. :param context: Additional data to be passed to the reducer. :return: A vector data cube with the computed results and restricted to the bounds of the geometries. The computed value is stored in dimension with the name that was specified in the parameter `target_dimension`. The computation also stores information about the total count of pixels (valid + invalid pixels) and the number of valid pixels (see ``is_valid()``) for each geometry. These values are stored as new dimension with a dimension name derived from `target_dimension` by adding the suffix `_meta`. The new dimension has the dimension labels `total_count` and `valid_count`. """ return _process('aggregate_spatial_binary', data=data, geometries=geometries, reducer=reducer, target_dimension=target_dimension, context=context) def aggregate_spatial_window(data, reducer, size, boundary=UNSET, align=UNSET, context=UNSET) -> ProcessBuilder: """ Zonal statistics for rectangular windows :param data: A raster data cube with exactly two horizontal spatial dimensions and an arbitrary number of additional dimensions. The process is applied to all additional dimensions individually. :param reducer: A reducer to be applied on the list of values, which contain all pixels covered by the window. A reducer is a single process such as ``mean()`` or a set of processes, which computes a single value for a list of values, see the category 'reducer' for such processes. :param size: Window size in pixels along the horizontal spatial dimensions. The first value corresponds to the `x` axis, the second value corresponds to the `y` axis. :param boundary: Behavior to apply if the number of values for the axes `x` and `y` is not a multiple of the corresponding value in the `size` parameter. Options are: - `pad` (default): pad the data cube with the no-data value `null` to fit the required window size. - `trim`: trim the data cube to fit the required window size. Set the parameter `align` to specifies to which corner the data is aligned to. :param align: If the data requires padding or trimming (see parameter `boundary`), specifies to which corner of the spatial extent the data is aligned to. For example, if the data is aligned to the upper left, the process pads/trims at the lower-right. :param context: Additional data to be passed to the reducer. :return: A data cube with the newly computed values and the same dimensions. The resolution will change depending on the chosen values for the `size` and `boundary` parameter. It usually decreases for the dimensions which have the corresponding parameter `size` set to values greater than 1. The dimension labels will be set to the coordinate at the center of the window. The other dimension properties (name, type and reference system) remain unchanged. """ return _process('aggregate_spatial_window', data=data, reducer=reducer, size=size, boundary=boundary, align=align, context=context) def aggregate_temporal(data, intervals, reducer, labels=UNSET, dimension=UNSET, context=UNSET) -> ProcessBuilder: """ Temporal aggregations :param data: A data cube. :param intervals: Left-closed temporal intervals, which are allowed to overlap. Each temporal interval in the array has exactly two elements: 1. The first element is the start of the temporal interval. The specified instance in time is **included** in the interval. 2. The second element is the end of the temporal interval. The specified instance in time is **excluded** from the interval. The specified temporal strings follow [RFC 3339](https://www.rfc-editor.org/rfc/rfc3339.html). Although [RFC 3339 prohibits the hour to be '24'](https://www.rfc-editor.org/rfc/rfc3339.html#section-5.7), **this process allows the value '24' for the hour** of an end time in order to make it possible that left-closed time intervals can fully cover the day. :param reducer: A reducer to be applied for the values contained in each interval. A reducer is a single process such as ``mean()`` or a set of processes, which computes a single value for a list of values, see the category 'reducer' for such processes. Intervals may not contain any values, which for most reducers leads to no-data (`null`) values by default. :param labels: Distinct labels for the intervals, which can contain dates and/or times. Is only required to be specified if the values for the start of the temporal intervals are not distinct and thus the default labels would not be unique. The number of labels and the number of groups need to be equal. :param dimension: The name of the temporal dimension for aggregation. All data along the dimension is passed through the specified reducer. If the dimension is not set or set to `null`, the data cube is expected to only have one temporal dimension. Fails with a `TooManyDimensions` exception if it has more dimensions. Fails with a `DimensionNotAvailable` exception if the specified dimension does not exist. :param context: Additional data to be passed to the reducer. :return: A new data cube with the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged, except for the resolution and dimension labels of the given temporal dimension. """ return _process('aggregate_temporal', data=data, intervals=intervals, reducer=reducer, labels=labels, dimension=dimension, context=context) def aggregate_temporal_period(data, period, reducer, dimension=UNSET, context=UNSET) -> ProcessBuilder: """ Temporal aggregations based on calendar hierarchies :param data: A data cube. :param period: The time intervals to aggregate. The following pre-defined values are available: * `hour`: Hour of the day * `day`: Day of the year * `week`: Week of the year * `dekad`: Ten day periods, counted per year with three periods per month (day 1 - 10, 11 - 20 and 21 - end of month). The third dekad of the month can range from 8 to 11 days. For example, the fourth dekad is Feb, 1 - Feb, 10 each year. * `month`: Month of the year * `season`: Three month periods of the calendar seasons (December - February, March - May, June - August, September - November). * `tropical-season`: Six month periods of the tropical seasons (November - April, May - October). * `year`: Proleptic years * `decade`: Ten year periods ([0-to-9 decade](https://en.wikipedia.org/wiki/Decade#0-to-9_decade)), from a year ending in a 0 to the next year ending in a 9. * `decade-ad`: Ten year periods ([1-to-0 decade](https://en.wikipedia.org/wiki/Decade#1-to-0_decade)) better aligned with the anno Domini (AD) calendar era, from a year ending in a 1 to the next year ending in a 0. :param reducer: A reducer to be applied for the values contained in each period. A reducer is a single process such as ``mean()`` or a set of processes, which computes a single value for a list of values, see the category 'reducer' for such processes. Periods may not contain any values, which for most reducers leads to no-data (`null`) values by default. :param dimension: The name of the temporal dimension for aggregation. All data along the dimension is passed through the specified reducer. If the dimension is not set or set to `null`, the data cube is expected to only have one temporal dimension. Fails with a `TooManyDimensions` exception if it has more dimensions. Fails with a `DimensionNotAvailable` exception if the specified dimension does not exist. :param context: Additional data to be passed to the reducer. :return: A new data cube with the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged, except for the resolution and dimension labels of the given temporal dimension. The specified temporal dimension has the following dimension labels (`YYYY` = four- digit year, `MM` = two-digit month, `DD` two-digit day of month): * `hour`: `YYYY-MM-DD-00` - `YYYY-MM- DD-23` * `day`: `YYYY-001` - `YYYY-365` * `week`: `YYYY-01` - `YYYY-52` * `dekad`: `YYYY-00` - `YYYY-36` * `month`: `YYYY-01` - `YYYY-12` * `season`: `YYYY-djf` (December - February), `YYYY-mam` (March - May), `YYYY-jja` (June - August), `YYYY-son` (September - November). * `tropical-season`: `YYYY-ndjfma` (November - April), `YYYY-mjjaso` (May - October). * `year`: `YYYY` * `decade`: `YYY0` * `decade-ad`: `YYY1` """ return _process('aggregate_temporal_period', data=data, period=period, reducer=reducer, dimension=dimension, context=context) def all(data, ignore_nodata=UNSET) -> ProcessBuilder: """ Are all of the values true? :param data: A set of boolean values. :param ignore_nodata: Indicates whether no-data values are ignored or not and ignores them by default. :return: Boolean result of the logical operation. """ return _process('all', data=data, ignore_nodata=ignore_nodata) def and_(x, y) -> ProcessBuilder: """ Logical AND :param x: A boolean value. :param y: A boolean value. :return: Boolean result of the logical AND. """ return _process('and', x=x, y=y) def anomaly(data, normals, period) -> ProcessBuilder: """ Compute anomalies :param data: A data cube with exactly one temporal dimension and the following dimension labels for the given period (`YYYY` = four-digit year, `MM` = two-digit month, `DD` two-digit day of month): * `hour`: `YYYY-MM-DD-00` - `YYYY-MM-DD-23` * `day`: `YYYY-001` - `YYYY-365` * `week`: `YYYY-01` - `YYYY-52` * `dekad`: `YYYY-00` - `YYYY-36` * `month`: `YYYY-01` - `YYYY-12` * `season`: `YYYY-djf` (December - February), `YYYY-mam` (March - May), `YYYY-jja` (June - August), `YYYY-son` (September - November). * `tropical-season`: `YYYY-ndjfma` (November - April), `YYYY-mjjaso` (May - October). * `year`: `YYYY` * `decade`: `YYY0` * `decade-ad`: `YYY1` * `single-period` / `climatology-period`: Any ``aggregate_temporal_period()`` can compute such a data cube. :param normals: A data cube with normals, e.g. daily, monthly or yearly values computed from a process such as ``climatological_normal()``. Must contain exactly one temporal dimension with the following dimension labels for the given period: * `hour`: `00` - `23` * `day`: `001` - `365` * `week`: `01` - `52` * `dekad`: `00` - `36` * `month`: `01` - `12` * `season`: `djf` (December - February), `mam` (March - May), `jja` (June - August), `son` (September - November) * `tropical-season`: `ndjfma` (November - April), `mjjaso` (May - October) * `year`: Four-digit year numbers * `decade`: Four-digit year numbers, the last digit being a `0` * `decade-ad`: Four-digit year numbers, the last digit being a `1` * `single-period` / `climatology- period`: A single dimension label with any name is expected. :param period: Specifies the time intervals available in the normals data cube. The following options are available: * `hour`: Hour of the day * `day`: Day of the year * `week`: Week of the year * `dekad`: Ten day periods, counted per year with three periods per month (day 1 - 10, 11 - 20 and 21 - end of month). The third dekad of the month can range from 8 to 11 days. For example, the fourth dekad is Feb, 1 - Feb, 10 each year. * `month`: Month of the year * `season`: Three month periods of the calendar seasons (December - February, March - May, June - August, September - November). * `tropical-season`: Six month periods of the tropical seasons (November - April, May - October). * `year`: Proleptic years * `decade`: Ten year periods ([0-to-9 decade](https://en.wikipedia.org/wiki/Decade#0-to-9_decade)), from a year ending in a 0 to the next year ending in a 9. * `decade-ad`: Ten year periods ([1-to-0 decade](https://en.wikipedia.org/wiki/Decade#1-to-0_decade)) better aligned with the anno Domini (AD) calendar era, from a year ending in a 1 to the next year ending in a 0. * `single-period` / `climatology- period`: A single period of arbitrary length :return: A data cube with the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged. """ return _process('anomaly', data=data, normals=normals, period=period) def any(data, ignore_nodata=UNSET) -> ProcessBuilder: """ Is at least one value true? :param data: A set of boolean values. :param ignore_nodata: Indicates whether no-data values are ignored or not and ignores them by default. :return: Boolean result of the logical operation. """ return _process('any', data=data, ignore_nodata=ignore_nodata) def apply(data, process, context=UNSET) -> ProcessBuilder: """ Apply a process to each pixel :param data: A data cube. :param process: A process that accepts and returns a single value and is applied on each individual value in the data cube. The process may consist of multiple sub-processes and could, for example, consist of processes such as ``abs()`` or ``linear_scale_range()``. :param context: Additional data to be passed to the process. :return: A data cube with the newly computed values and the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged. """ return _process('apply', data=data, process=process, context=context) def apply_dimension(data, process, dimension, target_dimension=UNSET, context=UNSET) -> ProcessBuilder: """ Apply a process to pixels along a dimension :param data: A data cube. :param process: Process to be applied on all pixel values. The specified process needs to accept an array and must return an array with at least one element. A process may consist of multiple sub-processes. :param dimension: The name of the source dimension to apply the process on. Fails with a `DimensionNotAvailable` exception if the specified dimension does not exist. :param target_dimension: The name of the target dimension or `null` (the default) to use the source dimension specified in the parameter `dimension`. By specifying a target dimension, the source dimension is removed. The target dimension with the specified name and the type `other` (see ``add_dimension()``) is created, if it doesn't exist yet. :param context: Additional data to be passed to the process. :return: A data cube with the newly computed values. All dimensions stay the same, except for the dimensions specified in corresponding parameters. There are three cases how the dimensions can change: 1. The source dimension is the target dimension: - The (number of) dimensions remain unchanged as the source dimension is the target dimension. - The source dimension properties name and type remain unchanged. - The dimension labels, the reference system and the resolution are preserved only if the number of pixel values in the source dimension is equal to the number of values computed by the process. Otherwise, all other dimension properties change as defined in the list below. 2. The source dimension is not the target dimension and the latter exists: - The number of dimensions decreases by one as the source dimension is dropped. - The target dimension properties name and type remain unchanged. All other dimension properties change as defined in the list below. 3. The source dimension is not the target dimension and the latter does not exist: - The number of dimensions remain unchanged, but the source dimension is replaced with the target dimension. - The target dimension has the specified name and the type other. All other dimension properties are set as defined in the list below. Unless otherwise stated above, for the given (target) dimension the following applies: - the number of dimension labels is equal to the number of values computed by the process, - the dimension labels are incrementing integers starting from zero, - the resolution changes, and - the reference system is undefined. """ return _process('apply_dimension', data=data, process=process, dimension=dimension, target_dimension=target_dimension, context=context) def apply_kernel(data, kernel, factor=UNSET, border=UNSET, replace_invalid=UNSET) -> ProcessBuilder: """ Apply a spatial convolution with a kernel :param data: A data cube. :param kernel: Kernel as a two-dimensional array of weights. The inner level of the nested array aligns with the `x` axis and the outer level aligns with the `y` axis. Each level of the kernel must have an uneven number of elements, otherwise the process throws a `KernelDimensionsUneven` exception. :param factor: A factor that is multiplied to each value after the kernel has been applied. This is basically a shortcut for explicitly multiplying each value by a factor afterwards, which is often required for some kernel-based algorithms such as the Gaussian blur. :param border: Determines how the data is extended when the kernel overlaps with the borders. Defaults to fill the border with zeroes. The following options are available: * *numeric value* - fill with a user- defined constant number `n`: `nnnnnn|abcdefgh|nnnnnn` (default, with `n` = 0) * `replicate` - repeat the value from the pixel at the border: `aaaaaa|abcdefgh|hhhhhh` * `reflect` - mirror/reflect from the border: `fedcba|abcdefgh|hgfedc` * `reflect_pixel` - mirror/reflect from the center of the pixel at the border: `gfedcb|abcdefgh|gfedcb` * `wrap` - repeat/wrap the image: `cdefgh|abcdefgh|abcdef` :param replace_invalid: This parameter specifies the value to replace non-numerical or infinite numerical values with. By default, those values are replaced with zeroes. :return: A data cube with the newly computed values and the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged. """ return _process('apply_kernel', data=data, kernel=kernel, factor=factor, border=border, replace_invalid=replace_invalid) def apply_neighborhood(data, process, size, overlap=UNSET, context=UNSET) -> ProcessBuilder: """ Apply a process to pixels in a n-dimensional neighborhood :param data: A data cube. :param process: Process to be applied on all neighborhoods. :param size: Neighborhood sizes along each dimension. This object maps dimension names to either a physical measure (e.g. 100 m, 10 days) or pixels (e.g. 32 pixels). For dimensions not specified, the default is to provide all values. Be aware that including all values from overly large dimensions may not be processed at once. :param overlap: Overlap of neighborhoods along each dimension to avoid border effects. For instance a temporal dimension can add 1 month before and after a neighborhood. In the spatial dimensions, this is often a number of pixels. The overlap specified is added before and after, so an overlap of 8 pixels will add 8 pixels on both sides of the window, so 16 in total. Be aware that large overlaps increase the need for computational resources and modifying overlapping data in subsequent operations have no effect. :param context: Additional data to be passed to the process. :return: A data cube with the newly computed values and the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged. """ return _process('apply_neighborhood', data=data, process=process, size=size, overlap=overlap, context=context) def arccos(x) -> ProcessBuilder: """ Inverse cosine :param x: A number. :return: The computed angle in radians. """ return _process('arccos', x=x) def arcosh(x) -> ProcessBuilder: """ Inverse hyperbolic cosine :param x: A number. :return: The computed angle in radians. """ return _process('arcosh', x=x) def arcsin(x) -> ProcessBuilder: """ Inverse sine :param x: A number. :return: The computed angle in radians. """ return _process('arcsin', x=x) def arctan(x) -> ProcessBuilder: """ Inverse tangent :param x: A number. :return: The computed angle in radians. """ return _process('arctan', x=x) def arctan2(y, x) -> ProcessBuilder: """ Inverse tangent of two numbers :param y: A number to be used as the dividend. :param x: A number to be used as the divisor. :return: The computed angle in radians. """ return _process('arctan2', y=y, x=x) def ard_normalized_radar_backscatter(data, elevation_model=UNSET, contributing_area=UNSET, ellipsoid_incidence_angle=UNSET, noise_removal=UNSET) -> ProcessBuilder: """ CARD4L compliant SAR NRB generation :param data: The source data cube containing SAR input. :param elevation_model: The digital elevation model to use. Set to `null` (the default) to allow the back- end to choose, which will improve portability, but reduce reproducibility. :param contributing_area: If set to `true`, a DEM-based local contributing area band named `contributing_area` is added. The values are given in square meters. :param ellipsoid_incidence_angle: If set to `true`, an ellipsoidal incidence angle band named `ellipsoid_incidence_angle` is added. The values are given in degrees. :param noise_removal: If set to `false`, no noise removal is applied. Defaults to `true`, which removes noise. :return: Backscatter values expressed as gamma0 in linear scale. In addition to the bands `contributing_area` and `ellipsoid_incidence_angle` that can optionally be added with corresponding parameters, the following bands are always added to the data cube: - `mask`: A data mask that indicates which values are valid (1), invalid (0) or contain no-data (null). - `local_incidence_angle`: A band with DEM-based local incidence angles in degrees. The data returned is CARD4L compliant with corresponding metadata. """ return _process('ard_normalized_radar_backscatter', data=data, elevation_model=elevation_model, contributing_area=contributing_area, ellipsoid_incidence_angle=ellipsoid_incidence_angle, noise_removal=noise_removal) def ard_surface_reflectance(data, atmospheric_correction_method, cloud_detection_method, elevation_model=UNSET, atmospheric_correction_options=UNSET, cloud_detection_options=UNSET) -> ProcessBuilder: """ CARD4L compliant Surface Reflectance generation :param data: The source data cube containing multi-spectral optical top of the atmosphere (TOA) reflectances. There must be a single dimension of type `bands` available. :param atmospheric_correction_method: The atmospheric correction method to use. :param cloud_detection_method: The cloud detection method to use. Each method supports detecting different atmospheric disturbances such as clouds, cloud shadows, aerosols, haze, ozone and/or water vapour in optical imagery. :param elevation_model: The digital elevation model to use. Set to `null` (the default) to allow the back- end to choose, which will improve portability, but reduce reproducibility. :param atmospheric_correction_options: Proprietary options for the atmospheric correction method. Specifying proprietary options will reduce portability. :param cloud_detection_options: Proprietary options for the cloud detection method. Specifying proprietary options will reduce portability. :return: Data cube containing bottom of atmosphere reflectances for each spectral band in the source data cube, with atmospheric disturbances like clouds and cloud shadows removed. No-data values (null) are directly set in the bands. Depending on the methods used, several additional bands will be added to the data cube: Data cube containing bottom of atmosphere reflectances for each spectral band in the source data cube, with atmospheric disturbances like clouds and cloud shadows removed. Depending on the methods used, several additional bands will be added to the data cube: - `date` (optional): Specifies per-pixel acquisition timestamps. - `incomplete-testing` (required): Identifies pixels with a value of 1 for which the per-pixel tests (at least saturation, cloud and cloud shadows, see CARD4L specification for details) have not all been successfully completed. Otherwise, the value is 0. - `saturation` (required) / `saturation_{band}` (optional): Indicates where pixels in the input spectral bands are saturated (1) or not (0). If the saturation is given per band, the band names are `saturation_{band}` with `{band}` being the band name from the source data cube. - `cloud`, `shadow` (both required),`aerosol`, `haze`, `ozone`, `water_vapor` (all optional): Indicates the probability of pixels being an atmospheric disturbance such as clouds. All bands have values between 0 (clear) and 1, which describes the probability that it is an atmospheric disturbance. - `snow-ice` (optional): Points to a file that indicates whether a pixel is assessed as being snow/ice (1) or not (0). All values describe the probability and must be between 0 and 1. - `land-water` (optional): Indicates whether a pixel is assessed as being land (1) or water (0). All values describe the probability and must be between 0 and 1. - `incidence-angle` (optional): Specifies per-pixel incidence angles in degrees. - `azimuth` (optional): Specifies per-pixel azimuth angles in degrees. - `sun- azimuth:` (optional): Specifies per-pixel sun azimuth angles in degrees. - `sun-elevation` (optional): Specifies per-pixel sun elevation angles in degrees. - `terrain-shadow` (optional): Indicates with a value of 1 whether a pixel is not directly illuminated due to terrain shadowing. Otherwise, the value is 0. - `terrain-occlusion` (optional): Indicates with a value of 1 whether a pixel is not visible to the sensor due to terrain occlusion during off-nadir viewing. Otherwise, the value is 0. - `terrain-illumination` (optional): Contains coefficients used for terrain illumination correction are provided for each pixel. The data returned is CARD4L compliant with corresponding metadata. """ return _process('ard_surface_reflectance', data=data, atmospheric_correction_method=atmospheric_correction_method, cloud_detection_method=cloud_detection_method, elevation_model=elevation_model, atmospheric_correction_options=atmospheric_correction_options, cloud_detection_options=cloud_detection_options) def array_append(data, value) -> ProcessBuilder: """ Append a value to an array :param data: An array. :param value: Value to append to the array. :return: The new array with the value being appended. """ return _process('array_append', data=data, value=value) def array_apply(data, process, context=UNSET) -> ProcessBuilder: """ Apply a process to each array element :param data: An array. :param process: A process that accepts and returns a single value and is applied on each individual value in the array. The process may consist of multiple sub-processes and could, for example, consist of processes such as ``abs()`` or ``linear_scale_range()``. :param context: Additional data to be passed to the process. :return: An array with the newly computed values. The number of elements are the same as for the original array. """ return _process('array_apply', data=data, process=process, context=context) def array_concat(array1, array2) -> ProcessBuilder: """ Merge two arrays :param array1: The first array. :param array2: The second array. :return: The merged array. """ return _process('array_concat', array1=array1, array2=array2) def array_contains(data, value) -> ProcessBuilder: """ Check whether the array contains a given value :param data: List to find the value in. :param value: Value to find in `data`. :return: `true` if the list contains the value, false` otherwise. """ return _process('array_contains', data=data, value=value) def array_create(data=UNSET, repeat=UNSET) -> ProcessBuilder: """ Create an array :param data: A (native) array to fill the newly created array with. Defaults to an empty array. :param repeat: The number of times the (native) array specified in `data` is repeatedly added after each other to the new array being created. Defaults to `1`. :return: The newly created array. """ return _process('array_create', data=data, repeat=repeat) def array_create_labeled(data, labels) -> ProcessBuilder: """ Create a labeled array :param data: An array of values to be used. :param labels: An array of labels to be used. :return: The newly created labeled array. """ return _process('array_create_labeled', data=data, labels=labels) def array_element(data, index=UNSET, label=UNSET, return_nodata=UNSET) -> ProcessBuilder: """ Get an element from an array :param data: An array. :param index: The zero-based index of the element to retrieve. :param label: The label of the element to retrieve. Throws an `ArrayNotLabeled` exception, if the given array is not a labeled array and this parameter is set. :param return_nodata: By default this process throws an `ArrayElementNotAvailable` exception if the index or label is invalid. If you want to return `null` instead, set this flag to `true`. :return: The value of the requested element. """ return _process('array_element', data=data, index=index, label=label, return_nodata=return_nodata) def array_filter(data, condition, context=UNSET) -> ProcessBuilder: """ Filter an array based on a condition :param data: An array. :param condition: A condition that is evaluated against each value, index and/or label in the array. Only the array elements for which the condition returns `true` are preserved. :param context: Additional data to be passed to the condition. :return: An array filtered by the specified condition. The number of elements are less than or equal compared to the original array. """ return _process('array_filter', data=data, condition=condition, context=context) def array_find(data, value) -> ProcessBuilder: """ Get the index for a value in an array :param data: List to find the value in. :param value: Value to find in `data`. :return: The index of the first element with the specified value. If no element was found, `null` is returned. """ return _process('array_find', data=data, value=value) def array_find_label(data, label) -> ProcessBuilder: """ Get the index for a label in a labeled array :param data: List to find the label in. :param label: Label to find in `data`. :return: The index of the element with the specified label assigned. If no such label was found, `null` is returned. """ return _process('array_find_label', data=data, label=label) def array_interpolate_linear(data) -> ProcessBuilder: """ One-dimensional linear interpolation for arrays :param data: An array of numbers and no-data values. If the given array is a labeled array, the labels must have a natural/inherent label order and the process expects the labels to be sorted accordingly. This is the default behavior in openEO for spatial and temporal dimensions. :return: An array with no-data values being replaced with interpolated values. If not at least 2 numerical values are available in the array, the array stays the same. """ return _process('array_interpolate_linear', data=data) def array_labels(data) -> ProcessBuilder: """ Get the labels for an array :param data: An array with labels. :return: The labels as an array. """ return _process('array_labels', data=data) def array_modify(data, values, index, length=UNSET) -> ProcessBuilder: """ Change the content of an array (insert, remove, update) :param data: An array. :param values: The values to fill the array with. :param index: The index of the element to insert the value(s) before. If the index is greater than the number of elements, the process throws an `ArrayElementNotAvailable` exception. To insert after the last element, there are two options: 1. Use the simpler processes ``array_append()`` to append a single value or ``array_concat`` to append multiple values. 2. Specify the number of elements in the array. You can retrieve the number of elements with the process ``count()``, having the parameter `condition` set to `true`. :param length: The number of elements to replace. This parameter has no effect in case the given `index` does not exist in the array given. :return: An array with values added, updated or removed. """ return _process('array_modify', data=data, values=values, index=index, length=length) def arsinh(x) -> ProcessBuilder: """ Inverse hyperbolic sine :param x: A number. :return: The computed angle in radians. """ return _process('arsinh', x=x) def artanh(x) -> ProcessBuilder: """ Inverse hyperbolic tangent :param x: A number. :return: The computed angle in radians. """ return _process('artanh', x=x) def atmospheric_correction(data, method, elevation_model=UNSET, options=UNSET) -> ProcessBuilder: """ Apply atmospheric correction :param data: Data cube containing multi-spectral optical top of atmosphere reflectances to be corrected. :param method: The atmospheric correction method to use. To get reproducible results, you have to set a specific method. Set to `null` to allow the back-end to choose, which will improve portability, but reduce reproducibility as you *may* get different results if you run the processes multiple times. :param elevation_model: The digital elevation model to use. Set to `null` (the default) to allow the back- end to choose, which will improve portability, but reduce reproducibility. :param options: Proprietary options for the atmospheric correction method. Specifying proprietary options will reduce portability. :return: Data cube containing bottom of atmosphere reflectances. """ return _process('atmospheric_correction', data=data, method=method, elevation_model=elevation_model, options=options) def between(x, min, max, exclude_max=UNSET) -> ProcessBuilder: """ Between comparison :param x: The value to check. :param min: Lower boundary (inclusive) to check against. :param max: Upper boundary (inclusive) to check against. :param exclude_max: Exclude the upper boundary `max` if set to `true`. Defaults to `false`. :return: `true` if `x` is between the specified bounds, otherwise `false`. """ return _process('between', x=x, min=min, max=max, exclude_max=exclude_max) def ceil(x) -> ProcessBuilder: """ Round fractions up :param x: A number to round up. :return: The number rounded up. """ return _process('ceil', x=x) def climatological_normal(data, period, climatology_period=UNSET) -> ProcessBuilder: """ Compute climatology normals :param data: A data cube with exactly one temporal dimension. The data cube must span at least the temporal interval specified in the parameter `climatology-period`. Seasonal periods may span two consecutive years, e.g. temporal winter that includes months December, January and February. If the required months before the actual climate period are available, the season is taken into account. If not available, the first season is not taken into account and the seasonal mean is based on one year less than the other seasonal normals. The incomplete season at the end of the last year is never taken into account. :param period: The time intervals to aggregate the average value for. The following pre-defined frequencies are supported: * `day`: Day of the year * `month`: Month of the year * `climatology-period`: The period specified in the `climatology-period`. * `season`: Three month periods of the calendar seasons (December - February, March - May, June - August, September - November). * `tropical-season`: Six month periods of the tropical seasons (November - April, May - October). :param climatology_period: The climatology period as a closed temporal interval. The first element of the array is the first year to be fully included in the temporal interval. The second element is the last year to be fully included in the temporal interval. The default period is from 1981 until 2010 (both inclusive). :return: A data cube with the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged, except for the resolution and dimension labels of the temporal dimension. The temporal dimension has the following dimension labels: * `day`: `001` - `365` * `month`: `01` - `12` * `climatology-period`: `climatology-period` * `season`: `djf` (December - February), `mam` (March - May), `jja` (June - August), `son` (September - November) * `tropical-season`: `ndjfma` (November - April), `mjjaso` (May - October) """ return _process('climatological_normal', data=data, period=period, climatology_period=climatology_period) def clip(x, min, max) -> ProcessBuilder: """ Clip a value between a minimum and a maximum :param x: A number. :param min: Minimum value. If the value is lower than this value, the process will return the value of this parameter. :param max: Maximum value. If the value is greater than this value, the process will return the value of this parameter. :return: The value clipped to the specified range. """ return _process('clip', x=x, min=min, max=max) def cloud_detection(data, method, options=UNSET) -> ProcessBuilder: """ Create cloud masks :param data: The source data cube containing multi-spectral optical top of the atmosphere (TOA) reflectances on which to perform cloud detection. :param method: The cloud detection method to use. To get reproducible results, you have to set a specific method. Set to `null` to allow the back-end to choose, which will improve portability, but reduce reproducibility as you *may* get different results if you run the processes multiple times. :param options: Proprietary options for the cloud detection method. Specifying proprietary options will reduce portability. :return: A data cube with bands for the atmospheric disturbances. Each of the masks contains values between 0 and 1. The data cube has the same spatial and temporal dimensions as the source data cube and a dimension that contains a dimension label for each of the supported/considered atmospheric disturbance. """ return _process('cloud_detection', data=data, method=method, options=options) def constant(x) -> ProcessBuilder: """ Define a constant value :param x: The value of the constant. :return: The value of the constant. """ return _process('constant', x=x) def cos(x) -> ProcessBuilder: """ Cosine :param x: An angle in radians. :return: The computed cosine of `x`. """ return _process('cos', x=x) def cosh(x) -> ProcessBuilder: """ Hyperbolic cosine :param x: An angle in radians. :return: The computed hyperbolic cosine of `x`. """ return _process('cosh', x=x) def count(data, condition=UNSET, context=UNSET) -> ProcessBuilder: """ Count the number of elements :param data: An array with elements of any data type. :param condition: A condition consists of one or more processes, which in the end return a boolean value. It is evaluated against each element in the array. An element is counted only if the condition returns `true`. Defaults to count valid elements in a list (see ``is_valid()``). Setting this parameter to boolean `true` counts all elements in the list. :param context: Additional data to be passed to the condition. :return: The counted number of elements. """ return _process('count', data=data, condition=condition, context=context) def create_raster_cube() -> ProcessBuilder: """ Create an empty raster data cube :return: An empty raster data cube with zero dimensions. """ return _process('create_raster_cube', ) def cummax(data, ignore_nodata=UNSET) -> ProcessBuilder: """ Cumulative maxima :param data: An array of numbers. :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. Setting this flag to `false` considers no-data values so that `null` is set for all the following elements. :return: An array with the computed cumulative maxima. """ return _process('cummax', data=data, ignore_nodata=ignore_nodata) def cummax(data, ignore_nodata=UNSET) -> ProcessBuilder: """ Cumulative maxima :param data: An array of numbers. :param ignore_nodata: Indicates whether no-data values are ignored or not and ignores them by default. Setting this flag to `false` considers no-data values so that `null` is set for all the following elements. :return: An array with the computed cumulative maxima. """ return _process('cummax', data=data, ignore_nodata=ignore_nodata) def cummin(data, ignore_nodata=UNSET) -> ProcessBuilder: """ Cumulative minima :param data: An array of numbers. :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. Setting this flag to `false` considers no-data values so that `null` is set for all the following elements. :return: An array with the computed cumulative minima. """ return _process('cummin', data=data, ignore_nodata=ignore_nodata) def cummin(data, ignore_nodata=UNSET) -> ProcessBuilder: """ Cumulative minima :param data: An array of numbers. :param ignore_nodata: Indicates whether no-data values are ignored or not and ignores them by default. Setting this flag to `false` considers no-data values so that `null` is set for all the following elements. :return: An array with the computed cumulative minima. """ return _process('cummin', data=data, ignore_nodata=ignore_nodata) def cumproduct(data, ignore_nodata=UNSET) -> ProcessBuilder: """ Cumulative products :param data: An array of numbers. :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. Setting this flag to `false` considers no-data values so that `null` is set for all the following elements. :return: An array with the computed cumulative products. """ return _process('cumproduct', data=data, ignore_nodata=ignore_nodata) def cumproduct(data, ignore_nodata=UNSET) -> ProcessBuilder: """ Cumulative products :param data: An array of numbers. :param ignore_nodata: Indicates whether no-data values are ignored or not and ignores them by default. Setting this flag to `false` considers no-data values so that `null` is set for all the following elements. :return: An array with the computed cumulative products. """ return _process('cumproduct', data=data, ignore_nodata=ignore_nodata) def cumsum(data, ignore_nodata=UNSET) -> ProcessBuilder: """ Cumulative sums :param data: An array of numbers. :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. Setting this flag to `false` considers no-data values so that `null` is set for all the following elements. :return: An array with the computed cumulative sums. """ return _process('cumsum', data=data, ignore_nodata=ignore_nodata) def cumsum(data, ignore_nodata=UNSET) -> ProcessBuilder: """ Cumulative sums :param data: An array of numbers. :param ignore_nodata: Indicates whether no-data values are ignored or not and ignores them by default. Setting this flag to `false` considers no-data values so that `null` is set for all the following elements. :return: An array with the computed cumulative sums. """ return _process('cumsum', data=data, ignore_nodata=ignore_nodata) def date_shift(date, value, unit) -> ProcessBuilder: """ Manipulates dates and times by addition or subtraction :param date: The date (and optionally time) to manipulate. If the given date doesn't include the time, the process assumes that the time component is `00:00:00Z` (i.e. midnight, in UTC). The millisecond part of the time is optional and defaults to `0` if not given. :param value: The period of time in the unit given that is added (positive numbers) or subtracted (negative numbers). The value `0` doesn't have any effect. :param unit: The unit for the value given. The following pre-defined units are available: - millisecond: Milliseconds - second: Seconds - leap seconds are ignored in computations. - minute: Minutes - hour: Hours - day: Days - changes only the the day part of a date - week: Weeks (equivalent to 7 days) - month: Months - year: Years Manipulations with the unit `year`, `month`, `week` or `day` do never change the time. If any of the manipulations result in an invalid date or time, the corresponding part is rounded down to the next valid date or time respectively. For example, adding a month to `2020-01-31` would result in `2020-02-29`. :return: The manipulated date. If a time component was given in the parameter `date`, the time component is returned with the date. """ return _process('date_shift', date=date, value=value, unit=unit) def debug(data, code=UNSET, level=UNSET, message=UNSET) -> ProcessBuilder: """ Publish debugging information :param data: Data to publish. :param code: An identifier to help identify the log entry in a bunch of other log entries. :param level: The severity level of this message, defaults to `info`. Note that the level `error` forces the computation to be stopped! :param message: A message to send in addition to the data. :return: Returns the data as passed to the `data` parameter. """ return _process('debug', data=data, code=code, level=level, message=message) def debug(data, code=UNSET, level=UNSET, message=UNSET) -> ProcessBuilder: """ Publish debugging information :param data: Data to publish. :param code: An identifier to help identify the log entry in a bunch of other log entries. :param level: The severity level of this message, defaults to `info`. Note that the level `error` forces the computation to be stopped! :param message: A message to send in addition to the data. :return: The data as passed to the `data` parameter without any modification. """ return _process('debug', data=data, code=code, level=level, message=message) def dimension_labels(data, dimension) -> ProcessBuilder: """ Get the dimension labels :param data: The data cube. :param dimension: The name of the dimension to get the labels for. :return: The labels as an array. """ return _process('dimension_labels', data=data, dimension=dimension) def divide(x, y) -> ProcessBuilder: """ Division of two numbers :param x: The dividend. :param y: The divisor. :return: The computed result. """ return _process('divide', x=x, y=y) def drop_dimension(data, name) -> ProcessBuilder: """ Remove a dimension :param data: The data cube to drop a dimension from. :param name: Name of the dimension to drop. :return: A data cube without the specified dimension. The number of dimensions decreases by one, but the dimension properties (name, type, labels, reference system and resolution) for all other dimensions remain unchanged. """ return _process('drop_dimension', data=data, name=name) def e() -> ProcessBuilder: """ Euler's number (e) :return: The numerical value of Euler's number. """ return _process('e', ) def eq(x, y, delta=UNSET, case_sensitive=UNSET) -> ProcessBuilder: """ Equal to comparison :param x: First operand. :param y: Second operand. :param delta: Only applicable for comparing two numbers. If this optional parameter is set to a positive non-zero number the equality of two numbers is checked against a delta value. This is especially useful to circumvent problems with floating-point inaccuracy in machine-based computation. This option is basically an alias for the following computation: `lte(abs(minus([x, y]), delta)` :param case_sensitive: Only applicable for comparing two strings. Case sensitive comparison can be disabled by setting this parameter to `false`. :return: `true` if `x` is equal to `y`, `null` if any operand is `null`, otherwise `false`. """ return _process('eq', x=x, y=y, delta=delta, case_sensitive=case_sensitive) def exp(p) -> ProcessBuilder: """ Exponentiation to the base e :param p: The numerical exponent. :return: The computed value for *e* raised to the power of `p`. """ return _process('exp', p=p) def extrema(data, ignore_nodata=UNSET) -> ProcessBuilder: """ Minimum and maximum values :param data: An array of numbers. :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. Setting this flag to `false` considers no-data values so that an array with two `null` values is returned if any value is such a value. :return: An array containing the minimum and maximum values for the specified numbers. The first element is the minimum, the second element is the maximum. If the input array is empty both elements are set to `null`. """ return _process('extrema', data=data, ignore_nodata=ignore_nodata) def filter_bands(data, bands=UNSET, wavelengths=UNSET) -> ProcessBuilder: """ Filter the bands by names :param data: A data cube with bands. :param bands: A list of band names. Either the unique band name (metadata field `name` in bands) or one of the common band names (metadata field `common_name` in bands). If the unique band name and the common name conflict, the unique band name has a higher priority. The order of the specified array defines the order of the bands in the data cube. If multiple bands match a common name, all matched bands are included in the original order. :param wavelengths: A list of sub-lists with each sub-list consisting of two elements. The first element is the minimum wavelength and the second element is the maximum wavelength. Wavelengths are specified in micrometers (μm). The order of the specified array defines the order of the bands in the data cube. If multiple bands match the wavelengths, all matched bands are included in the original order. :return: A data cube limited to a subset of its original bands. The dimensions and dimension properties (name, type, labels, reference system and resolution) remain unchanged, except that the dimension of type `bands` has less (or the same) dimension labels. """ return _process('filter_bands', data=data, bands=bands, wavelengths=wavelengths) def filter_bbox(data, extent) -> ProcessBuilder: """ Spatial filter using a bounding box :param data: A data cube. :param extent: A bounding box, which may include a vertical axis (see `base` and `height`). :return: A data cube restricted to the bounding box. The dimensions and dimension properties (name, type, labels, reference system and resolution) remain unchanged, except that the spatial dimensions have less (or the same) dimension labels. """ return _process('filter_bbox', data=data, extent=extent) def filter_labels(data, condition, dimension, context=UNSET) -> ProcessBuilder: """ Filter dimension labels based on a condition :param data: A data cube. :param condition: A condition that is evaluated against each dimension label in the specified dimension. A dimension label and the corresponding data is preserved for the given dimension, if the condition returns `true`. :param dimension: The name of the dimension to filter on. Fails with a `DimensionNotAvailable` error if the specified dimension does not exist. :param context: Additional data to be passed to the condition. :return: A data cube with the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged, except that the given dimension has less (or the same) dimension labels. """ return _process('filter_labels', data=data, condition=condition, dimension=dimension, context=context) def filter_labels(data, condition, dimension, context=UNSET) -> ProcessBuilder: """ Filter dimension labels based on a condition :param data: A data cube. :param condition: A condition that is evaluated against each dimension label in the specified dimension. A dimension label and the corresponding data is preserved for the given dimension, if the condition returns `true`. :param dimension: The name of the dimension to filter on. Fails with a `DimensionNotAvailable` exception if the specified dimension does not exist. :param context: Additional data to be passed to the condition. :return: A data cube with the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged, except that the given dimension has less (or the same) dimension labels. """ return _process('filter_labels', data=data, condition=condition, dimension=dimension, context=context) def filter_spatial(data, geometries) -> ProcessBuilder: """ Spatial filter using geometries :param data: A data cube. :param geometries: One or more geometries used for filtering, specified as GeoJSON. :return: A data cube restricted to the specified geometries. The dimensions and dimension properties (name, type, labels, reference system and resolution) remain unchanged, except that the spatial dimensions have less (or the same) dimension labels. """ return _process('filter_spatial', data=data, geometries=geometries) def filter_temporal(data, extent, dimension=UNSET) -> ProcessBuilder: """ Temporal filter for a temporal intervals :param data: A data cube. :param extent: Left-closed temporal interval, i.e. an array with exactly two elements: 1. The first element is the start of the temporal interval. The specified instance in time is **included** in the interval. 2. The second element is the end of the temporal interval. The specified instance in time is **excluded** from the interval. The specified temporal strings follow [RFC 3339](https://www.rfc- editor.org/rfc/rfc3339.html). Also supports open intervals by setting one of the boundaries to `null`, but never both. :param dimension: The name of the temporal dimension to filter on. If no specific dimension is specified or it is set to `null`, the filter applies to all temporal dimensions. Fails with a `DimensionNotAvailable` exception if the specified dimension does not exist. :return: A data cube restricted to the specified temporal extent. The dimensions and dimension properties (name, type, labels, reference system and resolution) remain unchanged, except that the temporal dimensions (determined by `dimensions` parameter) may have less dimension labels. """ return _process('filter_temporal', data=data, extent=extent, dimension=dimension) def first(data, ignore_nodata=UNSET) -> ProcessBuilder: """ First element :param data: An array with elements of any data type. :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. Setting this flag to `false` considers no-data values so that `null` is returned if the first value is such a value. :return: The first element of the input array. """ return _process('first', data=data, ignore_nodata=ignore_nodata) def floor(x) -> ProcessBuilder: """ Round fractions down :param x: A number to round down. :return: The number rounded down. """ return _process('floor', x=x) def gt(x, y) -> ProcessBuilder: """ Greater than comparison :param x: First operand. :param y: Second operand. :return: `true` if `x` is strictly greater than `y` or `null` if any operand is `null`, otherwise `false`. """ return _process('gt', x=x, y=y) def gte(x, y) -> ProcessBuilder: """ Greater than or equal to comparison :param x: First operand. :param y: Second operand. :return: `true` if `x` is greater than or equal to `y`, `null` if any operand is `null`, otherwise `false`. """ return _process('gte', x=x, y=y) def if_(value, accept, reject=UNSET) -> ProcessBuilder: """ If-Then-Else conditional :param value: A boolean value. :param accept: A value that is returned if the boolean value is `true`. :param reject: A value that is returned if the boolean value is **not** `true`. Defaults to `null`. :return: Either the `accept` or `reject` argument depending on the given boolean value. """ return _process('if', value=value, accept=accept, reject=reject) def int(x) -> ProcessBuilder: """ Integer part of a number :param x: A number. :return: Integer part of the number. """ return _process('int', x=x) def is_infinite(x) -> ProcessBuilder: """ Value is an infinite number :param x: The data to check. :return: `true` if the data is an infinite number, otherwise `false`. """ return _process('is_infinite', x=x) def is_nan(x) -> ProcessBuilder: """ Value is not a number :param x: The data to check. :return: `true` if the data is not a number, otherwise `false`. """ return _process('is_nan', x=x) def is_nodata(x) -> ProcessBuilder: """ Value is not a no-data value :param x: The data to check. :return: `true` if the data is a no-data value, otherwise `false`. """ return _process('is_nodata', x=x) def is_valid(x) -> ProcessBuilder: """ Value is valid data :param x: The data to check. :return: `true` if the data is valid, otherwise `false`. """ return _process('is_valid', x=x) def last(data, ignore_nodata=UNSET) -> ProcessBuilder: """ Last element :param data: An array with elements of any data type. :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. Setting this flag to `false` considers no-data values so that `null` is returned if the last value is such a value. :return: The last element of the input array. """ return _process('last', data=data, ignore_nodata=ignore_nodata) def linear_scale_range(x, inputMin, inputMax, outputMin=UNSET, outputMax=UNSET) -> ProcessBuilder: """ Linear transformation between two ranges :param x: A number to transform. The number gets clipped to the bounds specified in `inputMin` and `inputMax`. :param inputMin: Minimum value the input can obtain. :param inputMax: Maximum value the input can obtain. :param outputMin: Minimum value of the desired output range. :param outputMax: Maximum value of the desired output range. :return: The transformed number. """ return _process('linear_scale_range', x=x, inputMin=inputMin, inputMax=inputMax, outputMin=outputMin, outputMax=outputMax) def ln(x) -> ProcessBuilder: """ Natural logarithm :param x: A number to compute the natural logarithm for. :return: The computed natural logarithm. """ return _process('ln', x=x) def load_collection(id, spatial_extent, temporal_extent, bands=UNSET, properties=UNSET) -> ProcessBuilder: """ Load a collection :param id: The collection id. :param spatial_extent: Limits the data to load from the collection to the specified bounding box or polygons. The process puts a pixel into the data cube if the point at the pixel center intersects with the bounding box or any of the polygons (as defined in the Simple Features standard by the OGC). The GeoJSON can be one of the following feature types: * A `Polygon` or `MultiPolygon` geometry, * a `Feature` with a `Polygon` or `MultiPolygon` geometry, * a `FeatureCollection` containing at least one `Feature` with `Polygon` or `MultiPolygon` geometries, or * a `GeometryCollection` containing `Polygon` or `MultiPolygon` geometries. To maximize interoperability, `GeometryCollection` should be avoided in favour of one of the alternatives above. Set this parameter to `null` to set no limit for the spatial extent. Be careful with this when loading large datasets! It is recommended to use this parameter instead of using ``filter_bbox()`` or ``filter_spatial()`` directly after loading unbounded data. :param temporal_extent: Limits the data to load from the collection to the specified left-closed temporal interval. Applies to all temporal dimensions. The interval has to be specified as an array with exactly two elements: 1. The first element is the start of the temporal interval. The specified instance in time is **included** in the interval. 2. The second element is the end of the temporal interval. The specified instance in time is **excluded** from the interval. The specified temporal strings follow [RFC 3339](https://www.rfc-editor.org/rfc/rfc3339.html). Also supports open intervals by setting one of the boundaries to `null`, but never both. Set this parameter to `null` to set no limit for the temporal extent. Be careful with this when loading large datasets! It is recommended to use this parameter instead of using ``filter_temporal()`` directly after loading unbounded data. :param bands: Only adds the specified bands into the data cube so that bands that don't match the list of band names are not available. Applies to all dimensions of type `bands`. Either the unique band name (metadata field `name` in bands) or one of the common band names (metadata field `common_name` in bands) can be specified. If the unique band name and the common name conflict, the unique band name has a higher priority. The order of the specified array defines the order of the bands in the data cube. If multiple bands match a common name, all matched bands are included in the original order. It is recommended to use this parameter instead of using ``filter_bands()`` directly after loading unbounded data. :param properties: Limits the data by metadata properties to include only data in the data cube which all given conditions return `true` for (AND operation). Specify key-value-pairs with the key being the name of the metadata property, which can be retrieved with the openEO Data Discovery for Collections. The value must a condition (user-defined process) to be evaluated against the collection metadata, see the example. :return: A data cube for further processing. The dimensions and dimension properties (name, type, labels, reference system and resolution) correspond to the collection's metadata, but the dimension labels are restricted as specified in the parameters. """ return _process('load_collection', id=id, spatial_extent=spatial_extent, temporal_extent=temporal_extent, bands=bands, properties=properties) def load_result(id) -> ProcessBuilder: """ Load batch job results :param id: The id of a batch job with results. :return: A data cube for further processing. """ return _process('load_result', id=id) def load_result(id) -> ProcessBuilder: """ Load batch job results :param id: The id of a batch job with results. :return: A data cube for further processing. """ return _process('load_result', id=id) def load_uploaded_files(paths, format, options=UNSET) -> ProcessBuilder: """ Load files from the user workspace :param paths: The files to read. Folders can't be specified, instead specify all files. An error is thrown if a file can't be read. :param format: The file format to read from. It must be one of the values that the server reports as supported input file formats, which usually correspond to the short GDAL/OGR codes. If the format is not suitable for loading the data, a `FormatUnsuitable` exception will be thrown. This parameter is *case insensitive*. :param options: The file format parameters to be used to read the files. Must correspond to the parameters that the server reports as supported parameters for the chosen `format`. The parameter names and valid values usually correspond to the GDAL/OGR format options. :return: A data cube for further processing. """ return _process('load_uploaded_files', paths=paths, format=format, options=options) def load_uploaded_files(paths, format, options=UNSET) -> ProcessBuilder: """ Load files from the user workspace :param paths: The files to read. Folders can't be specified, specify all files instead. An exception is thrown if a file can't be read. :param format: The file format to read from. It must be one of the values that the server reports as supported input file formats, which usually correspond to the short GDAL/OGR codes. If the format is not suitable for loading the data, a `FormatUnsuitable` exception will be thrown. This parameter is *case insensitive*. :param options: The file format parameters to be used to read the files. Must correspond to the parameters that the server reports as supported parameters for the chosen `format`. The parameter names and valid values usually correspond to the GDAL/OGR format options. :return: A data cube for further processing. """ return _process('load_uploaded_files', paths=paths, format=format, options=options) def log(x, base) -> ProcessBuilder: """ Logarithm to a base :param x: A number to compute the logarithm for. :param base: The numerical base. :return: The computed logarithm. """ return _process('log', x=x, base=base) def lt(x, y) -> ProcessBuilder: """ Less than comparison :param x: First operand. :param y: Second operand. :return: `true` if `x` is strictly less than `y`, `null` if any operand is `null`, otherwise `false`. """ return _process('lt', x=x, y=y) def lte(x, y) -> ProcessBuilder: """ Less than or equal to comparison :param x: First operand. :param y: Second operand. :return: `true` if `x` is less than or equal to `y`, `null` if any operand is `null`, otherwise `false`. """ return _process('lte', x=x, y=y) def mask(data, mask, replacement=UNSET) -> ProcessBuilder: """ Apply a raster mask :param data: A raster data cube. :param mask: A mask as a raster data cube. Every pixel in `data` must have a corresponding element in `mask`. :param replacement: The value used to replace masked values with. :return: A masked raster data cube with the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged. """ return _process('mask', data=data, mask=mask, replacement=replacement) def mask_polygon(data, mask, replacement=UNSET, inside=UNSET) -> ProcessBuilder: """ Apply a polygon mask :param data: A raster data cube. :param mask: A GeoJSON object containing at least one polygon. The provided feature types can be one of the following: * A `Polygon` or `MultiPolygon` geometry, * a `Feature` with a `Polygon` or `MultiPolygon` geometry, * a `FeatureCollection` containing at least one `Feature` with `Polygon` or `MultiPolygon` geometries, or * a `GeometryCollection` containing `Polygon` or `MultiPolygon` geometries. To maximize interoperability, `GeometryCollection` should be avoided in favour of one of the alternatives above. :param replacement: The value used to replace masked values with. :param inside: If set to `true` all pixels for which the point at the pixel center **does** intersect with any polygon are replaced. :return: A masked raster data cube with the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged. """ return _process('mask_polygon', data=data, mask=mask, replacement=replacement, inside=inside) def max(data, ignore_nodata=UNSET) -> ProcessBuilder: """ Maximum value :param data: An array of numbers. :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. Setting this flag to `false` considers no-data values so that `null` is returned if any value is such a value. :return: The maximum value. """ return _process('max', data=data, ignore_nodata=ignore_nodata) def mean(data, ignore_nodata=UNSET) -> ProcessBuilder: """ Arithmetic mean (average) :param data: An array of numbers. :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. Setting this flag to `false` considers no-data values so that `null` is returned if any value is such a value. :return: The computed arithmetic mean. """ return _process('mean', data=data, ignore_nodata=ignore_nodata) def median(data, ignore_nodata=UNSET) -> ProcessBuilder: """ Statistical median :param data: An array of numbers. :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. Setting this flag to `false` considers no-data values so that `null` is returned if any value is such a value. :return: The computed statistical median. """ return _process('median', data=data, ignore_nodata=ignore_nodata) def merge_cubes(cube1, cube2, overlap_resolver=UNSET, context=UNSET) -> ProcessBuilder: """ Merge two data cubes :param cube1: The first data cube. :param cube2: The second data cube. :param overlap_resolver: A reduction operator that resolves the conflict if the data overlaps. The reducer must return a value of the same data type as the input values are. The reduction operator may be a single process such as ``multiply()`` or consist of multiple sub-processes. `null` (the default) can be specified if no overlap resolver is required. :param context: Additional data to be passed to the overlap resolver. :return: The merged data cube. See the process description for details regarding the dimensions and dimension properties (name, type, labels, reference system and resolution). """ return _process('merge_cubes', cube1=cube1, cube2=cube2, overlap_resolver=overlap_resolver, context=context) def min(data, ignore_nodata=UNSET) -> ProcessBuilder: """ Minimum value :param data: An array of numbers. :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. Setting this flag to `false` considers no-data values so that `null` is returned if any value is such a value. :return: The minimum value. """ return _process('min', data=data, ignore_nodata=ignore_nodata) def mod(x, y) -> ProcessBuilder: """ Modulo :param x: A number to be used as the dividend. :param y: A number to be used as the divisor. :return: The remainder after division. """ return _process('mod', x=x, y=y) def multiply(x, y) -> ProcessBuilder: """ Multiplication of two numbers :param x: The multiplier. :param y: The multiplicand. :return: The computed product of the two numbers. """ return _process('multiply', x=x, y=y) def nan() -> ProcessBuilder: """ Not a Number (NaN) :return: Returns `NaN`. """ return _process('nan', ) def ndvi(data, nir=UNSET, red=UNSET, target_band=UNSET) -> ProcessBuilder: """ Normalized Difference Vegetation Index :param data: A raster data cube with two bands that have the common names `red` and `nir` assigned. :param nir: The name of the NIR band. Defaults to the band that has the common name `nir` assigned. Either the unique band name (metadata field `name` in bands) or one of the common band names (metadata field `common_name` in bands) can be specified. If the unique band name and the common name conflict, the unique band name has a higher priority. :param red: The name of the red band. Defaults to the band that has the common name `red` assigned. Either the unique band name (metadata field `name` in bands) or one of the common band names (metadata field `common_name` in bands) can be specified. If the unique band name and the common name conflict, the unique band name has a higher priority. :param target_band: By default, the dimension of type `bands` is dropped. To keep the dimension specify a new band name in this parameter so that a new dimension label with the specified name will be added for the computed values. :return: A raster data cube containing the computed NDVI values. The structure of the data cube differs depending on the value passed to `target_band`: * `target_band` is `null`: The data cube does not contain the dimension of type `bands`, the number of dimensions decreases by one. The dimension properties (name, type, labels, reference system and resolution) for all other dimensions remain unchanged. * `target_band` is a string: The data cube keeps the same dimensions. The dimension properties remain unchanged, but the number of dimension labels for the dimension of type `bands` increases by one. The additional label is named as specified in `target_band`. """ return _process('ndvi', data=data, nir=nir, red=red, target_band=target_band) def neq(x, y, delta=UNSET, case_sensitive=UNSET) -> ProcessBuilder: """ Not equal to comparison :param x: First operand. :param y: Second operand. :param delta: Only applicable for comparing two numbers. If this optional parameter is set to a positive non-zero number the non-equality of two numbers is checked against a delta value. This is especially useful to circumvent problems with floating-point inaccuracy in machine-based computation. This option is basically an alias for the following computation: `gt(abs(minus([x, y]), delta)` :param case_sensitive: Only applicable for comparing two strings. Case sensitive comparison can be disabled by setting this parameter to `false`. :return: `true` if `x` is *not* equal to `y`, `null` if any operand is `null`, otherwise `false`. """ return _process('neq', x=x, y=y, delta=delta, case_sensitive=case_sensitive) def normalized_difference(x, y) -> ProcessBuilder: """ Normalized difference :param x: The value for the first band. :param y: The value for the second band. :return: The computed normalized difference. """ return _process('normalized_difference', x=x, y=y) def not_(x) -> ProcessBuilder: """ Inverting a boolean :param x: Boolean value to invert. :return: Inverted boolean value. """ return _process('not', x=x) def or_(x, y) -> ProcessBuilder: """ Logical OR :param x: A boolean value. :param y: A boolean value. :return: Boolean result of the logical OR. """ return _process('or', x=x, y=y) def order(data, asc=UNSET, nodata=UNSET) -> ProcessBuilder: """ Create a permutation :param data: An array to compute the order for. :param asc: The default sort order is ascending, with smallest values first. To sort in reverse (descending) order, set this parameter to `false`. :param nodata: Controls the handling of no-data values (`null`). By default, they are removed. If set to `true`, missing values in the data are put last; if set to `false`, they are put first. :return: The computed permutation. """ return _process('order', data=data, asc=asc, nodata=nodata) def pi() -> ProcessBuilder: """ Pi (π) :return: The numerical value of Pi. """ return _process('pi', ) def power(base, p) -> ProcessBuilder: """ Exponentiation :param base: The numerical base. :param p: The numerical exponent. :return: The computed value for `base` raised to the power of `p`. """ return _process('power', base=base, p=p) def product(data, ignore_nodata=UNSET) -> ProcessBuilder: """ Compute the product by multiplying numbers :param data: An array of numbers. :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. Setting this flag to `false` considers no-data values so that `null` is returned if any value is such a value. :return: The computed product of the sequence of numbers. """ return _process('product', data=data, ignore_nodata=ignore_nodata) def quantiles(data, probabilities=UNSET, q=UNSET, ignore_nodata=UNSET) -> ProcessBuilder: """ Quantiles :param data: An array of numbers. :param probabilities: A list of probabilities to calculate quantiles for. The probabilities must be between 0 and 1. :param q: Intervals to calculate quantiles for. Calculates q-quantiles with (nearly) equal-sized intervals. :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. Setting this flag to `false` considers no-data values so that an array with `null` values is returned if any element is such a value. :return: An array with the computed quantiles. The list has either * as many elements as the given list of `probabilities` had or * *`q`-1* elements. If the input array is empty the resulting array is filled with as many `null` values as required according to the list above. See the 'Empty array' example for an example. """ return _process('quantiles', data=data, probabilities=probabilities, q=q, ignore_nodata=ignore_nodata) def rearrange(data, order) -> ProcessBuilder: """ Rearrange an array based on a permutation :param data: The array to rearrange. :param order: The permutation used for rearranging. :return: The rearranged array. """ return _process('rearrange', data=data, order=order) def reduce_dimension(data, reducer, dimension, context=UNSET) -> ProcessBuilder: """ Reduce dimensions :param data: A data cube. :param reducer: A reducer to apply on the specified dimension. A reducer is a single process such as ``mean()`` or a set of processes, which computes a single value for a list of values, see the category 'reducer' for such processes. :param dimension: The name of the dimension over which to reduce. Fails with a `DimensionNotAvailable` exception if the specified dimension does not exist. :param context: Additional data to be passed to the reducer. :return: A data cube with the newly computed values. It is missing the given dimension, the number of dimensions decreases by one. The dimension properties (name, type, labels, reference system and resolution) for all other dimensions remain unchanged. """ return _process('reduce_dimension', data=data, reducer=reducer, dimension=dimension, context=context) def reduce_dimension_binary(data, reducer, dimension, context=UNSET) -> ProcessBuilder: """ Reduce dimensions using binary reduction :param data: A data cube. :param reducer: A reduction operator to be applied consecutively on pairs of values. It must be both associative and commutative as the execution may be executed in parallel and therefore the order of execution is arbitrary. The reduction operator may be a single process such as ``multiply()`` or consist of multiple sub-processes. :param dimension: The name of the dimension over which to reduce. Fails with a `DimensionNotAvailable` error if the specified dimension does not exist. :param context: Additional data to be passed to the reducer. :return: A data cube with the newly computed values. It is missing the given dimension, the number of dimensions decreases by one. The dimension properties (name, type, labels, reference system and resolution) for all other dimensions remain unchanged. """ return _process('reduce_dimension_binary', data=data, reducer=reducer, dimension=dimension, context=context) def reduce_spatial(data, reducer, context=UNSET) -> ProcessBuilder: """ Reduce spatial dimensions 'x' and 'y' :param data: A data cube. :param reducer: A reducer to apply on the horizontal spatial dimensions. A reducer is a single process such as ``mean()`` or a set of processes, which computes a single value for a list of values, see the category 'reducer' for such processes. :param context: Additional data to be passed to the reducer. :return: A data cube with the newly computed values. It is missing the horizontal spatial dimensions, the number of dimensions decreases by two. The dimension properties (name, type, labels, reference system and resolution) for all other dimensions remain unchanged. """ return _process('reduce_spatial', data=data, reducer=reducer, context=context) def rename_dimension(data, source, target) -> ProcessBuilder: """ Rename a dimension :param data: The data cube. :param source: The current name of the dimension. Fails with a `DimensionNotAvailable` exception if the specified dimension does not exist. :param target: A new Name for the dimension. Fails with a `DimensionExists` exception if a dimension with the specified name exists. :return: A data cube with the same dimensions, but the name of one of the dimensions changes. The old name can not be referred to any longer. The dimension properties (name, type, labels, reference system and resolution) remain unchanged. """ return _process('rename_dimension', data=data, source=source, target=target) def rename_labels(data, dimension, target, source=UNSET) -> ProcessBuilder: """ Rename dimension labels :param data: The data cube. :param dimension: The name of the dimension to rename the labels for. :param target: The new names for the labels. The dimension labels in the data cube are expected to be enumerated if the parameter `target` is not specified. If a target dimension label already exists in the data cube, a `LabelExists` exception is thrown. :param source: The names of the labels as they are currently in the data cube. The array defines an unsorted and potentially incomplete list of labels that should be renamed to the names available in the corresponding array elements in the parameter `target`. If one of the source dimension labels doesn't exist, the `LabelNotAvailable` exception is thrown. By default, the array is empty so that the dimension labels in the data cube are expected to be enumerated. :return: The data cube with the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged, except that for the given dimension the labels change. The old labels can not be referred to any longer. The number of labels remains the same. """ return _process('rename_labels', data=data, dimension=dimension, target=target, source=source) def resample_cube_spatial(data, target, method=UNSET) -> ProcessBuilder: """ Resample the spatial dimensions to match a target data cube :param data: A data cube. :param target: A data cube that describes the spatial target resolution. :param method: Resampling method to use. The following options are available and are meant to align with [`gdalwarp`](https://gdal.org/programs/gdalwarp.html#cmdoption-gdalwarp-r): * `average`: average (mean) resampling, computes the weighted average of all valid pixels * `bilinear`: bilinear resampling * `cubic`: cubic resampling * `cubicspline`: cubic spline resampling * `lanczos`: Lanczos windowed sinc resampling * `max`: maximum resampling, selects the maximum value from all valid pixels * `med`: median resampling, selects the median value of all valid pixels * `min`: minimum resampling, selects the minimum value from all valid pixels * `mode`: mode resampling, selects the value which appears most often of all the sampled points * `near`: nearest neighbour resampling (default) * `q1`: first quartile resampling, selects the first quartile value of all valid pixels * `q3`: third quartile resampling, selects the third quartile value of all valid pixels * `rms` root mean square (quadratic mean) of all valid pixels * `sum`: compute the weighted sum of all valid pixels Valid pixels are determined based on the function ``is_valid()``. :return: A data cube with the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged, except for the resolution and dimension labels of the spatial dimensions. """ return _process('resample_cube_spatial', data=data, target=target, method=method) def resample_cube_temporal(data, target, method, dimension=UNSET, context=UNSET) -> ProcessBuilder: """ Resample a temporal dimension to match a target data cube :param data: A data cube. :param target: A data cube that describes the temporal target resolution. :param method: A resampling method to be applied, could be a reducer for downsampling or other methods for upsampling. A reducer is a single process such as ``mean()`` or a set of processes, which computes a single value for a list of values, see the category 'reducer' for such processes. :param dimension: The name of the temporal dimension to resample, which must exist with this name in both data cubes. If the dimension is not set or is set to `null`, the data cube is expected to only have one temporal dimension. Fails with a `TooManyDimensions` error if it has more dimensions. Fails with a `DimensionNotAvailable` error if the specified dimension does not exist. :param context: Additional data to be passed to the process specified for the parameter `method`. :return: A raster data cube with the same dimensions and the same dimension properties (name, type, labels, reference system and resolution) for all non-temporal dimensions. For the temporal dimension the name and type remain unchanged, but the reference system changes and the labels and resolution may change. """ return _process('resample_cube_temporal', data=data, target=target, method=method, dimension=dimension, context=context) def resample_cube_temporal(data, target, dimension=UNSET, valid_within=UNSET) -> ProcessBuilder: """ Resample temporal dimensions to match a target data cube :param data: A data cube with one or more temporal dimensions. :param target: A data cube that describes the temporal target resolution. :param dimension: The name of the temporal dimension to resample, which must exist with this name in both data cubes. If the dimension is not set or is set to `null`, the process resamples all temporal dimensions that exist with the same names in both data cubes. The following exceptions may occur: * A dimension is given, but it does not exist in any of the data cubes: `DimensionNotAvailable` * A dimension is given, but one of them is not temporal: `DimensionMismatch` * No specific dimension name is given and there are no temporal dimensions with the same name in the data: `DimensionMismatch` :param valid_within: Setting this parameter to a numerical value enables that the process searches for valid values within the given period of days before and after the target timestamps. Valid values are determined based on the function ``is_valid()``. For example, the limit of `7` for the target timestamps `2020-01-15 12:00:00` looks for a nearest neighbor after `2020-01-08 12:00:00` and before `2020-01-22 12:00:00`. If no valid value is found within the given period, the value will be set to no-data (`null`). :return: A raster data cube with the same dimensions and the same dimension properties (name, type, labels, reference system and resolution) for all non-temporal dimensions. For the temporal dimension, the name and type remain unchanged, but the dimension labels, resolution and reference system may change. """ return _process('resample_cube_temporal', data=data, target=target, dimension=dimension, valid_within=valid_within) def resample_spatial(data, resolution=UNSET, projection=UNSET, method=UNSET, align=UNSET) -> ProcessBuilder: """ Resample and warp the spatial dimensions :param data: A raster data cube. :param resolution: Resamples the data cube to the target resolution, which can be specified either as separate values for x and y or as a single value for both axes. Specified in the units of the target projection. Doesn't change the resolution by default (`0`). :param projection: Warps the data cube to the target projection, specified as as [EPSG code](http://www.epsg-registry.org/), [WKT2 (ISO 19162) string](http://docs.opengeospatial.org/is/18-010r7/18-010r7.html), [PROJ definition (deprecated)](https://proj.org/usage/quickstart.html). By default (`null`), the projection is not changed. :param method: Resampling method to use. The following options are available and are meant to align with [`gdalwarp`](https://gdal.org/programs/gdalwarp.html#cmdoption-gdalwarp-r): * `average`: average (mean) resampling, computes the weighted average of all valid pixels * `bilinear`: bilinear resampling * `cubic`: cubic resampling * `cubicspline`: cubic spline resampling * `lanczos`: Lanczos windowed sinc resampling * `max`: maximum resampling, selects the maximum value from all valid pixels * `med`: median resampling, selects the median value of all valid pixels * `min`: minimum resampling, selects the minimum value from all valid pixels * `mode`: mode resampling, selects the value which appears most often of all the sampled points * `near`: nearest neighbour resampling (default) * `q1`: first quartile resampling, selects the first quartile value of all valid pixels * `q3`: third quartile resampling, selects the third quartile value of all valid pixels * `rms` root mean square (quadratic mean) of all valid pixels * `sum`: compute the weighted sum of all valid pixels Valid pixels are determined based on the function ``is_valid()``. :param align: Specifies to which corner of the spatial extent the new resampled data is aligned to. :return: A raster data cube with values warped onto the new projection. It has the same dimensions and the same dimension properties (name, type, labels, reference system and resolution) for all non-spatial or vertical spatial dimensions. For the horizontal spatial dimensions the name and type remain unchanged, but reference system, labels and resolution may change depending on the given parameters. """ return _process('resample_spatial', data=data, resolution=resolution, projection=projection, method=method, align=align) def round(x, p=UNSET) -> ProcessBuilder: """ Round to a specified precision :param x: A number to round. :param p: A positive number specifies the number of digits after the decimal point to round to. A negative number means rounding to a power of ten, so for example *-2* rounds to the nearest hundred. Defaults to *0*. :return: The rounded number. """ return _process('round', x=x, p=p) def run_udf(data, udf, runtime, version=UNSET, context=UNSET) -> ProcessBuilder: """ Run a UDF :param data: The data to be passed to the UDF as an array or raster data cube. :param udf: Either source code, an absolute URL or a path to a UDF script. :param runtime: A UDF runtime identifier available at the back-end. :param version: An UDF runtime version. If set to `null`, the default runtime version specified for each runtime is used. :param context: Additional data such as configuration options to be passed to the UDF. :return: The data processed by the UDF. * Returns a raster data cube, if a raster data cube is passed for `data`. Details on the dimensions and dimension properties (name, type, labels, reference system and resolution) depend on the UDF. * If an array is passed for `data`, the returned value can be of any data type, but is exactly what the UDF returns. """ return _process('run_udf', data=data, udf=udf, runtime=runtime, version=version, context=context) def run_udf_externally(data, url, context=UNSET) -> ProcessBuilder: """ Run an externally hosted UDF container :param data: The data to be passed to the UDF as array or raster data cube. :param url: URL to a remote UDF service. :param context: Additional data such as configuration options that should be passed to the UDF. :return: The data processed by the UDF service. * Returns a raster data cube, if a raster data cube is passed for `data`. Details on the dimensions and dimension properties (name, type, labels, reference system and resolution) depend on the UDF. * If an array is passed for `data`, the returned value can be of any data type, but is exactly what the UDF returns. """ return _process('run_udf_externally', data=data, url=url, context=context) def run_udf_externally(data, url, context=UNSET) -> ProcessBuilder: """ Run an externally hosted UDF container :param data: The data to be passed to the UDF as an array or raster data cube. :param url: Absolute URL to a remote UDF service. :param context: Additional data such as configuration options to be passed to the UDF. :return: The data processed by the UDF service. * Returns a raster data cube if a raster data cube is passed for `data`. Details on the dimensions and dimension properties (name, type, labels, reference system and resolution) depend on the UDF. * If an array is passed for `data`, the returned value can be of any data type, but is exactly what the UDF returns. """ return _process('run_udf_externally', data=data, url=url, context=context) def sar_backscatter(data, coefficient=UNSET, elevation_model=UNSET, mask=UNSET, contributing_area=UNSET, local_incidence_angle=UNSET, ellipsoid_incidence_angle=UNSET, noise_removal=UNSET) -> ProcessBuilder: """ Computes backscatter from SAR input :param data: The source data cube containing SAR input. :param coefficient: Select the radiometric correction coefficient. The following options are available: * `beta0`: radar brightness * `sigma0-ellipsoid`: ground area computed with ellipsoid earth model * `sigma0-terrain`: ground area computed with terrain earth model * `gamma0-ellipsoid`: ground area computed with ellipsoid earth model in sensor line of sight * `gamma0-terrain`: ground area computed with terrain earth model in sensor line of sight (default) * `null`: non-normalized backscatter :param elevation_model: The digital elevation model to use. Set to `null` (the default) to allow the back- end to choose, which will improve portability, but reduce reproducibility. :param mask: If set to `true`, a data mask is added to the bands with the name `mask`. It indicates which values are valid (1), invalid (0) or contain no-data (null). :param contributing_area: If set to `true`, a DEM-based local contributing area band named `contributing_area` is added. The values are given in square meters. :param local_incidence_angle: If set to `true`, a DEM-based local incidence angle band named `local_incidence_angle` is added. The values are given in degrees. :param ellipsoid_incidence_angle: If set to `true`, an ellipsoidal incidence angle band named `ellipsoid_incidence_angle` is added. The values are given in degrees. :param noise_removal: If set to `false`, no noise removal is applied. Defaults to `true`, which removes noise. :return: Backscatter values corresponding to the chosen parametrization. The values are given in linear scale. """ return _process('sar_backscatter', data=data, coefficient=coefficient, elevation_model=elevation_model, mask=mask, contributing_area=contributing_area, local_incidence_angle=local_incidence_angle, ellipsoid_incidence_angle=ellipsoid_incidence_angle, noise_removal=noise_removal) def save_result(data, format, options=UNSET) -> ProcessBuilder: """ Save processed data to storage :param data: The data to save. :param format: The file format to save to. It must be one of the values that the server reports as supported output file formats, which usually correspond to the short GDAL/OGR codes. If the format is not suitable for storing the underlying data structure, a `FormatUnsuitable` exception will be thrown. This parameter is *case insensitive*. :param options: The file format parameters to be used to create the file(s). Must correspond to the parameters that the server reports as supported parameters for the chosen `format`. The parameter names and valid values usually correspond to the GDAL/OGR format options. :return: `false` if saving failed, `true` otherwise. """ return _process('save_result', data=data, format=format, options=options) def sd(data, ignore_nodata=UNSET) -> ProcessBuilder: """ Standard deviation :param data: An array of numbers. :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. Setting this flag to `false` considers no-data values so that `null` is returned if any value is such a value. :return: The computed sample standard deviation. """ return _process('sd', data=data, ignore_nodata=ignore_nodata) def sgn(x) -> ProcessBuilder: """ Signum :param x: A number. :return: The computed signum value of `x`. """ return _process('sgn', x=x) def sin(x) -> ProcessBuilder: """ Sine :param x: An angle in radians. :return: The computed sine of `x`. """ return _process('sin', x=x) def sinh(x) -> ProcessBuilder: """ Hyperbolic sine :param x: An angle in radians. :return: The computed hyperbolic sine of `x`. """ return _process('sinh', x=x) def sort(data, asc=UNSET, nodata=UNSET) -> ProcessBuilder: """ Sort data :param data: An array with data to sort. :param asc: The default sort order is ascending, with smallest values first. To sort in reverse (descending) order, set this parameter to `false`. :param nodata: Controls the handling of no-data values (`null`). By default, they are removed. If set to `true`, missing values in the data are put last; if set to `false`, they are put first. :return: The sorted array. """ return _process('sort', data=data, asc=asc, nodata=nodata) def sqrt(x) -> ProcessBuilder: """ Square root :param x: A number. :return: The computed square root. """ return _process('sqrt', x=x) def subtract(x, y) -> ProcessBuilder: """ Subtraction of two numbers :param x: The minuend. :param y: The subtrahend. :return: The computed result. """ return _process('subtract', x=x, y=y) def sum(data, ignore_nodata=UNSET) -> ProcessBuilder: """ Compute the sum by adding up numbers :param data: An array of numbers. :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. Setting this flag to `false` considers no-data values so that `null` is returned if any value is such a value. :return: The computed sum of the sequence of numbers. """ return _process('sum', data=data, ignore_nodata=ignore_nodata) def tan(x) -> ProcessBuilder: """ Tangent :param x: An angle in radians. :return: The computed tangent of `x`. """ return _process('tan', x=x) def tanh(x) -> ProcessBuilder: """ Hyperbolic tangent :param x: An angle in radians. :return: The computed hyperbolic tangent of `x`. """ return _process('tanh', x=x) def text_begins(data, pattern, case_sensitive=UNSET) -> ProcessBuilder: """ Text begins with another text :param data: Text in which to find something at the beginning. :param pattern: Text to find at the beginning of `data`. Regular expressions are not supported. :param case_sensitive: Case sensitive comparison can be disabled by setting this parameter to `false`. :return: `true` if `data` begins with `pattern`, false` otherwise. """ return _process('text_begins', data=data, pattern=pattern, case_sensitive=case_sensitive) def text_contains(data, pattern, case_sensitive=UNSET) -> ProcessBuilder: """ Text contains another text :param data: Text in which to find something in. :param pattern: Text to find in `data`. Regular expressions are not supported. :param case_sensitive: Case sensitive comparison can be disabled by setting this parameter to `false`. :return: `true` if `data` contains the `pattern`, false` otherwise. """ return _process('text_contains', data=data, pattern=pattern, case_sensitive=case_sensitive) def text_ends(data, pattern, case_sensitive=UNSET) -> ProcessBuilder: """ Text ends with another text :param data: Text in which to find something at the end. :param pattern: Text to find at the end of `data`. Regular expressions are not supported. :param case_sensitive: Case sensitive comparison can be disabled by setting this parameter to `false`. :return: `true` if `data` ends with `pattern`, false` otherwise. """ return _process('text_ends', data=data, pattern=pattern, case_sensitive=case_sensitive) def text_merge(data, separator=UNSET) -> ProcessBuilder: """ Concatenate elements to a single text :param data: A set of elements. Numbers, boolean values and null values get converted to their (lower case) string representation. For example: `1` (integer), `-1.5` (number), `true` / `false` (boolean values) :param separator: A separator to put between each of the individual texts. Defaults to an empty string. :return: A string containing a string representation of all the array elements in the same order, with the separator between each element. """ return _process('text_merge', data=data, separator=separator) def trim_cube(data) -> ProcessBuilder: """ Remove dimension labels with no-data values :param data: A raster data cube to trim. :return: A trimmed raster data cube with the same dimensions. The dimension properties name, type, reference system and resolution remain unchanged. The number of dimension labels may decrease. """ return _process('trim_cube', data=data) def variance(data, ignore_nodata=UNSET) -> ProcessBuilder: """ Variance :param data: An array of numbers. :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. Setting this flag to `false` considers no-data values so that `null` is returned if any value is such a value. :return: The computed sample variance. """ return _process('variance', data=data, ignore_nodata=ignore_nodata) def xor(x, y) -> ProcessBuilder: """ Logical XOR (exclusive or) :param x: A boolean value. :param y: A boolean value. :return: Boolean result of the logical XOR. """ return _process('xor', x=x, y=y)
49.976754
310
0.687616
31,650
232,192
5.007362
0.038325
0.01373
0.005792
0.011244
0.961308
0.949174
0.938826
0.916193
0.904968
0.893194
0
0.004061
0.239556
232,192
4,645
311
49.987513
0.893509
0.736714
0
0.115756
1
0
0.085249
0.007089
0
0
0
0
0
1
0.493569
false
0
0.003215
0.019293
0.993569
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
9
aa075f3e7bfad6bdeef8e2e73d607be7cf75a80a
150
py
Python
tools/test_sh_file.py
insigh/Faster-RCNN-Tensorflow
4f446a4c1ebefcf6d92b5e01d2b6396bcbbf1a8d
[ "MIT" ]
null
null
null
tools/test_sh_file.py
insigh/Faster-RCNN-Tensorflow
4f446a4c1ebefcf6d92b5e01d2b6396bcbbf1a8d
[ "MIT" ]
1
2019-04-04T14:19:40.000Z
2019-04-06T03:32:22.000Z
tools/test_sh_file.py
insigh/Faster-RCNN-Tensorflow
4f446a4c1ebefcf6d92b5e01d2b6396bcbbf1a8d
[ "MIT" ]
null
null
null
import os os.system("bash /home/chaojie/github/tf-faster-rcnn/experiments/scripts/test_faster_rcnn.sh 0 pascal_voc_0712 res101") # print("hahaha")
21.428571
118
0.786667
24
150
4.75
0.875
0.175439
0
0
0
0
0
0
0
0
0
0.057971
0.08
150
6
119
25
0.768116
0.1
0
0
0
0.5
0.789474
0.56391
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
1
1
1
null
0
0
0
0
0
0
1
0
1
0
0
0
0
7
aa23d06dce627c300497fe073f8ad758fa854eb5
70
py
Python
tests/syntax/dict_value_missing_4.py
hoefling/friendly-traceback
26acac903521c5f0e5cf837d32bda750918d4afb
[ "MIT" ]
45
2021-07-06T03:30:20.000Z
2022-03-16T17:30:58.000Z
tests/syntax/dict_value_missing_4.py
hoefling/friendly-traceback
26acac903521c5f0e5cf837d32bda750918d4afb
[ "MIT" ]
110
2021-06-28T11:48:46.000Z
2022-03-25T20:41:25.000Z
tests/syntax/dict_value_missing_4.py
hoefling/friendly-traceback
26acac903521c5f0e5cf837d32bda750918d4afb
[ "MIT" ]
4
2021-07-05T20:56:39.000Z
2021-11-11T20:24:34.000Z
# a = {1:2, 3} # a = {1:2, 3:} # a = {1:2, 3, 4:5} a = {1:2, 3:, 4:5}
14
19
0.285714
20
70
1
0.3
0.4
0.6
0.8
1
1
0.6
0.6
0
0
0
0.326531
0.3
70
4
20
17.5
0.081633
0.628571
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0
0
null
null
0
1
0
1
null
1
1
1
1
1
0
0
0
0
0
1
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
11
aa28e03629efadc4e5cc26f252584a74b351dcd7
13,561
py
Python
Tools/pybench/Arithmetic.py
sireliah/polish-python
605df4944c2d3bc25f8bf6964b274c0a0d297cc3
[ "PSF-2.0" ]
1
2018-06-21T18:21:24.000Z
2018-06-21T18:21:24.000Z
Tools/pybench/Arithmetic.py
sireliah/polish-python
605df4944c2d3bc25f8bf6964b274c0a0d297cc3
[ "PSF-2.0" ]
null
null
null
Tools/pybench/Arithmetic.py
sireliah/polish-python
605df4944c2d3bc25f8bf6964b274c0a0d297cc3
[ "PSF-2.0" ]
null
null
null
z pybench zaimportuj Test klasa SimpleIntegerArithmetic(Test): version = 2.0 operations = 5 * (3 + 5 + 5 + 3 + 3 + 3) rounds = 120000 def test(self): dla i w range(self.rounds): a = 2 b = 3 c = 3 c = a + b c = b + c c = c + a c = a + b c = b + c c = c - a c = a - b c = b - c c = c - a c = b - c c = a / b c = b / a c = c / b c = a * b c = b * a c = c * b c = a / b c = b / a c = c / b a = 2 b = 3 c = 3 c = a + b c = b + c c = c + a c = a + b c = b + c c = c - a c = a - b c = b - c c = c - a c = b - c c = a / b c = b / a c = c / b c = a * b c = b * a c = c * b c = a / b c = b / a c = c / b a = 2 b = 3 c = 3 c = a + b c = b + c c = c + a c = a + b c = b + c c = c - a c = a - b c = b - c c = c - a c = b - c c = a / b c = b / a c = c / b c = a * b c = b * a c = c * b c = a / b c = b / a c = c / b a = 2 b = 3 c = 3 c = a + b c = b + c c = c + a c = a + b c = b + c c = c - a c = a - b c = b - c c = c - a c = b - c c = a / b c = b / a c = c / b c = a * b c = b * a c = c * b c = a / b c = b / a c = c / b a = 2 b = 3 c = 3 c = a + b c = b + c c = c + a c = a + b c = b + c c = c - a c = a - b c = b - c c = c - a c = b - c c = a / b c = b / a c = c / b c = a * b c = b * a c = c * b c = a / b c = b / a c = c / b def calibrate(self): dla i w range(self.rounds): dalej klasa SimpleFloatArithmetic(Test): version = 2.0 operations = 5 * (3 + 5 + 5 + 3 + 3 + 3) rounds = 120000 def test(self): dla i w range(self.rounds): a = 2.1 b = 3.3332 c = 3.14159 c = a + b c = b + c c = c + a c = a + b c = b + c c = c - a c = a - b c = b - c c = c - a c = b - c c = a / b c = b / a c = c / b c = a * b c = b * a c = c * b c = a / b c = b / a c = c / b a = 2.1 b = 3.3332 c = 3.14159 c = a + b c = b + c c = c + a c = a + b c = b + c c = c - a c = a - b c = b - c c = c - a c = b - c c = a / b c = b / a c = c / b c = a * b c = b * a c = c * b c = a / b c = b / a c = c / b a = 2.1 b = 3.3332 c = 3.14159 c = a + b c = b + c c = c + a c = a + b c = b + c c = c - a c = a - b c = b - c c = c - a c = b - c c = a / b c = b / a c = c / b c = a * b c = b * a c = c * b c = a / b c = b / a c = c / b a = 2.1 b = 3.3332 c = 3.14159 c = a + b c = b + c c = c + a c = a + b c = b + c c = c - a c = a - b c = b - c c = c - a c = b - c c = a / b c = b / a c = c / b c = a * b c = b * a c = c * b c = a / b c = b / a c = c / b a = 2.1 b = 3.3332 c = 3.14159 c = a + b c = b + c c = c + a c = a + b c = b + c c = c - a c = a - b c = b - c c = c - a c = b - c c = a / b c = b / a c = c / b c = a * b c = b * a c = c * b c = a / b c = b / a c = c / b def calibrate(self): dla i w range(self.rounds): dalej klasa SimpleIntFloatArithmetic(Test): version = 2.0 operations = 5 * (3 + 5 + 5 + 3 + 3 + 3) rounds = 120000 def test(self): dla i w range(self.rounds): a = 2 b = 3 c = 3.14159 c = a + b c = b + c c = c + a c = a + b c = b + c c = c - a c = a - b c = b - c c = c - a c = b - c c = a / b c = b / a c = c / b c = a * b c = b * a c = c * b c = a / b c = b / a c = c / b a = 2 b = 3 c = 3.14159 c = a + b c = b + c c = c + a c = a + b c = b + c c = c - a c = a - b c = b - c c = c - a c = b - c c = a / b c = b / a c = c / b c = a * b c = b * a c = c * b c = a / b c = b / a c = c / b a = 2 b = 3 c = 3.14159 c = a + b c = b + c c = c + a c = a + b c = b + c c = c - a c = a - b c = b - c c = c - a c = b - c c = a / b c = b / a c = c / b c = a * b c = b * a c = c * b c = a / b c = b / a c = c / b a = 2 b = 3 c = 3.14159 c = a + b c = b + c c = c + a c = a + b c = b + c c = c - a c = a - b c = b - c c = c - a c = b - c c = a / b c = b / a c = c / b c = a * b c = b * a c = c * b c = a / b c = b / a c = c / b a = 2 b = 3 c = 3.14159 c = a + b c = b + c c = c + a c = a + b c = b + c c = c - a c = a - b c = b - c c = c - a c = b - c c = a / b c = b / a c = c / b c = a * b c = b * a c = c * b c = a / b c = b / a c = c / b def calibrate(self): dla i w range(self.rounds): dalej klasa SimpleLongArithmetic(Test): version = 2.0 operations = 5 * (3 + 5 + 5 + 3 + 3 + 3) rounds = 60000 def test(self): dla i w range(self.rounds): a = 2220001 b = 100001 c = 30005 c = a + b c = b + c c = c + a c = a + b c = b + c c = c - a c = a - b c = b - c c = c - a c = b - c c = a / b c = b / a c = c / b c = a * b c = b * a c = c * b c = a / b c = b / a c = c / b a = 2220001 b = 100001 c = 30005 c = a + b c = b + c c = c + a c = a + b c = b + c c = c - a c = a - b c = b - c c = c - a c = b - c c = a / b c = b / a c = c / b c = a * b c = b * a c = c * b c = a / b c = b / a c = c / b a = 2220001 b = 100001 c = 30005 c = a + b c = b + c c = c + a c = a + b c = b + c c = c - a c = a - b c = b - c c = c - a c = b - c c = a / b c = b / a c = c / b c = a * b c = b * a c = c * b c = a / b c = b / a c = c / b a = 2220001 b = 100001 c = 30005 c = a + b c = b + c c = c + a c = a + b c = b + c c = c - a c = a - b c = b - c c = c - a c = b - c c = a / b c = b / a c = c / b c = a * b c = b * a c = c * b c = a / b c = b / a c = c / b a = 2220001 b = 100001 c = 30005 c = a + b c = b + c c = c + a c = a + b c = b + c c = c - a c = a - b c = b - c c = c - a c = b - c c = a / b c = b / a c = c / b c = a * b c = b * a c = c * b c = a / b c = b / a c = c / b def calibrate(self): dla i w range(self.rounds): dalej klasa SimpleComplexArithmetic(Test): version = 2.0 operations = 5 * (3 + 5 + 5 + 3 + 3 + 3) rounds = 80000 def test(self): dla i w range(self.rounds): a = 2 + 3j b = 2.5 + 4.5j c = 1.2 + 6.2j c = a + b c = b + c c = c + a c = a + b c = b + c c = c - a c = a - b c = b - c c = c - a c = b - c c = a / b c = b / a c = c / b c = a * b c = b * a c = c * b c = a / b c = b / a c = c / b a = 2 + 3j b = 2.5 + 4.5j c = 1.2 + 6.2j c = a + b c = b + c c = c + a c = a + b c = b + c c = c - a c = a - b c = b - c c = c - a c = b - c c = a / b c = b / a c = c / b c = a * b c = b * a c = c * b c = a / b c = b / a c = c / b a = 2 + 3j b = 2.5 + 4.5j c = 1.2 + 6.2j c = a + b c = b + c c = c + a c = a + b c = b + c c = c - a c = a - b c = b - c c = c - a c = b - c c = a / b c = b / a c = c / b c = a * b c = b * a c = c * b c = a / b c = b / a c = c / b a = 2 + 3j b = 2.5 + 4.5j c = 1.2 + 6.2j c = a + b c = b + c c = c + a c = a + b c = b + c c = c - a c = a - b c = b - c c = c - a c = b - c c = a / b c = b / a c = c / b c = a * b c = b * a c = c * b c = a / b c = b / a c = c / b a = 2 + 3j b = 2.5 + 4.5j c = 1.2 + 6.2j c = a + b c = b + c c = c + a c = a + b c = b + c c = c - a c = a - b c = b - c c = c - a c = b - c c = a / b c = b / a c = c / b c = a * b c = b * a c = c * b c = a / b c = b / a c = c / b def calibrate(self): dla i w range(self.rounds): dalej
17.430591
44
0.184426
1,809
13,561
1.382532
0.023217
0.239904
0.179928
0.239904
0.940824
0.940824
0.940824
0.940824
0.940824
0.940824
0
0.089372
0.725242
13,561
777
45
17.453024
0.581857
0
0
0.986577
0
0
0
0
0
0
0
0
0
0
null
null
0
0.001678
null
null
0
0
0
1
null
1
0
1
1
1
1
1
1
1
0
0
1
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
12
a4b316e9ad441ba72ae0734905a35d9a33cedcab
1,220
py
Python
07_Java_Experiment/PyTest/exception/exceptions.py
Robert-Stackflow/HUST-Courses
300752552e7af035b0e5c7663953850c81871242
[ "MIT" ]
4
2021-11-01T09:27:32.000Z
2022-03-07T14:24:10.000Z
07_Java_Experiment/PyTest/exception/exceptions.py
Robert-Stackflow/HUST-Courses
300752552e7af035b0e5c7663953850c81871242
[ "MIT" ]
null
null
null
07_Java_Experiment/PyTest/exception/exceptions.py
Robert-Stackflow/HUST-Courses
300752552e7af035b0e5c7663953850c81871242
[ "MIT" ]
null
null
null
class FileNameFormatError(Exception): """ 压缩文件名格式错误异常 """ def __init__(self, message): """ 构造函数 :param message: 异常信息描述字符串 """ self.message = message # __str__相当于Java的toString函数,把对象转成字符串 def __str__(self): return repr(self.message) class CompressFormatError(Exception): """ 压缩格式错误异常 """ def __init__(self, message): """ 构造函数 :param message: 异常信息描述字符串 """ self.message = message # __str__相当于Java的toString函数,把对象转成字符串 def __str__(self): return repr(self.message) class AutoTestDirError(Exception): """ 自动测试目录异常 """ def __init__(self, message): """ 构造函数 :param message: 异常信息描述字符串 """ self.message = message # __str__相当于Java的toString函数,把对象转成字符串 def __str__(self): return repr(self.message) class UncompressError(Exception): """ 解压异常 """ def __init__(self, message): """ 构造函数 :param message: 异常信息描述字符串 """ self.message = message # __str__相当于Java的toString函数,把对象转成字符串 def __str__(self): return repr(self.message) None
18.769231
44
0.567213
101
1,220
6.376238
0.227723
0.204969
0.068323
0.111801
0.774845
0.774845
0.774845
0.774845
0.774845
0.774845
0
0
0.331148
1,220
64
45
19.0625
0.789216
0.245082
0
0.761905
0
0
0
0
0
0
0
0
0
1
0.380952
false
0
0
0.190476
0.761905
0
0
0
0
null
1
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
10
353f5d359b556af4f4fa3ec86baa016a280d3e22
37,473
py
Python
Src Code/Our Methods/hybrid_words_parser_method.py
kautsiitd/Unsupervised-Decomposition-of-a-Multi-Author-Document
33dab2dce3de2c08c7ae1a34646059653e2fcccc
[ "MIT" ]
4
2017-09-26T19:39:06.000Z
2020-05-14T14:54:55.000Z
Src Code/Our Methods/hybrid_words_parser_method.py
kautsiitd/Unsupervised-Decomposition-of-a-Multi-Author-Document
33dab2dce3de2c08c7ae1a34646059653e2fcccc
[ "MIT" ]
null
null
null
Src Code/Our Methods/hybrid_words_parser_method.py
kautsiitd/Unsupervised-Decomposition-of-a-Multi-Author-Document
33dab2dce3de2c08c7ae1a34646059653e2fcccc
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- # encoding=utf8 from __future__ import unicode_literals '''#############################''' '''#####Importing Libraries#####''' ################################### import sys import os import io import re import pickle from random import randint as rnd from random import shuffle from itertools import groupby from sklearn.feature_extraction.text import CountVectorizer as CV from sklearn.mixture import GMM from pprint import pprint import operator import math import nltk from sklearn.naive_bayes import MultinomialNB as BNB import itertools import numpy as np from multiprocessing import Pool from multiprocessing import Manager import matplotlib.pyplot as plt from scipy.interpolate import spline # from pos_tag import pos_tagging # from pq_gram import find_pq_grams print "Import Done" '''#############################''' '''#####Importing Libraries#####''' ################################### '''############################''' '''#####Defining Variables#####''' ################################## processors= 4 V = 200 b_num = 0 b = ["Becker-Posner","GC-TF-PK","MD-TF-PK","MD-GC-PK","MD-GC-TF-PK"] gmm_initialisation = 5 # segment size seg_size = 30 # number of most frequent features max_features = 1500 # choosing top vital segment in a class best_per = .5 # number of sentence to test on from final model n_gram_size = 1 lowercase = True tokenizer = None token_pattern = u'(?u)\\b\\w\\w+\\b' # threshold for pq_gram similiarity index threshold = 1.0 # Whether plot graph for choosing threshold or not do_plot = 0 # Whether or not comparte with baseline results ignore_baseline = 0 # threshold for trusted sentence trus_thrs = .95 '''############################''' '''#####Defining Variables#####''' ################################## '''######### Step 1 ##########''' '''extracting and merging data''' ################################# ''' books_names = ['a',b','c',....] merged_data = ['sentence1','sentence2',.....] label_sen = [0,0,0,1,2,.....] label of sentence in merged_data segments_sen= [['sentence1','sentence2',... ],['sentence1','sentence2',... ],... number of segments] segments = ['segment1','segment2',.....] label_in_seg= [[0,0,0,1,2,0,0,..] [0,1,1,2,1,0,1,..] .... ] label of sentences in individual segments label_in_seg= [0,1,1,0,2,0,...] book with max count in segment is_pure_seg = [True,True,False,True,... number of segments] True if segment is pure randoms = [ [120,[2,0,1,.. number of Authors]], [180,[1,2,0,.. number of Authors]], .. ] ''' folder = "../dataset/Original/"+b[b_num] books_names = os.listdir(folder) merged_data = [] label_sen = [] segments_sen= [] segments = [] label_seg = [] label_in_seg= [] is_pure_seg = [] randoms = [] # main number_books= len(books_names) books_data = [] for book in books_names: path = os.path.join(folder,book) f = io.open(path, encoding="ISO-8859-1") books_data.append(f.readlines()) number_sen = [len(book_data) for book_data in books_data] total_sen = sum(number_sen) number_seg = int(math.ceil((total_sen/seg_size))) count_sen = [0]*number_books while(sum(count_sen) != total_sen): size = rnd(1,V) randoms.append([size,[]]) done_book = [0]*number_books for i in range(number_books): book_num = rnd(0,number_books-1) while(done_book[book_num] != 0): book_num = rnd(0,number_books-1) randoms[-1][-1].append(book_num) done_book[book_num] = 1 new_count_sen = count_sen[book_num] + min(size,number_sen[book_num]-count_sen[book_num]) for j in books_data[book_num][ count_sen[book_num]:new_count_sen ]: merged_data.append( re.sub('[\r\n]','',j) ) label_sen.extend([book_num] * (new_count_sen - count_sen[book_num]) ) count_sen[book_num] = new_count_sen for i in range(number_seg): start = seg_size*i end = min(seg_size*(i+1),total_sen) seg_data = merged_data[start:end] segments_sen.append(seg_data) segments.append(' '.join(seg_data)) labels = label_sen[start:end] label_in_seg.append(labels) for i in range(number_seg): label_seg.append(max(set(label_in_seg[i]), key=label_in_seg[i].count)) for i in range(number_seg): is_pure_seg.append(sum(label_in_seg[i])%len(label_in_seg[i]) == 0) '''######''' '''Step 1''' '''######''' '''###########################''' '''Printing Results of merging''' '''###########################''' ''' org_seg = [430,405,...,150] number of pure segments by author i, last one for mixed ''' # calculating segments by each author org_seg = [0 for i in range(number_books+1)] for i in range(number_seg): if( sum(label_in_seg[i])%len(label_in_seg[i]) == 0): org_seg[ sum(label_in_seg[i])/len(label_in_seg[i]) ] += 1 else: org_seg[-1] += 1 for i in range(number_books): print "Author "+str(i)+":",org_seg[i] print "Mixed :",org_seg[-1] print "STEP 1 done" '''###########################''' '''Printing Results of merging''' ################################# '''######### Step 2 #########''' '''Get pq-gram of merged data''' ################################ ''' segments_parser = [ [[**ROOTNN*,*NN**JJ, ... pq-grams of Sentence 1],[**ROOTNN*,*NN**JJ, ... pq-grams of Sentence 2],.... ... Number of Sentence in Segment 1] [[**ROOTNN*,*NN**JJ, ... pq-grams of Sentence 1],[**ROOTNN*,*NN**JJ, ... pq-grams of Sentence 2],.... ... Number of Sentence in Segment 2] .... Number of segments ] ''' folder = "../../dataset/Parser/"+b[b_num] books_names = os.listdir(folder) merged_parser = [] segments_parser = [] parser_data = [] count_sen = [0]*number_books for book in books_names: path = os.path.join(folder,book) f = io.open(path, encoding="ISO-8859-1") parser_data.append(f.readlines()) for random in randoms: size = random[0] for i in range(number_books): book_num = random[-1][i] new_count_sen = count_sen[book_num] + min(size,number_sen[book_num]-count_sen[book_num]) for j in parser_data[book_num][ count_sen[book_num]:new_count_sen ]: merged_parser.append( re.sub('[\r\n]','',j) ) count_sen[book_num] = new_count_sen for i in range(number_seg): start = seg_size*i end = min(seg_size*(i+1),total_sen) seg_data = merged_parser[start:end] segments_parser.append(seg_data) print "Step 2 done" '''######''' '''Step 2''' '''######''' '''####################### STEP 3 ######################''' '''Find If given Segment is Pure or Mix USING WORDS used''' ########################################################### ''' Calculate Similiarity Index of Segment: Higher Similiarity Index, Higher Pure ''' n_gram_size_2 = 2 threshold_2 = 2200 number_seg = len(segments_parser) score_true = Manager().list([]) score_false = Manager().list([]) mixed_segments = Manager().list([]) pure_segments = Manager().list([]) pure_data = Manager().list([]) mixed_data = Manager().list([]) pure = Manager().list([]) mixed = Manager().list([]) def score_word_similiarity(i): seg_model = CV(binary = True, min_df=1, ngram_range=(1,n_gram_size_2), max_features=2000, lowercase=lowercase, tokenizer=tokenizer, token_pattern=token_pattern) vec_seg = seg_model.fit_transform(segments_sen[i]).toarray() similiarity_index = 0 seg_size = len(segments_sen[i]) for j in range(seg_size): for k in range(j,seg_size): similiarity_index += sum(x[0]*x[1] for x in zip(vec_seg[j],vec_seg[k])) if similiarity_index > threshold_2: pure_segments.append(segments[i]) pure_data.extend(segments_sen[i]) else: mixed_segments.append(segments[i]) mixed_data.extend(segments_sen[i]) if is_pure_seg[i] == True: score_true.append(similiarity_index) if similiarity_index > threshold_2: pure.append(1) else: mixed.append(0) else: score_false.append(similiarity_index) if similiarity_index > threshold_2: pure.append(0) else: mixed.append(1) p = Pool(processors) p.map(score_word_similiarity, range(number_seg)) score_true.sort() score_false.sort() print score_true print score_false print "Accuracy Initial",float(sum(org_seg)-org_seg[-1])/sum(org_seg),sum(org_seg) print "Accuracy Final",pure.count(1),float(pure.count(1))/len(pure),len(pure) print "Step 3 done" '''######''' '''Step 3''' '''######''' '''################## STEP ** #################''' '''Plotting Graph for finding optimum threshold''' ################################################## def plot_word_graph(): global score_true,score_false accuracies = [] n_pure = [] data_size = [] fig, ax = plt.subplots() axes = [ax, ax.twinx()] for thr in range(1500,2500): print thr mixed = [] pure = [] threshold = float(thr)/1 for similiarity_index in score_true: if similiarity_index > thr: pure.append(1) else: mixed.append(0) for similiarity_index in score_false: if similiarity_index > thr: pure.append(0) else: mixed.append(1) accuracies.append(float(pure.count(1)*100)/len(pure)) n_pure.append(pure.count(1)) data_size.append(len(pure)) base = np.array([float(x)/1 for x in range(1500,2500)]) thr = np.linspace(base.min(),base.max(),2000) accuracies_smooth = spline(base,accuracies,thr) n_pure_smooth = spline(base,n_pure,thr) data_size_smooth = spline(base,data_size,thr) axes[1].plot(thr,accuracies_smooth,'r') axes[0].plot(thr,n_pure_smooth,'b') axes[0].plot(thr,data_size_smooth,'g') plt.show() if do_plot == 1: plot_word_graph() '''#######''' '''Step **''' '''#######''' # sys.exit() '''######################### STEP 4 ######################''' '''Find If given Segment is Pure or Mix USING PQ GRAM used''' ############################################################# ''' Calculate Similiarity Index of Segment: Higher Similiarity Index, Higher Pure ''' ''' pure_segments = ['segment1','segment2',.....] pure_data = ['sentence1','sentence2',.....] mixed_data = ['sentence1','sentence2',.....] ''' # calculating sentence sizes in each segment sentence_size = [] for segment in segments_parser: sentence_size.append([]) for sentence in segment: sentence_size[-1].append(len(sentence)) # calculating similiarity index for each segment number_seg = len(segments_parser) score_true = Manager().list([]) score_false = Manager().list([]) mixed_segments = Manager().list([]) pure_segments = Manager().list([]) pure_data = Manager().list([]) mixed_data = Manager().list([]) pure = Manager().list([]) mixed = Manager().list([]) def score_similiarity(i): segment = segments_parser[i] similiarity_index = 0 seg_size = len(segment) '''iterating over sentences in a segment''' for j in range(seg_size): for pq_gram in segment[j]: '''checking current pqgram is in how many other sentences of same segment''' for k in range(j,seg_size): if pq_gram in segment[k]: similiarity_index += 1.0/(sentence_size[i][j]*sentence_size[i][k]) if similiarity_index > threshold: pure_segments.append(segments[i]) pure_data.extend(segments_sen[i]) else: mixed_segments.append(segments[i]) mixed_data.extend(segments_sen[i]) if is_pure_seg[i] == True: score_true.append(similiarity_index) if similiarity_index > threshold: pure.append(1) else: mixed.append(0) else: score_false.append(similiarity_index) if similiarity_index > threshold: pure.append(0) else: mixed.append(1) p = Pool(processors) p.map(score_similiarity, range(number_seg)) print "Accuracy Initial",float(sum(org_seg)-org_seg[-1])/sum(org_seg),sum(org_seg) print "Accuracy Final",pure.count(1),float(pure.count(1))/len(pure),len(pure) print "Step 4 done" '''######''' '''Step 4''' '''######''' '''################## STEP ** #################''' '''Plotting Graph for finding optimum threshold''' ################################################## def plot_graph(): global score_true,score_false accuracies = [] n_pure = [] data_size = [] fig, ax = plt.subplots() axes = [ax, ax.twinx()] for thr in range(20,210): mixed = [] pure = [] threshold = float(thr)/100 for similiarity_index in score_true: if similiarity_index > threshold: pure.append(1) else: mixed.append(0) for similiarity_index in score_false: if similiarity_index > threshold: pure.append(0) else: mixed.append(1) # print thr,"Accuracy Final",pure_segments.count(1),float(pure_segments.count(1))/len(pure_segments),len(pure_segments) accuracies.append(float(pure.count(1)*100)/len(pure)) n_pure.append(pure.count(1)) data_size.append(len(pure)) base = np.array([float(x)/100 for x in range(20,210)]) thr = np.linspace(base.min(),base.max(),500) accuracies_smooth = spline(base,accuracies,thr) n_pure_smooth = spline(base,n_pure,thr) data_size_smooth = spline(base,data_size,thr) axes[1].plot(thr,accuracies_smooth,'r') axes[0].plot(thr,n_pure_smooth,'b') axes[0].plot(thr,data_size_smooth,'g') plt.show() if do_plot == 1: plot_graph() '''#######''' '''Step **''' '''#######''' '''##############################################''' '''################## Step 5 ####################''' '''finding features and vectorising pure segments''' '''##############################################''' ''' model = model with feature words having atleast frequency = 3 = 11000 vec_seg_pure(sparse matrix) = [ [0,0,1,1,0,1,1,1,1,0,0,0,0,1,1,... number of feature words=11000] [0,0,1,0,0,1,1,0,1,0,0,1,1,0,0,... whether word present or not] .... number of segments ] number_f_w = number of feature words extracted from merged data ''' model = CV(binary = True, min_df = 3, ngram_range=(1,n_gram_size), max_features=20000, lowercase=lowercase, tokenizer=tokenizer, token_pattern=token_pattern) model = model.fit(pure_data) vec_seg_pure = model.transform(pure_segments) vec_seg_mixed = model.transform(mixed_segments) number_f_w = len(model.vocabulary_) vec_seg_pure = vec_seg_pure.toarray() max_features = min(max_features,number_f_w) print "number of feature words:",number_f_w print "STEP 5 done" '''######''' '''Step 5''' '''######''' '''############################################''' '''################ Step 6 ####################''' '''Unsupervised labelling of segments using GMM''' '''############################################''' ''' label_p = [0,1,0,1,2,0,1,.... number of segments] predicted label for each segment count_mapping = [[20,3,450,... number of books] how much predicted label match to original label(max count) [410,5,10,..] ... number of books ] mapping = [2,0,1,5,3,...] What predicted label match to in original label clusters = [['sentence','sentence',..... in cluster 0] ['sentence','sentence',..... in cluster 1] .... number of books ] ''' number_pure_seg = len(pure_segments) number_mixed_seg = len(mixed_segments) mapping = [0 for i in range(number_books)] while(len(set(mapping)) != number_books): model1 = GMM(n_components = number_books, n_iter = 1000, covariance_type = 'diag', n_init = gmm_initialisation, verbose = 1) model1 = model1.fit(vec_seg_pure) label_p = model1.predict_proba(vec_seg_pure) temp_label_p = [] for j in range(number_pure_seg): temp_label_p.append(map(lambda x: (x),label_p[j]) . index(max(label_p[j]))) label_p = temp_label_p count_mapping = [ [0 for j in range(number_books)] for i in range(number_books)] for i,j in zip(label_p,label_seg): count_mapping[i][j] += 1 for i in range(number_books): max_frq = max(count_mapping[i]) mapping[i] = count_mapping[i].index(max_frq) print "mapping:",mapping print "count_mapping:",count_mapping # updating label_p with mapping for i in range(number_pure_seg): label_p[i] = mapping[label_p[i]] # segments in each clusters as sentences clusters = [[] for i in range(number_books)] for i in range(number_pure_seg): clusters[label_p[i]].append(pure_segments[i]) '''######''' '''Step 6''' '''######''' '''################################''' '''Calculating Precision and Recall''' '''################################''' confusion_matrix = [ [0 for j in range(number_books)] for i in range(number_books)] for i in range(number_pure_seg): confusion_matrix[label_p[i]][label_seg[i]] += 1 recall = [] for i in range(number_books): recall.append(float(confusion_matrix[i][i])/sum(zip(*confusion_matrix)[i])) print "Recall:",recall print float(sum(recall))/number_books print "mapping:",mapping print "confusion_matrix:",confusion_matrix print "STEP 6 done" '''################################''' '''Calculating Precision and Recall''' '''################################''' '''############################################################''' '''##################### Step 7 ###############################''' '''Revectorising segments with max_features most frequent words''' '''############################################################''' ''' model2 = model with at most max_features=1500 feature words vec_seg_cls(sparse matrix) = [[ [0,1,1,0,1,1,1,0,..... max_features=1500],[vector of segment 2],.... cluster 0] [ [0,0,1,1,0,0,1,0,..... max_features=1500],[vector of segment 2],.... cluster 1] .... number of books ] vector representation of each segment in corresponding cluster vec_seg_new(sparse matrix) = [[0,1,1,0,1,1,1,0,..... max_features=1500] [0,0,1,1,0,0,1,0,..... max_features=1500] .... number of segments ] vector representation of each segment ''' model2 = CV(ngram_range=(1,n_gram_size), max_features = max_features) model2 = model2.fit(pure_data) vec_seg_cls = [model2.transform(clusters[i]) for i in range(number_books)] vec_seg_new = model2.transform(segments) print "STEP 7 done" '''######''' '''Step 7''' '''######''' '''#####################################''' '''############## Step 8 ###############''' '''Applying SegmentElicitation Procedure''' '''#####################################''' ''' vec_seg_cls(dense_array) = vector representation of each segment in corresponding cluster vec_seg_new(dense_array1) = vector representation of each segment word_cls_frq = frequency of feature words(max_features=1500) in each cluster = [[25,100,13,15,253,.... number of feature words] cluster 0 [65,200,123,10,15,.... number of feature words] cluster 1 .... number of clusters/books ] word_frq = each feature word(max_features=1500) frequency in whole document = [150,550,260,1021,.... number of feature words(max_features=1500)] post_p_w = posterior probability of each feature word in each cluster/book = [ [0.3,0.25,.... number of clusters/books] word 1 [0.1,0.15,.... number of clusters/books] word 2 .... number of feature words(max_features=1500) ] post_p_seg = posterior probability of each segment in each cluster = [ [[0.85,0.01,0.1,... number of books,0(segment number)], [segment 2],.... number of segments in this cluster] cluster 1 [[0.85,0.01,0.1,... number of books,1(segment number)], [segment 2],.... number of segments in this cluster] cluster 2 .... number of clusters/books ] best_seg = 80% of post_p_seg for each cluster in same format = [ [[0.85,0.01,0.1,... number of books,0(segment number)], [segment 2],.... number of segments in this cluster] cluster 1 [[0.85,0.01,0.1,... number of books,1(segment number)], [segment 2],.... number of segments in this cluster] cluster 2 .... number of clusters/books ] ''' # calculating posterior probability of words # variables post_p_w = [] dense_array = [i.toarray() for i in vec_seg_cls] dense_array1 = vec_seg_new.toarray() word_cls_frq = [[sum(word_f) for word_f in zip(*cluster)] for cluster in dense_array] word_frq = [sum(word_f) for word_f in zip(*word_cls_frq)] # main for i in range(max_features): post_p_w.append([]) for j in range(number_books): post_p_w[i].append(float(word_cls_frq[j][i])/word_frq[i]) # calculating posterior probability of segments in each cluster post_p_seg = [[] for i in range(number_books)] # jth segment ith cluster '''calculating posterior probability of pure segments''' for j in range(number_pure_seg): cls_num = label_p[j] temp = [] for i in range(number_books): summation = 0 for k in range(max_features): if (dense_array1[j][k]>0 and post_p_w[k][i]>0): summation += math.log(post_p_w[k][i]) temp.append(summation) temp.append(j) post_p_seg[cls_num].append(temp) '''calculating posterior probability of mixed segments''' for j in range(number_mixed_seg): temp = [] for i in range(number_books): summation = 0 for k in range(max_features): if (dense_array1[j][k]>0 and post_p_w[k][i]>0): summation += math.log(post_p_w[k][i]) temp.append(summation) cls_num = temp.index(max(temp)) temp.append(j) post_p_seg[cls_num].append(temp) '''################finding vital segment for each cluster####################''' '''Choosing best 80%(best_per) of segments to represent corresponding cluster''' '''##########################################################################''' best_seg = [] for i in range(number_books): end = int(best_per*len(post_p_seg[i])) sort_seg = sorted(post_p_seg[i], key=lambda x:-x[i]+max(x[:i]+x[i+1:-1])) best_seg.append(sort_seg[:end]) print "STEP 8 done" '''######''' '''Step 8''' '''######''' '''#################################################################################################''' '''######################################## Step 9 #################################################''' '''Representing vital segments in form of minimum 3 frq feature words for each corresponding cluster''' '''#################################################################################################''' ''' vec_seg(dense) = vector representation of each segment vital_seg = [ [ [0,1,1,0,0,1,1,1,0,0,0,... ~1500 max_features(=1500)], [0,1,1,0,0,1,1,1,0,0,0,...],.... number of vital segments] cluster 0 [ [0,1,1,0,0,1,1,1,0,0,0,... ~1500 max_features(=1500)], [0,1,1,0,0,1,1,1,0,0,0,...],.... number of vital segments] cluster 1 .... number of clusters ] ''' vital_seg = [] for cluster_n in range(number_books): vital_seg.append([]) for seg in best_seg[cluster_n]: vital_seg[cluster_n].append(dense_array1[seg[-1]]) print "STEP 9 done" '''######''' '''Step 9''' '''######''' '''###############################################################################''' '''################################ Step 10 ######################################''' '''Training using Bernouli Naive-Bayesian model to learn a classifier on vital_seg''' '''###############################################################################''' ''' vital_seg = 2*number of vital_seg*1500 train = 2*number of vital_seg*1500 ''' train = [] labels= [] for cluster_n in range(number_books): for seg in vital_seg[cluster_n]: train.append(seg.tolist()) labels.append(cluster_n) model3 = BNB(fit_prior = True) model3 = model3.fit(train, labels) print "STEP 10 done" '''########''' '''Step 10''' '''#######''' '''################################################################''' '''######################### Step 11 ##############################''' '''classfying sentences on trained classifier and calculating score''' '''################################################################''' test_size = len(merged_data) vec_sen = model2.transform(merged_data[:test_size]) auth_proba = model3.predict_proba(vec_sen) predicted = [map(lambda x: (x),auth_proba[i]).index(max(auth_proba[i])) for i in range(test_size)] org_label = label_sen[:test_size] print model3.score(vec_sen, org_label) print "STEP 11 done" '''#######''' '''Step 11''' '''#######''' '''#########################################''' '''################Step 12##################''' '''Applying Probability Indication Procedure''' '''#########################################''' # Rule 1 is_trusted = [] for i in range(test_size): temp = sorted(auth_proba[i]) if(temp[-1]-temp[-2] > trus_thrs): is_trusted.append(predicted[i]) else: is_trusted.append(-1) # Rule 2 if(is_trusted[0] == -1): for i in range(1,test_size): if(is_trusted[i] != -1): is_trusted[0] = is_trusted[i] break if(is_trusted[0] == -1): is_trusted[0] = map(lambda x: (x),auth_proba[0]).index(max(auth_proba[0])) # Rule 3 if(is_trusted[-1] == -1): for i in range(test_size-1, -1, -1): if(is_trusted[i] != -1): is_trusted[-1] = is_trusted[i] break if(is_trusted[-1] == -1): is_trusted[-1] = map(lambda x: (x),auth_proba[-1]).index(max(auth_proba[-1])) # Rule 4 & 5 before_label = -1 for i in range(test_size): if(is_trusted[i] != -1): before_label = is_trusted[i] else: after_label = -1 start = i end = i while(i < test_size): i += 1 if(is_trusted[i] != -1): after_label = is_trusted[i] end = i break if(before_label == after_label): for j in range(start,end): is_trusted[j] = before_label else: for j in range(start,(start+end)/2): is_trusted[j] = before_label for j in range((start+end)/2,end): is_trusted[j] = after_label print "STEP 12 done" '''######''' '''Step 12''' '''######''' # Checking New Score correct = 0 for i in range(test_size): if(org_label[i] == is_trusted[i]): correct += 1 print "New Accuracy:",float(correct*100)/test_size,"%" if ignore_baseline == 0: '''#########################################''' '''################ Step 2 #################''' '''finding features and vectorising segments''' '''####### Comparing with baseline #########''' '''#########################################''' ''' model = model with feature words having atleast frequency = 3 = 11000 vec_seg(sparse matrix) = [ [0,0,1,1,0,1,1,1,1,0,0,0,0,1,1,... number of feature words=11000] [0,0,1,0,0,1,1,0,1,0,0,1,1,0,0,... whether word present or not] .... number of segments ] number_f_w = number of feature words extracted from merged data ''' model = CV(binary = True, min_df = 3, ngram_range=(1,n_gram_size), max_features=20000, lowercase=lowercase, tokenizer=tokenizer, token_pattern=token_pattern) model = model.fit(merged_data) vec_seg = model.transform(segments) number_f_w = len(model.vocabulary_) vec_seg = vec_seg.toarray() max_features = min(max_features,number_f_w) print "number of feature words:",number_f_w print "STEP 2 done" '''#######''' '''Step 2''' '''#######''' '''############################################''' '''################ Step 3 ####################''' '''Unsupervised labelling of segments using GMM''' '''############################################''' ''' label_p = [0,1,0,1,2,0,1,.... number of segments] predicted label for each segment count_mapping = [[20,3,450,... number of books] how much predicted label match to original label(max count) [410,5,10,..] ... number of books ] mapping = [2,0,1,5,3,...] What predicted label match to in original label clusters = [['sentence','sentence',..... in cluster 0] ['sentence','sentence',..... in cluster 1] .... number of books ] ''' mapping = [0 for i in range(number_books)] while(len(set(mapping)) != number_books): model1 = GMM(n_components = number_books, n_iter = 1000, covariance_type = 'diag', n_init = gmm_initialisation, verbose = 0) model1 = model1.fit(vec_seg) label_p = model1.predict_proba(vec_seg) temp_label_p = [] for j in range(number_seg): temp_label_p.append(map(lambda x: (x),label_p[j]) . index(max(label_p[j]))) label_p = temp_label_p count_mapping = [ [0 for j in range(number_books)] for i in range(number_books)] for i,j in zip(label_p,label_seg): count_mapping[i][j] += 1 for i in range(number_books): max_frq = max(count_mapping[i]) mapping[i] = count_mapping[i].index(max_frq) print "baseline mapping:",mapping print "baseline count_mapping:",count_mapping # updating label_p with mapping for i in range(number_seg): label_p[i] = mapping[label_p[i]] # segments in each clusters as sentences clusters = [[] for i in range(number_books)] for i in range(number_seg): clusters[label_p[i]].append(segments[i]) '''######''' '''Step 3''' '''######''' '''#############################################''' '''Calculating Precision and Recall for baseline''' '''#############################################''' confusion_matrix = [ [0 for j in range(number_books)] for i in range(number_books)] for i in range(number_seg): confusion_matrix[label_p[i]][label_seg[i]] += 1 recall = [] for i in range(number_books): recall.append(float(confusion_matrix[i][i])/sum(zip(*confusion_matrix)[i])) print "baseline Recall:",recall print float(sum(recall))/number_books print "baseline mapping:",mapping print "baseline confusion_matrix:",confusion_matrix print "STEP 3 done" '''#############################################''' '''Calculating Precision and Recall for baseline''' '''#############################################''' '''############################################################''' '''######################Step 4################################''' '''Revectorising segments with max_features most frequent words''' '''############################################################''' ''' model2 = model with at most max_features=1500 feature words vec_seg_cls(sparse matrix) = [[ [0,1,1,0,1,1,1,0,..... max_features=1500],[vector of segment 2],.... cluster 0] [ [0,0,1,1,0,0,1,0,..... max_features=1500],[vector of segment 2],.... cluster 1] .... number of books ] vector representation of each segment in corresponding cluster vec_seg_new(sparse matrix) = [[0,1,1,0,1,1,1,0,..... max_features=1500] [0,0,1,1,0,0,1,0,..... max_features=1500] .... number of segments ] vector representation of each segment ''' model2 = CV(ngram_range=(1,n_gram_size), max_features = max_features) model2 = model2.fit(merged_data) vec_seg_cls = [model2.transform(clusters[i]) for i in range(number_books)] vec_seg_new = model2.transform(segments) print "STEP 4 done" '''#####################################''' '''###############Step 5################''' '''Applying SegmentElicitation Procedure''' '''#####################################''' ''' vec_seg_cls(dense) = vector representation of each segment in corresponding cluster vec_seg_new(dense) = vector representation of each segment word_cls_frq = frequency of feature words(max_features=1500) in each cluster = [[25,100,13,15,253,.... number of feature words] cluster 0 [65,200,123,10,15,.... number of feature words] cluster 1 .... number of clusters/books ] word_frq = each feature word(max_features=1500) frequency in whole document = [150,550,260,1021,.... number of feature words(max_features=1500)] post_p_w = posterior probability of each feature word in each cluster/book = [ [0.3,0.25,.... number of clusters/books] word 1 [0.1,0.15,.... number of clusters/books] word 2 .... number of feature words(max_features=1500) ] post_p_seg = posterior probability of each segment in each cluster = [ [[0.85,0.01,0.1,... number of books,0(segment number)], [segment 2],.... number of segmensin this cluster] cluster 1 [[0.85,0.01,0.1,... number of books,1(segment number)], [segment 2],.... number of segmensin this cluster] cluster 2 .... number of clusters/books ] best_seg = 80% of post_p_seg for each cluster in same format = [ [[0.85,0.01,0.1,... number of books,0(segment number)], [segment 2],.... number of segments in this cluster] cluster 1 [[0.85,0.01,0.1,... number of books,1(segment number)], [segment 2],.... number of segments in this cluster] cluster 2 .... number of clusters/books ] ''' # calculating posterior probability of words # variables post_p_w = [] dense_array = [i.toarray() for i in vec_seg_cls] dense_array1 = vec_seg_new.toarray() word_cls_frq = [[sum(word_f) for word_f in zip(*cluster)] for cluster in dense_array] word_frq = [sum(word_f) for word_f in zip(*word_cls_frq)] # main for i in range(max_features): post_p_w.append([]) for j in range(number_books): post_p_w[i].append(float(word_cls_frq[j][i])/word_frq[i]) # calculating posterior probability of segments in each cluster post_p_seg = [[] for i in range(number_books)] # jth segment ith cluster for j in range(number_seg): cls_num = label_p[j] temp = [] for i in range(number_books): summation = 0 for k in range(max_features): if (dense_array1[j][k]>0 and post_p_w[k][i]>0): summation += math.log(post_p_w[k][i]) temp.append(summation) temp.append(j) post_p_seg[cls_num].append(temp) # print post_p_seg[cls_num][-1] '''################finding vital segment for each cluster####################''' '''Choosing best 80%(best_per) of segments to represent corresponding cluster''' '''##########################################################################''' best_seg = [] for i in range(number_books): end = int(best_per*len(post_p_seg[i])) sort_seg = sorted(post_p_seg[i], key=lambda x:-x[i]+max(x[:i]+x[i+1:-1])) best_seg.append(sort_seg[:end]) print "STEP 5 done" '''######''' '''Step 5''' '''######''' '''#################################################################################################''' '''#########################################Step 6##################################################''' '''Representing vital segments in form of minimum 3 frq feature words for each corresponding cluster''' '''#################################################################################################''' ''' vec_seg(dense) = vector representation of each segment vital_seg = [ [ [0,1,1,0,0,1,1,1,0,0,0,... ~1500 max_features(=1500)], [0,1,1,0,0,1,1,1,0,0,0,...],.... number of vital segments] cluster 0 [ [0,1,1,0,0,1,1,1,0,0,0,... ~1500 max_features(=1500)], [0,1,1,0,0,1,1,1,0,0,0,...],.... number of vital segments] cluster 1 .... number of clusters ] ''' print "STEP 5 done" vital_seg = [] for cluster_n in range(number_books): vital_seg.append([]) for seg in best_seg[cluster_n]: vital_seg[cluster_n].append(dense_array1[seg[-1]]) print "STEP 6 done" '''######''' '''Step 6''' '''######''' '''###############################################################################''' '''#################################Step 7########################################''' '''Training using Bernouli Naive-Bayesian model to learn a classifier on vital_seg''' '''###############################################################################''' ''' vital_seg = 2*number of vital_seg*1500 train = 2*number of vital_seg*1500 ''' train = [] labels= [] for cluster_n in range(number_books): for seg in vital_seg[cluster_n]: train.append(seg.tolist()) labels.append(cluster_n) model3 = BNB(fit_prior = True) model3 = model3.fit(train, labels) print "STEP 7 done" '''######''' '''Step 7''' '''######''' '''################################################################''' '''##########################Step 8################################''' '''classfying sentences on trained classifier and calculating score''' '''################################################################''' test_size = len(merged_data) vec_sen = model2.transform(merged_data[:test_size]) auth_proba = model3.predict_proba(vec_sen) predicted = [map(lambda x: (x),auth_proba[i]).index(max(auth_proba[i])) for i in range(test_size)] org_label = label_sen[:test_size] print model3.score(vec_sen, org_label) print "STEP 8 done" '''######''' '''Step 8''' '''######''' '''#########################################''' '''################Step 9###################''' '''Applying Probability Indication Procedure''' '''#########################################''' # Rule 1 is_trusted = [] for i in range(test_size): temp = sorted(auth_proba[i]) if(temp[-1]-temp[-2] > trus_thrs): is_trusted.append(predicted[i]) else: is_trusted.append(-1) # Rule 2 if(is_trusted[0] == -1): for i in range(1,test_size): if(is_trusted[i] != -1): is_trusted[0] = is_trusted[i] break if(is_trusted[0] == -1): is_trusted[0] = map(lambda x: (x),auth_proba[0]).index(max(auth_proba[0])) # Rule 3 if(is_trusted[-1] == -1): for i in range(test_size-1, -1, -1): if(is_trusted[i] != -1): is_trusted[-1] = is_trusted[i] break if(is_trusted[-1] == -1): is_trusted[-1] = map(lambda x: (x),auth_proba[-1]).index(max(auth_proba[-1])) # Rule 4 & 5 before_label = -1 for i in range(test_size): if(is_trusted[i] != -1): before_label = is_trusted[i] else: after_label = -1 start = i end = i while(i < test_size): i += 1 if(is_trusted[i] != -1): after_label = is_trusted[i] end = i break if(before_label == after_label): for j in range(start,end): is_trusted[j] = before_label else: for j in range(start,(start+end)/2): is_trusted[j] = before_label for j in range((start+end)/2,end): is_trusted[j] = after_label print "STEP 9 done" '''######''' '''Step 9''' '''######''' # Checking New Score correct = 0 for i in range(test_size): if(org_label[i] == is_trusted[i]): correct += 1 print "New Accuracy:",float(correct*100)/test_size,"%"
35.218985
161
0.586422
5,224
37,473
4.032542
0.072741
0.027248
0.014811
0.026108
0.808791
0.792462
0.766828
0.74428
0.729422
0.705734
0
0.034363
0.14808
37,473
1,064
162
35.218985
0.625517
0.03245
0
0.720613
0
0
0.035131
0.001048
0
0
0
0
0
0
null
null
0
0.039182
null
null
0.085179
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
8
103ba8ba42568437de742c18c037ccdeff76f266
16,806
py
Python
glance_store/tests/unit/test_driver.py
sokovnich/glance_store
088a07e602825ae482ebf747cddc94d1996f04e6
[ "Apache-2.0" ]
49
2015-01-01T17:49:15.000Z
2021-01-12T07:08:58.000Z
glance_store/tests/unit/test_driver.py
sokovnich/glance_store
088a07e602825ae482ebf747cddc94d1996f04e6
[ "Apache-2.0" ]
4
2021-05-20T12:09:08.000Z
2021-06-07T13:25:26.000Z
glance_store/tests/unit/test_driver.py
sokovnich/glance_store
088a07e602825ae482ebf747cddc94d1996f04e6
[ "Apache-2.0" ]
50
2015-01-23T18:21:06.000Z
2021-11-23T04:42:25.000Z
# Copyright 2018 Verizon Wireless # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import hashlib from oslo_utils.secretutils import md5 from oslotest import base import glance_store.driver as driver class _FakeStore(object): @driver.back_compat_add def add(self, image_id, image_file, image_size, hashing_algo, context=None, verifier=None): """This is a 0.26.0+ add, returns a 5-tuple""" if hashing_algo == 'md5': hasher = md5(usedforsecurity=False) else: hasher = hashlib.new(str(hashing_algo)) # assume 'image_file' will be bytes for these tests hasher.update(image_file) backend_url = "backend://%s" % image_id bytes_written = len(image_file) checksum = md5(image_file, usedforsecurity=False).hexdigest() multihash = hasher.hexdigest() metadata_dict = {"verifier_obj": verifier.name if verifier else None, "context_obj": context.name if context else None} return (backend_url, bytes_written, checksum, multihash, metadata_dict) class _FakeContext(object): name = 'context' class _FakeVerifier(object): name = 'verifier' class TestBackCompatWrapper(base.BaseTestCase): def setUp(self): super(TestBackCompatWrapper, self).setUp() self.fake_store = _FakeStore() self.fake_context = _FakeContext() self.fake_verifier = _FakeVerifier() self.img_id = '1234' self.img_file = b'0123456789' self.img_size = 10 self.img_checksum = md5(self.img_file, usedforsecurity=False).hexdigest() self.hashing_algo = 'sha256' self.img_sha256 = hashlib.sha256(self.img_file).hexdigest() def test_old_style_3_args(self): x = self.fake_store.add(self.img_id, self.img_file, self.img_size) self.assertEqual(tuple, type(x)) self.assertEqual(4, len(x)) self.assertIn(self.img_id, x[0]) self.assertEqual(self.img_size, x[1]) self.assertEqual(self.img_checksum, x[2]) self.assertTrue(dict, type(x[3])) self.assertIsNone(x[3]['context_obj']) self.assertIsNone(x[3]['verifier_obj']) def test_old_style_4_args(self): x = self.fake_store.add(self.img_id, self.img_file, self.img_size, self.fake_context) self.assertEqual(tuple, type(x)) self.assertEqual(4, len(x)) self.assertIn(self.img_id, x[0]) self.assertEqual(self.img_size, x[1]) self.assertEqual(self.img_checksum, x[2]) self.assertTrue(dict, type(x[3])) self.assertEqual('context', x[3]['context_obj']) self.assertIsNone(x[3]['verifier_obj']) def test_old_style_5_args(self): x = self.fake_store.add(self.img_id, self.img_file, self.img_size, self.fake_context, self.fake_verifier) self.assertEqual(tuple, type(x)) self.assertEqual(4, len(x)) self.assertIn(self.img_id, x[0]) self.assertEqual(self.img_size, x[1]) self.assertEqual(self.img_checksum, x[2]) self.assertTrue(dict, type(x[3])) self.assertEqual('context', x[3]['context_obj']) self.assertEqual('verifier', x[3]['verifier_obj']) def test_old_style_3_args_kw_context(self): x = self.fake_store.add(self.img_id, self.img_file, self.img_size, context=self.fake_context) self.assertEqual(tuple, type(x)) self.assertEqual(4, len(x)) self.assertIn(self.img_id, x[0]) self.assertEqual(self.img_size, x[1]) self.assertEqual(self.img_checksum, x[2]) self.assertTrue(dict, type(x[3])) self.assertEqual('context', x[3]['context_obj']) self.assertIsNone(x[3]['verifier_obj']) def test_old_style_3_args_kw_verifier(self): x = self.fake_store.add(self.img_id, self.img_file, self.img_size, verifier=self.fake_verifier) self.assertEqual(tuple, type(x)) self.assertEqual(4, len(x)) self.assertIn(self.img_id, x[0]) self.assertEqual(self.img_size, x[1]) self.assertEqual(self.img_checksum, x[2]) self.assertTrue(dict, type(x[3])) self.assertIsNone(x[3]['context_obj']) self.assertEqual('verifier', x[3]['verifier_obj']) def test_old_style_4_args_kw_verifier(self): x = self.fake_store.add(self.img_id, self.img_file, self.img_size, self.fake_context, verifier=self.fake_verifier) self.assertEqual(tuple, type(x)) self.assertEqual(4, len(x)) self.assertIn(self.img_id, x[0]) self.assertEqual(self.img_size, x[1]) self.assertEqual(self.img_checksum, x[2]) self.assertTrue(dict, type(x[3])) self.assertEqual('context', x[3]['context_obj']) self.assertEqual('verifier', x[3]['verifier_obj']) def test_old_style_3_args_kws_context_verifier(self): x = self.fake_store.add(self.img_id, self.img_file, self.img_size, context=self.fake_context, verifier=self.fake_verifier) self.assertEqual(tuple, type(x)) self.assertEqual(4, len(x)) self.assertIn(self.img_id, x[0]) self.assertEqual(self.img_size, x[1]) self.assertEqual(self.img_checksum, x[2]) self.assertTrue(dict, type(x[3])) self.assertEqual('context', x[3]['context_obj']) self.assertEqual('verifier', x[3]['verifier_obj']) def test_old_style_all_kw_in_order(self): x = self.fake_store.add(image_id=self.img_id, image_file=self.img_file, image_size=self.img_size, context=self.fake_context, verifier=self.fake_verifier) self.assertEqual(tuple, type(x)) self.assertEqual(4, len(x)) self.assertIn(self.img_id, x[0]) self.assertEqual(self.img_size, x[1]) self.assertEqual(self.img_checksum, x[2]) self.assertTrue(dict, type(x[3])) self.assertEqual('context', x[3]['context_obj']) self.assertEqual('verifier', x[3]['verifier_obj']) def test_old_style_all_kw_random_order(self): x = self.fake_store.add(image_file=self.img_file, context=self.fake_context, image_size=self.img_size, verifier=self.fake_verifier, image_id=self.img_id) self.assertEqual(tuple, type(x)) self.assertEqual(4, len(x)) self.assertIn(self.img_id, x[0]) self.assertEqual(self.img_size, x[1]) self.assertEqual(self.img_checksum, x[2]) self.assertTrue(dict, type(x[3])) self.assertEqual('context', x[3]['context_obj']) self.assertEqual('verifier', x[3]['verifier_obj']) def test_new_style_6_args(self): x = self.fake_store.add(self.img_id, self.img_file, self.img_size, self.hashing_algo, self.fake_context, self.fake_verifier) self.assertEqual(tuple, type(x)) self.assertEqual(5, len(x)) self.assertIn(self.img_id, x[0]) self.assertEqual(self.img_size, x[1]) self.assertEqual(self.img_checksum, x[2]) self.assertEqual(self.img_sha256, x[3]) self.assertTrue(dict, type(x[4])) self.assertEqual('context', x[4]['context_obj']) self.assertEqual('verifier', x[4]['verifier_obj']) def test_new_style_3_args_kw_hash(self): x = self.fake_store.add(self.img_id, self.img_file, self.img_size, hashing_algo=self.hashing_algo) self.assertEqual(tuple, type(x)) self.assertEqual(5, len(x)) self.assertIn(self.img_id, x[0]) self.assertEqual(self.img_size, x[1]) self.assertEqual(self.img_checksum, x[2]) self.assertEqual(self.img_sha256, x[3]) self.assertTrue(dict, type(x[4])) self.assertIsNone(x[4]['context_obj']) self.assertIsNone(x[4]['verifier_obj']) def test_new_style_3_args_kws_context_hash(self): x = self.fake_store.add(self.img_id, self.img_file, self.img_size, context=self.fake_context, hashing_algo=self.hashing_algo) self.assertEqual(tuple, type(x)) self.assertEqual(5, len(x)) self.assertIn(self.img_id, x[0]) self.assertEqual(self.img_size, x[1]) self.assertEqual(self.img_checksum, x[2]) self.assertEqual(self.img_sha256, x[3]) self.assertTrue(dict, type(x[4])) self.assertEqual('context', x[4]['context_obj']) self.assertIsNone(x[4]['verifier_obj']) def test_new_style_3_args_kws_verifier_hash(self): x = self.fake_store.add(self.img_id, self.img_file, self.img_size, hashing_algo=self.hashing_algo, verifier=self.fake_verifier) self.assertEqual(tuple, type(x)) self.assertEqual(5, len(x)) self.assertIn(self.img_id, x[0]) self.assertEqual(self.img_size, x[1]) self.assertEqual(self.img_checksum, x[2]) self.assertEqual(self.img_sha256, x[3]) self.assertTrue(dict, type(x[4])) self.assertIsNone(x[4]['context_obj']) self.assertEqual('verifier', x[4]['verifier_obj']) def test_new_style_3_args_kws_hash_context_verifier(self): x = self.fake_store.add(self.img_id, self.img_file, self.img_size, hashing_algo=self.hashing_algo, context=self.fake_context, verifier=self.fake_verifier) self.assertEqual(tuple, type(x)) self.assertEqual(5, len(x)) self.assertIn(self.img_id, x[0]) self.assertEqual(self.img_size, x[1]) self.assertEqual(self.img_checksum, x[2]) self.assertEqual(self.img_sha256, x[3]) self.assertTrue(dict, type(x[4])) self.assertEqual('context', x[4]['context_obj']) self.assertEqual('verifier', x[4]['verifier_obj']) def test_new_style_4_args(self): x = self.fake_store.add(self.img_id, self.img_file, self.img_size, self.hashing_algo) self.assertEqual(tuple, type(x)) self.assertEqual(5, len(x)) self.assertIn(self.img_id, x[0]) self.assertEqual(self.img_size, x[1]) self.assertEqual(self.img_checksum, x[2]) self.assertEqual(self.img_sha256, x[3]) self.assertTrue(dict, type(x[4])) self.assertIsNone(x[4]['context_obj']) self.assertIsNone(x[4]['verifier_obj']) def test_new_style_4_args_kw_context(self): x = self.fake_store.add(self.img_id, self.img_file, self.img_size, self.hashing_algo, context=self.fake_context) self.assertEqual(tuple, type(x)) self.assertEqual(5, len(x)) self.assertIn(self.img_id, x[0]) self.assertEqual(self.img_size, x[1]) self.assertEqual(self.img_checksum, x[2]) self.assertEqual(self.img_sha256, x[3]) self.assertTrue(dict, type(x[4])) self.assertEqual('context', x[4]['context_obj']) self.assertIsNone(x[4]['verifier_obj']) def test_new_style_4_args_kws_verifier_context(self): x = self.fake_store.add(self.img_id, self.img_file, self.img_size, self.hashing_algo, context=self.fake_context, verifier=self.fake_verifier) self.assertEqual(tuple, type(x)) self.assertEqual(5, len(x)) self.assertIn(self.img_id, x[0]) self.assertEqual(self.img_size, x[1]) self.assertEqual(self.img_checksum, x[2]) self.assertEqual(self.img_sha256, x[3]) self.assertTrue(dict, type(x[4])) self.assertEqual('context', x[4]['context_obj']) self.assertEqual('verifier', x[4]['verifier_obj']) def test_new_style_5_args_kw_verifier(self): x = self.fake_store.add(self.img_id, self.img_file, self.img_size, self.hashing_algo, self.fake_context, verifier=self.fake_verifier) self.assertEqual(tuple, type(x)) self.assertEqual(5, len(x)) self.assertIn(self.img_id, x[0]) self.assertEqual(self.img_size, x[1]) self.assertEqual(self.img_checksum, x[2]) self.assertEqual(self.img_sha256, x[3]) self.assertTrue(dict, type(x[4])) self.assertEqual('context', x[4]['context_obj']) self.assertEqual('verifier', x[4]['verifier_obj']) def test_new_style_6_args_no_kw(self): x = self.fake_store.add(self.img_id, self.img_file, self.img_size, self.hashing_algo, self.fake_context, self.fake_verifier) self.assertEqual(tuple, type(x)) self.assertEqual(5, len(x)) self.assertIn(self.img_id, x[0]) self.assertEqual(self.img_size, x[1]) self.assertEqual(self.img_checksum, x[2]) self.assertEqual(self.img_sha256, x[3]) self.assertTrue(dict, type(x[4])) self.assertEqual('context', x[4]['context_obj']) self.assertEqual('verifier', x[4]['verifier_obj']) def test_new_style_all_kw_in_order(self): x = self.fake_store.add(image_id=self.img_id, image_file=self.img_file, image_size=self.img_size, hashing_algo=self.hashing_algo, context=self.fake_context, verifier=self.fake_verifier) self.assertEqual(tuple, type(x)) self.assertEqual(5, len(x)) self.assertIn(self.img_id, x[0]) self.assertEqual(self.img_size, x[1]) self.assertEqual(self.img_checksum, x[2]) self.assertEqual(self.img_sha256, x[3]) self.assertTrue(dict, type(x[4])) self.assertEqual('context', x[4]['context_obj']) self.assertEqual('verifier', x[4]['verifier_obj']) def test_new_style_all_kw_random_order(self): x = self.fake_store.add(hashing_algo=self.hashing_algo, image_file=self.img_file, context=self.fake_context, image_size=self.img_size, verifier=self.fake_verifier, image_id=self.img_id) self.assertEqual(tuple, type(x)) self.assertEqual(5, len(x)) self.assertIn(self.img_id, x[0]) self.assertEqual(self.img_size, x[1]) self.assertEqual(self.img_checksum, x[2]) self.assertEqual(self.img_sha256, x[3]) self.assertTrue(dict, type(x[4])) self.assertEqual('context', x[4]['context_obj']) self.assertEqual('verifier', x[4]['verifier_obj']) def test_neg_too_few_args(self): self.assertRaises(TypeError, self.fake_store.add, self.img_id, self.img_file) def test_neg_too_few_kw_args(self): self.assertRaises(TypeError, self.fake_store.add, self.img_file, self.img_size, self.fake_context, self.fake_verifier, image_id=self.img_id) def test_neg_bogus_kw_args(self): self.assertRaises(TypeError, self.fake_store.add, thrashing_algo=self.hashing_algo, image_file=self.img_file, context=self.fake_context, image_size=self.img_size, verifier=self.fake_verifier, image_id=self.img_id)
43.994764
79
0.594371
2,188
16,806
4.35649
0.075411
0.112358
0.107637
0.124633
0.835711
0.831305
0.829627
0.829627
0.826479
0.826479
0
0.021146
0.282459
16,806
381
80
44.110236
0.769301
0.041295
0
0.755352
0
0
0.048471
0
0
0
0
0
0.559633
1
0.079511
false
0
0.012232
0
0.11315
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
8
105ed6e108255e3a17a0719d906ef6b7af77821a
5,275
py
Python
audit_history/tests/test_audit_history.py
smileback-com/django-model-audit-history
0a127303d7bf4c998772c20f54611265f03f0d71
[ "MIT" ]
1
2019-01-23T11:47:33.000Z
2019-01-23T11:47:33.000Z
audit_history/tests/test_audit_history.py
nexto/django-model-audit-history
0a127303d7bf4c998772c20f54611265f03f0d71
[ "MIT" ]
3
2019-01-25T11:11:17.000Z
2019-05-20T11:44:22.000Z
audit_history/tests/test_audit_history.py
smileback-com/django-model-audit-history
0a127303d7bf4c998772c20f54611265f03f0d71
[ "MIT" ]
1
2019-01-24T14:17:26.000Z
2019-01-24T14:17:26.000Z
import datetime from .main import BaseTestSetUp from test_app.models import BlogPost from audit_history.settings import TIMESTAMP_FORMAT class AuditHistoryTest(BaseTestSetUp): def test_create_instance(self): self.blog_post.save() self.assertEqual(1, BlogPost.objects.count()) def test_empty_history_after_save(self): self.blog_post.save() self.assertEqual([], self.blog_post.history) def test_default_timestamp_in_audit_history(self): self.blog_post.save_with_audit_record(None, self.history_event) timestamp = self.blog_post.history[0].get('timestamp') try: dt = datetime.datetime.strptime(timestamp, TIMESTAMP_FORMAT) except ValueError: dt = None self.assertTrue(dt) def test_not_used_timestamp_in_audit_history(self): self.blog_post.save_with_audit_record(None, self.history_event) timestamp = self.blog_post.history[0].get('timestamp') try: dt = datetime.datetime.strptime(timestamp, self.not_used_timestamp_format) except ValueError: dt = None self.assertFalse(dt) def test_history_after_save_with_audit(self): self.blog_post.save_with_audit_record(None, self.history_event, payload=self.payload) self.assertEqual(self.payload['a'], self.blog_post.history[0]['payload']['a']) def test_save_with_audit_record_with_staff_user(self): self.blog_post.save_with_audit_record(self.user, self.history_event, payload=self.payload) self.assertEqual(self.user_name, self.blog_post.history[0]['actor']['name']) self.assertEqual(self.user_email, self.blog_post.history[0]['actor']['email']) self.assertEqual(True, self.blog_post.history[0]['actor']['is_staff']) self.assertEqual(self.payload['a'], self.blog_post.history[0]['payload']['a']) self.assertEqual(self.payload['b'], self.blog_post.history[0]['payload']['b']) def test_save_with_audit_record_without_user(self): self.blog_post.save_with_audit_record(None, self.history_event, payload=self.payload) self.assertEqual(None, self.blog_post.history[0]['actor']) self.assertEqual(self.payload['a'], self.blog_post.history[0]['payload']['a']) self.assertEqual(self.payload['b'], self.blog_post.history[0]['payload']['b']) def test_n_usages_of_save_with_audit(self): for _ in range(100): self.blog_post.save_with_audit_record(None, self.history_event, payload=self.payload) self.assertEqual(100, len(self.blog_post.history)) def test_append_audit_record_with_staff_user(self): self.blog_post.save() self.blog_post.append_audit_record(self.user, self.history_event, payload=self.payload) self.assertEqual(self.user_name, self.blog_post.history[0]['actor']['name']) self.assertEqual(self.user_email, self.blog_post.history[0]['actor']['email']) self.assertEqual(True, self.blog_post.history[0]['actor']['is_staff']) self.assertEqual(self.payload['a'], self.blog_post.history[0]['payload']['a']) self.assertEqual(self.payload['b'], self.blog_post.history[0]['payload']['b']) def test_append_audit_record_without_user(self): self.blog_post.save() self.blog_post.append_audit_record(None, self.history_event, payload=self.payload) self.assertEqual(None, self.blog_post.history[0]['actor']) self.assertEqual(self.payload['a'], self.blog_post.history[0]['payload']['a']) self.assertEqual(self.payload['b'], self.blog_post.history[0]['payload']['b']) def test_n_usages_of_append_audit_record(self): self.blog_post.save() for _ in range(100): self.blog_post.append_audit_record(None, self.history_event, payload=self.payload) self.assertEqual(100, len(self.blog_post.history)) def test_update_with_audit_record_with_staff_user(self): self.blog_post.save() self.blog_post.update_with_audit_record(self.user, self.history_event, **self.payload_for_update) self.assertEqual(self.user_name, self.blog_post.history[0]['actor']['name']) self.assertEqual(self.user_email, self.blog_post.history[0]['actor']['email']) self.assertEqual(True, self.blog_post.history[0]['actor']['is_staff']) self.assertEqual(self.payload_for_update['title'], self.blog_post.history[0]['title']) self.assertEqual(self.payload_for_update['position'], int(self.blog_post.history[0]['position'])) def test_update_with_audit_record_without_user(self): self.blog_post.save() self.blog_post.update_with_audit_record(None, self.history_event, **self.payload_for_update) self.assertEqual(None, self.blog_post.history[0]['actor']) self.assertEqual(self.payload_for_update['title'], self.blog_post.history[0]['title']) self.assertEqual(self.payload_for_update['position'], int(self.blog_post.history[0]['position'])) def test_n_usages_of_update_with_audit_record(self): self.blog_post.save() for _ in range(100): self.blog_post.update_with_audit_record(None, self.history_event, **self.payload_for_update) self.assertEqual(100, len(self.blog_post.history))
51.715686
105
0.708815
730
5,275
4.838356
0.094521
0.115515
0.173273
0.166761
0.905719
0.896942
0.881937
0.835221
0.825878
0.814553
0
0.010295
0.152986
5,275
101
106
52.227723
0.780215
0
0
0.666667
0
0
0.049668
0
0
0
0
0
0.380952
1
0.166667
false
0
0.047619
0
0.22619
0
0
0
0
null
0
0
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
10b906d7e2f402082ce9a44dd1e77bcab1a22325
378
py
Python
terrascript/data/stackpath.py
mjuenema/python-terrascript
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
[ "BSD-2-Clause" ]
507
2017-07-26T02:58:38.000Z
2022-01-21T12:35:13.000Z
terrascript/data/stackpath.py
mjuenema/python-terrascript
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
[ "BSD-2-Clause" ]
135
2017-07-20T12:01:59.000Z
2021-10-04T22:25:40.000Z
terrascript/data/stackpath.py
mjuenema/python-terrascript
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
[ "BSD-2-Clause" ]
81
2018-02-20T17:55:28.000Z
2022-01-31T07:08:40.000Z
# terrascript/data/stackpath.py # Automatically generated by tools/makecode.py (24-Sep-2021 15:27:49 UTC) # # For imports without namespace, e.g. # # >>> import terrascript.data.stackpath # # instead of # # >>> import terrascript.data.stackpath.stackpath # # This is only available for 'official' and 'partner' providers. from terrascript.data.stackpath.stackpath import *
25.2
73
0.743386
49
378
5.734694
0.693878
0.213523
0.341637
0.213523
0
0
0
0
0
0
0
0.036697
0.134921
378
14
74
27
0.82263
0.796296
0
0
1
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
5e16812609f0418e9fb2003df9152566c3346074
25,687
py
Python
tests/functional-tests/remote_snapshot_test.py
leoluan1/ibm-spectrum-scale-csi
27118d22292030eb89df6850eec742b146ce928f
[ "Apache-2.0" ]
46
2020-01-17T19:08:44.000Z
2022-03-29T15:19:07.000Z
tests/functional-tests/remote_snapshot_test.py
leoluan1/ibm-spectrum-scale-csi
27118d22292030eb89df6850eec742b146ce928f
[ "Apache-2.0" ]
428
2020-01-17T19:17:00.000Z
2022-03-31T12:47:11.000Z
tests/functional-tests/remote_snapshot_test.py
leoluan1/ibm-spectrum-scale-csi
27118d22292030eb89df6850eec742b146ce928f
[ "Apache-2.0" ]
42
2020-01-17T20:39:31.000Z
2022-03-25T11:38:27.000Z
import copy import logging import pytest import utils.fileset_functions as ff import scale_operator as scaleop LOGGER = logging.getLogger() @pytest.fixture(scope='session', autouse=True) def values(request): global data, remote_data, snapshot_object, kubeconfig_value # are required in every testcase kubeconfig_value, clusterconfig_value, operator_namespace, test_namespace, runslow_val = scaleop.get_cmd_values(request) data = scaleop.read_driver_data(clusterconfig_value, test_namespace, operator_namespace, kubeconfig_value) operator_data = scaleop.read_operator_data(clusterconfig_value, operator_namespace, kubeconfig_value) keep_objects = data["keepobjects"] if not("remote" in data): LOGGER.error("remote data is not provided in cr file") assert False remote_data = get_remote_data(data) ff.cred_check(data) ff.cred_check(remote_data) ff.set_data(remote_data) operator = scaleop.Scaleoperator(kubeconfig_value, operator_namespace) operator_object = scaleop.Scaleoperatorobject(operator_data, kubeconfig_value) condition = scaleop.check_ns_exists(kubeconfig_value, operator_namespace) if condition is True: if not(operator_object.check(data["csiscaleoperator_name"])): LOGGER.error("Operator custom object is not deployed succesfully") assert False else: operator.create() operator.check() scaleop.check_nodes_available(operator_data["pluginNodeSelector"], "pluginNodeSelector") scaleop.check_nodes_available( operator_data["provisionerNodeSelector"], "provisionerNodeSelector") scaleop.check_nodes_available( operator_data["attacherNodeSelector"], "attacherNodeSelector") operator_object.create() val = operator_object.check() if val is True: LOGGER.info("Operator custom object is deployed succesfully") else: LOGGER.error("Operator custom object is not deployed succesfully") assert False if runslow_val: value_pvc = [{"access_modes": "ReadWriteMany", "storage": "1Gi"}, {"access_modes": "ReadWriteOnce", "storage": "1Gi"}] else: value_pvc = [{"access_modes": "ReadWriteMany", "storage": "1Gi"}] value_vs_class = {"deletionPolicy": "Delete"} number_of_snapshots = 1 snapshot_object = scaleop.Snapshot(kubeconfig_value, test_namespace, keep_objects, value_pvc, value_vs_class, number_of_snapshots, data["image_name"], remote_data["id"], data["pluginNodeSelector"]) ff.create_dir(remote_data["volDirBasePath"]) yield if condition is False and not(keep_objects): operator_object.delete() operator.delete() if(ff.fileset_exists(data)): ff.delete_fileset(data) def get_remote_data(data_passed): remote_data = copy.deepcopy(data_passed) remote_data["remoteFs_remote_name"] = ff.get_remoteFs_remotename(data) if remote_data["remoteFs_remote_name"] is None: LOGGER.error("Unable to get remoteFs , name on remote cluster") assert False remote_data["primaryFs"] = remote_data["remoteFs_remote_name"] remote_data["id"] = remote_data["remoteid"] remote_data["port"] = remote_data["remote_port"] for cluster in remote_data["clusters"]: if cluster["id"] == remote_data["remoteid"]: remote_data["guiHost"] = cluster["restApi"][0]["guiHost"] remote_sec_name = cluster["secrets"] remote_data["username"] = remote_data["remote_username"][remote_sec_name] remote_data["password"] = remote_data["remote_password"][remote_sec_name] remote_data["volDirBasePath"] = remote_data["r_volDirBasePath"] remote_data["parentFileset"] = remote_data["r_parentFileset"] remote_data["gid_name"] = remote_data["r_gid_name"] remote_data["uid_name"] = remote_data["r_uid_name"] remote_data["gid_number"] = remote_data["r_gid_number"] remote_data["uid_number"] = remote_data["r_uid_number"] remote_data["inodeLimit"] = remote_data["r_inodeLimit"] # for get_mount_point function remote_data["type_remote"] = {"username": data_passed["username"], "password": data_passed["password"], "port": data_passed["port"], "guiHost": data_passed["guiHost"]} return remote_data @pytest.mark.regression def test_get_version(): LOGGER.info("Remote Cluster Details:") LOGGER.info("-----------------------") ff.get_scale_version(remote_data) LOGGER.info("Local Cluster Details:") LOGGER.info("-----------------------") ff.get_scale_version(data) scaleop.get_kubernetes_version(kubeconfig_value) scaleop.scale_function.get_operator_image() scaleop.ob.get_driver_image() @pytest.mark.regression def test_snapshot_static_pass_1(): value_sc = {"volBackendFs": data["remoteFs"], "clusterId": data["remoteid"]} snapshot_object.test_static(value_sc, test_restore=True) @pytest.mark.regression def test_snapshot_static_multiple_snapshots(): value_sc = {"volBackendFs": data["remoteFs"], "clusterId": data["remoteid"]} snapshot_object.test_static(value_sc, test_restore=True, number_of_snapshots=3) def test_snapshot_static_pass_3(): value_sc = {"volBackendFs": data["remoteFs"], "clusterId": data["remoteid"], "gid": data["r_gid_number"]} snapshot_object.test_static(value_sc, test_restore=True) def test_snapshot_static_pass_4(): value_sc = {"volBackendFs": data["remoteFs"], "clusterId": data["remoteid"], "inodeLimit": data["r_inodeLimit"]} snapshot_object.test_static(value_sc, test_restore=True) def test_snapshot_static_pass_5(): value_sc = {"volBackendFs": data["remoteFs"], "clusterId": data["remoteid"], "inodeLimit": data["r_inodeLimit"], "uid": data["r_uid_number"]} snapshot_object.test_static(value_sc, test_restore=True) def test_snapshot_static_pass_6(): value_sc = {"volBackendFs": data["remoteFs"], "clusterId": data["remoteid"], "gid": data["r_gid_number"], "uid": data["r_uid_number"]} snapshot_object.test_static(value_sc, test_restore=True) def test_snapshot_static_pass_7(): value_sc = {"volBackendFs": data["remoteFs"], "clusterId": data["remoteid"], "inodeLimit": data["r_inodeLimit"], "gid": data["r_gid_number"]} snapshot_object.test_static(value_sc, test_restore=True) def test_snapshot_static_pass_8(): value_sc = {"volBackendFs": data["remoteFs"], "clusterId": data["remoteid"], "inodeLimit": data["r_inodeLimit"], "uid": data["r_uid_number"], "gid": data["r_gid_number"]} snapshot_object.test_static(value_sc, test_restore=True) def test_snapshot_static_pass_9(): value_sc = {"volBackendFs": data["remoteFs"], "clusterId": data["remoteid"], "uid": data["r_uid_number"]} snapshot_object.test_static(value_sc, test_restore=True) def test_snapshot_static_pass_10(): value_sc = {"volBackendFs": data["remoteFs"], "inodeLimit": data["r_inodeLimit"], "clusterId": data["remoteid"], "filesetType": "independent"} snapshot_object.test_static(value_sc, test_restore=True) def test_snapshot_static_pass_11(): value_sc = {"volBackendFs": data["remoteFs"], "gid": data["r_gid_number"], "uid": data["r_uid_number"], "clusterId": data["remoteid"], "filesetType": "independent", "inodeLimit": data["r_inodeLimit"]} snapshot_object.test_static(value_sc, test_restore=True) def test_snapshot_static_pass_12(): value_sc = {"clusterId": data["remoteid"], "gid": data["r_gid_number"], "uid": data["r_uid_number"], "volBackendFs": data["remoteFs"]} snapshot_object.test_static(value_sc, test_restore=True) def test_snapshot_static_pass_13(): value_sc = {"clusterId": data["remoteid"], "uid": data["r_uid_number"], "inodeLimit": data["r_inodeLimit"], "volBackendFs": data["remoteFs"]} snapshot_object.test_static(value_sc, test_restore=True) def test_snapshot_static_pass_14(): value_sc = {"clusterId": data["remoteid"], "gid": data["r_gid_number"], "inodeLimit": data["r_inodeLimit"], "volBackendFs": data["remoteFs"]} snapshot_object.test_static(value_sc, test_restore=True) def test_snapshot_static_pass_15(): value_sc = {"clusterId": data["remoteid"], "volBackendFs": data["remoteFs"], "gid": data["r_gid_number"], "uid": data["r_uid_number"], "inodeLimit": data["r_inodeLimit"]} snapshot_object.test_static(value_sc, test_restore=True) def test_snapshot_static_pass_16(): value_sc = {"volBackendFs": data["remoteFs"], "clusterId": data["remoteid"]} snapshot_object.test_static(value_sc, test_restore=False) def test_snapshot_static_pass_17(): value_sc = {"volBackendFs": data["remoteFs"], "gid": data["r_gid_number"], "uid": data["r_uid_number"], "clusterId": data["remoteid"], "filesetType": "independent", "inodeLimit": data["r_inodeLimit"]} snapshot_object.test_static(value_sc, test_restore=False) def test_snapshot_static_pass_18(): value_sc = {"clusterId": data["remoteid"], "gid": data["r_gid_number"], "inodeLimit": data["r_inodeLimit"], "volBackendFs": data["remoteFs"]} snapshot_object.test_static(value_sc, test_restore=False) @pytest.mark.regression def test_snapshot_dynamic_pass_1(): value_sc = {"volBackendFs": data["remoteFs"], "clusterId": data["remoteid"]} snapshot_object.test_dynamic(value_sc, test_restore=True) def test_snapshot_dynamic_pass_2(): value_sc = {"volBackendFs": data["remoteFs"], "clusterId": data["remoteid"]} snapshot_object.test_dynamic(value_sc, test_restore=True, value_vs_class={"deletionPolicy": "Retain"}) @pytest.mark.regression def test_snapshot_dynamic_expected_fail_1(): value_sc = {"volBackendFs": data["remoteFs"], "filesetType": "dependent", "clusterId": data["remoteid"]} snapshot_object.test_dynamic(value_sc, test_restore=False, reason="Volume snapshot can only be created when source volume is independent fileset") @pytest.mark.regression def test_snapshot_dynamic_expected_fail_2(): value_sc = {"volBackendFs": data["remoteFs"], "volDirBasePath": data["r_volDirBasePath"]} snapshot_object.test_dynamic(value_sc, test_restore=False, reason="Volume snapshot can only be created when source volume is independent fileset") @pytest.mark.regression def test_snapshot_dynamic_multiple_snapshots(): value_sc = {"volBackendFs": data["remoteFs"], "clusterId": data["remoteid"]} snapshot_object.test_dynamic(value_sc, test_restore=True, number_of_snapshots=3) @pytest.mark.slow def test_snapshot_dynamic_multiple_snapshots_256(): value_sc = {"volBackendFs": data["remoteFs"], "clusterId": data["remoteid"]} snapshot_object.test_dynamic(value_sc, test_restore=True, number_of_snapshots=256) @pytest.mark.slow def test_snapshot_dynamic_multiple_snapshots_257(): value_sc = {"volBackendFs": data["remoteFs"], "clusterId": data["remoteid"]} snapshot_object.test_dynamic(value_sc, test_restore=True, number_of_snapshots=257) def test_snapshot_dynamic_pass_3(): value_sc = {"volBackendFs": data["remoteFs"], "clusterId": data["remoteid"], "gid": data["r_gid_number"]} snapshot_object.test_dynamic(value_sc, test_restore=True) def test_snapshot_dynamic_pass_4(): value_sc = {"volBackendFs": data["remoteFs"], "clusterId": data["remoteid"], "inodeLimit": data["r_inodeLimit"]} snapshot_object.test_dynamic(value_sc, test_restore=True) def test_snapshot_dynamic_pass_5(): value_sc = {"volBackendFs": data["remoteFs"], "clusterId": data["remoteid"], "inodeLimit": data["r_inodeLimit"], "uid": data["r_uid_number"]} snapshot_object.test_dynamic(value_sc, test_restore=True) def test_snapshot_dynamic_pass_6(): value_sc = {"volBackendFs": data["remoteFs"], "clusterId": data["remoteid"], "gid": data["r_gid_number"], "uid": data["r_uid_number"]} snapshot_object.test_dynamic(value_sc, test_restore=True) def test_snapshot_dynamic_pass_7(): value_sc = {"volBackendFs": data["remoteFs"], "clusterId": data["remoteid"], "inodeLimit": data["r_inodeLimit"], "gid": data["r_gid_number"]} snapshot_object.test_dynamic(value_sc, test_restore=True) def test_snapshot_dynamic_pass_8(): value_sc = {"volBackendFs": data["remoteFs"], "clusterId": data["remoteid"], "inodeLimit": data["r_inodeLimit"], "uid": data["r_uid_number"], "gid": data["r_gid_number"]} snapshot_object.test_dynamic(value_sc, test_restore=True) def test_snapshot_dynamic_pass_9(): value_sc = {"volBackendFs": data["remoteFs"], "clusterId": data["remoteid"], "uid": data["r_uid_number"]} snapshot_object.test_dynamic(value_sc, test_restore=True) def test_snapshot_dynamic_pass_10(): value_sc = {"volBackendFs": data["remoteFs"], "inodeLimit": data["r_inodeLimit"], "clusterId": data["remoteid"], "filesetType": "independent"} snapshot_object.test_dynamic(value_sc, test_restore=True) def test_snapshot_dynamic_pass_11(): value_sc = {"volBackendFs": data["remoteFs"], "gid": data["r_gid_number"], "uid": data["r_uid_number"], "clusterId": data["remoteid"], "filesetType": "independent", "inodeLimit": data["r_inodeLimit"]} snapshot_object.test_dynamic(value_sc, test_restore=True) def test_snapshot_dynamic_pass_12(): value_sc = {"clusterId": data["remoteid"], "gid": data["r_gid_number"], "uid": data["r_uid_number"], "volBackendFs": data["remoteFs"]} snapshot_object.test_dynamic(value_sc, test_restore=True) def test_snapshot_dynamic_pass_13(): value_sc = {"clusterId": data["remoteid"], "uid": data["r_uid_number"], "inodeLimit": data["r_inodeLimit"], "volBackendFs": data["remoteFs"]} snapshot_object.test_dynamic(value_sc, test_restore=True) def test_snapshot_dynamic_pass_14(): value_sc = {"clusterId": data["remoteid"], "gid": data["r_gid_number"], "inodeLimit": data["r_inodeLimit"], "volBackendFs": data["remoteFs"]} snapshot_object.test_dynamic(value_sc, test_restore=True) def test_snapshot_dynamic_pass_15(): value_sc = {"clusterId": data["remoteid"], "volBackendFs": data["remoteFs"], "gid": data["r_gid_number"], "uid": data["r_uid_number"], "inodeLimit": data["r_inodeLimit"]} snapshot_object.test_dynamic(value_sc, test_restore=True) def test_snapshot_dynamic_pass_16(): value_sc = {"volBackendFs": data["remoteFs"], "clusterId": data["remoteid"]} snapshot_object.test_dynamic(value_sc, test_restore=False) def test_snapshot_dynamic_pass_17(): value_sc = {"volBackendFs": data["remoteFs"], "gid": data["r_gid_number"], "uid": data["r_uid_number"], "clusterId": data["remoteid"], "filesetType": "independent", "inodeLimit": data["r_inodeLimit"]} snapshot_object.test_dynamic(value_sc, test_restore=False) def test_snapshot_dynamic_pass_18(): value_sc = {"clusterId": data["remoteid"], "gid": data["r_gid_number"], "inodeLimit": data["r_inodeLimit"], "volBackendFs": data["remoteFs"]} snapshot_object.test_dynamic(value_sc, test_restore=False) @pytest.mark.regression def test_snapshot_dynamic_different_sc_1(): value_sc = {"volBackendFs": data["remoteFs"], "clusterId": data["remoteid"]} restore_sc = {"volBackendFs": data["remoteFs"], "volDirBasePath": data["r_volDirBasePath"]} snapshot_object.test_dynamic(value_sc, test_restore=True, restore_sc=restore_sc) @pytest.mark.regression def test_snapshot_dynamic_different_sc_2(): value_sc = {"volBackendFs": data["remoteFs"], "clusterId": data["remoteid"]} restore_sc = {"volBackendFs": data["remoteFs"], "filesetType": "dependent", "clusterId": data["remoteid"]} snapshot_object.test_dynamic(value_sc, test_restore=True, restore_sc=restore_sc) def test_snapshot_dynamic_different_sc_3(): value_sc = {"volBackendFs": data["remoteFs"], "clusterId": data["remoteid"], "inodeLimit": data["r_inodeLimit"], "uid": data["r_uid_number"], "gid": data["r_gid_number"]} restore_sc = {"volBackendFs": data["remoteFs"], "volDirBasePath": data["r_volDirBasePath"]} snapshot_object.test_dynamic(value_sc, test_restore=True, restore_sc=restore_sc) def test_snapshot_dynamic_different_sc_4(): value_sc = {"volBackendFs": data["remoteFs"], "clusterId": data["remoteid"], "inodeLimit": data["r_inodeLimit"], "uid": data["r_uid_number"], "gid": data["r_gid_number"]} restore_sc = {"volBackendFs": data["remoteFs"], "filesetType": "dependent", "clusterId": data["remoteid"]} snapshot_object.test_dynamic(value_sc, test_restore=True, restore_sc=restore_sc) def test_snapshot_dynamic_different_sc_5(): value_sc = {"volBackendFs": data["remoteFs"], "inodeLimit": data["r_inodeLimit"], "clusterId": data["remoteid"], "filesetType": "independent"} restore_sc = {"volBackendFs": data["remoteFs"], "volDirBasePath": data["r_volDirBasePath"]} snapshot_object.test_dynamic(value_sc, test_restore=True, restore_sc=restore_sc) def test_snapshot_dynamic_different_sc_6(): value_sc = {"volBackendFs": data["remoteFs"], "inodeLimit": data["r_inodeLimit"], "clusterId": data["remoteid"], "filesetType": "independent"} restore_sc = {"volBackendFs": data["remoteFs"], "filesetType": "dependent", "clusterId": data["remoteid"]} snapshot_object.test_dynamic(value_sc, test_restore=True, restore_sc=restore_sc) def test_snapshot_static_different_sc_1(): value_sc = {"volBackendFs": data["remoteFs"], "clusterId": data["remoteid"]} restore_sc = {"volBackendFs": data["remoteFs"], "volDirBasePath": data["r_volDirBasePath"]} snapshot_object.test_static(value_sc, test_restore=True, restore_sc=restore_sc) def test_snapshot_static_different_sc_2(): value_sc = {"volBackendFs": data["remoteFs"], "clusterId": data["remoteid"]} restore_sc = {"volBackendFs": data["remoteFs"], "filesetType": "dependent", "clusterId": data["remoteid"]} snapshot_object.test_static(value_sc, test_restore=True, restore_sc=restore_sc) def test_snapshot_static_different_sc_3(): value_sc = {"volBackendFs": data["remoteFs"], "clusterId": data["remoteid"], "inodeLimit": data["r_inodeLimit"], "uid": data["r_uid_number"], "gid": data["r_gid_number"]} restore_sc = {"volBackendFs": data["remoteFs"], "volDirBasePath": data["r_volDirBasePath"]} snapshot_object.test_static(value_sc, test_restore=True, restore_sc=restore_sc) def test_snapshot_static_different_sc_4(): value_sc = {"volBackendFs": data["remoteFs"], "clusterId": data["remoteid"], "inodeLimit": data["r_inodeLimit"], "uid": data["r_uid_number"], "gid": data["r_gid_number"]} restore_sc = {"volBackendFs": data["remoteFs"], "filesetType": "dependent", "clusterId": data["remoteid"]} snapshot_object.test_static(value_sc, test_restore=True, restore_sc=restore_sc) def test_snapshot_static_different_sc_5(): value_sc = {"volBackendFs": data["remoteFs"], "inodeLimit": data["r_inodeLimit"], "clusterId": data["remoteid"], "filesetType": "independent"} restore_sc = {"volBackendFs": data["remoteFs"], "volDirBasePath": data["r_volDirBasePath"]} snapshot_object.test_static(value_sc, test_restore=True, restore_sc=restore_sc) def test_snapshot_static_different_sc_6(): value_sc = {"volBackendFs": data["remoteFs"], "inodeLimit": data["r_inodeLimit"], "clusterId": data["remoteid"], "filesetType": "independent"} restore_sc = {"volBackendFs": data["remoteFs"], "filesetType": "dependent", "clusterId": data["remoteid"]} snapshot_object.test_static(value_sc, test_restore=True, restore_sc=restore_sc) @pytest.mark.regression def test_snapshot_dynamic_nodeclass_1(): value_sc = {"volBackendFs": data["remoteFs"], "clusterId": data["remoteid"]} restore_sc = {"volBackendFs": data["remoteFs"], "clusterId": data["remoteid"], "nodeClass": "GUI_MGMT_SERVERS"} snapshot_object.test_dynamic(value_sc, test_restore=True, restore_sc=restore_sc) @pytest.mark.regression def test_snapshot_dynamic_nodeclass_2(): value_sc = {"volBackendFs": data["remoteFs"], "clusterId": data["remoteid"]} restore_sc = {"volBackendFs": data["remoteFs"], "clusterId": data["remoteid"], "nodeClass": "GUI_SERVERS"} snapshot_object.test_dynamic(value_sc, test_restore=True, restore_sc=restore_sc) def test_snapshot_dynamic_nodeclass_3(): value_sc = {"volBackendFs": data["remoteFs"], "clusterId": data["remoteid"]} restore_sc = {"volBackendFs": data["remoteFs"], "clusterId": data["remoteid"], "nodeClass": "randomnodeclassx"} restore_pvc = {"access_modes": "ReadWriteMany", "storage": "1Gi", "reason": "NotFound desc = nodeclass"} snapshot_object.test_dynamic(value_sc, test_restore=True, restore_sc=restore_sc, restore_pvc=restore_pvc) def test_snapshot_static_nodeclass_1(): value_sc = {"volBackendFs": data["remoteFs"], "clusterId": data["remoteid"]} restore_sc = {"volBackendFs": data["remoteFs"], "clusterId": data["remoteid"], "nodeClass": "GUI_MGMT_SERVERS"} snapshot_object.test_static(value_sc, test_restore=True, restore_sc=restore_sc) def test_snapshot_static_nodeclass_2(): value_sc = {"volBackendFs": data["remoteFs"], "clusterId": data["remoteid"]} restore_sc = {"volBackendFs": data["remoteFs"], "clusterId": data["remoteid"], "nodeClass": "GUI_SERVERS"} snapshot_object.test_static(value_sc, test_restore=True, restore_sc=restore_sc) def test_snapshot_static_nodeclass_3(): value_sc = {"volBackendFs": data["remoteFs"], "clusterId": data["remoteid"]} restore_sc = {"volBackendFs": data["remoteFs"], "clusterId": data["remoteid"], "nodeClass": "randomnodeclassx"} restore_pvc = {"access_modes": "ReadWriteMany", "storage": "1Gi", "reason": "NotFound desc = nodeclass"} snapshot_object.test_static(value_sc, test_restore=True, restore_sc=restore_sc, restore_pvc=restore_pvc) def test_snapshot_dynamic_permissions_777_independent(): LOGGER.warning("Testcase will fail if scale version < 5.1.1-4") value_pod = {"mount_path": "/usr/share/nginx/html/scale", "read_only": "False", "sub_path": ["sub_path_mnt"], "volumemount_readonly": [False]} value_sc = {"volBackendFs": data["remoteFs"], "clusterId": data["remoteid"], "permissions": "777", "gid": data["r_gid_number"], "uid": data["r_uid_number"]} snapshot_object.test_dynamic(value_sc, test_restore=True, value_pod=value_pod) def test_snapshot_dynamic_volume_expansion_1(): value_sc = {"volBackendFs": data["remoteFs"], "clusterId": data["remoteid"], "allow_volume_expansion": True} value_pvc = [{"access_modes": "ReadWriteMany", "storage": "1Gi", "presnap_volume_expansion_storage": ["2Gi"], "post_presnap_volume_expansion_storage": ["5Gi", "15Gi"], "postsnap_volume_expansion_storage": ["10Gi", "15Gi"]}] snapshot_object.test_dynamic(value_sc, test_restore=True, value_pvc=value_pvc) def test_snapshot_dynamic_volume_expansion_2(): value_sc = {"volBackendFs": data["remoteFs"], "clusterId": data["remoteid"], "allow_volume_expansion": True} restore_sc = {"volBackendFs": data["remoteFs"], "filesetType": "dependent", "clusterId": data["remoteid"], "allow_volume_expansion": True} value_pvc = [{"access_modes": "ReadWriteMany", "storage": "1Gi", "presnap_volume_expansion_storage": ["3Gi"], "post_presnap_volume_expansion_storage": ["5Gi", "12Gi"], "postsnap_volume_expansion_storage": ["8Gi", "12Gi"]}] snapshot_object.test_dynamic(value_sc, test_restore=True, value_pvc=value_pvc, restore_sc=restore_sc) def test_snapshot_dynamic_volume_expansion_3(): value_sc = {"volBackendFs": data["remoteFs"], "clusterId": data["remoteid"], "allow_volume_expansion": True} restore_sc = {"volBackendFs": data["remoteFs"], "volDirBasePath": data["r_volDirBasePath"], "allow_volume_expansion": True} value_pvc = [{"access_modes": "ReadWriteMany", "storage": "1Gi", "presnap_volume_expansion_storage": ["2Gi"], "post_presnap_volume_expansion_storage": ["5Gi", "15Gi"], "postsnap_volume_expansion_storage": ["10Gi", "15Gi"]}] snapshot_object.test_dynamic(value_sc, test_restore=True, value_pvc=value_pvc, restore_sc=restore_sc) def test_snapshot_dynamic_volume_cloning_1(): value_sc = {"volBackendFs": data["remoteFs"], "clusterId": data["remoteid"]} value_pvc = [{"access_modes": "ReadWriteMany", "storage": "1Gi"}] value_clone_passed = {"clone_pvc": [{"access_modes": "ReadWriteMany", "storage": "1Gi"}, {"access_modes": "ReadWriteOnce", "storage": "1Gi"}]} snapshot_object.test_dynamic(value_sc, test_restore=True, value_pvc=value_pvc, value_clone_passed=value_clone_passed)
46.874088
150
0.692685
3,003
25,687
5.571762
0.070929
0.05355
0.120488
0.114989
0.829249
0.816519
0.798291
0.793091
0.779465
0.76267
0
0.006352
0.160392
25,687
547
151
46.959781
0.769463
0.002297
0
0.591133
0
0
0.285854
0.021698
0
0
0
0
0.009852
1
0.165025
false
0.108374
0.012315
0
0.179803
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
7
eac0677cb910e01532d0d4b534681adc03eeeaaf
138
py
Python
topcoder/memory.py
leetcode-notes/daily-algorithms-practice
2a03499ed0b403d79f6c8451c9a839991b23e188
[ "Unlicense" ]
null
null
null
topcoder/memory.py
leetcode-notes/daily-algorithms-practice
2a03499ed0b403d79f6c8451c9a839991b23e188
[ "Unlicense" ]
null
null
null
topcoder/memory.py
leetcode-notes/daily-algorithms-practice
2a03499ed0b403d79f6c8451c9a839991b23e188
[ "Unlicense" ]
null
null
null
''' https://community.topcoder.com/stat?c=problem_statement&pm=16009https://community.topcoder.com/stat?c=problem_statement&pm=16009' '''
34.5
129
0.782609
19
138
5.578947
0.578947
0.320755
0.377358
0.45283
0.811321
0.811321
0.811321
0.811321
0
0
0
0.074074
0.021739
138
3
130
46
0.711111
0.934783
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
1
1
1
1
1
1
1
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
11
eadf20db5d51ec98e024641cbd9fd2865afe42de
36,763
py
Python
venv/lib/python3.6/site-packages/ansible_collections/cisco/ios/tests/unit/modules/network/ios/test_ios_bgp_address_family.py
usegalaxy-no/usegalaxy
75dad095769fe918eb39677f2c887e681a747f3a
[ "MIT" ]
1
2020-01-22T13:11:23.000Z
2020-01-22T13:11:23.000Z
venv/lib/python3.6/site-packages/ansible_collections/cisco/ios/tests/unit/modules/network/ios/test_ios_bgp_address_family.py
usegalaxy-no/usegalaxy
75dad095769fe918eb39677f2c887e681a747f3a
[ "MIT" ]
12
2020-02-21T07:24:52.000Z
2020-04-14T09:54:32.000Z
venv/lib/python3.6/site-packages/ansible_collections/cisco/ios/tests/unit/modules/network/ios/test_ios_bgp_address_family.py
usegalaxy-no/usegalaxy
75dad095769fe918eb39677f2c887e681a747f3a
[ "MIT" ]
null
null
null
# # (c) 2019, Ansible by Red Hat, inc # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # from __future__ import absolute_import, division, print_function __metaclass__ = type from ansible_collections.cisco.ios.tests.unit.compat.mock import patch from ansible_collections.cisco.ios.plugins.modules import ( ios_bgp_address_family, ) from ansible_collections.cisco.ios.tests.unit.modules.utils import ( set_module_args, ) from .ios_module import TestIosModule, load_fixture class TestIosBgpAddressFamilyModule(TestIosModule): module = ios_bgp_address_family def setUp(self): super(TestIosBgpAddressFamilyModule, self).setUp() self.mock_get_config = patch( "ansible_collections.ansible.netcommon.plugins.module_utils.network.common.network.Config.get_config" ) self.get_config = self.mock_get_config.start() self.mock_load_config = patch( "ansible_collections.ansible.netcommon.plugins.module_utils.network.common.network.Config.load_config" ) self.load_config = self.mock_load_config.start() self.mock_get_resource_connection_config = patch( "ansible_collections.ansible.netcommon.plugins.module_utils.network.common.cfg.base." "get_resource_connection" ) self.get_resource_connection_config = ( self.mock_get_resource_connection_config.start() ) self.mock_get_resource_connection_facts = patch( "ansible_collections.ansible.netcommon.plugins.module_utils.network.common.rm_base.resource_module_base." "get_resource_connection" ) self.get_resource_connection_facts = ( self.mock_get_resource_connection_facts.start() ) self.mock_edit_config = patch( "ansible_collections.cisco.ios.plugins.module_utils.network.ios.providers.providers.CliProvider.edit_config" ) self.edit_config = self.mock_edit_config.start() self.mock_execute_show_command = patch( "ansible_collections.cisco.ios.plugins.module_utils.network.ios.facts.bgp_address_family.bgp_address_family." "Bgp_address_familyFacts.get_bgp_address_family_data" ) self.execute_show_command = self.mock_execute_show_command.start() def tearDown(self): super(TestIosBgpAddressFamilyModule, self).tearDown() self.mock_get_resource_connection_config.stop() self.mock_get_resource_connection_facts.stop() self.mock_edit_config.stop() self.mock_get_config.stop() self.mock_load_config.stop() self.mock_execute_show_command.stop() def load_fixtures(self, commands=None): def load_from_file(*args, **kwargs): return load_fixture("ios_bgp_address_family.cfg") self.execute_show_command.side_effect = load_from_file def test_ios_bgp_address_family_merged(self): set_module_args( dict( config=dict( as_number="65000", address_family=[ dict( afi="ipv4", safi="multicast", vrf="blue", aggregate_address=[ dict( address="192.0.3.1", netmask="255.255.255.255", as_confed_set=True, ) ], bgp=dict( dampening=dict( penalty_half_time=10, reuse_route_val=10, suppress_route_val=10, max_suppress=10, ) ), neighbor=[ dict( address="198.51.100.1", remote_as=65100, route_maps=[ dict(name="test-route-out", out="true") ], prefix_lists=[ dict( name="AS65100-PREFIX-OUT", out="true", ) ], ) ], ), dict( afi="nsap", bgp=dict( aggregate_timer=20, dmzlink_bw=True, scan_time=10, ), default_metric=10, network=[ dict( address="192.0.1.1", route_map="test_route" ) ], ), ], ), state="merged", ) ) commands = [ "router bgp 65000", "address-family ipv4 multicast vrf blue", "bgp dampening 10 10 10 10", "aggregate-address 192.0.3.1 255.255.255.255 as-confed-set", "address-family nsap", "bgp aggregate-timer 20", "bgp dmzlink-bw", "bgp scan-time 10", "neighbor 198.51.100.1 remote-as 65100", "neighbor 198.51.100.1 route-map test-route-out out", "network 192.0.1.1 route-map test_route", "default-metric 10", ] result = self.execute_module(changed=True) self.assertEqual(sorted(result["commands"]), sorted(commands)) def test_ios_bgp_address_family_merged_2(self): set_module_args( dict( config=dict( as_number="65000", address_family=[ dict( afi="ipv4", safi="unicast", vrf="blue", neighbor=[ dict( address="192.0.3.1", remote_as=65001, soft_reconfiguration=True, prefix_list=dict( name="PREFIX-OUT", out=True ), ) ], network=[ dict(address="192.0.3.1", mask="255.255.255.0") ], ) ], ), state="merged", ) ) commands = [ "router bgp 65000", "address-family ipv4 unicast vrf blue", "neighbor 192.0.3.1 remote-as 65001", "neighbor 192.0.3.1 prefix-list PREFIX-OUT out", "neighbor 192.0.3.1 soft-reconfiguration inbound", "network 192.0.3.1 mask 255.255.255.0", ] result = self.execute_module(changed=True) self.assertEqual(sorted(result["commands"]), sorted(commands)) def test_ios_bgp_address_family_merged_idempotent(self): set_module_args( dict( config=dict( as_number="65000", address_family=[ dict( afi="ipv4", safi="multicast", vrf="blue", aggregate_address=[ dict( address="192.0.2.1", netmask="255.255.255.255", as_confed_set=True, ) ], bgp=dict( aggregate_timer=10, dampening=dict( penalty_half_time=1, reuse_route_val=1, suppress_route_val=1, max_suppress=1, ), slow_peer=[ dict(detection=dict(threshold=150)) ], ), neighbor=[ dict( activate=True, address="198.51.100.1", aigp=dict( send=dict( cost_community=dict( id=100, poi=dict( igp_cost=True, transitive=True, ), ) ) ), next_hop_self=True, nexthop_self=dict(all=True), prefix_lists=[ dict( name="AS65100-PREFIX-OUT", out="true", ) ], slow_peer=[ dict(detection=dict(threshold=150)) ], remote_as=10, route_maps=[ dict(name="test-out", out=True) ], route_server_client=True, ) ], network=[ dict( address="198.51.110.10", mask="255.255.255.255", backdoor=True, ) ], ), dict( afi="ipv4", safi="mdt", bgp=dict( dmzlink_bw=True, dampening=dict( penalty_half_time=1, reuse_route_val=10, suppress_route_val=100, max_suppress=5, ), soft_reconfig_backup=True, ), ), dict( afi="ipv4", safi="multicast", aggregate_address=[ dict( address="192.0.3.1", netmask="255.255.255.255", as_confed_set=True, ) ], default_metric=12, distance=dict(external=10, internal=10, local=100), network=[ dict( address="198.51.111.11", mask="255.255.255.255", route_map="test", ) ], table_map=dict(name="test_tableMap", filter=True), ), ], ), state="merged", ) ) self.execute_module(changed=False, commands=[]) def test_ios_bgp_address_family_replaced(self): set_module_args( dict( config=dict( as_number="65000", address_family=[ dict( afi="ipv4", safi="multicast", vrf="blue", aggregate_address=[ dict( address="192.0.2.1", netmask="255.255.255.255", as_confed_set=True, ) ], bgp=dict( aggregate_timer=10, slow_peer=[ dict(detection=dict(threshold=200)) ], ), redistribute=[dict(connected=dict(metric=10))], neighbor=[ dict( address="198.51.110.1", activate=True, remote_as=200, route_maps=[ dict( name="test-replaced-route", out=True, ) ], ) ], ) ], ), state="replaced", ) ) commands = [ "router bgp 65000", "address-family ipv4 multicast vrf blue", "no bgp dampening 1 1 1 1", "bgp slow-peer detection threshold 200", "no neighbor 198.51.100.1 activate", "no neighbor 198.51.100.1 next-hop-self all", "no neighbor 198.51.100.1 remote-as 10", "no neighbor 198.51.100.1 aigp send cost-community 100 poi igp-cost transitive", "no neighbor 198.51.100.1 route-server-client", "no neighbor 198.51.100.1 slow-peer detection threshold 150", "neighbor 198.51.110.1 activate", "neighbor 198.51.110.1 remote-as 200", "neighbor 198.51.110.1 route-map test-replaced-route out", "no network 198.51.110.10 mask 255.255.255.255 backdoor", ] result = self.execute_module(changed=True) self.assertEqual(sorted(result["commands"]), sorted(commands)) def test_ios_bgp_address_family_replaced_idempotent(self): set_module_args( dict( config=dict( as_number="65000", address_family=[ dict( afi="ipv4", safi="multicast", vrf="blue", aggregate_address=[ dict( address="192.0.2.1", netmask="255.255.255.255", as_confed_set=True, ) ], bgp=dict( aggregate_timer=10, dampening=dict( penalty_half_time=1, reuse_route_val=1, suppress_route_val=1, max_suppress=1, ), slow_peer=[ dict(detection=dict(threshold=150)) ], ), neighbor=[ dict( activate=True, address="198.51.100.1", aigp=dict( send=dict( cost_community=dict( id=100, poi=dict( igp_cost=True, transitive=True, ), ) ) ), nexthop_self=dict(all=True), prefix_lists=[ dict( name="AS65100-PREFIX-OUT", out="true", ) ], slow_peer=[ dict(detection=dict(threshold=150)) ], remote_as=10, route_maps=[ dict(name="test-out", out=True) ], route_server_client=True, ) ], network=[ dict( address="198.51.110.10", mask="255.255.255.255", backdoor=True, ) ], ), dict( afi="ipv4", safi="mdt", bgp=dict( dmzlink_bw=True, dampening=dict( penalty_half_time=1, reuse_route_val=10, suppress_route_val=100, max_suppress=5, ), soft_reconfig_backup=True, ), ), dict( afi="ipv4", safi="multicast", aggregate_address=[ dict( address="192.0.3.1", netmask="255.255.255.255", as_confed_set=True, ) ], default_metric=12, distance=dict(external=10, internal=10, local=100), network=[ dict( address="198.51.111.11", mask="255.255.255.255", route_map="test", ) ], table_map=dict(name="test_tableMap", filter=True), ), ], ), state="replaced", ) ) self.execute_module(changed=False, commands=[]) def test_ios_bgp_address_family_overridden_idempotent(self): set_module_args( dict( config=dict( as_number="65000", address_family=[ dict( afi="ipv4", safi="multicast", vrf="blue", aggregate_address=[ dict( address="192.0.2.1", netmask="255.255.255.255", as_confed_set=True, ) ], bgp=dict( aggregate_timer=10, dampening=dict( penalty_half_time=1, reuse_route_val=1, suppress_route_val=1, max_suppress=1, ), slow_peer=[ dict(detection=dict(threshold=150)) ], ), neighbor=[ dict( activate=True, address="198.51.100.1", aigp=dict( send=dict( cost_community=dict( id=100, poi=dict( igp_cost=True, transitive=True, ), ) ) ), nexthop_self=dict(all=True), prefix_lists=[ dict( name="AS65100-PREFIX-OUT", out="true", ) ], slow_peer=[ dict(detection=dict(threshold=150)) ], remote_as=10, route_maps=[ dict(name="test-out", out=True) ], route_server_client=True, ) ], network=[ dict( address="198.51.110.10", mask="255.255.255.255", backdoor=True, ) ], ), dict( afi="ipv4", safi="mdt", bgp=dict( dmzlink_bw=True, dampening=dict( penalty_half_time=1, reuse_route_val=10, suppress_route_val=100, max_suppress=5, ), soft_reconfig_backup=True, ), ), dict( afi="ipv4", safi="multicast", aggregate_address=[ dict( address="192.0.3.1", netmask="255.255.255.255", as_confed_set=True, ) ], default_metric=12, distance=dict(external=10, internal=10, local=100), network=[ dict( address="198.51.111.11", mask="255.255.255.255", route_map="test", ) ], table_map=dict(name="test_tableMap", filter=True), ), ], ), state="overridden", ) ) self.execute_module(changed=False, commands=[]) def test_ios_bgp_address_family_deleted(self): set_module_args(dict(state="deleted")) commands = [ "router bgp 65000", "no address-family ipv4 multicast vrf blue", "no address-family ipv4 mdt", "no address-family ipv4 multicast", ] result = self.execute_module(changed=True) self.assertEqual(sorted(result["commands"]), sorted(commands)) def test_ios_bgp_address_family_delete_without_config(self): set_module_args( dict( config=dict( as_number="65000", address_family=[ dict(afi="ipv4", safi="multicast"), dict(afi="ipv4", safi="mdt"), ], ), state="deleted", ) ) commands = [ "router bgp 65000", "no address-family ipv4 mdt", "no address-family ipv4 multicast", ] result = self.execute_module(changed=True) self.assertEqual(sorted(result["commands"]), sorted(commands)) def test_ios_bgp_address_family_rendered(self): set_module_args( dict( config=dict( as_number="65000", address_family=[ dict( afi="ipv4", safi="multicast", vrf="blue", aggregate_address=[ dict( address="192.0.2.1", netmask="255.255.255.255", as_confed_set=True, ) ], bgp=dict( dampening=dict( penalty_half_time=1, reuse_route_val=1, suppress_route_val=1, max_suppress=1, ) ), neighbor=[ dict( activate=True, address="198.51.100.1", aigp=dict( send=dict( cost_community=dict( id=100, poi=dict( igp_cost=True, transitive=True, ), ) ) ), slow_peer=[ dict(detection=dict(threshold=150)) ], remote_as=10, route_maps=[ dict(name="test-route", out=True) ], route_server_client=True, ) ], network=[ dict( address="198.51.110.10", mask="255.255.255.255", backdoor=True, ) ], ), dict( afi="ipv4", safi="multicast", aggregate_address=[ dict( address="192.0.3.1", netmask="255.255.255.255", as_confed_set=True, ) ], default_metric=12, distance=dict(external=10, internal=10, local=100), network=[ dict( address="198.51.111.11", mask="255.255.255.255", route_map="test", ) ], table_map=dict(name="test_tableMap", filter=True), ), ], ), state="rendered", ) ) commands = [ "router bgp 65000", "address-family ipv4 multicast vrf blue", "bgp dampening 1 1 1 1", "neighbor 198.51.100.1 remote-as 10", "neighbor 198.51.100.1 activate", "neighbor 198.51.100.1 aigp send cost-community 100 poi igp-cost transitive", "neighbor 198.51.100.1 route-map test-route out", "neighbor 198.51.100.1 route-server-client", "neighbor 198.51.100.1 slow-peer detection threshold 150", "network 198.51.110.10 mask 255.255.255.255 backdoor", "aggregate-address 192.0.2.1 255.255.255.255 as-confed-set", "address-family ipv4 multicast", "network 198.51.111.11 mask 255.255.255.255 route-map test", "aggregate-address 192.0.3.1 255.255.255.255 as-confed-set", "default-metric 12", "distance bgp 10 10 100", "table-map test_tableMap filter", ] result = self.execute_module(changed=False) self.assertEqual(sorted(result["rendered"]), sorted(commands)) def test_ios_bgp_address_family_parsed(self): set_module_args( dict( running_config="router bgp 65000\n address-family ipv4 multicast vrf blue\n bgp aggregate-timer 10\n bgp slow-peer detection threshold 150", state="parsed", ) ) result = self.execute_module(changed=False) parsed_list = { "address_family": [ { "afi": "ipv4", "bgp": { "aggregate_timer": 10, "slow_peer": [{"detection": {"threshold": 150}}], }, "safi": "multicast", "vrf": "blue", } ], "as_number": "65000", } self.assertEqual(parsed_list, result["parsed"]) def test_ios_bgp_address_family_merged_multiple_neighbor(self): set_module_args( dict( config=dict( as_number="65000", address_family=[ dict( afi="ipv4", neighbor=[ dict( address="192.31.39.212", soft_reconfiguration=True, activate=True, ), dict( address="192.31.47.206", soft_reconfiguration=True, activate=True, ), ], network=[ dict( address="192.0.3.1", mask="255.255.255.0" ), dict( address="192.0.2.1", mask="255.255.255.0" ), dict( address="192.0.4.1", mask="255.255.255.0" ), ], ) ], ), state="merged", ) ) commands = [ "router bgp 65000", "address-family ipv4", "neighbor 192.31.39.212 activate", "neighbor 192.31.39.212 soft-reconfiguration inbound", "neighbor 192.31.47.206 activate", "neighbor 192.31.47.206 soft-reconfiguration inbound", "network 192.0.3.1 mask 255.255.255.0", "network 192.0.2.1 mask 255.255.255.0", "network 192.0.4.1 mask 255.255.255.0", ] result = self.execute_module(changed=True) self.assertEqual(sorted(result["commands"]), sorted(commands)) def test_ios_bgp_address_family_overridden_multiple_neighbor(self): set_module_args( dict( config=dict( as_number="65000", address_family=[ dict( afi="ipv4", neighbor=[ dict( address="192.31.39.212", soft_reconfiguration=True, activate=True, ), dict( address="192.31.47.206", soft_reconfiguration=True, activate=True, ), ], network=[ dict( address="192.0.3.1", mask="255.255.255.0" ), dict( address="192.0.2.1", mask="255.255.255.0" ), dict( address="192.0.4.1", mask="255.255.255.0" ), ], ) ], ), state="overridden", ) ) commands = [ "router bgp 65000", "no address-family ipv4 multicast", "no address-family ipv4 mdt", "no address-family ipv4 multicast vrf blue", "address-family ipv4", "neighbor 192.31.39.212 activate", "neighbor 192.31.39.212 soft-reconfiguration inbound", "neighbor 192.31.47.206 activate", "neighbor 192.31.47.206 soft-reconfiguration inbound", "network 192.0.3.1 mask 255.255.255.0", "network 192.0.2.1 mask 255.255.255.0", "network 192.0.4.1 mask 255.255.255.0", ] result = self.execute_module(changed=True) self.assertEqual(sorted(result["commands"]), sorted(commands))
42.847319
156
0.323722
2,483
36,763
4.616996
0.084575
0.052338
0.048674
0.02835
0.845865
0.801727
0.763957
0.729676
0.699669
0.693562
0
0.099134
0.601039
36,763
857
157
42.897316
0.682484
0.003373
0
0.726061
0
0.006061
0.139215
0.019681
0
0
0
0
0.010909
1
0.019394
false
0
0.006061
0.001212
0.029091
0.001212
0
0
0
null
0
0
0
1
1
1
1
0
1
0
0
1
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
d82d22e7b47203684156102a82e1b3876589bb62
36,681
py
Python
src/ebay_rest/api/commerce_notification/api/subscription_api.py
gbm001/ebay_rest
077d3478423ccd80ff35e0361821d6a11180bc54
[ "MIT" ]
3
2021-12-12T04:28:03.000Z
2022-03-10T03:29:18.000Z
src/ebay_rest/api/commerce_notification/api/subscription_api.py
jdavv/ebay_rest
20fc88c6aefdae9ab90f9c1330e79abddcd750cd
[ "MIT" ]
33
2021-06-16T20:44:36.000Z
2022-03-30T14:55:06.000Z
src/ebay_rest/api/commerce_notification/api/subscription_api.py
jdavv/ebay_rest
20fc88c6aefdae9ab90f9c1330e79abddcd750cd
[ "MIT" ]
7
2021-06-03T09:30:23.000Z
2022-03-08T19:51:33.000Z
# coding: utf-8 """ Notification API The eBay Notification API enables management of the entire end-to-end eBay notification experience by allowing users to:<ul><li>Browse for supported notification topics and retrieve topic details</li><li>Create, configure, and manage notification destination endpionts</li><li>Configure, manage, and test notification subscriptions</li><li>Process eBay notifications and verify the integrity of the message payload</li></ul> # noqa: E501 OpenAPI spec version: v1.2.0 Generated by: https://github.com/swagger-api/swagger-codegen.git """ from __future__ import absolute_import import re # noqa: F401 # python 2 and python 3 compatibility library import six from ...commerce_notification.api_client import ApiClient class SubscriptionApi(object): """NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. Ref: https://github.com/swagger-api/swagger-codegen """ def __init__(self, api_client=None): if api_client is None: api_client = ApiClient() self.api_client = api_client def create_subscription(self, **kwargs): # noqa: E501 """create_subscription # noqa: E501 This method allows applications to create a subscription for a topic and supported schema version. Subscriptions allow applications to express interest in notifications and keep receiving the information relevant to their business.<br/><br/>Each application and topic-schema pairing to a subscription should have a 1:1 cardinality.<br/><br/>You can create the subscription in disabled mode, test it (see the <b>test</b> method), and when everything is ready, you can enable the subscription (see the <b>enableSubscription</b> method).<br /><br /><span class=\"tablenote\"><b>Note:</b> If an application is not authorized to subscribe to a topic, for example, if your authorization does not include the list of scopes required for the topic, an error code of 195011 is returned.</span> # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.create_subscription(async_req=True) >>> result = thread.get() :param async_req bool :param CreateSubscriptionRequest body: The create subscription request. :return: object If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.create_subscription_with_http_info(**kwargs) # noqa: E501 else: (data) = self.create_subscription_with_http_info(**kwargs) # noqa: E501 return data def create_subscription_with_http_info(self, **kwargs): # noqa: E501 """create_subscription # noqa: E501 This method allows applications to create a subscription for a topic and supported schema version. Subscriptions allow applications to express interest in notifications and keep receiving the information relevant to their business.<br/><br/>Each application and topic-schema pairing to a subscription should have a 1:1 cardinality.<br/><br/>You can create the subscription in disabled mode, test it (see the <b>test</b> method), and when everything is ready, you can enable the subscription (see the <b>enableSubscription</b> method).<br /><br /><span class=\"tablenote\"><b>Note:</b> If an application is not authorized to subscribe to a topic, for example, if your authorization does not include the list of scopes required for the topic, an error code of 195011 is returned.</span> # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.create_subscription_with_http_info(async_req=True) >>> result = thread.get() :param async_req bool :param CreateSubscriptionRequest body: The create subscription request. :return: object If the method is called asynchronously, returns the request thread. """ all_params = ['body'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method create_subscription" % key ) params[key] = val del params['kwargs'] collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in params: body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['api_auth'] # noqa: E501 return self.api_client.call_api( '/subscription', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='object', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def delete_subscription(self, subscription_id, **kwargs): # noqa: E501 """delete_subscription # noqa: E501 This method allows applications to delete a subscription. Subscriptions can be deleted regardless of status. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_subscription(subscription_id, async_req=True) >>> result = thread.get() :param async_req bool :param str subscription_id: The unique identifier for the subscription. (required) :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.delete_subscription_with_http_info(subscription_id, **kwargs) # noqa: E501 else: (data) = self.delete_subscription_with_http_info(subscription_id, **kwargs) # noqa: E501 return data def delete_subscription_with_http_info(self, subscription_id, **kwargs): # noqa: E501 """delete_subscription # noqa: E501 This method allows applications to delete a subscription. Subscriptions can be deleted regardless of status. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_subscription_with_http_info(subscription_id, async_req=True) >>> result = thread.get() :param async_req bool :param str subscription_id: The unique identifier for the subscription. (required) :return: None If the method is called asynchronously, returns the request thread. """ all_params = ['subscription_id'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method delete_subscription" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'subscription_id' is set if ('subscription_id' not in params or params['subscription_id'] is None): raise ValueError("Missing the required parameter `subscription_id` when calling `delete_subscription`") # noqa: E501 collection_formats = {} path_params = {} if 'subscription_id' in params: path_params['subscription_id'] = params['subscription_id'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # Authentication setting auth_settings = ['api_auth'] # noqa: E501 return self.api_client.call_api( '/subscription/{subscription_id}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def disable_subscription(self, subscription_id, **kwargs): # noqa: E501 """disable_subscription # noqa: E501 This method disables a subscription, which prevents the subscription from providing notifications. To restart a subscription, call <strong>enableSubscription</strong>. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.disable_subscription(subscription_id, async_req=True) >>> result = thread.get() :param async_req bool :param str subscription_id: The unique identifier for the subscription. (required) :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.disable_subscription_with_http_info(subscription_id, **kwargs) # noqa: E501 else: (data) = self.disable_subscription_with_http_info(subscription_id, **kwargs) # noqa: E501 return data def disable_subscription_with_http_info(self, subscription_id, **kwargs): # noqa: E501 """disable_subscription # noqa: E501 This method disables a subscription, which prevents the subscription from providing notifications. To restart a subscription, call <strong>enableSubscription</strong>. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.disable_subscription_with_http_info(subscription_id, async_req=True) >>> result = thread.get() :param async_req bool :param str subscription_id: The unique identifier for the subscription. (required) :return: None If the method is called asynchronously, returns the request thread. """ all_params = ['subscription_id'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method disable_subscription" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'subscription_id' is set if ('subscription_id' not in params or params['subscription_id'] is None): raise ValueError("Missing the required parameter `subscription_id` when calling `disable_subscription`") # noqa: E501 collection_formats = {} path_params = {} if 'subscription_id' in params: path_params['subscription_id'] = params['subscription_id'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # Authentication setting auth_settings = ['api_auth'] # noqa: E501 return self.api_client.call_api( '/subscription/{subscription_id}/disable', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def enable_subscription(self, subscription_id, **kwargs): # noqa: E501 """enable_subscription # noqa: E501 This method allows applications to enable a disabled subscription. To pause (or disable) an enabled subscription, call <strong>disableSubscription</strong>. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.enable_subscription(subscription_id, async_req=True) >>> result = thread.get() :param async_req bool :param str subscription_id: The unique identifier for the subscription. (required) :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.enable_subscription_with_http_info(subscription_id, **kwargs) # noqa: E501 else: (data) = self.enable_subscription_with_http_info(subscription_id, **kwargs) # noqa: E501 return data def enable_subscription_with_http_info(self, subscription_id, **kwargs): # noqa: E501 """enable_subscription # noqa: E501 This method allows applications to enable a disabled subscription. To pause (or disable) an enabled subscription, call <strong>disableSubscription</strong>. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.enable_subscription_with_http_info(subscription_id, async_req=True) >>> result = thread.get() :param async_req bool :param str subscription_id: The unique identifier for the subscription. (required) :return: None If the method is called asynchronously, returns the request thread. """ all_params = ['subscription_id'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method enable_subscription" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'subscription_id' is set if ('subscription_id' not in params or params['subscription_id'] is None): raise ValueError("Missing the required parameter `subscription_id` when calling `enable_subscription`") # noqa: E501 collection_formats = {} path_params = {} if 'subscription_id' in params: path_params['subscription_id'] = params['subscription_id'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # Authentication setting auth_settings = ['api_auth'] # noqa: E501 return self.api_client.call_api( '/subscription/{subscription_id}/enable', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_subscription(self, subscription_id, **kwargs): # noqa: E501 """get_subscription # noqa: E501 This method allows applications to retrieve subscription details for the specified subscription.<br /><br />Specify the subscription to retrieve using the <strong>subscription_id</strong>. Use the <strong>getSubscriptions</strong> method to browse all subscriptions if you do not know the <strong>subscription_id</strong>.<br /><br />Subscriptions allow applications to express interest in notifications and keep receiving the information relevant to their business. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_subscription(subscription_id, async_req=True) >>> result = thread.get() :param async_req bool :param str subscription_id: The unique identifier for the subscription. (required) :return: Subscription If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_subscription_with_http_info(subscription_id, **kwargs) # noqa: E501 else: (data) = self.get_subscription_with_http_info(subscription_id, **kwargs) # noqa: E501 return data def get_subscription_with_http_info(self, subscription_id, **kwargs): # noqa: E501 """get_subscription # noqa: E501 This method allows applications to retrieve subscription details for the specified subscription.<br /><br />Specify the subscription to retrieve using the <strong>subscription_id</strong>. Use the <strong>getSubscriptions</strong> method to browse all subscriptions if you do not know the <strong>subscription_id</strong>.<br /><br />Subscriptions allow applications to express interest in notifications and keep receiving the information relevant to their business. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_subscription_with_http_info(subscription_id, async_req=True) >>> result = thread.get() :param async_req bool :param str subscription_id: The unique identifier for the subscription. (required) :return: Subscription If the method is called asynchronously, returns the request thread. """ all_params = ['subscription_id'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_subscription" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'subscription_id' is set if ('subscription_id' not in params or params['subscription_id'] is None): raise ValueError("Missing the required parameter `subscription_id` when calling `get_subscription`") # noqa: E501 collection_formats = {} path_params = {} if 'subscription_id' in params: path_params['subscription_id'] = params['subscription_id'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['api_auth'] # noqa: E501 return self.api_client.call_api( '/subscription/{subscription_id}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='Subscription', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_subscriptions(self, **kwargs): # noqa: E501 """get_subscriptions # noqa: E501 This method allows applications to retrieve a list of all subscriptions. The list returned is a paginated collection of subscription resources.<br /><br />Subscriptions allow applications to express interest in notifications and keep receiving the information relevant to their business. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_subscriptions(async_req=True) >>> result = thread.get() :param async_req bool :param str limit: The number of items, from the result set, returned in a single page. Range is from 10-100. If this parameter is omitted, the default value is used.<br/><br/><b>Default:</b> 20<br/><br/><b>Maximum:</b> 100 items per page :param str continuation_token: The continuation token for the next set of results. :return: SubscriptionSearchResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_subscriptions_with_http_info(**kwargs) # noqa: E501 else: (data) = self.get_subscriptions_with_http_info(**kwargs) # noqa: E501 return data def get_subscriptions_with_http_info(self, **kwargs): # noqa: E501 """get_subscriptions # noqa: E501 This method allows applications to retrieve a list of all subscriptions. The list returned is a paginated collection of subscription resources.<br /><br />Subscriptions allow applications to express interest in notifications and keep receiving the information relevant to their business. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_subscriptions_with_http_info(async_req=True) >>> result = thread.get() :param async_req bool :param str limit: The number of items, from the result set, returned in a single page. Range is from 10-100. If this parameter is omitted, the default value is used.<br/><br/><b>Default:</b> 20<br/><br/><b>Maximum:</b> 100 items per page :param str continuation_token: The continuation token for the next set of results. :return: SubscriptionSearchResponse If the method is called asynchronously, returns the request thread. """ all_params = ['limit', 'continuation_token'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_subscriptions" % key ) params[key] = val del params['kwargs'] collection_formats = {} path_params = {} query_params = [] if 'limit' in params: query_params.append(('limit', params['limit'])) # noqa: E501 if 'continuation_token' in params: query_params.append(('continuation_token', params['continuation_token'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['api_auth'] # noqa: E501 return self.api_client.call_api( '/subscription', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SubscriptionSearchResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def test(self, subscription_id, **kwargs): # noqa: E501 """test # noqa: E501 This method triggers a mocked test payload that includes a notification ID, publish date, and so on. Use this method to test your subscription end-to-end.<br /><br />You can create the subscription in disabled mode, test it using this method, and when everything is ready, you can enable the subscription (see the <strong>enableSubscription</strong> method).<br /><br /><span class=\"tablenote\"><b>Note:</b> Use the <strong>notificationId</strong> to tell the difference between a test payload and a real payload.</span> # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.test(subscription_id, async_req=True) >>> result = thread.get() :param async_req bool :param str subscription_id: The unique identifier for the subscription. (required) :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.test_with_http_info(subscription_id, **kwargs) # noqa: E501 else: (data) = self.test_with_http_info(subscription_id, **kwargs) # noqa: E501 return data def test_with_http_info(self, subscription_id, **kwargs): # noqa: E501 """test # noqa: E501 This method triggers a mocked test payload that includes a notification ID, publish date, and so on. Use this method to test your subscription end-to-end.<br /><br />You can create the subscription in disabled mode, test it using this method, and when everything is ready, you can enable the subscription (see the <strong>enableSubscription</strong> method).<br /><br /><span class=\"tablenote\"><b>Note:</b> Use the <strong>notificationId</strong> to tell the difference between a test payload and a real payload.</span> # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.test_with_http_info(subscription_id, async_req=True) >>> result = thread.get() :param async_req bool :param str subscription_id: The unique identifier for the subscription. (required) :return: None If the method is called asynchronously, returns the request thread. """ all_params = ['subscription_id'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method test" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'subscription_id' is set if ('subscription_id' not in params or params['subscription_id'] is None): raise ValueError("Missing the required parameter `subscription_id` when calling `test`") # noqa: E501 collection_formats = {} path_params = {} if 'subscription_id' in params: path_params['subscription_id'] = params['subscription_id'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # Authentication setting auth_settings = ['api_auth'] # noqa: E501 return self.api_client.call_api( '/subscription/{subscription_id}/test', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def update_subscription(self, subscription_id, **kwargs): # noqa: E501 """update_subscription # noqa: E501 This method allows applications to update a subscription. Subscriptions allow applications to express interest in notifications and keep receiving the information relevant to their business.<br /><br /><span class=\"tablenote\"><b>Note:</b> This call returns an error if an application is not authorized to subscribe to a topic.</span><br/><br/>You can pause and restart a subscription. See the <b>disableSubscription</b> and <b>enableSubscription</b> methods. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.update_subscription(subscription_id, async_req=True) >>> result = thread.get() :param async_req bool :param str subscription_id: The unique identifier for the subscription. (required) :param UpdateSubscriptionRequest body: The create subscription request. :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.update_subscription_with_http_info(subscription_id, **kwargs) # noqa: E501 else: (data) = self.update_subscription_with_http_info(subscription_id, **kwargs) # noqa: E501 return data def update_subscription_with_http_info(self, subscription_id, **kwargs): # noqa: E501 """update_subscription # noqa: E501 This method allows applications to update a subscription. Subscriptions allow applications to express interest in notifications and keep receiving the information relevant to their business.<br /><br /><span class=\"tablenote\"><b>Note:</b> This call returns an error if an application is not authorized to subscribe to a topic.</span><br/><br/>You can pause and restart a subscription. See the <b>disableSubscription</b> and <b>enableSubscription</b> methods. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.update_subscription_with_http_info(subscription_id, async_req=True) >>> result = thread.get() :param async_req bool :param str subscription_id: The unique identifier for the subscription. (required) :param UpdateSubscriptionRequest body: The create subscription request. :return: None If the method is called asynchronously, returns the request thread. """ all_params = ['subscription_id', 'body'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method update_subscription" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'subscription_id' is set if ('subscription_id' not in params or params['subscription_id'] is None): raise ValueError("Missing the required parameter `subscription_id` when calling `update_subscription`") # noqa: E501 collection_formats = {} path_params = {} if 'subscription_id' in params: path_params['subscription_id'] = params['subscription_id'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in params: body_params = params['body'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['api_auth'] # noqa: E501 return self.api_client.call_api( '/subscription/{subscription_id}', 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
46.90665
806
0.646356
4,307
36,681
5.30137
0.064314
0.038541
0.019621
0.025227
0.95235
0.947707
0.947707
0.940437
0.939912
0.929795
0
0.013979
0.268668
36,681
781
807
46.966709
0.837173
0.446198
0
0.805755
0
0
0.188755
0.045376
0
0
0
0
0
1
0.040767
false
0
0.009592
0
0.110312
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
dc22263b520403eec348d8dfed0eb9bde4f70e28
67,078
py
Python
python/paddle/fluid/tests/unittests/auto_parallel/test_cluster.py
RangeKing/Paddle
2d87300809ae75d76f5b0b457d8112cb88dc3e27
[ "Apache-2.0" ]
8
2016-08-15T07:02:27.000Z
2016-08-24T09:34:00.000Z
python/paddle/fluid/tests/unittests/auto_parallel/test_cluster.py
RangeKing/Paddle
2d87300809ae75d76f5b0b457d8112cb88dc3e27
[ "Apache-2.0" ]
1
2022-01-28T07:23:22.000Z
2022-01-28T07:23:22.000Z
python/paddle/fluid/tests/unittests/auto_parallel/test_cluster.py
RangeKing/Paddle
2d87300809ae75d76f5b0b457d8112cb88dc3e27
[ "Apache-2.0" ]
null
null
null
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest import os import json import paddle from paddle.distributed.auto_parallel.cluster import Cluster cluster_json = """ { "alpha_latency": {"inter": {"ring": "NET", "tree": "NET"}, "intra": {"ring": "NVL", "tree": "PHB"}, "base": {"ring": 8.4, "tree": 0}, "switch": 10.0}, "machines": [ { "hostname": "yq01-sys-hic-v100-box-a225-0266", "addr": "10.127.9.147", "port": "60009", "devices": [ { "global_id": 0, "local_id": 0, "type": "GPU", "model": " Tesla V100-SXM2-32GB", "memory": "32", "sp_gflops": "15700", "dp_gflops": "7800" }, { "global_id": 1, "local_id": 1, "type": "GPU", "model": " Tesla V100-SXM2-32GB", "memory": "32", "sp_gflops": "15700", "dp_gflops": "7800" }, { "global_id": 2, "local_id": 2, "type": "GPU", "model": " Tesla V100-SXM2-32GB", "memory": "32", "sp_gflops": "15700", "dp_gflops": "7800" }, { "global_id": 3, "local_id": 3, "type": "GPU", "model": " Tesla V100-SXM2-32GB", "memory": "32", "sp_gflops": "15700", "dp_gflops": "7800" }, { "global_id": 4, "local_id": 4, "type": "GPU", "model": " Tesla V100-SXM2-32GB", "memory": "32", "sp_gflops": "15700", "dp_gflops": "7800" }, { "global_id": 5, "local_id": 5, "type": "GPU", "model": " Tesla V100-SXM2-32GB", "memory": "32", "sp_gflops": "15700", "dp_gflops": "7800" }, { "global_id": 6, "local_id": 6, "type": "GPU", "model": " Tesla V100-SXM2-32GB", "memory": "32", "sp_gflops": "15700", "dp_gflops": "7800" }, { "global_id": 7, "local_id": 7, "type": "GPU", "model": " Tesla V100-SXM2-32GB", "memory": "32", "sp_gflops": "15700", "dp_gflops": "7800" }, { "global_id": 8, "local_id": 0, "type": "CPU", "arch": "x86_64", "vendor": "GenuineIntel", "model": "Intel(R) Xeon(R) Gold 6148 CPU @ 2.40GH", "memory": "502", "sp_gflops": "150", "dp_gflops": "75" }, { "global_id": 9, "local_id": 0, "type": "NIC", "width": 12.5, "ip": "10.127.9.147" } ], "links": [ { "source_global_id": 0, "target_global_id": 1, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 0, "target_global_id": 2, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 0, "target_global_id": 3, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 0, "target_global_id": 4, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 0, "target_global_id": 5, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 0, "target_global_id": 6, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 0, "target_global_id": 7, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 0, "target_global_id": 8, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 0, "target_global_id": 9, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 1, "target_global_id": 0, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 1, "target_global_id": 2, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 1, "target_global_id": 3, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 1, "target_global_id": 4, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 1, "target_global_id": 5, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 1, "target_global_id": 6, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 1, "target_global_id": 7, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 1, "target_global_id": 8, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 1, "target_global_id": 9, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 2, "target_global_id": 0, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 2, "target_global_id": 1, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 2, "target_global_id": 3, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 2, "target_global_id": 4, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 2, "target_global_id": 5, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 2, "target_global_id": 6, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 2, "target_global_id": 7, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 2, "target_global_id": 8, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 2, "target_global_id": 9, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 3, "target_global_id": 0, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 3, "target_global_id": 1, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 3, "target_global_id": 2, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 3, "target_global_id": 4, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 3, "target_global_id": 5, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 3, "target_global_id": 6, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 3, "target_global_id": 7, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 3, "target_global_id": 8, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 3, "target_global_id": 9, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 4, "target_global_id": 0, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 4, "target_global_id": 1, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 4, "target_global_id": 2, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 4, "target_global_id": 3, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 4, "target_global_id": 5, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 4, "target_global_id": 6, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 4, "target_global_id": 7, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 4, "target_global_id": 8, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 4, "target_global_id": 9, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 5, "target_global_id": 0, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 5, "target_global_id": 1, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 5, "target_global_id": 2, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 5, "target_global_id": 3, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 5, "target_global_id": 4, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 5, "target_global_id": 6, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 5, "target_global_id": 7, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 5, "target_global_id": 8, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 5, "target_global_id": 9, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 6, "target_global_id": 0, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 6, "target_global_id": 1, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 6, "target_global_id": 2, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 6, "target_global_id": 3, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 6, "target_global_id": 4, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 6, "target_global_id": 5, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 6, "target_global_id": 7, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 6, "target_global_id": 8, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 6, "target_global_id": 9, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 7, "target_global_id": 0, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 7, "target_global_id": 1, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 7, "target_global_id": 2, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 7, "target_global_id": 3, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 7, "target_global_id": 4, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 7, "target_global_id": 5, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 7, "target_global_id": 6, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 7, "target_global_id": 8, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 7, "target_global_id": 9, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 8, "target_global_id": 0, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 8, "target_global_id": 1, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 8, "target_global_id": 2, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 8, "target_global_id": 3, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 8, "target_global_id": 4, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 8, "target_global_id": 5, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 8, "target_global_id": 6, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 8, "target_global_id": 7, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 8, "target_global_id": 9, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 9, "target_global_id": 0, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 9, "target_global_id": 1, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 9, "target_global_id": 2, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 9, "target_global_id": 3, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 9, "target_global_id": 4, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 9, "target_global_id": 5, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 9, "target_global_id": 6, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 9, "target_global_id": 7, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 9, "target_global_id": 8, "type": "PHB", "bandwidth": 24.0 } ] } ] } """ multi_cluster_json = """{ "machines": [ { "hostname": "yq01-sys-hic-v100-box-a225-0266", "addr": "10.127.9.147", "port": "60009", "devices": [ { "global_id": 0, "local_id": 0, "type": "GPU", "model": " Tesla V100-SXM2-32GB", "memory": "32", "sp_gflops": "15700", "dp_gflops": "7800" }, { "global_id": 1, "local_id": 1, "type": "GPU", "model": " Tesla V100-SXM2-32GB", "memory": "32", "sp_gflops": "15700", "dp_gflops": "7800" }, { "global_id": 2, "local_id": 2, "type": "GPU", "model": " Tesla V100-SXM2-32GB", "memory": "32", "sp_gflops": "15700", "dp_gflops": "7800" }, { "global_id": 3, "local_id": 3, "type": "GPU", "model": " Tesla V100-SXM2-32GB", "memory": "32", "sp_gflops": "15700", "dp_gflops": "7800" }, { "global_id": 4, "local_id": 4, "type": "GPU", "model": " Tesla V100-SXM2-32GB", "memory": "32", "sp_gflops": "15700", "dp_gflops": "7800" }, { "global_id": 5, "local_id": 5, "type": "GPU", "model": " Tesla V100-SXM2-32GB", "memory": "32", "sp_gflops": "15700", "dp_gflops": "7800" }, { "global_id": 6, "local_id": 6, "type": "GPU", "model": " Tesla V100-SXM2-32GB", "memory": "32", "sp_gflops": "15700", "dp_gflops": "7800" }, { "global_id": 7, "local_id": 7, "type": "GPU", "model": " Tesla V100-SXM2-32GB", "memory": "32", "sp_gflops": "15700", "dp_gflops": "7800" }, { "global_id": 8, "local_id": 0, "type": "CPU", "arch": "x86_64", "vendor": "GenuineIntel", "model": "Intel(R) Xeon(R) Gold 6148 CPU @ 2.40GH", "memory": "502", "sp_gflops": "150", "dp_gflops": "75" }, { "global_id": 9, "local_id": 0, "type": "NIC", "width": 12.5, "ip": "10.127.9.147" } ], "links": [ { "source_global_id": 0, "target_global_id": 1, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 0, "target_global_id": 2, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 0, "target_global_id": 3, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 0, "target_global_id": 4, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 0, "target_global_id": 5, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 0, "target_global_id": 6, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 0, "target_global_id": 7, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 0, "target_global_id": 8, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 0, "target_global_id": 9, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 1, "target_global_id": 0, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 1, "target_global_id": 2, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 1, "target_global_id": 3, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 1, "target_global_id": 4, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 1, "target_global_id": 5, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 1, "target_global_id": 6, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 1, "target_global_id": 7, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 1, "target_global_id": 8, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 1, "target_global_id": 9, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 2, "target_global_id": 0, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 2, "target_global_id": 1, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 2, "target_global_id": 3, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 2, "target_global_id": 4, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 2, "target_global_id": 5, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 2, "target_global_id": 6, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 2, "target_global_id": 7, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 2, "target_global_id": 8, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 2, "target_global_id": 9, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 3, "target_global_id": 0, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 3, "target_global_id": 1, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 3, "target_global_id": 2, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 3, "target_global_id": 4, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 3, "target_global_id": 5, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 3, "target_global_id": 6, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 3, "target_global_id": 7, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 3, "target_global_id": 8, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 3, "target_global_id": 9, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 4, "target_global_id": 0, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 4, "target_global_id": 1, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 4, "target_global_id": 2, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 4, "target_global_id": 3, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 4, "target_global_id": 5, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 4, "target_global_id": 6, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 4, "target_global_id": 7, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 4, "target_global_id": 8, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 4, "target_global_id": 9, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 5, "target_global_id": 0, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 5, "target_global_id": 1, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 5, "target_global_id": 2, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 5, "target_global_id": 3, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 5, "target_global_id": 4, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 5, "target_global_id": 6, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 5, "target_global_id": 7, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 5, "target_global_id": 8, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 5, "target_global_id": 9, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 6, "target_global_id": 0, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 6, "target_global_id": 1, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 6, "target_global_id": 2, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 6, "target_global_id": 3, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 6, "target_global_id": 4, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 6, "target_global_id": 5, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 6, "target_global_id": 7, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 6, "target_global_id": 8, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 6, "target_global_id": 9, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 7, "target_global_id": 0, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 7, "target_global_id": 1, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 7, "target_global_id": 2, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 7, "target_global_id": 3, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 7, "target_global_id": 4, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 7, "target_global_id": 5, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 7, "target_global_id": 6, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 7, "target_global_id": 8, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 7, "target_global_id": 9, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 8, "target_global_id": 0, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 8, "target_global_id": 1, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 8, "target_global_id": 2, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 8, "target_global_id": 3, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 8, "target_global_id": 4, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 8, "target_global_id": 5, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 8, "target_global_id": 6, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 8, "target_global_id": 7, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 8, "target_global_id": 9, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 9, "target_global_id": 0, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 9, "target_global_id": 1, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 9, "target_global_id": 2, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 9, "target_global_id": 3, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 9, "target_global_id": 4, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 9, "target_global_id": 5, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 9, "target_global_id": 6, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 9, "target_global_id": 7, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 9, "target_global_id": 8, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 9, "target_global_id": 19, "type": "NET", "bandwidth": 24.0 } ] }, { "hostname": "yq01-sys-hic-k8s-v100-box-a225-0751", "addr": "10.127.43.24", "port": "60009", "devices": [ { "global_id": 10, "local_id": 0, "type": "GPU", "model": " Tesla V100-SXM2-32GB", "memory": "32", "sp_gflops": "15700", "dp_gflops": "7800" }, { "global_id": 11, "local_id": 1, "type": "GPU", "model": " Tesla V100-SXM2-32GB", "memory": "32", "sp_gflops": "15700", "dp_gflops": "7800" }, { "global_id": 12, "local_id": 2, "type": "GPU", "model": " Tesla V100-SXM2-32GB", "memory": "32", "sp_gflops": "15700", "dp_gflops": "7800" }, { "global_id": 13, "local_id": 3, "type": "GPU", "model": " Tesla V100-SXM2-32GB", "memory": "32", "sp_gflops": "15700", "dp_gflops": "7800" }, { "global_id": 14, "local_id": 4, "type": "GPU", "model": " Tesla V100-SXM2-32GB", "memory": "32", "sp_gflops": "15700", "dp_gflops": "7800" }, { "global_id": 15, "local_id": 5, "type": "GPU", "model": " Tesla V100-SXM2-32GB", "memory": "32", "sp_gflops": "15700", "dp_gflops": "7800" }, { "global_id": 16, "local_id": 6, "type": "GPU", "model": " Tesla V100-SXM2-32GB", "memory": "32", "sp_gflops": "15700", "dp_gflops": "7800" }, { "global_id": 17, "local_id": 7, "type": "GPU", "model": " Tesla V100-SXM2-32GB", "memory": "32", "sp_gflops": "15700", "dp_gflops": "7800" }, { "global_id": 18, "local_id": 0, "type": "CPU", "arch": "x86_64", "vendor": "GenuineIntel", "model": "Intel(R) Xeon(R) Gold 6271C CPU @ 2.60G", "memory": "503", "sp_gflops": "150", "dp_gflops": "75" }, { "global_id": 19, "local_id": 0, "type": "NIC", "width": 12.5, "ip": "10.127.43.24" } ], "links": [ { "source_global_id": 10, "target_global_id": 11, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 10, "target_global_id": 12, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 10, "target_global_id": 13, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 10, "target_global_id": 14, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 10, "target_global_id": 15, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 10, "target_global_id": 16, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 10, "target_global_id": 17, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 10, "target_global_id": 18, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 10, "target_global_id": 19, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 11, "target_global_id": 10, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 11, "target_global_id": 12, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 11, "target_global_id": 13, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 11, "target_global_id": 14, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 11, "target_global_id": 15, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 11, "target_global_id": 16, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 11, "target_global_id": 17, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 11, "target_global_id": 18, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 11, "target_global_id": 19, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 12, "target_global_id": 10, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 12, "target_global_id": 11, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 12, "target_global_id": 13, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 12, "target_global_id": 14, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 12, "target_global_id": 15, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 12, "target_global_id": 16, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 12, "target_global_id": 17, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 12, "target_global_id": 18, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 12, "target_global_id": 19, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 13, "target_global_id": 10, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 13, "target_global_id": 11, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 13, "target_global_id": 12, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 13, "target_global_id": 14, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 13, "target_global_id": 15, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 13, "target_global_id": 16, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 13, "target_global_id": 17, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 13, "target_global_id": 18, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 13, "target_global_id": 19, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 14, "target_global_id": 10, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 14, "target_global_id": 11, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 14, "target_global_id": 12, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 14, "target_global_id": 13, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 14, "target_global_id": 15, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 14, "target_global_id": 16, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 14, "target_global_id": 17, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 14, "target_global_id": 18, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 14, "target_global_id": 19, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 15, "target_global_id": 10, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 15, "target_global_id": 11, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 15, "target_global_id": 12, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 15, "target_global_id": 13, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 15, "target_global_id": 14, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 15, "target_global_id": 16, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 15, "target_global_id": 17, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 15, "target_global_id": 18, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 15, "target_global_id": 19, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 16, "target_global_id": 10, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 16, "target_global_id": 11, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 16, "target_global_id": 12, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 16, "target_global_id": 13, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 16, "target_global_id": 14, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 16, "target_global_id": 15, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 16, "target_global_id": 17, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 16, "target_global_id": 18, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 16, "target_global_id": 19, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 17, "target_global_id": 10, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 17, "target_global_id": 11, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 17, "target_global_id": 12, "type": "NVB", "bandwidth": 235.0 }, { "source_global_id": 17, "target_global_id": 13, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 17, "target_global_id": 14, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 17, "target_global_id": 15, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 17, "target_global_id": 16, "type": "NVL", "bandwidth": 235.0 }, { "source_global_id": 17, "target_global_id": 18, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 17, "target_global_id": 19, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 18, "target_global_id": 10, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 18, "target_global_id": 11, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 18, "target_global_id": 12, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 18, "target_global_id": 13, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 18, "target_global_id": 14, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 18, "target_global_id": 15, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 18, "target_global_id": 16, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 18, "target_global_id": 17, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 18, "target_global_id": 19, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 19, "target_global_id": 10, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 19, "target_global_id": 11, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 19, "target_global_id": 12, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 19, "target_global_id": 13, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 19, "target_global_id": 14, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 19, "target_global_id": 15, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 19, "target_global_id": 16, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 19, "target_global_id": 17, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 19, "target_global_id": 18, "type": "PHB", "bandwidth": 24.0 }, { "source_global_id": 19, "target_global_id": 9, "type": "NET", "bandwidth": 24.0 } ] } ] } """ class TestCluster(unittest.TestCase): def test_single_machine(self): # Build cluster file_dir = os.path.dirname(os.path.abspath(__file__)) cluster_json_path = os.path.join(file_dir, "auto_parallel_cluster.json") cluster_json_object = json.loads(cluster_json) with open(cluster_json_path, "w") as cluster_json_file: json.dump(cluster_json_object, cluster_json_file) cluster = Cluster() cluster.build_from_file(cluster_json_path) beta = cluster.get_beta(0, 1) hop = cluster.get_hop(0, 1) cross_machine = cluster.cross_machine([0, 1]) devices = cluster.convert_rank_to_device_id([0, 1, 2, 3]) involved_machine_count = cluster.get_involved_machine_count(devices) self.assertTrue(beta > 0) self.assertTrue(hop == 0) self.assertTrue(not cross_machine) self.assertTrue(devices == [0, 1, 2, 3]) self.assertTrue(involved_machine_count == 1) # Remove unnecessary files if os.path.exists(cluster_json_path): os.remove(cluster_json_path) def test_multi_machine(self): # Build cluster file_dir = os.path.dirname(os.path.abspath(__file__)) cluster_json_path = os.path.join(file_dir, "auto_parallel_cluster.json") cluster_json_object = json.loads(multi_cluster_json) with open(cluster_json_path, "w") as cluster_json_file: json.dump(cluster_json_object, cluster_json_file) cluster = Cluster() cluster.build_from_file(cluster_json_path) beta = cluster.get_beta(0, 11) hop = cluster.get_hop(0, 11) cross_machine = cluster.cross_machine([0, 11]) devices = cluster.convert_rank_to_device_id([5, 6, 7, 8]) involved_machine_count = cluster.get_involved_machine_count(devices) self.assertTrue(beta > 0) self.assertTrue(hop >= 0) self.assertTrue(cross_machine) self.assertTrue(devices == [5, 6, 7, 10]) self.assertTrue(involved_machine_count == 2) # Remove unnecessary files if os.path.exists(cluster_json_path): os.remove(cluster_json_path) if __name__ == "__main__": unittest.main()
33.157687
80
0.292674
4,664
67,078
3.927744
0.043739
0.250669
0.207872
0.220263
0.952072
0.939353
0.935859
0.931164
0.929472
0.929472
0
0.089037
0.596142
67,078
2,022
81
33.174085
0.587191
0.009854
0
0.665161
0
0
0.965798
0.00238
0
0
0
0
0.00502
1
0.001004
false
0
0.00251
0
0.004016
0
0
0
0
null
1
1
1
1
1
1
1
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
11
dc2b972249ec2aa30aeca24de7c1c755a711e488
26,626
py
Python
tb_rest_client/api/api_pe/asset_controller_api.py
maksonlee/python_tb_rest_client
a6cd17ef4de31f68c3226b7a9835292fbac4b1fa
[ "Apache-2.0" ]
1
2021-07-19T10:09:04.000Z
2021-07-19T10:09:04.000Z
tb_rest_client/api/api_pe/asset_controller_api.py
moravcik94/python_tb_rest_client
985361890cdf4ccce93d2b24905ad9003c8dfcaa
[ "Apache-2.0" ]
null
null
null
tb_rest_client/api/api_pe/asset_controller_api.py
moravcik94/python_tb_rest_client
985361890cdf4ccce93d2b24905ad9003c8dfcaa
[ "Apache-2.0" ]
null
null
null
# coding: utf-8 # Copyright 2020. ThingsBoard # # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # # http://www.apache.org/licenses/LICENSE-2.0 # # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from __future__ import absolute_import import re # noqa: F401 # python 2 and python 3 compatibility library import six from tb_rest_client.api_client import ApiClient from tb_rest_client.api.api_ce import AssetControllerApi class AssetControllerApi(AssetControllerApi): """NOTE: This class is auto generated by the swagger code generator program. Ref: https://github.com/swagger-api/swagger-codegen """ def __init__(self, api_client=None): super().__init__(api_client) def get_assets_by_entity_group_id_using_get(self, entity_group_id, page_size, page, **kwargs): # noqa: E501 """getAssetsByEntityGroupId # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api_pe.get_assets_by_entity_group_id_using_get(entity_group_id, page_size, page, async_req=True) >>> result = thread.get() :param async_req bool :param str entity_group_id: entityGroupId (required) :param str page_size: Page size (required) :param str page: Page (required) :param str text_search: textSearch :param str sort_property: sortProperty :param str sort_order: sortOrder :return: PageDataAsset If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_assets_by_entity_group_id_using_get_with_http_info(entity_group_id, page_size, page, **kwargs) # noqa: E501 else: (data) = self.get_assets_by_entity_group_id_using_get_with_http_info(entity_group_id, page_size, page, **kwargs) # noqa: E501 return data def get_assets_by_entity_group_id_using_get_with_http_info(self, entity_group_id, page_size, page, **kwargs): # noqa: E501 """getAssetsByEntityGroupId # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api_pe.get_assets_by_entity_group_id_using_get_with_http_info(entity_group_id, page_size, page, async_req=True) >>> result = thread.get() :param async_req bool :param str entity_group_id: entityGroupId (required) :param str page_size: Page size (required) :param str page: Page (required) :param str text_search: textSearch :param str sort_property: sortProperty :param str sort_order: sortOrder :return: PageDataAsset If the method is called asynchronously, returns the request thread. """ all_params = ['entity_group_id', 'page_size', 'page', 'text_search', 'sort_property', 'sort_order'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): params[key] = val del params['kwargs'] # verify the required parameter 'entity_group_id' is set if ('entity_group_id' not in params or params['entity_group_id'] is None): raise ValueError("Missing the required parameter `entity_group_id` when calling `get_assets_by_entity_group_id_using_get`") # noqa: E501 # verify the required parameter 'page_size' is set if ('page_size' not in params or params['page_size'] is None): raise ValueError("Missing the required parameter `page_size` when calling `get_assets_by_entity_group_id_using_get`") # noqa: E501 # verify the required parameter 'page' is set if ('page' not in params or params['page'] is None): raise ValueError("Missing the required parameter `page` when calling `get_assets_by_entity_group_id_using_get`") # noqa: E501 if 'page_size' in params and params['page_size'] < 1.0: # noqa: E501 raise ValueError("Invalid value for parameter `page_size` when calling `get_assets_by_entity_group_id_using_get`, must be a value greater than or equal to `1.0`") # noqa: E501 if 'page' in params and params['page'] < 0.0: # noqa: E501 raise ValueError("Invalid value for parameter `page` when calling `get_assets_by_entity_group_id_using_get`, must be a value greater than or equal to `0.0`") # noqa: E501 collection_formats = {} path_params = {} if 'entity_group_id' in params: path_params['entityGroupId'] = params['entity_group_id'] # noqa: E501 query_params = [] if 'text_search' in params: query_params.append(('textSearch', params['text_search'])) # noqa: E501 if 'sort_property' in params: query_params.append(('sortProperty', params['sort_property'])) # noqa: E501 if 'sort_order' in params: query_params.append(('sortOrder', params['sort_order'])) # noqa: E501 if 'page_size' in params: query_params.append(('pageSize', params['page_size'])) # noqa: E501 if 'page' in params: query_params.append(('page', params['page'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['*/*']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['X-Authorization'] # noqa: E501 return self.api_client.call_api( '/api/entityGroup/{entityGroupId}/assets{?textSearch,sortProperty,sortOrder,pageSize,page}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='PageDataAsset', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_customer_assets_using_get(self, customer_id, page_size, page, **kwargs): # noqa: E501 """getCustomerAssets # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api_pe.get_customer_assets_using_get(customer_id, page_size, page, async_req=True) >>> result = thread.get() :param async_req bool :param str customer_id: customerId (required) :param str page_size: pageSize (required) :param str page: page (required) :param str type: type :param str text_search: textSearch :param str sort_property: sortProperty :param str sort_order: sortOrder :return: PageDataAsset If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_customer_assets_using_get_with_http_info(customer_id, page_size, page, **kwargs) # noqa: E501 else: (data) = self.get_customer_assets_using_get_with_http_info(customer_id, page_size, page, **kwargs) # noqa: E501 return data def get_customer_assets_using_get_with_http_info(self, customer_id, page_size, page, **kwargs): # noqa: E501 """getCustomerAssets # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api_pe.get_customer_assets_using_get_with_http_info(customer_id, page_size, page, async_req=True) >>> result = thread.get() :param async_req bool :param str customer_id: customerId (required) :param str page_size: pageSize (required) :param str page: page (required) :param str type: type :param str text_search: textSearch :param str sort_property: sortProperty :param str sort_order: sortOrder :return: PageDataAsset If the method is called asynchronously, returns the request thread. """ all_params = ['customer_id', 'page_size', 'page', 'type', 'text_search', 'sort_property', 'sort_order'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): params[key] = val del params['kwargs'] # verify the required parameter 'customer_id' is set if ('customer_id' not in params or params['customer_id'] is None): raise ValueError("Missing the required parameter `customer_id` when calling `get_customer_assets_using_get`") # noqa: E501 # verify the required parameter 'page_size' is set if ('page_size' not in params or params['page_size'] is None): raise ValueError("Missing the required parameter `page_size` when calling `get_customer_assets_using_get`") # noqa: E501 # verify the required parameter 'page' is set if ('page' not in params or params['page'] is None): raise ValueError("Missing the required parameter `page` when calling `get_customer_assets_using_get`") # noqa: E501 collection_formats = {} path_params = {} if 'customer_id' in params: path_params['customerId'] = params['customer_id'] # noqa: E501 query_params = [] if 'type' in params: query_params.append(('type', params['type'])) # noqa: E501 if 'text_search' in params: query_params.append(('textSearch', params['text_search'])) # noqa: E501 if 'sort_property' in params: query_params.append(('sortProperty', params['sort_property'])) # noqa: E501 if 'sort_order' in params: query_params.append(('sortOrder', params['sort_order'])) # noqa: E501 if 'page_size' in params: query_params.append(('pageSize', params['page_size'])) # noqa: E501 if 'page' in params: query_params.append(('page', params['page'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['*/*']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['X-Authorization'] # noqa: E501 return self.api_client.call_api( '/api/customer/{customerId}/assets{?type,textSearch,sortProperty,sortOrder,pageSize,page}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='PageDataAsset', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_tenant_assets_using_get(self, page_size, page, **kwargs): # noqa: E501 """getTenantAssets # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api_pe.get_tenant_assets_using_get(page_size, page, async_req=True) >>> result = thread.get() :param async_req bool :param str page_size: pageSize (required) :param str page: page (required) :param str type: type :param str text_search: textSearch :param str sort_property: sortProperty :param str sort_order: sortOrder :return: PageDataAsset If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_tenant_assets_using_get_with_http_info(page_size, page, **kwargs) # noqa: E501 else: (data) = self.get_tenant_assets_using_get_with_http_info(page_size, page, **kwargs) # noqa: E501 return data def get_tenant_assets_using_get_with_http_info(self, page_size, page, **kwargs): # noqa: E501 """getTenantAssets # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api_pe.get_tenant_assets_using_get_with_http_info(page_size, page, async_req=True) >>> result = thread.get() :param async_req bool :param str page_size: pageSize (required) :param str page: page (required) :param str type: type :param str text_search: textSearch :param str sort_property: sortProperty :param str sort_order: sortOrder :return: PageDataAsset If the method is called asynchronously, returns the request thread. """ all_params = ['page_size', 'page', 'type', 'text_search', 'sort_property', 'sort_order'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): params[key] = val del params['kwargs'] # verify the required parameter 'page_size' is set if ('page_size' not in params or params['page_size'] is None): raise ValueError("Missing the required parameter `page_size` when calling `get_tenant_assets_using_get`") # noqa: E501 # verify the required parameter 'page' is set if ('page' not in params or params['page'] is None): raise ValueError("Missing the required parameter `page` when calling `get_tenant_assets_using_get`") # noqa: E501 collection_formats = {} path_params = {} query_params = [] if 'type' in params: query_params.append(('type', params['type'])) # noqa: E501 if 'text_search' in params: query_params.append(('textSearch', params['text_search'])) # noqa: E501 if 'sort_property' in params: query_params.append(('sortProperty', params['sort_property'])) # noqa: E501 if 'sort_order' in params: query_params.append(('sortOrder', params['sort_order'])) # noqa: E501 if 'page_size' in params: query_params.append(('pageSize', params['page_size'])) # noqa: E501 if 'page' in params: query_params.append(('page', params['page'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['*/*']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['X-Authorization'] # noqa: E501 return self.api_client.call_api( '/api/tenant/assets{?type,textSearch,sortProperty,sortOrder,pageSize,page}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='PageDataAsset', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_user_assets_using_get(self, page_size, page, **kwargs): # noqa: E501 """getUserAssets # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api_pe.get_user_assets_using_get(page_size, page, async_req=True) >>> result = thread.get() :param async_req bool :param str page_size: pageSize (required) :param str page: page (required) :param str type: type :param str text_search: textSearch :param str sort_property: sortProperty :param str sort_order: sortOrder :return: PageDataAsset If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_user_assets_using_get_with_http_info(page_size, page, **kwargs) # noqa: E501 else: (data) = self.get_user_assets_using_get_with_http_info(page_size, page, **kwargs) # noqa: E501 return data def get_user_assets_using_get_with_http_info(self, page_size, page, **kwargs): # noqa: E501 """getUserAssets # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api_pe.get_user_assets_using_get_with_http_info(page_size, page, async_req=True) >>> result = thread.get() :param async_req bool :param str page_size: pageSize (required) :param str page: page (required) :param str type: type :param str text_search: textSearch :param str sort_property: sortProperty :param str sort_order: sortOrder :return: PageDataAsset If the method is called asynchronously, returns the request thread. """ all_params = ['page_size', 'page', 'type', 'text_search', 'sort_property', 'sort_order'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): params[key] = val del params['kwargs'] # verify the required parameter 'page_size' is set if ('page_size' not in params or params['page_size'] is None): raise ValueError("Missing the required parameter `page_size` when calling `get_user_assets_using_get`") # noqa: E501 # verify the required parameter 'page' is set if ('page' not in params or params['page'] is None): raise ValueError("Missing the required parameter `page` when calling `get_user_assets_using_get`") # noqa: E501 collection_formats = {} path_params = {} query_params = [] if 'type' in params: query_params.append(('type', params['type'])) # noqa: E501 if 'text_search' in params: query_params.append(('textSearch', params['text_search'])) # noqa: E501 if 'sort_property' in params: query_params.append(('sortProperty', params['sort_property'])) # noqa: E501 if 'sort_order' in params: query_params.append(('sortOrder', params['sort_order'])) # noqa: E501 if 'page_size' in params: query_params.append(('pageSize', params['page_size'])) # noqa: E501 if 'page' in params: query_params.append(('page', params['page'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['*/*']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['X-Authorization'] # noqa: E501 return self.api_client.call_api( '/api/user/assets{?type,textSearch,sortProperty,sortOrder,pageSize,page}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='PageDataAsset', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def save_asset_using_post(self, asset, **kwargs): # noqa: E501 """saveAsset # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api_pe.save_asset_using_post(asset, async_req=True) >>> result = thread.get() :param async_req bool :param Asset asset: asset (required) :param str entity_group_id: entityGroupId :return: Asset If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.save_asset_using_post_with_http_info(asset, **kwargs) # noqa: E501 else: (data) = self.save_asset_using_post_with_http_info(asset, **kwargs) # noqa: E501 return data def save_asset_using_post_with_http_info(self, asset, **kwargs): # noqa: E501 """saveAsset # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api_pe.save_asset_using_post_with_http_info(asset, async_req=True) >>> result = thread.get() :param async_req bool :param Asset asset: asset (required) :param str entity_group_id: entityGroupId :return: Asset If the method is called asynchronously, returns the request thread. """ all_params = ['asset', 'entity_group_id'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): params[key] = val del params['kwargs'] # verify the required parameter 'asset' is set if ('asset' not in params or params['asset'] is None): raise ValueError("Missing the required parameter `asset` when calling `save_asset_using_post`") # noqa: E501 collection_formats = {} path_params = {} query_params = [] if 'entity_group_id' in params: query_params.append(('entityGroupId', params['entity_group_id'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None if 'asset' in params: body_params = params['asset'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['*/*']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['X-Authorization'] # noqa: E501 return self.api_client.call_api( '/api/asset{?entityGroupId}', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='Asset', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
43.224026
188
0.630436
3,189
26,626
4.989652
0.066792
0.050779
0.034188
0.028658
0.933886
0.921883
0.910005
0.897373
0.890837
0.874497
0
0.016795
0.273229
26,626
615
189
43.294309
0.805488
0.325509
0
0.799373
0
0.009404
0.235298
0.067004
0
0
0
0
0
1
0.034483
false
0
0.015674
0
0.100313
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
dc733ac0f7945f7164f6e9169f16c18ea6f4a084
433,911
py
Python
tests/test_nist_meta_10609.py
tefra/xsdata-w3c-tests
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
[ "MIT" ]
1
2021-08-14T17:59:21.000Z
2021-08-14T17:59:21.000Z
tests/test_nist_meta_10609.py
tefra/xsdata-w3c-tests
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
[ "MIT" ]
4
2020-02-12T21:30:44.000Z
2020-04-15T20:06:46.000Z
tests/test_nist_meta_10609.py
tefra/xsdata-w3c-tests
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
[ "MIT" ]
null
null
null
from tests.utils import assert_bindings def test_atomic_negative_integer_max_inclusive_4_nistxml_sv_iv_atomic_negative_integer_max_inclusive_5_3(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet maxInclusive with value -1. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-maxInclusive-5.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-maxInclusive-5-3.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMaxInclusive5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_max_inclusive_4_nistxml_sv_iv_atomic_negative_integer_max_inclusive_5_4(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet maxInclusive with value -1. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-maxInclusive-5.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-maxInclusive-5-4.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMaxInclusive5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_max_inclusive_4_nistxml_sv_iv_atomic_negative_integer_max_inclusive_5_5(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet maxInclusive with value -1. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-maxInclusive-5.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-maxInclusive-5-5.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMaxInclusive5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_max_inclusive_3_nistxml_sv_iv_atomic_negative_integer_max_inclusive_4_1(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet maxInclusive with value -666057423564200834. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-maxInclusive-4.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-maxInclusive-4-1.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMaxInclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_max_inclusive_3_nistxml_sv_iv_atomic_negative_integer_max_inclusive_4_2(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet maxInclusive with value -666057423564200834. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-maxInclusive-4.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-maxInclusive-4-2.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMaxInclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_max_inclusive_3_nistxml_sv_iv_atomic_negative_integer_max_inclusive_4_3(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet maxInclusive with value -666057423564200834. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-maxInclusive-4.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-maxInclusive-4-3.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMaxInclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_max_inclusive_3_nistxml_sv_iv_atomic_negative_integer_max_inclusive_4_4(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet maxInclusive with value -666057423564200834. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-maxInclusive-4.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-maxInclusive-4-4.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMaxInclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_max_inclusive_3_nistxml_sv_iv_atomic_negative_integer_max_inclusive_4_5(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet maxInclusive with value -666057423564200834. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-maxInclusive-4.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-maxInclusive-4-5.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMaxInclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_max_inclusive_2_nistxml_sv_iv_atomic_negative_integer_max_inclusive_3_1(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet maxInclusive with value -34749374507754505. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-maxInclusive-3.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-maxInclusive-3-1.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMaxInclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_max_inclusive_2_nistxml_sv_iv_atomic_negative_integer_max_inclusive_3_2(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet maxInclusive with value -34749374507754505. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-maxInclusive-3.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-maxInclusive-3-2.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMaxInclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_max_inclusive_2_nistxml_sv_iv_atomic_negative_integer_max_inclusive_3_3(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet maxInclusive with value -34749374507754505. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-maxInclusive-3.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-maxInclusive-3-3.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMaxInclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_max_inclusive_2_nistxml_sv_iv_atomic_negative_integer_max_inclusive_3_4(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet maxInclusive with value -34749374507754505. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-maxInclusive-3.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-maxInclusive-3-4.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMaxInclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_max_inclusive_2_nistxml_sv_iv_atomic_negative_integer_max_inclusive_3_5(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet maxInclusive with value -34749374507754505. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-maxInclusive-3.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-maxInclusive-3-5.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMaxInclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_max_inclusive_1_nistxml_sv_iv_atomic_negative_integer_max_inclusive_2_1(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet maxInclusive with value -922333322214573646. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-maxInclusive-2.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-maxInclusive-2-1.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMaxInclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_max_inclusive_1_nistxml_sv_iv_atomic_negative_integer_max_inclusive_2_2(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet maxInclusive with value -922333322214573646. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-maxInclusive-2.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-maxInclusive-2-2.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMaxInclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_max_inclusive_1_nistxml_sv_iv_atomic_negative_integer_max_inclusive_2_3(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet maxInclusive with value -922333322214573646. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-maxInclusive-2.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-maxInclusive-2-3.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMaxInclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_max_inclusive_1_nistxml_sv_iv_atomic_negative_integer_max_inclusive_2_4(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet maxInclusive with value -922333322214573646. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-maxInclusive-2.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-maxInclusive-2-4.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMaxInclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_max_inclusive_1_nistxml_sv_iv_atomic_negative_integer_max_inclusive_2_5(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet maxInclusive with value -922333322214573646. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-maxInclusive-2.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-maxInclusive-2-5.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMaxInclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_max_inclusive_nistxml_sv_iv_atomic_negative_integer_max_inclusive_1_1(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet maxInclusive with value -999999999999999999. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-maxInclusive-1.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-maxInclusive-1-1.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMaxInclusive1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_max_exclusive_4_nistxml_sv_iv_atomic_negative_integer_max_exclusive_5_1(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet maxExclusive with value -1. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-maxExclusive-5.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-maxExclusive-5-1.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMaxExclusive5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_max_exclusive_4_nistxml_sv_iv_atomic_negative_integer_max_exclusive_5_2(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet maxExclusive with value -1. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-maxExclusive-5.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-maxExclusive-5-2.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMaxExclusive5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_max_exclusive_4_nistxml_sv_iv_atomic_negative_integer_max_exclusive_5_3(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet maxExclusive with value -1. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-maxExclusive-5.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-maxExclusive-5-3.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMaxExclusive5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_max_exclusive_4_nistxml_sv_iv_atomic_negative_integer_max_exclusive_5_4(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet maxExclusive with value -1. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-maxExclusive-5.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-maxExclusive-5-4.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMaxExclusive5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_max_exclusive_4_nistxml_sv_iv_atomic_negative_integer_max_exclusive_5_5(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet maxExclusive with value -1. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-maxExclusive-5.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-maxExclusive-5-5.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMaxExclusive5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_max_exclusive_3_nistxml_sv_iv_atomic_negative_integer_max_exclusive_4_1(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet maxExclusive with value -572450131914860271. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-maxExclusive-4.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-maxExclusive-4-1.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMaxExclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_max_exclusive_3_nistxml_sv_iv_atomic_negative_integer_max_exclusive_4_2(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet maxExclusive with value -572450131914860271. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-maxExclusive-4.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-maxExclusive-4-2.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMaxExclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_max_exclusive_3_nistxml_sv_iv_atomic_negative_integer_max_exclusive_4_3(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet maxExclusive with value -572450131914860271. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-maxExclusive-4.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-maxExclusive-4-3.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMaxExclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_max_exclusive_3_nistxml_sv_iv_atomic_negative_integer_max_exclusive_4_4(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet maxExclusive with value -572450131914860271. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-maxExclusive-4.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-maxExclusive-4-4.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMaxExclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_max_exclusive_3_nistxml_sv_iv_atomic_negative_integer_max_exclusive_4_5(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet maxExclusive with value -572450131914860271. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-maxExclusive-4.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-maxExclusive-4-5.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMaxExclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_max_exclusive_2_nistxml_sv_iv_atomic_negative_integer_max_exclusive_3_1(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet maxExclusive with value -184935339155753553. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-maxExclusive-3.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-maxExclusive-3-1.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMaxExclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_max_exclusive_2_nistxml_sv_iv_atomic_negative_integer_max_exclusive_3_2(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet maxExclusive with value -184935339155753553. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-maxExclusive-3.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-maxExclusive-3-2.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMaxExclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_max_exclusive_2_nistxml_sv_iv_atomic_negative_integer_max_exclusive_3_3(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet maxExclusive with value -184935339155753553. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-maxExclusive-3.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-maxExclusive-3-3.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMaxExclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_max_exclusive_2_nistxml_sv_iv_atomic_negative_integer_max_exclusive_3_4(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet maxExclusive with value -184935339155753553. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-maxExclusive-3.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-maxExclusive-3-4.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMaxExclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_max_exclusive_2_nistxml_sv_iv_atomic_negative_integer_max_exclusive_3_5(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet maxExclusive with value -184935339155753553. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-maxExclusive-3.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-maxExclusive-3-5.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMaxExclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_max_exclusive_1_nistxml_sv_iv_atomic_negative_integer_max_exclusive_2_1(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet maxExclusive with value -866521354558973720. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-maxExclusive-2.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-maxExclusive-2-1.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMaxExclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_max_exclusive_1_nistxml_sv_iv_atomic_negative_integer_max_exclusive_2_2(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet maxExclusive with value -866521354558973720. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-maxExclusive-2.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-maxExclusive-2-2.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMaxExclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_max_exclusive_1_nistxml_sv_iv_atomic_negative_integer_max_exclusive_2_3(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet maxExclusive with value -866521354558973720. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-maxExclusive-2.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-maxExclusive-2-3.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMaxExclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_max_exclusive_1_nistxml_sv_iv_atomic_negative_integer_max_exclusive_2_4(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet maxExclusive with value -866521354558973720. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-maxExclusive-2.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-maxExclusive-2-4.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMaxExclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_max_exclusive_1_nistxml_sv_iv_atomic_negative_integer_max_exclusive_2_5(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet maxExclusive with value -866521354558973720. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-maxExclusive-2.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-maxExclusive-2-5.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMaxExclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_max_exclusive_nistxml_sv_iv_atomic_negative_integer_max_exclusive_1_1(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet maxExclusive with value -999999999999999998. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-maxExclusive-1.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-maxExclusive-1-1.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMaxExclusive1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_min_inclusive_4_nistxml_sv_iv_atomic_negative_integer_min_inclusive_5_1(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet minInclusive with value -1. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-minInclusive-5.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-minInclusive-5-1.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMinInclusive5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_min_inclusive_3_nistxml_sv_iv_atomic_negative_integer_min_inclusive_4_1(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet minInclusive with value -947674826094804355. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-minInclusive-4.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-minInclusive-4-1.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMinInclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_min_inclusive_3_nistxml_sv_iv_atomic_negative_integer_min_inclusive_4_2(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet minInclusive with value -947674826094804355. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-minInclusive-4.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-minInclusive-4-2.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMinInclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_min_inclusive_3_nistxml_sv_iv_atomic_negative_integer_min_inclusive_4_3(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet minInclusive with value -947674826094804355. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-minInclusive-4.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-minInclusive-4-3.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMinInclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_min_inclusive_3_nistxml_sv_iv_atomic_negative_integer_min_inclusive_4_4(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet minInclusive with value -947674826094804355. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-minInclusive-4.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-minInclusive-4-4.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMinInclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_min_inclusive_3_nistxml_sv_iv_atomic_negative_integer_min_inclusive_4_5(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet minInclusive with value -947674826094804355. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-minInclusive-4.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-minInclusive-4-5.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMinInclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_min_inclusive_2_nistxml_sv_iv_atomic_negative_integer_min_inclusive_3_1(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet minInclusive with value -539945622984702833. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-minInclusive-3.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-minInclusive-3-1.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMinInclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_min_inclusive_2_nistxml_sv_iv_atomic_negative_integer_min_inclusive_3_2(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet minInclusive with value -539945622984702833. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-minInclusive-3.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-minInclusive-3-2.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMinInclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_min_inclusive_2_nistxml_sv_iv_atomic_negative_integer_min_inclusive_3_3(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet minInclusive with value -539945622984702833. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-minInclusive-3.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-minInclusive-3-3.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMinInclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_min_inclusive_2_nistxml_sv_iv_atomic_negative_integer_min_inclusive_3_4(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet minInclusive with value -539945622984702833. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-minInclusive-3.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-minInclusive-3-4.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMinInclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_min_inclusive_2_nistxml_sv_iv_atomic_negative_integer_min_inclusive_3_5(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet minInclusive with value -539945622984702833. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-minInclusive-3.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-minInclusive-3-5.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMinInclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_min_inclusive_1_nistxml_sv_iv_atomic_negative_integer_min_inclusive_2_1(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet minInclusive with value -440277848538184635. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-minInclusive-2.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-minInclusive-2-1.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMinInclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_min_inclusive_1_nistxml_sv_iv_atomic_negative_integer_min_inclusive_2_2(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet minInclusive with value -440277848538184635. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-minInclusive-2.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-minInclusive-2-2.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMinInclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_min_inclusive_1_nistxml_sv_iv_atomic_negative_integer_min_inclusive_2_3(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet minInclusive with value -440277848538184635. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-minInclusive-2.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-minInclusive-2-3.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMinInclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_min_inclusive_1_nistxml_sv_iv_atomic_negative_integer_min_inclusive_2_4(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet minInclusive with value -440277848538184635. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-minInclusive-2.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-minInclusive-2-4.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMinInclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_min_inclusive_1_nistxml_sv_iv_atomic_negative_integer_min_inclusive_2_5(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet minInclusive with value -440277848538184635. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-minInclusive-2.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-minInclusive-2-5.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMinInclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_min_inclusive_nistxml_sv_iv_atomic_negative_integer_min_inclusive_1_1(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet minInclusive with value -999999999999999999. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-minInclusive-1.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-minInclusive-1-1.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMinInclusive1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_min_inclusive_nistxml_sv_iv_atomic_negative_integer_min_inclusive_1_2(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet minInclusive with value -999999999999999999. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-minInclusive-1.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-minInclusive-1-2.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMinInclusive1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_min_inclusive_nistxml_sv_iv_atomic_negative_integer_min_inclusive_1_3(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet minInclusive with value -999999999999999999. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-minInclusive-1.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-minInclusive-1-3.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMinInclusive1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_min_inclusive_nistxml_sv_iv_atomic_negative_integer_min_inclusive_1_4(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet minInclusive with value -999999999999999999. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-minInclusive-1.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-minInclusive-1-4.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMinInclusive1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_min_inclusive_nistxml_sv_iv_atomic_negative_integer_min_inclusive_1_5(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet minInclusive with value -999999999999999999. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-minInclusive-1.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-minInclusive-1-5.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMinInclusive1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_min_exclusive_4_nistxml_sv_iv_atomic_negative_integer_min_exclusive_5_1(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet minExclusive with value -2. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-minExclusive-5.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-minExclusive-5-1.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMinExclusive5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_min_exclusive_3_nistxml_sv_iv_atomic_negative_integer_min_exclusive_4_1(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet minExclusive with value -495295756372066909. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-minExclusive-4.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-minExclusive-4-1.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMinExclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_min_exclusive_3_nistxml_sv_iv_atomic_negative_integer_min_exclusive_4_2(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet minExclusive with value -495295756372066909. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-minExclusive-4.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-minExclusive-4-2.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMinExclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_min_exclusive_3_nistxml_sv_iv_atomic_negative_integer_min_exclusive_4_3(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet minExclusive with value -495295756372066909. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-minExclusive-4.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-minExclusive-4-3.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMinExclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_min_exclusive_3_nistxml_sv_iv_atomic_negative_integer_min_exclusive_4_4(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet minExclusive with value -495295756372066909. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-minExclusive-4.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-minExclusive-4-4.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMinExclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_min_exclusive_3_nistxml_sv_iv_atomic_negative_integer_min_exclusive_4_5(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet minExclusive with value -495295756372066909. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-minExclusive-4.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-minExclusive-4-5.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMinExclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_min_exclusive_2_nistxml_sv_iv_atomic_negative_integer_min_exclusive_3_1(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet minExclusive with value -900435039333670416. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-minExclusive-3.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-minExclusive-3-1.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMinExclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_min_exclusive_2_nistxml_sv_iv_atomic_negative_integer_min_exclusive_3_2(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet minExclusive with value -900435039333670416. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-minExclusive-3.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-minExclusive-3-2.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMinExclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_min_exclusive_2_nistxml_sv_iv_atomic_negative_integer_min_exclusive_3_3(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet minExclusive with value -900435039333670416. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-minExclusive-3.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-minExclusive-3-3.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMinExclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_min_exclusive_2_nistxml_sv_iv_atomic_negative_integer_min_exclusive_3_4(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet minExclusive with value -900435039333670416. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-minExclusive-3.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-minExclusive-3-4.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMinExclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_min_exclusive_2_nistxml_sv_iv_atomic_negative_integer_min_exclusive_3_5(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet minExclusive with value -900435039333670416. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-minExclusive-3.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-minExclusive-3-5.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMinExclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_min_exclusive_1_nistxml_sv_iv_atomic_negative_integer_min_exclusive_2_1(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet minExclusive with value -435976618086570511. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-minExclusive-2.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-minExclusive-2-1.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMinExclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_min_exclusive_1_nistxml_sv_iv_atomic_negative_integer_min_exclusive_2_2(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet minExclusive with value -435976618086570511. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-minExclusive-2.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-minExclusive-2-2.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMinExclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_min_exclusive_1_nistxml_sv_iv_atomic_negative_integer_min_exclusive_2_3(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet minExclusive with value -435976618086570511. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-minExclusive-2.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-minExclusive-2-3.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMinExclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_min_exclusive_1_nistxml_sv_iv_atomic_negative_integer_min_exclusive_2_4(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet minExclusive with value -435976618086570511. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-minExclusive-2.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-minExclusive-2-4.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMinExclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_min_exclusive_1_nistxml_sv_iv_atomic_negative_integer_min_exclusive_2_5(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet minExclusive with value -435976618086570511. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-minExclusive-2.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-minExclusive-2-5.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMinExclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_min_exclusive_nistxml_sv_iv_atomic_negative_integer_min_exclusive_1_1(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet minExclusive with value -999999999999999999. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-minExclusive-1.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-minExclusive-1-1.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMinExclusive1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_min_exclusive_nistxml_sv_iv_atomic_negative_integer_min_exclusive_1_2(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet minExclusive with value -999999999999999999. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-minExclusive-1.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-minExclusive-1-2.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMinExclusive1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_min_exclusive_nistxml_sv_iv_atomic_negative_integer_min_exclusive_1_3(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet minExclusive with value -999999999999999999. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-minExclusive-1.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-minExclusive-1-3.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMinExclusive1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_min_exclusive_nistxml_sv_iv_atomic_negative_integer_min_exclusive_1_4(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet minExclusive with value -999999999999999999. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-minExclusive-1.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-minExclusive-1-4.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMinExclusive1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_negative_integer_min_exclusive_nistxml_sv_iv_atomic_negative_integer_min_exclusive_1_5(mode, save_output, output_format): """ Type atomic/negativeInteger is restricted by facet minExclusive with value -999999999999999999. """ assert_bindings( schema="nistData/atomic/negativeInteger/Schema+Instance/NISTSchema-SV-IV-atomic-negativeInteger-minExclusive-1.xsd", instance="nistData/atomic/negativeInteger/Schema+Instance/NISTXML-SV-IV-atomic-negativeInteger-minExclusive-1-5.xml", class_name="NistschemaSvIvAtomicNegativeIntegerMinExclusive1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_white_space_nistxml_sv_iv_atomic_non_positive_integer_white_space_1_1(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet whiteSpace with value collapse. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-whiteSpace-1.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-whiteSpace-1-1.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerWhiteSpace1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_white_space_nistxml_sv_iv_atomic_non_positive_integer_white_space_1_2(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet whiteSpace with value collapse. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-whiteSpace-1.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-whiteSpace-1-2.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerWhiteSpace1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_white_space_nistxml_sv_iv_atomic_non_positive_integer_white_space_1_3(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet whiteSpace with value collapse. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-whiteSpace-1.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-whiteSpace-1-3.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerWhiteSpace1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_white_space_nistxml_sv_iv_atomic_non_positive_integer_white_space_1_4(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet whiteSpace with value collapse. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-whiteSpace-1.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-whiteSpace-1-4.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerWhiteSpace1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_white_space_nistxml_sv_iv_atomic_non_positive_integer_white_space_1_5(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet whiteSpace with value collapse. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-whiteSpace-1.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-whiteSpace-1-5.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerWhiteSpace1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_enumeration_4_nistxml_sv_iv_atomic_non_positive_integer_enumeration_5_1(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-enumeration-5.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-enumeration-5-1.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerEnumeration5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_enumeration_4_nistxml_sv_iv_atomic_non_positive_integer_enumeration_5_2(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-enumeration-5.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-enumeration-5-2.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerEnumeration5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_enumeration_4_nistxml_sv_iv_atomic_non_positive_integer_enumeration_5_3(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-enumeration-5.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-enumeration-5-3.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerEnumeration5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_enumeration_4_nistxml_sv_iv_atomic_non_positive_integer_enumeration_5_4(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-enumeration-5.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-enumeration-5-4.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerEnumeration5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_enumeration_4_nistxml_sv_iv_atomic_non_positive_integer_enumeration_5_5(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-enumeration-5.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-enumeration-5-5.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerEnumeration5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_enumeration_3_nistxml_sv_iv_atomic_non_positive_integer_enumeration_4_1(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-enumeration-4.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-enumeration-4-1.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerEnumeration4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_enumeration_3_nistxml_sv_iv_atomic_non_positive_integer_enumeration_4_2(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-enumeration-4.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-enumeration-4-2.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerEnumeration4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_enumeration_3_nistxml_sv_iv_atomic_non_positive_integer_enumeration_4_3(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-enumeration-4.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-enumeration-4-3.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerEnumeration4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_enumeration_3_nistxml_sv_iv_atomic_non_positive_integer_enumeration_4_4(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-enumeration-4.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-enumeration-4-4.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerEnumeration4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_enumeration_3_nistxml_sv_iv_atomic_non_positive_integer_enumeration_4_5(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-enumeration-4.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-enumeration-4-5.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerEnumeration4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_enumeration_2_nistxml_sv_iv_atomic_non_positive_integer_enumeration_3_1(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-enumeration-3.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-enumeration-3-1.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerEnumeration3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_enumeration_2_nistxml_sv_iv_atomic_non_positive_integer_enumeration_3_2(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-enumeration-3.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-enumeration-3-2.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerEnumeration3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_enumeration_2_nistxml_sv_iv_atomic_non_positive_integer_enumeration_3_3(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-enumeration-3.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-enumeration-3-3.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerEnumeration3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_enumeration_2_nistxml_sv_iv_atomic_non_positive_integer_enumeration_3_4(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-enumeration-3.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-enumeration-3-4.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerEnumeration3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_enumeration_2_nistxml_sv_iv_atomic_non_positive_integer_enumeration_3_5(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-enumeration-3.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-enumeration-3-5.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerEnumeration3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_enumeration_1_nistxml_sv_iv_atomic_non_positive_integer_enumeration_2_1(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-enumeration-2.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-enumeration-2-1.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerEnumeration2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_enumeration_1_nistxml_sv_iv_atomic_non_positive_integer_enumeration_2_2(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-enumeration-2.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-enumeration-2-2.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerEnumeration2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_enumeration_1_nistxml_sv_iv_atomic_non_positive_integer_enumeration_2_3(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-enumeration-2.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-enumeration-2-3.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerEnumeration2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_enumeration_1_nistxml_sv_iv_atomic_non_positive_integer_enumeration_2_4(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-enumeration-2.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-enumeration-2-4.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerEnumeration2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_enumeration_1_nistxml_sv_iv_atomic_non_positive_integer_enumeration_2_5(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-enumeration-2.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-enumeration-2-5.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerEnumeration2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_enumeration_nistxml_sv_iv_atomic_non_positive_integer_enumeration_1_1(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-enumeration-1.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-enumeration-1-1.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerEnumeration1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_enumeration_nistxml_sv_iv_atomic_non_positive_integer_enumeration_1_2(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-enumeration-1.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-enumeration-1-2.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerEnumeration1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_enumeration_nistxml_sv_iv_atomic_non_positive_integer_enumeration_1_3(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-enumeration-1.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-enumeration-1-3.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerEnumeration1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_enumeration_nistxml_sv_iv_atomic_non_positive_integer_enumeration_1_4(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-enumeration-1.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-enumeration-1-4.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerEnumeration1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_enumeration_nistxml_sv_iv_atomic_non_positive_integer_enumeration_1_5(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-enumeration-1.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-enumeration-1-5.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerEnumeration1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_pattern_4_nistxml_sv_iv_atomic_non_positive_integer_pattern_5_1(mode, save_output, output_format): r""" Type atomic/nonPositiveInteger is restricted by facet pattern with value \-\d{18}. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-pattern-5.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-pattern-5-1.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerPattern5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_pattern_4_nistxml_sv_iv_atomic_non_positive_integer_pattern_5_2(mode, save_output, output_format): r""" Type atomic/nonPositiveInteger is restricted by facet pattern with value \-\d{18}. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-pattern-5.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-pattern-5-2.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerPattern5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_pattern_4_nistxml_sv_iv_atomic_non_positive_integer_pattern_5_3(mode, save_output, output_format): r""" Type atomic/nonPositiveInteger is restricted by facet pattern with value \-\d{18}. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-pattern-5.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-pattern-5-3.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerPattern5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_pattern_4_nistxml_sv_iv_atomic_non_positive_integer_pattern_5_4(mode, save_output, output_format): r""" Type atomic/nonPositiveInteger is restricted by facet pattern with value \-\d{18}. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-pattern-5.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-pattern-5-4.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerPattern5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_pattern_4_nistxml_sv_iv_atomic_non_positive_integer_pattern_5_5(mode, save_output, output_format): r""" Type atomic/nonPositiveInteger is restricted by facet pattern with value \-\d{18}. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-pattern-5.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-pattern-5-5.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerPattern5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_pattern_3_nistxml_sv_iv_atomic_non_positive_integer_pattern_4_1(mode, save_output, output_format): r""" Type atomic/nonPositiveInteger is restricted by facet pattern with value \-\d{13}. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-pattern-4.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-pattern-4-1.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerPattern4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_pattern_3_nistxml_sv_iv_atomic_non_positive_integer_pattern_4_2(mode, save_output, output_format): r""" Type atomic/nonPositiveInteger is restricted by facet pattern with value \-\d{13}. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-pattern-4.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-pattern-4-2.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerPattern4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_pattern_3_nistxml_sv_iv_atomic_non_positive_integer_pattern_4_3(mode, save_output, output_format): r""" Type atomic/nonPositiveInteger is restricted by facet pattern with value \-\d{13}. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-pattern-4.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-pattern-4-3.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerPattern4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_pattern_3_nistxml_sv_iv_atomic_non_positive_integer_pattern_4_4(mode, save_output, output_format): r""" Type atomic/nonPositiveInteger is restricted by facet pattern with value \-\d{13}. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-pattern-4.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-pattern-4-4.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerPattern4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_pattern_3_nistxml_sv_iv_atomic_non_positive_integer_pattern_4_5(mode, save_output, output_format): r""" Type atomic/nonPositiveInteger is restricted by facet pattern with value \-\d{13}. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-pattern-4.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-pattern-4-5.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerPattern4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_pattern_2_nistxml_sv_iv_atomic_non_positive_integer_pattern_3_1(mode, save_output, output_format): r""" Type atomic/nonPositiveInteger is restricted by facet pattern with value \-\d{9}. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-pattern-3.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-pattern-3-1.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerPattern3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_pattern_2_nistxml_sv_iv_atomic_non_positive_integer_pattern_3_2(mode, save_output, output_format): r""" Type atomic/nonPositiveInteger is restricted by facet pattern with value \-\d{9}. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-pattern-3.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-pattern-3-2.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerPattern3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_pattern_2_nistxml_sv_iv_atomic_non_positive_integer_pattern_3_3(mode, save_output, output_format): r""" Type atomic/nonPositiveInteger is restricted by facet pattern with value \-\d{9}. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-pattern-3.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-pattern-3-3.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerPattern3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_pattern_2_nistxml_sv_iv_atomic_non_positive_integer_pattern_3_4(mode, save_output, output_format): r""" Type atomic/nonPositiveInteger is restricted by facet pattern with value \-\d{9}. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-pattern-3.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-pattern-3-4.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerPattern3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_pattern_2_nistxml_sv_iv_atomic_non_positive_integer_pattern_3_5(mode, save_output, output_format): r""" Type atomic/nonPositiveInteger is restricted by facet pattern with value \-\d{9}. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-pattern-3.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-pattern-3-5.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerPattern3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_pattern_1_nistxml_sv_iv_atomic_non_positive_integer_pattern_2_1(mode, save_output, output_format): r""" Type atomic/nonPositiveInteger is restricted by facet pattern with value \-\d{5}. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-pattern-2.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-pattern-2-1.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerPattern2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_pattern_1_nistxml_sv_iv_atomic_non_positive_integer_pattern_2_2(mode, save_output, output_format): r""" Type atomic/nonPositiveInteger is restricted by facet pattern with value \-\d{5}. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-pattern-2.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-pattern-2-2.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerPattern2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_pattern_1_nistxml_sv_iv_atomic_non_positive_integer_pattern_2_3(mode, save_output, output_format): r""" Type atomic/nonPositiveInteger is restricted by facet pattern with value \-\d{5}. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-pattern-2.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-pattern-2-3.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerPattern2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_pattern_1_nistxml_sv_iv_atomic_non_positive_integer_pattern_2_4(mode, save_output, output_format): r""" Type atomic/nonPositiveInteger is restricted by facet pattern with value \-\d{5}. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-pattern-2.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-pattern-2-4.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerPattern2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_pattern_1_nistxml_sv_iv_atomic_non_positive_integer_pattern_2_5(mode, save_output, output_format): r""" Type atomic/nonPositiveInteger is restricted by facet pattern with value \-\d{5}. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-pattern-2.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-pattern-2-5.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerPattern2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_pattern_nistxml_sv_iv_atomic_non_positive_integer_pattern_1_1(mode, save_output, output_format): r""" Type atomic/nonPositiveInteger is restricted by facet pattern with value \-\d{1}. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-pattern-1.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-pattern-1-1.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerPattern1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_pattern_nistxml_sv_iv_atomic_non_positive_integer_pattern_1_2(mode, save_output, output_format): r""" Type atomic/nonPositiveInteger is restricted by facet pattern with value \-\d{1}. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-pattern-1.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-pattern-1-2.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerPattern1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_pattern_nistxml_sv_iv_atomic_non_positive_integer_pattern_1_3(mode, save_output, output_format): r""" Type atomic/nonPositiveInteger is restricted by facet pattern with value \-\d{1}. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-pattern-1.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-pattern-1-3.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerPattern1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_pattern_nistxml_sv_iv_atomic_non_positive_integer_pattern_1_4(mode, save_output, output_format): r""" Type atomic/nonPositiveInteger is restricted by facet pattern with value \-\d{1}. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-pattern-1.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-pattern-1-4.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerPattern1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_pattern_nistxml_sv_iv_atomic_non_positive_integer_pattern_1_5(mode, save_output, output_format): r""" Type atomic/nonPositiveInteger is restricted by facet pattern with value \-\d{1}. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-pattern-1.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-pattern-1-5.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerPattern1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_total_digits_4_nistxml_sv_iv_atomic_non_positive_integer_total_digits_5_1(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet totalDigits with value 18. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-totalDigits-5.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-totalDigits-5-1.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerTotalDigits5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_total_digits_4_nistxml_sv_iv_atomic_non_positive_integer_total_digits_5_2(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet totalDigits with value 18. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-totalDigits-5.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-totalDigits-5-2.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerTotalDigits5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_total_digits_4_nistxml_sv_iv_atomic_non_positive_integer_total_digits_5_3(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet totalDigits with value 18. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-totalDigits-5.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-totalDigits-5-3.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerTotalDigits5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_total_digits_4_nistxml_sv_iv_atomic_non_positive_integer_total_digits_5_4(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet totalDigits with value 18. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-totalDigits-5.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-totalDigits-5-4.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerTotalDigits5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_total_digits_4_nistxml_sv_iv_atomic_non_positive_integer_total_digits_5_5(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet totalDigits with value 18. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-totalDigits-5.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-totalDigits-5-5.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerTotalDigits5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_total_digits_3_nistxml_sv_iv_atomic_non_positive_integer_total_digits_4_1(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet totalDigits with value 13. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-totalDigits-4.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-totalDigits-4-1.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerTotalDigits4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_total_digits_3_nistxml_sv_iv_atomic_non_positive_integer_total_digits_4_2(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet totalDigits with value 13. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-totalDigits-4.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-totalDigits-4-2.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerTotalDigits4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_total_digits_3_nistxml_sv_iv_atomic_non_positive_integer_total_digits_4_3(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet totalDigits with value 13. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-totalDigits-4.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-totalDigits-4-3.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerTotalDigits4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_total_digits_3_nistxml_sv_iv_atomic_non_positive_integer_total_digits_4_4(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet totalDigits with value 13. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-totalDigits-4.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-totalDigits-4-4.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerTotalDigits4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_total_digits_3_nistxml_sv_iv_atomic_non_positive_integer_total_digits_4_5(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet totalDigits with value 13. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-totalDigits-4.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-totalDigits-4-5.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerTotalDigits4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_total_digits_2_nistxml_sv_iv_atomic_non_positive_integer_total_digits_3_1(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet totalDigits with value 9. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-totalDigits-3.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-totalDigits-3-1.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerTotalDigits3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_total_digits_2_nistxml_sv_iv_atomic_non_positive_integer_total_digits_3_2(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet totalDigits with value 9. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-totalDigits-3.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-totalDigits-3-2.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerTotalDigits3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_total_digits_2_nistxml_sv_iv_atomic_non_positive_integer_total_digits_3_3(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet totalDigits with value 9. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-totalDigits-3.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-totalDigits-3-3.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerTotalDigits3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_total_digits_2_nistxml_sv_iv_atomic_non_positive_integer_total_digits_3_4(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet totalDigits with value 9. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-totalDigits-3.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-totalDigits-3-4.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerTotalDigits3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_total_digits_2_nistxml_sv_iv_atomic_non_positive_integer_total_digits_3_5(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet totalDigits with value 9. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-totalDigits-3.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-totalDigits-3-5.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerTotalDigits3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_total_digits_1_nistxml_sv_iv_atomic_non_positive_integer_total_digits_2_1(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet totalDigits with value 5. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-totalDigits-2.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-totalDigits-2-1.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerTotalDigits2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_total_digits_1_nistxml_sv_iv_atomic_non_positive_integer_total_digits_2_2(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet totalDigits with value 5. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-totalDigits-2.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-totalDigits-2-2.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerTotalDigits2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_total_digits_1_nistxml_sv_iv_atomic_non_positive_integer_total_digits_2_3(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet totalDigits with value 5. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-totalDigits-2.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-totalDigits-2-3.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerTotalDigits2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_total_digits_1_nistxml_sv_iv_atomic_non_positive_integer_total_digits_2_4(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet totalDigits with value 5. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-totalDigits-2.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-totalDigits-2-4.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerTotalDigits2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_total_digits_1_nistxml_sv_iv_atomic_non_positive_integer_total_digits_2_5(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet totalDigits with value 5. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-totalDigits-2.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-totalDigits-2-5.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerTotalDigits2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_total_digits_nistxml_sv_iv_atomic_non_positive_integer_total_digits_1_1(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet totalDigits with value 1. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-totalDigits-1.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-totalDigits-1-1.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerTotalDigits1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_total_digits_nistxml_sv_iv_atomic_non_positive_integer_total_digits_1_2(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet totalDigits with value 1. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-totalDigits-1.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-totalDigits-1-2.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerTotalDigits1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_total_digits_nistxml_sv_iv_atomic_non_positive_integer_total_digits_1_3(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet totalDigits with value 1. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-totalDigits-1.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-totalDigits-1-3.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerTotalDigits1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_total_digits_nistxml_sv_iv_atomic_non_positive_integer_total_digits_1_4(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet totalDigits with value 1. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-totalDigits-1.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-totalDigits-1-4.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerTotalDigits1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_total_digits_nistxml_sv_iv_atomic_non_positive_integer_total_digits_1_5(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet totalDigits with value 1. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-totalDigits-1.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-totalDigits-1-5.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerTotalDigits1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_fraction_digits_nistxml_sv_iv_atomic_non_positive_integer_fraction_digits_1_1(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet fractionDigits with value 0. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-fractionDigits-1.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-fractionDigits-1-1.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerFractionDigits1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_fraction_digits_nistxml_sv_iv_atomic_non_positive_integer_fraction_digits_1_2(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet fractionDigits with value 0. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-fractionDigits-1.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-fractionDigits-1-2.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerFractionDigits1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_fraction_digits_nistxml_sv_iv_atomic_non_positive_integer_fraction_digits_1_3(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet fractionDigits with value 0. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-fractionDigits-1.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-fractionDigits-1-3.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerFractionDigits1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_fraction_digits_nistxml_sv_iv_atomic_non_positive_integer_fraction_digits_1_4(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet fractionDigits with value 0. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-fractionDigits-1.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-fractionDigits-1-4.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerFractionDigits1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_fraction_digits_nistxml_sv_iv_atomic_non_positive_integer_fraction_digits_1_5(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet fractionDigits with value 0. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-fractionDigits-1.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-fractionDigits-1-5.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerFractionDigits1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_max_inclusive_4_nistxml_sv_iv_atomic_non_positive_integer_max_inclusive_5_1(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet maxInclusive with value 0. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-maxInclusive-5.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-maxInclusive-5-1.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMaxInclusive5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_max_inclusive_4_nistxml_sv_iv_atomic_non_positive_integer_max_inclusive_5_2(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet maxInclusive with value 0. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-maxInclusive-5.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-maxInclusive-5-2.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMaxInclusive5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_max_inclusive_4_nistxml_sv_iv_atomic_non_positive_integer_max_inclusive_5_3(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet maxInclusive with value 0. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-maxInclusive-5.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-maxInclusive-5-3.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMaxInclusive5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_max_inclusive_4_nistxml_sv_iv_atomic_non_positive_integer_max_inclusive_5_4(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet maxInclusive with value 0. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-maxInclusive-5.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-maxInclusive-5-4.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMaxInclusive5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_max_inclusive_4_nistxml_sv_iv_atomic_non_positive_integer_max_inclusive_5_5(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet maxInclusive with value 0. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-maxInclusive-5.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-maxInclusive-5-5.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMaxInclusive5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_max_inclusive_3_nistxml_sv_iv_atomic_non_positive_integer_max_inclusive_4_1(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet maxInclusive with value -686635117591375964. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-maxInclusive-4.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-maxInclusive-4-1.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMaxInclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_max_inclusive_3_nistxml_sv_iv_atomic_non_positive_integer_max_inclusive_4_2(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet maxInclusive with value -686635117591375964. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-maxInclusive-4.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-maxInclusive-4-2.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMaxInclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_max_inclusive_3_nistxml_sv_iv_atomic_non_positive_integer_max_inclusive_4_3(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet maxInclusive with value -686635117591375964. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-maxInclusive-4.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-maxInclusive-4-3.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMaxInclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_max_inclusive_3_nistxml_sv_iv_atomic_non_positive_integer_max_inclusive_4_4(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet maxInclusive with value -686635117591375964. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-maxInclusive-4.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-maxInclusive-4-4.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMaxInclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_max_inclusive_3_nistxml_sv_iv_atomic_non_positive_integer_max_inclusive_4_5(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet maxInclusive with value -686635117591375964. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-maxInclusive-4.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-maxInclusive-4-5.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMaxInclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_max_inclusive_2_nistxml_sv_iv_atomic_non_positive_integer_max_inclusive_3_1(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet maxInclusive with value -78303033269241706. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-maxInclusive-3.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-maxInclusive-3-1.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMaxInclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_max_inclusive_2_nistxml_sv_iv_atomic_non_positive_integer_max_inclusive_3_2(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet maxInclusive with value -78303033269241706. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-maxInclusive-3.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-maxInclusive-3-2.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMaxInclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_max_inclusive_2_nistxml_sv_iv_atomic_non_positive_integer_max_inclusive_3_3(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet maxInclusive with value -78303033269241706. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-maxInclusive-3.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-maxInclusive-3-3.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMaxInclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_max_inclusive_2_nistxml_sv_iv_atomic_non_positive_integer_max_inclusive_3_4(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet maxInclusive with value -78303033269241706. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-maxInclusive-3.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-maxInclusive-3-4.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMaxInclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_max_inclusive_2_nistxml_sv_iv_atomic_non_positive_integer_max_inclusive_3_5(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet maxInclusive with value -78303033269241706. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-maxInclusive-3.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-maxInclusive-3-5.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMaxInclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_max_inclusive_1_nistxml_sv_iv_atomic_non_positive_integer_max_inclusive_2_1(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet maxInclusive with value -63404852978511949. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-maxInclusive-2.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-maxInclusive-2-1.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMaxInclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_max_inclusive_1_nistxml_sv_iv_atomic_non_positive_integer_max_inclusive_2_2(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet maxInclusive with value -63404852978511949. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-maxInclusive-2.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-maxInclusive-2-2.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMaxInclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_max_inclusive_1_nistxml_sv_iv_atomic_non_positive_integer_max_inclusive_2_3(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet maxInclusive with value -63404852978511949. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-maxInclusive-2.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-maxInclusive-2-3.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMaxInclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_max_inclusive_1_nistxml_sv_iv_atomic_non_positive_integer_max_inclusive_2_4(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet maxInclusive with value -63404852978511949. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-maxInclusive-2.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-maxInclusive-2-4.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMaxInclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_max_inclusive_1_nistxml_sv_iv_atomic_non_positive_integer_max_inclusive_2_5(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet maxInclusive with value -63404852978511949. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-maxInclusive-2.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-maxInclusive-2-5.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMaxInclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_max_inclusive_nistxml_sv_iv_atomic_non_positive_integer_max_inclusive_1_1(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet maxInclusive with value -999999999999999999. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-maxInclusive-1.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-maxInclusive-1-1.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMaxInclusive1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_max_exclusive_4_nistxml_sv_iv_atomic_non_positive_integer_max_exclusive_5_1(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet maxExclusive with value 0. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-maxExclusive-5.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-maxExclusive-5-1.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMaxExclusive5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_max_exclusive_4_nistxml_sv_iv_atomic_non_positive_integer_max_exclusive_5_2(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet maxExclusive with value 0. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-maxExclusive-5.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-maxExclusive-5-2.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMaxExclusive5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_max_exclusive_4_nistxml_sv_iv_atomic_non_positive_integer_max_exclusive_5_3(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet maxExclusive with value 0. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-maxExclusive-5.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-maxExclusive-5-3.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMaxExclusive5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_max_exclusive_4_nistxml_sv_iv_atomic_non_positive_integer_max_exclusive_5_4(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet maxExclusive with value 0. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-maxExclusive-5.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-maxExclusive-5-4.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMaxExclusive5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_max_exclusive_4_nistxml_sv_iv_atomic_non_positive_integer_max_exclusive_5_5(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet maxExclusive with value 0. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-maxExclusive-5.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-maxExclusive-5-5.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMaxExclusive5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_max_exclusive_3_nistxml_sv_iv_atomic_non_positive_integer_max_exclusive_4_1(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet maxExclusive with value -398718969796236887. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-maxExclusive-4.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-maxExclusive-4-1.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMaxExclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_max_exclusive_3_nistxml_sv_iv_atomic_non_positive_integer_max_exclusive_4_2(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet maxExclusive with value -398718969796236887. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-maxExclusive-4.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-maxExclusive-4-2.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMaxExclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_max_exclusive_3_nistxml_sv_iv_atomic_non_positive_integer_max_exclusive_4_3(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet maxExclusive with value -398718969796236887. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-maxExclusive-4.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-maxExclusive-4-3.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMaxExclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_max_exclusive_3_nistxml_sv_iv_atomic_non_positive_integer_max_exclusive_4_4(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet maxExclusive with value -398718969796236887. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-maxExclusive-4.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-maxExclusive-4-4.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMaxExclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_max_exclusive_3_nistxml_sv_iv_atomic_non_positive_integer_max_exclusive_4_5(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet maxExclusive with value -398718969796236887. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-maxExclusive-4.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-maxExclusive-4-5.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMaxExclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_max_exclusive_2_nistxml_sv_iv_atomic_non_positive_integer_max_exclusive_3_1(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet maxExclusive with value -64116953963150757. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-maxExclusive-3.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-maxExclusive-3-1.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMaxExclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_max_exclusive_2_nistxml_sv_iv_atomic_non_positive_integer_max_exclusive_3_2(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet maxExclusive with value -64116953963150757. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-maxExclusive-3.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-maxExclusive-3-2.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMaxExclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_max_exclusive_2_nistxml_sv_iv_atomic_non_positive_integer_max_exclusive_3_3(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet maxExclusive with value -64116953963150757. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-maxExclusive-3.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-maxExclusive-3-3.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMaxExclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_max_exclusive_2_nistxml_sv_iv_atomic_non_positive_integer_max_exclusive_3_4(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet maxExclusive with value -64116953963150757. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-maxExclusive-3.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-maxExclusive-3-4.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMaxExclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_max_exclusive_2_nistxml_sv_iv_atomic_non_positive_integer_max_exclusive_3_5(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet maxExclusive with value -64116953963150757. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-maxExclusive-3.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-maxExclusive-3-5.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMaxExclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_max_exclusive_1_nistxml_sv_iv_atomic_non_positive_integer_max_exclusive_2_1(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet maxExclusive with value -267691436022826633. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-maxExclusive-2.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-maxExclusive-2-1.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMaxExclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_max_exclusive_1_nistxml_sv_iv_atomic_non_positive_integer_max_exclusive_2_2(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet maxExclusive with value -267691436022826633. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-maxExclusive-2.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-maxExclusive-2-2.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMaxExclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_max_exclusive_1_nistxml_sv_iv_atomic_non_positive_integer_max_exclusive_2_3(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet maxExclusive with value -267691436022826633. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-maxExclusive-2.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-maxExclusive-2-3.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMaxExclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_max_exclusive_1_nistxml_sv_iv_atomic_non_positive_integer_max_exclusive_2_4(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet maxExclusive with value -267691436022826633. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-maxExclusive-2.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-maxExclusive-2-4.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMaxExclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_max_exclusive_1_nistxml_sv_iv_atomic_non_positive_integer_max_exclusive_2_5(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet maxExclusive with value -267691436022826633. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-maxExclusive-2.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-maxExclusive-2-5.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMaxExclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_max_exclusive_nistxml_sv_iv_atomic_non_positive_integer_max_exclusive_1_1(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet maxExclusive with value -999999999999999998. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-maxExclusive-1.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-maxExclusive-1-1.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMaxExclusive1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_min_inclusive_4_nistxml_sv_iv_atomic_non_positive_integer_min_inclusive_5_1(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet minInclusive with value 0. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-minInclusive-5.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-minInclusive-5-1.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMinInclusive5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_min_inclusive_3_nistxml_sv_iv_atomic_non_positive_integer_min_inclusive_4_1(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet minInclusive with value -911248228325171715. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-minInclusive-4.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-minInclusive-4-1.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMinInclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_min_inclusive_3_nistxml_sv_iv_atomic_non_positive_integer_min_inclusive_4_2(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet minInclusive with value -911248228325171715. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-minInclusive-4.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-minInclusive-4-2.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMinInclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_min_inclusive_3_nistxml_sv_iv_atomic_non_positive_integer_min_inclusive_4_3(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet minInclusive with value -911248228325171715. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-minInclusive-4.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-minInclusive-4-3.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMinInclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_min_inclusive_3_nistxml_sv_iv_atomic_non_positive_integer_min_inclusive_4_4(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet minInclusive with value -911248228325171715. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-minInclusive-4.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-minInclusive-4-4.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMinInclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_min_inclusive_3_nistxml_sv_iv_atomic_non_positive_integer_min_inclusive_4_5(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet minInclusive with value -911248228325171715. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-minInclusive-4.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-minInclusive-4-5.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMinInclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_min_inclusive_2_nistxml_sv_iv_atomic_non_positive_integer_min_inclusive_3_1(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet minInclusive with value -214379312213180406. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-minInclusive-3.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-minInclusive-3-1.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMinInclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_min_inclusive_2_nistxml_sv_iv_atomic_non_positive_integer_min_inclusive_3_2(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet minInclusive with value -214379312213180406. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-minInclusive-3.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-minInclusive-3-2.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMinInclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_min_inclusive_2_nistxml_sv_iv_atomic_non_positive_integer_min_inclusive_3_3(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet minInclusive with value -214379312213180406. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-minInclusive-3.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-minInclusive-3-3.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMinInclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_min_inclusive_2_nistxml_sv_iv_atomic_non_positive_integer_min_inclusive_3_4(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet minInclusive with value -214379312213180406. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-minInclusive-3.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-minInclusive-3-4.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMinInclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_min_inclusive_2_nistxml_sv_iv_atomic_non_positive_integer_min_inclusive_3_5(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet minInclusive with value -214379312213180406. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-minInclusive-3.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-minInclusive-3-5.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMinInclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_min_inclusive_1_nistxml_sv_iv_atomic_non_positive_integer_min_inclusive_2_1(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet minInclusive with value -927820889571802863. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-minInclusive-2.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-minInclusive-2-1.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMinInclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_min_inclusive_1_nistxml_sv_iv_atomic_non_positive_integer_min_inclusive_2_2(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet minInclusive with value -927820889571802863. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-minInclusive-2.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-minInclusive-2-2.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMinInclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_min_inclusive_1_nistxml_sv_iv_atomic_non_positive_integer_min_inclusive_2_3(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet minInclusive with value -927820889571802863. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-minInclusive-2.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-minInclusive-2-3.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMinInclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_min_inclusive_1_nistxml_sv_iv_atomic_non_positive_integer_min_inclusive_2_4(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet minInclusive with value -927820889571802863. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-minInclusive-2.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-minInclusive-2-4.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMinInclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_min_inclusive_1_nistxml_sv_iv_atomic_non_positive_integer_min_inclusive_2_5(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet minInclusive with value -927820889571802863. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-minInclusive-2.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-minInclusive-2-5.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMinInclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_min_inclusive_nistxml_sv_iv_atomic_non_positive_integer_min_inclusive_1_1(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet minInclusive with value -999999999999999999. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-minInclusive-1.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-minInclusive-1-1.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMinInclusive1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_min_inclusive_nistxml_sv_iv_atomic_non_positive_integer_min_inclusive_1_2(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet minInclusive with value -999999999999999999. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-minInclusive-1.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-minInclusive-1-2.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMinInclusive1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_min_inclusive_nistxml_sv_iv_atomic_non_positive_integer_min_inclusive_1_3(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet minInclusive with value -999999999999999999. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-minInclusive-1.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-minInclusive-1-3.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMinInclusive1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_min_inclusive_nistxml_sv_iv_atomic_non_positive_integer_min_inclusive_1_4(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet minInclusive with value -999999999999999999. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-minInclusive-1.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-minInclusive-1-4.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMinInclusive1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_min_inclusive_nistxml_sv_iv_atomic_non_positive_integer_min_inclusive_1_5(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet minInclusive with value -999999999999999999. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-minInclusive-1.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-minInclusive-1-5.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMinInclusive1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_min_exclusive_4_nistxml_sv_iv_atomic_non_positive_integer_min_exclusive_5_1(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet minExclusive with value -1. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-minExclusive-5.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-minExclusive-5-1.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMinExclusive5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_min_exclusive_3_nistxml_sv_iv_atomic_non_positive_integer_min_exclusive_4_1(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet minExclusive with value -594976296252018754. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-minExclusive-4.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-minExclusive-4-1.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMinExclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_min_exclusive_3_nistxml_sv_iv_atomic_non_positive_integer_min_exclusive_4_2(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet minExclusive with value -594976296252018754. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-minExclusive-4.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-minExclusive-4-2.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMinExclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_min_exclusive_3_nistxml_sv_iv_atomic_non_positive_integer_min_exclusive_4_3(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet minExclusive with value -594976296252018754. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-minExclusive-4.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-minExclusive-4-3.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMinExclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_min_exclusive_3_nistxml_sv_iv_atomic_non_positive_integer_min_exclusive_4_4(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet minExclusive with value -594976296252018754. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-minExclusive-4.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-minExclusive-4-4.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMinExclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_min_exclusive_3_nistxml_sv_iv_atomic_non_positive_integer_min_exclusive_4_5(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet minExclusive with value -594976296252018754. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-minExclusive-4.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-minExclusive-4-5.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMinExclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_min_exclusive_2_nistxml_sv_iv_atomic_non_positive_integer_min_exclusive_3_1(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet minExclusive with value -406392790344449528. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-minExclusive-3.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-minExclusive-3-1.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMinExclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_min_exclusive_2_nistxml_sv_iv_atomic_non_positive_integer_min_exclusive_3_2(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet minExclusive with value -406392790344449528. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-minExclusive-3.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-minExclusive-3-2.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMinExclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_min_exclusive_2_nistxml_sv_iv_atomic_non_positive_integer_min_exclusive_3_3(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet minExclusive with value -406392790344449528. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-minExclusive-3.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-minExclusive-3-3.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMinExclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_min_exclusive_2_nistxml_sv_iv_atomic_non_positive_integer_min_exclusive_3_4(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet minExclusive with value -406392790344449528. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-minExclusive-3.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-minExclusive-3-4.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMinExclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_min_exclusive_2_nistxml_sv_iv_atomic_non_positive_integer_min_exclusive_3_5(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet minExclusive with value -406392790344449528. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-minExclusive-3.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-minExclusive-3-5.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMinExclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_min_exclusive_1_nistxml_sv_iv_atomic_non_positive_integer_min_exclusive_2_1(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet minExclusive with value -482054947069493477. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-minExclusive-2.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-minExclusive-2-1.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMinExclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_min_exclusive_1_nistxml_sv_iv_atomic_non_positive_integer_min_exclusive_2_2(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet minExclusive with value -482054947069493477. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-minExclusive-2.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-minExclusive-2-2.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMinExclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_min_exclusive_1_nistxml_sv_iv_atomic_non_positive_integer_min_exclusive_2_3(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet minExclusive with value -482054947069493477. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-minExclusive-2.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-minExclusive-2-3.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMinExclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_min_exclusive_1_nistxml_sv_iv_atomic_non_positive_integer_min_exclusive_2_4(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet minExclusive with value -482054947069493477. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-minExclusive-2.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-minExclusive-2-4.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMinExclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_min_exclusive_1_nistxml_sv_iv_atomic_non_positive_integer_min_exclusive_2_5(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet minExclusive with value -482054947069493477. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-minExclusive-2.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-minExclusive-2-5.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMinExclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_min_exclusive_nistxml_sv_iv_atomic_non_positive_integer_min_exclusive_1_1(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet minExclusive with value -999999999999999999. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-minExclusive-1.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-minExclusive-1-1.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMinExclusive1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_min_exclusive_nistxml_sv_iv_atomic_non_positive_integer_min_exclusive_1_2(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet minExclusive with value -999999999999999999. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-minExclusive-1.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-minExclusive-1-2.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMinExclusive1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_min_exclusive_nistxml_sv_iv_atomic_non_positive_integer_min_exclusive_1_3(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet minExclusive with value -999999999999999999. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-minExclusive-1.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-minExclusive-1-3.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMinExclusive1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_min_exclusive_nistxml_sv_iv_atomic_non_positive_integer_min_exclusive_1_4(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet minExclusive with value -999999999999999999. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-minExclusive-1.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-minExclusive-1-4.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMinExclusive1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_non_positive_integer_min_exclusive_nistxml_sv_iv_atomic_non_positive_integer_min_exclusive_1_5(mode, save_output, output_format): """ Type atomic/nonPositiveInteger is restricted by facet minExclusive with value -999999999999999999. """ assert_bindings( schema="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTSchema-SV-IV-atomic-nonPositiveInteger-minExclusive-1.xsd", instance="nistData/atomic/nonPositiveInteger/Schema+Instance/NISTXML-SV-IV-atomic-nonPositiveInteger-minExclusive-1-5.xml", class_name="NistschemaSvIvAtomicNonPositiveIntegerMinExclusive1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_white_space_nistxml_sv_iv_atomic_integer_white_space_1_1(mode, save_output, output_format): """ Type atomic/integer is restricted by facet whiteSpace with value collapse. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-whiteSpace-1.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-whiteSpace-1-1.xml", class_name="NistschemaSvIvAtomicIntegerWhiteSpace1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_white_space_nistxml_sv_iv_atomic_integer_white_space_1_2(mode, save_output, output_format): """ Type atomic/integer is restricted by facet whiteSpace with value collapse. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-whiteSpace-1.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-whiteSpace-1-2.xml", class_name="NistschemaSvIvAtomicIntegerWhiteSpace1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_white_space_nistxml_sv_iv_atomic_integer_white_space_1_3(mode, save_output, output_format): """ Type atomic/integer is restricted by facet whiteSpace with value collapse. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-whiteSpace-1.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-whiteSpace-1-3.xml", class_name="NistschemaSvIvAtomicIntegerWhiteSpace1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_white_space_nistxml_sv_iv_atomic_integer_white_space_1_4(mode, save_output, output_format): """ Type atomic/integer is restricted by facet whiteSpace with value collapse. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-whiteSpace-1.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-whiteSpace-1-4.xml", class_name="NistschemaSvIvAtomicIntegerWhiteSpace1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_white_space_nistxml_sv_iv_atomic_integer_white_space_1_5(mode, save_output, output_format): """ Type atomic/integer is restricted by facet whiteSpace with value collapse. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-whiteSpace-1.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-whiteSpace-1-5.xml", class_name="NistschemaSvIvAtomicIntegerWhiteSpace1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_enumeration_4_nistxml_sv_iv_atomic_integer_enumeration_5_1(mode, save_output, output_format): """ Type atomic/integer is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-enumeration-5.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-enumeration-5-1.xml", class_name="NistschemaSvIvAtomicIntegerEnumeration5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_enumeration_4_nistxml_sv_iv_atomic_integer_enumeration_5_2(mode, save_output, output_format): """ Type atomic/integer is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-enumeration-5.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-enumeration-5-2.xml", class_name="NistschemaSvIvAtomicIntegerEnumeration5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_enumeration_4_nistxml_sv_iv_atomic_integer_enumeration_5_3(mode, save_output, output_format): """ Type atomic/integer is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-enumeration-5.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-enumeration-5-3.xml", class_name="NistschemaSvIvAtomicIntegerEnumeration5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_enumeration_4_nistxml_sv_iv_atomic_integer_enumeration_5_4(mode, save_output, output_format): """ Type atomic/integer is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-enumeration-5.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-enumeration-5-4.xml", class_name="NistschemaSvIvAtomicIntegerEnumeration5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_enumeration_4_nistxml_sv_iv_atomic_integer_enumeration_5_5(mode, save_output, output_format): """ Type atomic/integer is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-enumeration-5.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-enumeration-5-5.xml", class_name="NistschemaSvIvAtomicIntegerEnumeration5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_enumeration_3_nistxml_sv_iv_atomic_integer_enumeration_4_1(mode, save_output, output_format): """ Type atomic/integer is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-enumeration-4.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-enumeration-4-1.xml", class_name="NistschemaSvIvAtomicIntegerEnumeration4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_enumeration_3_nistxml_sv_iv_atomic_integer_enumeration_4_2(mode, save_output, output_format): """ Type atomic/integer is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-enumeration-4.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-enumeration-4-2.xml", class_name="NistschemaSvIvAtomicIntegerEnumeration4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_enumeration_3_nistxml_sv_iv_atomic_integer_enumeration_4_3(mode, save_output, output_format): """ Type atomic/integer is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-enumeration-4.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-enumeration-4-3.xml", class_name="NistschemaSvIvAtomicIntegerEnumeration4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_enumeration_3_nistxml_sv_iv_atomic_integer_enumeration_4_4(mode, save_output, output_format): """ Type atomic/integer is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-enumeration-4.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-enumeration-4-4.xml", class_name="NistschemaSvIvAtomicIntegerEnumeration4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_enumeration_3_nistxml_sv_iv_atomic_integer_enumeration_4_5(mode, save_output, output_format): """ Type atomic/integer is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-enumeration-4.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-enumeration-4-5.xml", class_name="NistschemaSvIvAtomicIntegerEnumeration4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_enumeration_2_nistxml_sv_iv_atomic_integer_enumeration_3_1(mode, save_output, output_format): """ Type atomic/integer is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-enumeration-3.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-enumeration-3-1.xml", class_name="NistschemaSvIvAtomicIntegerEnumeration3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_enumeration_2_nistxml_sv_iv_atomic_integer_enumeration_3_2(mode, save_output, output_format): """ Type atomic/integer is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-enumeration-3.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-enumeration-3-2.xml", class_name="NistschemaSvIvAtomicIntegerEnumeration3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_enumeration_2_nistxml_sv_iv_atomic_integer_enumeration_3_3(mode, save_output, output_format): """ Type atomic/integer is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-enumeration-3.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-enumeration-3-3.xml", class_name="NistschemaSvIvAtomicIntegerEnumeration3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_enumeration_2_nistxml_sv_iv_atomic_integer_enumeration_3_4(mode, save_output, output_format): """ Type atomic/integer is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-enumeration-3.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-enumeration-3-4.xml", class_name="NistschemaSvIvAtomicIntegerEnumeration3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_enumeration_2_nistxml_sv_iv_atomic_integer_enumeration_3_5(mode, save_output, output_format): """ Type atomic/integer is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-enumeration-3.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-enumeration-3-5.xml", class_name="NistschemaSvIvAtomicIntegerEnumeration3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_enumeration_1_nistxml_sv_iv_atomic_integer_enumeration_2_1(mode, save_output, output_format): """ Type atomic/integer is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-enumeration-2.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-enumeration-2-1.xml", class_name="NistschemaSvIvAtomicIntegerEnumeration2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_enumeration_1_nistxml_sv_iv_atomic_integer_enumeration_2_2(mode, save_output, output_format): """ Type atomic/integer is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-enumeration-2.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-enumeration-2-2.xml", class_name="NistschemaSvIvAtomicIntegerEnumeration2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_enumeration_1_nistxml_sv_iv_atomic_integer_enumeration_2_3(mode, save_output, output_format): """ Type atomic/integer is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-enumeration-2.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-enumeration-2-3.xml", class_name="NistschemaSvIvAtomicIntegerEnumeration2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_enumeration_1_nistxml_sv_iv_atomic_integer_enumeration_2_4(mode, save_output, output_format): """ Type atomic/integer is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-enumeration-2.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-enumeration-2-4.xml", class_name="NistschemaSvIvAtomicIntegerEnumeration2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_enumeration_1_nistxml_sv_iv_atomic_integer_enumeration_2_5(mode, save_output, output_format): """ Type atomic/integer is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-enumeration-2.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-enumeration-2-5.xml", class_name="NistschemaSvIvAtomicIntegerEnumeration2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_enumeration_nistxml_sv_iv_atomic_integer_enumeration_1_1(mode, save_output, output_format): """ Type atomic/integer is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-enumeration-1.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-enumeration-1-1.xml", class_name="NistschemaSvIvAtomicIntegerEnumeration1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_enumeration_nistxml_sv_iv_atomic_integer_enumeration_1_2(mode, save_output, output_format): """ Type atomic/integer is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-enumeration-1.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-enumeration-1-2.xml", class_name="NistschemaSvIvAtomicIntegerEnumeration1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_enumeration_nistxml_sv_iv_atomic_integer_enumeration_1_3(mode, save_output, output_format): """ Type atomic/integer is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-enumeration-1.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-enumeration-1-3.xml", class_name="NistschemaSvIvAtomicIntegerEnumeration1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_enumeration_nistxml_sv_iv_atomic_integer_enumeration_1_4(mode, save_output, output_format): """ Type atomic/integer is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-enumeration-1.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-enumeration-1-4.xml", class_name="NistschemaSvIvAtomicIntegerEnumeration1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_enumeration_nistxml_sv_iv_atomic_integer_enumeration_1_5(mode, save_output, output_format): """ Type atomic/integer is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-enumeration-1.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-enumeration-1-5.xml", class_name="NistschemaSvIvAtomicIntegerEnumeration1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_pattern_4_nistxml_sv_iv_atomic_integer_pattern_5_1(mode, save_output, output_format): r""" Type atomic/integer is restricted by facet pattern with value \d{18}. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-pattern-5.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-pattern-5-1.xml", class_name="NistschemaSvIvAtomicIntegerPattern5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_pattern_4_nistxml_sv_iv_atomic_integer_pattern_5_2(mode, save_output, output_format): r""" Type atomic/integer is restricted by facet pattern with value \d{18}. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-pattern-5.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-pattern-5-2.xml", class_name="NistschemaSvIvAtomicIntegerPattern5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_pattern_4_nistxml_sv_iv_atomic_integer_pattern_5_3(mode, save_output, output_format): r""" Type atomic/integer is restricted by facet pattern with value \d{18}. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-pattern-5.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-pattern-5-3.xml", class_name="NistschemaSvIvAtomicIntegerPattern5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_pattern_4_nistxml_sv_iv_atomic_integer_pattern_5_4(mode, save_output, output_format): r""" Type atomic/integer is restricted by facet pattern with value \d{18}. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-pattern-5.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-pattern-5-4.xml", class_name="NistschemaSvIvAtomicIntegerPattern5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_pattern_4_nistxml_sv_iv_atomic_integer_pattern_5_5(mode, save_output, output_format): r""" Type atomic/integer is restricted by facet pattern with value \d{18}. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-pattern-5.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-pattern-5-5.xml", class_name="NistschemaSvIvAtomicIntegerPattern5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_pattern_3_nistxml_sv_iv_atomic_integer_pattern_4_1(mode, save_output, output_format): r""" Type atomic/integer is restricted by facet pattern with value \d{5}. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-pattern-4.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-pattern-4-1.xml", class_name="NistschemaSvIvAtomicIntegerPattern4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_pattern_3_nistxml_sv_iv_atomic_integer_pattern_4_2(mode, save_output, output_format): r""" Type atomic/integer is restricted by facet pattern with value \d{5}. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-pattern-4.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-pattern-4-2.xml", class_name="NistschemaSvIvAtomicIntegerPattern4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_pattern_3_nistxml_sv_iv_atomic_integer_pattern_4_3(mode, save_output, output_format): r""" Type atomic/integer is restricted by facet pattern with value \d{5}. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-pattern-4.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-pattern-4-3.xml", class_name="NistschemaSvIvAtomicIntegerPattern4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_pattern_3_nistxml_sv_iv_atomic_integer_pattern_4_4(mode, save_output, output_format): r""" Type atomic/integer is restricted by facet pattern with value \d{5}. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-pattern-4.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-pattern-4-4.xml", class_name="NistschemaSvIvAtomicIntegerPattern4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_pattern_3_nistxml_sv_iv_atomic_integer_pattern_4_5(mode, save_output, output_format): r""" Type atomic/integer is restricted by facet pattern with value \d{5}. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-pattern-4.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-pattern-4-5.xml", class_name="NistschemaSvIvAtomicIntegerPattern4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_pattern_2_nistxml_sv_iv_atomic_integer_pattern_3_1(mode, save_output, output_format): r""" Type atomic/integer is restricted by facet pattern with value \-\d{1}. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-pattern-3.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-pattern-3-1.xml", class_name="NistschemaSvIvAtomicIntegerPattern3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_pattern_2_nistxml_sv_iv_atomic_integer_pattern_3_2(mode, save_output, output_format): r""" Type atomic/integer is restricted by facet pattern with value \-\d{1}. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-pattern-3.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-pattern-3-2.xml", class_name="NistschemaSvIvAtomicIntegerPattern3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_pattern_2_nistxml_sv_iv_atomic_integer_pattern_3_3(mode, save_output, output_format): r""" Type atomic/integer is restricted by facet pattern with value \-\d{1}. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-pattern-3.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-pattern-3-3.xml", class_name="NistschemaSvIvAtomicIntegerPattern3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_pattern_2_nistxml_sv_iv_atomic_integer_pattern_3_4(mode, save_output, output_format): r""" Type atomic/integer is restricted by facet pattern with value \-\d{1}. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-pattern-3.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-pattern-3-4.xml", class_name="NistschemaSvIvAtomicIntegerPattern3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_pattern_2_nistxml_sv_iv_atomic_integer_pattern_3_5(mode, save_output, output_format): r""" Type atomic/integer is restricted by facet pattern with value \-\d{1}. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-pattern-3.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-pattern-3-5.xml", class_name="NistschemaSvIvAtomicIntegerPattern3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_pattern_1_nistxml_sv_iv_atomic_integer_pattern_2_1(mode, save_output, output_format): r""" Type atomic/integer is restricted by facet pattern with value \-\d{9}. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-pattern-2.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-pattern-2-1.xml", class_name="NistschemaSvIvAtomicIntegerPattern2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_pattern_1_nistxml_sv_iv_atomic_integer_pattern_2_2(mode, save_output, output_format): r""" Type atomic/integer is restricted by facet pattern with value \-\d{9}. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-pattern-2.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-pattern-2-2.xml", class_name="NistschemaSvIvAtomicIntegerPattern2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_pattern_1_nistxml_sv_iv_atomic_integer_pattern_2_3(mode, save_output, output_format): r""" Type atomic/integer is restricted by facet pattern with value \-\d{9}. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-pattern-2.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-pattern-2-3.xml", class_name="NistschemaSvIvAtomicIntegerPattern2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_pattern_1_nistxml_sv_iv_atomic_integer_pattern_2_4(mode, save_output, output_format): r""" Type atomic/integer is restricted by facet pattern with value \-\d{9}. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-pattern-2.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-pattern-2-4.xml", class_name="NistschemaSvIvAtomicIntegerPattern2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_pattern_1_nistxml_sv_iv_atomic_integer_pattern_2_5(mode, save_output, output_format): r""" Type atomic/integer is restricted by facet pattern with value \-\d{9}. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-pattern-2.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-pattern-2-5.xml", class_name="NistschemaSvIvAtomicIntegerPattern2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_pattern_nistxml_sv_iv_atomic_integer_pattern_1_1(mode, save_output, output_format): r""" Type atomic/integer is restricted by facet pattern with value \-\d{18}. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-pattern-1.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-pattern-1-1.xml", class_name="NistschemaSvIvAtomicIntegerPattern1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_pattern_nistxml_sv_iv_atomic_integer_pattern_1_2(mode, save_output, output_format): r""" Type atomic/integer is restricted by facet pattern with value \-\d{18}. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-pattern-1.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-pattern-1-2.xml", class_name="NistschemaSvIvAtomicIntegerPattern1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_pattern_nistxml_sv_iv_atomic_integer_pattern_1_3(mode, save_output, output_format): r""" Type atomic/integer is restricted by facet pattern with value \-\d{18}. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-pattern-1.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-pattern-1-3.xml", class_name="NistschemaSvIvAtomicIntegerPattern1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_pattern_nistxml_sv_iv_atomic_integer_pattern_1_4(mode, save_output, output_format): r""" Type atomic/integer is restricted by facet pattern with value \-\d{18}. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-pattern-1.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-pattern-1-4.xml", class_name="NistschemaSvIvAtomicIntegerPattern1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_pattern_nistxml_sv_iv_atomic_integer_pattern_1_5(mode, save_output, output_format): r""" Type atomic/integer is restricted by facet pattern with value \-\d{18}. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-pattern-1.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-pattern-1-5.xml", class_name="NistschemaSvIvAtomicIntegerPattern1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_total_digits_4_nistxml_sv_iv_atomic_integer_total_digits_5_1(mode, save_output, output_format): """ Type atomic/integer is restricted by facet totalDigits with value 18. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-totalDigits-5.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-totalDigits-5-1.xml", class_name="NistschemaSvIvAtomicIntegerTotalDigits5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_total_digits_4_nistxml_sv_iv_atomic_integer_total_digits_5_2(mode, save_output, output_format): """ Type atomic/integer is restricted by facet totalDigits with value 18. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-totalDigits-5.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-totalDigits-5-2.xml", class_name="NistschemaSvIvAtomicIntegerTotalDigits5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_total_digits_4_nistxml_sv_iv_atomic_integer_total_digits_5_3(mode, save_output, output_format): """ Type atomic/integer is restricted by facet totalDigits with value 18. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-totalDigits-5.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-totalDigits-5-3.xml", class_name="NistschemaSvIvAtomicIntegerTotalDigits5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_total_digits_4_nistxml_sv_iv_atomic_integer_total_digits_5_4(mode, save_output, output_format): """ Type atomic/integer is restricted by facet totalDigits with value 18. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-totalDigits-5.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-totalDigits-5-4.xml", class_name="NistschemaSvIvAtomicIntegerTotalDigits5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_total_digits_4_nistxml_sv_iv_atomic_integer_total_digits_5_5(mode, save_output, output_format): """ Type atomic/integer is restricted by facet totalDigits with value 18. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-totalDigits-5.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-totalDigits-5-5.xml", class_name="NistschemaSvIvAtomicIntegerTotalDigits5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_total_digits_3_nistxml_sv_iv_atomic_integer_total_digits_4_1(mode, save_output, output_format): """ Type atomic/integer is restricted by facet totalDigits with value 13. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-totalDigits-4.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-totalDigits-4-1.xml", class_name="NistschemaSvIvAtomicIntegerTotalDigits4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_total_digits_3_nistxml_sv_iv_atomic_integer_total_digits_4_2(mode, save_output, output_format): """ Type atomic/integer is restricted by facet totalDigits with value 13. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-totalDigits-4.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-totalDigits-4-2.xml", class_name="NistschemaSvIvAtomicIntegerTotalDigits4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_total_digits_3_nistxml_sv_iv_atomic_integer_total_digits_4_3(mode, save_output, output_format): """ Type atomic/integer is restricted by facet totalDigits with value 13. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-totalDigits-4.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-totalDigits-4-3.xml", class_name="NistschemaSvIvAtomicIntegerTotalDigits4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_total_digits_3_nistxml_sv_iv_atomic_integer_total_digits_4_4(mode, save_output, output_format): """ Type atomic/integer is restricted by facet totalDigits with value 13. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-totalDigits-4.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-totalDigits-4-4.xml", class_name="NistschemaSvIvAtomicIntegerTotalDigits4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_total_digits_3_nistxml_sv_iv_atomic_integer_total_digits_4_5(mode, save_output, output_format): """ Type atomic/integer is restricted by facet totalDigits with value 13. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-totalDigits-4.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-totalDigits-4-5.xml", class_name="NistschemaSvIvAtomicIntegerTotalDigits4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_total_digits_2_nistxml_sv_iv_atomic_integer_total_digits_3_1(mode, save_output, output_format): """ Type atomic/integer is restricted by facet totalDigits with value 9. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-totalDigits-3.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-totalDigits-3-1.xml", class_name="NistschemaSvIvAtomicIntegerTotalDigits3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_total_digits_2_nistxml_sv_iv_atomic_integer_total_digits_3_2(mode, save_output, output_format): """ Type atomic/integer is restricted by facet totalDigits with value 9. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-totalDigits-3.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-totalDigits-3-2.xml", class_name="NistschemaSvIvAtomicIntegerTotalDigits3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_total_digits_2_nistxml_sv_iv_atomic_integer_total_digits_3_3(mode, save_output, output_format): """ Type atomic/integer is restricted by facet totalDigits with value 9. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-totalDigits-3.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-totalDigits-3-3.xml", class_name="NistschemaSvIvAtomicIntegerTotalDigits3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_total_digits_2_nistxml_sv_iv_atomic_integer_total_digits_3_4(mode, save_output, output_format): """ Type atomic/integer is restricted by facet totalDigits with value 9. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-totalDigits-3.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-totalDigits-3-4.xml", class_name="NistschemaSvIvAtomicIntegerTotalDigits3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_total_digits_2_nistxml_sv_iv_atomic_integer_total_digits_3_5(mode, save_output, output_format): """ Type atomic/integer is restricted by facet totalDigits with value 9. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-totalDigits-3.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-totalDigits-3-5.xml", class_name="NistschemaSvIvAtomicIntegerTotalDigits3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_total_digits_1_nistxml_sv_iv_atomic_integer_total_digits_2_1(mode, save_output, output_format): """ Type atomic/integer is restricted by facet totalDigits with value 5. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-totalDigits-2.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-totalDigits-2-1.xml", class_name="NistschemaSvIvAtomicIntegerTotalDigits2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_total_digits_1_nistxml_sv_iv_atomic_integer_total_digits_2_2(mode, save_output, output_format): """ Type atomic/integer is restricted by facet totalDigits with value 5. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-totalDigits-2.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-totalDigits-2-2.xml", class_name="NistschemaSvIvAtomicIntegerTotalDigits2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_total_digits_1_nistxml_sv_iv_atomic_integer_total_digits_2_3(mode, save_output, output_format): """ Type atomic/integer is restricted by facet totalDigits with value 5. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-totalDigits-2.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-totalDigits-2-3.xml", class_name="NistschemaSvIvAtomicIntegerTotalDigits2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_total_digits_1_nistxml_sv_iv_atomic_integer_total_digits_2_4(mode, save_output, output_format): """ Type atomic/integer is restricted by facet totalDigits with value 5. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-totalDigits-2.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-totalDigits-2-4.xml", class_name="NistschemaSvIvAtomicIntegerTotalDigits2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_total_digits_1_nistxml_sv_iv_atomic_integer_total_digits_2_5(mode, save_output, output_format): """ Type atomic/integer is restricted by facet totalDigits with value 5. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-totalDigits-2.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-totalDigits-2-5.xml", class_name="NistschemaSvIvAtomicIntegerTotalDigits2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_total_digits_nistxml_sv_iv_atomic_integer_total_digits_1_1(mode, save_output, output_format): """ Type atomic/integer is restricted by facet totalDigits with value 1. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-totalDigits-1.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-totalDigits-1-1.xml", class_name="NistschemaSvIvAtomicIntegerTotalDigits1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_total_digits_nistxml_sv_iv_atomic_integer_total_digits_1_2(mode, save_output, output_format): """ Type atomic/integer is restricted by facet totalDigits with value 1. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-totalDigits-1.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-totalDigits-1-2.xml", class_name="NistschemaSvIvAtomicIntegerTotalDigits1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_total_digits_nistxml_sv_iv_atomic_integer_total_digits_1_3(mode, save_output, output_format): """ Type atomic/integer is restricted by facet totalDigits with value 1. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-totalDigits-1.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-totalDigits-1-3.xml", class_name="NistschemaSvIvAtomicIntegerTotalDigits1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_total_digits_nistxml_sv_iv_atomic_integer_total_digits_1_4(mode, save_output, output_format): """ Type atomic/integer is restricted by facet totalDigits with value 1. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-totalDigits-1.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-totalDigits-1-4.xml", class_name="NistschemaSvIvAtomicIntegerTotalDigits1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_total_digits_nistxml_sv_iv_atomic_integer_total_digits_1_5(mode, save_output, output_format): """ Type atomic/integer is restricted by facet totalDigits with value 1. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-totalDigits-1.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-totalDigits-1-5.xml", class_name="NistschemaSvIvAtomicIntegerTotalDigits1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_fraction_digits_nistxml_sv_iv_atomic_integer_fraction_digits_1_1(mode, save_output, output_format): """ Type atomic/integer is restricted by facet fractionDigits with value 0. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-fractionDigits-1.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-fractionDigits-1-1.xml", class_name="NistschemaSvIvAtomicIntegerFractionDigits1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_fraction_digits_nistxml_sv_iv_atomic_integer_fraction_digits_1_2(mode, save_output, output_format): """ Type atomic/integer is restricted by facet fractionDigits with value 0. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-fractionDigits-1.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-fractionDigits-1-2.xml", class_name="NistschemaSvIvAtomicIntegerFractionDigits1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_fraction_digits_nistxml_sv_iv_atomic_integer_fraction_digits_1_3(mode, save_output, output_format): """ Type atomic/integer is restricted by facet fractionDigits with value 0. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-fractionDigits-1.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-fractionDigits-1-3.xml", class_name="NistschemaSvIvAtomicIntegerFractionDigits1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_fraction_digits_nistxml_sv_iv_atomic_integer_fraction_digits_1_4(mode, save_output, output_format): """ Type atomic/integer is restricted by facet fractionDigits with value 0. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-fractionDigits-1.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-fractionDigits-1-4.xml", class_name="NistschemaSvIvAtomicIntegerFractionDigits1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_fraction_digits_nistxml_sv_iv_atomic_integer_fraction_digits_1_5(mode, save_output, output_format): """ Type atomic/integer is restricted by facet fractionDigits with value 0. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-fractionDigits-1.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-fractionDigits-1-5.xml", class_name="NistschemaSvIvAtomicIntegerFractionDigits1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_max_inclusive_4_nistxml_sv_iv_atomic_integer_max_inclusive_5_1(mode, save_output, output_format): """ Type atomic/integer is restricted by facet maxInclusive with value 999999999999999999. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-maxInclusive-5.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-maxInclusive-5-1.xml", class_name="NistschemaSvIvAtomicIntegerMaxInclusive5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_max_inclusive_4_nistxml_sv_iv_atomic_integer_max_inclusive_5_2(mode, save_output, output_format): """ Type atomic/integer is restricted by facet maxInclusive with value 999999999999999999. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-maxInclusive-5.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-maxInclusive-5-2.xml", class_name="NistschemaSvIvAtomicIntegerMaxInclusive5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_max_inclusive_4_nistxml_sv_iv_atomic_integer_max_inclusive_5_3(mode, save_output, output_format): """ Type atomic/integer is restricted by facet maxInclusive with value 999999999999999999. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-maxInclusive-5.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-maxInclusive-5-3.xml", class_name="NistschemaSvIvAtomicIntegerMaxInclusive5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_max_inclusive_4_nistxml_sv_iv_atomic_integer_max_inclusive_5_4(mode, save_output, output_format): """ Type atomic/integer is restricted by facet maxInclusive with value 999999999999999999. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-maxInclusive-5.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-maxInclusive-5-4.xml", class_name="NistschemaSvIvAtomicIntegerMaxInclusive5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_max_inclusive_4_nistxml_sv_iv_atomic_integer_max_inclusive_5_5(mode, save_output, output_format): """ Type atomic/integer is restricted by facet maxInclusive with value 999999999999999999. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-maxInclusive-5.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-maxInclusive-5-5.xml", class_name="NistschemaSvIvAtomicIntegerMaxInclusive5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_max_inclusive_3_nistxml_sv_iv_atomic_integer_max_inclusive_4_1(mode, save_output, output_format): """ Type atomic/integer is restricted by facet maxInclusive with value -2761698266856349. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-maxInclusive-4.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-maxInclusive-4-1.xml", class_name="NistschemaSvIvAtomicIntegerMaxInclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_max_inclusive_3_nistxml_sv_iv_atomic_integer_max_inclusive_4_2(mode, save_output, output_format): """ Type atomic/integer is restricted by facet maxInclusive with value -2761698266856349. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-maxInclusive-4.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-maxInclusive-4-2.xml", class_name="NistschemaSvIvAtomicIntegerMaxInclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_max_inclusive_3_nistxml_sv_iv_atomic_integer_max_inclusive_4_3(mode, save_output, output_format): """ Type atomic/integer is restricted by facet maxInclusive with value -2761698266856349. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-maxInclusive-4.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-maxInclusive-4-3.xml", class_name="NistschemaSvIvAtomicIntegerMaxInclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_max_inclusive_3_nistxml_sv_iv_atomic_integer_max_inclusive_4_4(mode, save_output, output_format): """ Type atomic/integer is restricted by facet maxInclusive with value -2761698266856349. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-maxInclusive-4.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-maxInclusive-4-4.xml", class_name="NistschemaSvIvAtomicIntegerMaxInclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_max_inclusive_3_nistxml_sv_iv_atomic_integer_max_inclusive_4_5(mode, save_output, output_format): """ Type atomic/integer is restricted by facet maxInclusive with value -2761698266856349. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-maxInclusive-4.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-maxInclusive-4-5.xml", class_name="NistschemaSvIvAtomicIntegerMaxInclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_max_inclusive_2_nistxml_sv_iv_atomic_integer_max_inclusive_3_1(mode, save_output, output_format): """ Type atomic/integer is restricted by facet maxInclusive with value 705179181121327491. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-maxInclusive-3.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-maxInclusive-3-1.xml", class_name="NistschemaSvIvAtomicIntegerMaxInclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_max_inclusive_2_nistxml_sv_iv_atomic_integer_max_inclusive_3_2(mode, save_output, output_format): """ Type atomic/integer is restricted by facet maxInclusive with value 705179181121327491. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-maxInclusive-3.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-maxInclusive-3-2.xml", class_name="NistschemaSvIvAtomicIntegerMaxInclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_max_inclusive_2_nistxml_sv_iv_atomic_integer_max_inclusive_3_3(mode, save_output, output_format): """ Type atomic/integer is restricted by facet maxInclusive with value 705179181121327491. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-maxInclusive-3.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-maxInclusive-3-3.xml", class_name="NistschemaSvIvAtomicIntegerMaxInclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_max_inclusive_2_nistxml_sv_iv_atomic_integer_max_inclusive_3_4(mode, save_output, output_format): """ Type atomic/integer is restricted by facet maxInclusive with value 705179181121327491. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-maxInclusive-3.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-maxInclusive-3-4.xml", class_name="NistschemaSvIvAtomicIntegerMaxInclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_max_inclusive_2_nistxml_sv_iv_atomic_integer_max_inclusive_3_5(mode, save_output, output_format): """ Type atomic/integer is restricted by facet maxInclusive with value 705179181121327491. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-maxInclusive-3.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-maxInclusive-3-5.xml", class_name="NistschemaSvIvAtomicIntegerMaxInclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_max_inclusive_1_nistxml_sv_iv_atomic_integer_max_inclusive_2_1(mode, save_output, output_format): """ Type atomic/integer is restricted by facet maxInclusive with value 828008406281169228. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-maxInclusive-2.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-maxInclusive-2-1.xml", class_name="NistschemaSvIvAtomicIntegerMaxInclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_max_inclusive_1_nistxml_sv_iv_atomic_integer_max_inclusive_2_2(mode, save_output, output_format): """ Type atomic/integer is restricted by facet maxInclusive with value 828008406281169228. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-maxInclusive-2.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-maxInclusive-2-2.xml", class_name="NistschemaSvIvAtomicIntegerMaxInclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_max_inclusive_1_nistxml_sv_iv_atomic_integer_max_inclusive_2_3(mode, save_output, output_format): """ Type atomic/integer is restricted by facet maxInclusive with value 828008406281169228. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-maxInclusive-2.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-maxInclusive-2-3.xml", class_name="NistschemaSvIvAtomicIntegerMaxInclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_max_inclusive_1_nistxml_sv_iv_atomic_integer_max_inclusive_2_4(mode, save_output, output_format): """ Type atomic/integer is restricted by facet maxInclusive with value 828008406281169228. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-maxInclusive-2.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-maxInclusive-2-4.xml", class_name="NistschemaSvIvAtomicIntegerMaxInclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_max_inclusive_1_nistxml_sv_iv_atomic_integer_max_inclusive_2_5(mode, save_output, output_format): """ Type atomic/integer is restricted by facet maxInclusive with value 828008406281169228. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-maxInclusive-2.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-maxInclusive-2-5.xml", class_name="NistschemaSvIvAtomicIntegerMaxInclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_max_inclusive_nistxml_sv_iv_atomic_integer_max_inclusive_1_1(mode, save_output, output_format): """ Type atomic/integer is restricted by facet maxInclusive with value -999999999999999999. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-maxInclusive-1.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-maxInclusive-1-1.xml", class_name="NistschemaSvIvAtomicIntegerMaxInclusive1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_max_exclusive_4_nistxml_sv_iv_atomic_integer_max_exclusive_5_1(mode, save_output, output_format): """ Type atomic/integer is restricted by facet maxExclusive with value 999999999999999999. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-maxExclusive-5.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-maxExclusive-5-1.xml", class_name="NistschemaSvIvAtomicIntegerMaxExclusive5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_max_exclusive_4_nistxml_sv_iv_atomic_integer_max_exclusive_5_2(mode, save_output, output_format): """ Type atomic/integer is restricted by facet maxExclusive with value 999999999999999999. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-maxExclusive-5.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-maxExclusive-5-2.xml", class_name="NistschemaSvIvAtomicIntegerMaxExclusive5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_max_exclusive_4_nistxml_sv_iv_atomic_integer_max_exclusive_5_3(mode, save_output, output_format): """ Type atomic/integer is restricted by facet maxExclusive with value 999999999999999999. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-maxExclusive-5.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-maxExclusive-5-3.xml", class_name="NistschemaSvIvAtomicIntegerMaxExclusive5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_max_exclusive_4_nistxml_sv_iv_atomic_integer_max_exclusive_5_4(mode, save_output, output_format): """ Type atomic/integer is restricted by facet maxExclusive with value 999999999999999999. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-maxExclusive-5.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-maxExclusive-5-4.xml", class_name="NistschemaSvIvAtomicIntegerMaxExclusive5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_max_exclusive_4_nistxml_sv_iv_atomic_integer_max_exclusive_5_5(mode, save_output, output_format): """ Type atomic/integer is restricted by facet maxExclusive with value 999999999999999999. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-maxExclusive-5.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-maxExclusive-5-5.xml", class_name="NistschemaSvIvAtomicIntegerMaxExclusive5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_max_exclusive_3_nistxml_sv_iv_atomic_integer_max_exclusive_4_1(mode, save_output, output_format): """ Type atomic/integer is restricted by facet maxExclusive with value -839533034801862807. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-maxExclusive-4.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-maxExclusive-4-1.xml", class_name="NistschemaSvIvAtomicIntegerMaxExclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_max_exclusive_3_nistxml_sv_iv_atomic_integer_max_exclusive_4_2(mode, save_output, output_format): """ Type atomic/integer is restricted by facet maxExclusive with value -839533034801862807. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-maxExclusive-4.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-maxExclusive-4-2.xml", class_name="NistschemaSvIvAtomicIntegerMaxExclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_max_exclusive_3_nistxml_sv_iv_atomic_integer_max_exclusive_4_3(mode, save_output, output_format): """ Type atomic/integer is restricted by facet maxExclusive with value -839533034801862807. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-maxExclusive-4.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-maxExclusive-4-3.xml", class_name="NistschemaSvIvAtomicIntegerMaxExclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_max_exclusive_3_nistxml_sv_iv_atomic_integer_max_exclusive_4_4(mode, save_output, output_format): """ Type atomic/integer is restricted by facet maxExclusive with value -839533034801862807. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-maxExclusive-4.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-maxExclusive-4-4.xml", class_name="NistschemaSvIvAtomicIntegerMaxExclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_max_exclusive_3_nistxml_sv_iv_atomic_integer_max_exclusive_4_5(mode, save_output, output_format): """ Type atomic/integer is restricted by facet maxExclusive with value -839533034801862807. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-maxExclusive-4.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-maxExclusive-4-5.xml", class_name="NistschemaSvIvAtomicIntegerMaxExclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_max_exclusive_2_nistxml_sv_iv_atomic_integer_max_exclusive_3_1(mode, save_output, output_format): """ Type atomic/integer is restricted by facet maxExclusive with value 549869808681548999. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-maxExclusive-3.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-maxExclusive-3-1.xml", class_name="NistschemaSvIvAtomicIntegerMaxExclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_max_exclusive_2_nistxml_sv_iv_atomic_integer_max_exclusive_3_2(mode, save_output, output_format): """ Type atomic/integer is restricted by facet maxExclusive with value 549869808681548999. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-maxExclusive-3.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-maxExclusive-3-2.xml", class_name="NistschemaSvIvAtomicIntegerMaxExclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_max_exclusive_2_nistxml_sv_iv_atomic_integer_max_exclusive_3_3(mode, save_output, output_format): """ Type atomic/integer is restricted by facet maxExclusive with value 549869808681548999. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-maxExclusive-3.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-maxExclusive-3-3.xml", class_name="NistschemaSvIvAtomicIntegerMaxExclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_max_exclusive_2_nistxml_sv_iv_atomic_integer_max_exclusive_3_4(mode, save_output, output_format): """ Type atomic/integer is restricted by facet maxExclusive with value 549869808681548999. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-maxExclusive-3.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-maxExclusive-3-4.xml", class_name="NistschemaSvIvAtomicIntegerMaxExclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_max_exclusive_2_nistxml_sv_iv_atomic_integer_max_exclusive_3_5(mode, save_output, output_format): """ Type atomic/integer is restricted by facet maxExclusive with value 549869808681548999. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-maxExclusive-3.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-maxExclusive-3-5.xml", class_name="NistschemaSvIvAtomicIntegerMaxExclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_max_exclusive_1_nistxml_sv_iv_atomic_integer_max_exclusive_2_1(mode, save_output, output_format): """ Type atomic/integer is restricted by facet maxExclusive with value -863230876206589446. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-maxExclusive-2.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-maxExclusive-2-1.xml", class_name="NistschemaSvIvAtomicIntegerMaxExclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_max_exclusive_1_nistxml_sv_iv_atomic_integer_max_exclusive_2_2(mode, save_output, output_format): """ Type atomic/integer is restricted by facet maxExclusive with value -863230876206589446. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-maxExclusive-2.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-maxExclusive-2-2.xml", class_name="NistschemaSvIvAtomicIntegerMaxExclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_max_exclusive_1_nistxml_sv_iv_atomic_integer_max_exclusive_2_3(mode, save_output, output_format): """ Type atomic/integer is restricted by facet maxExclusive with value -863230876206589446. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-maxExclusive-2.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-maxExclusive-2-3.xml", class_name="NistschemaSvIvAtomicIntegerMaxExclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_max_exclusive_1_nistxml_sv_iv_atomic_integer_max_exclusive_2_4(mode, save_output, output_format): """ Type atomic/integer is restricted by facet maxExclusive with value -863230876206589446. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-maxExclusive-2.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-maxExclusive-2-4.xml", class_name="NistschemaSvIvAtomicIntegerMaxExclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_max_exclusive_1_nistxml_sv_iv_atomic_integer_max_exclusive_2_5(mode, save_output, output_format): """ Type atomic/integer is restricted by facet maxExclusive with value -863230876206589446. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-maxExclusive-2.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-maxExclusive-2-5.xml", class_name="NistschemaSvIvAtomicIntegerMaxExclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_max_exclusive_nistxml_sv_iv_atomic_integer_max_exclusive_1_1(mode, save_output, output_format): """ Type atomic/integer is restricted by facet maxExclusive with value -999999999999999998. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-maxExclusive-1.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-maxExclusive-1-1.xml", class_name="NistschemaSvIvAtomicIntegerMaxExclusive1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_min_inclusive_4_nistxml_sv_iv_atomic_integer_min_inclusive_5_1(mode, save_output, output_format): """ Type atomic/integer is restricted by facet minInclusive with value 999999999999999999. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-minInclusive-5.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-minInclusive-5-1.xml", class_name="NistschemaSvIvAtomicIntegerMinInclusive5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_min_inclusive_3_nistxml_sv_iv_atomic_integer_min_inclusive_4_1(mode, save_output, output_format): """ Type atomic/integer is restricted by facet minInclusive with value -183640263935870295. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-minInclusive-4.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-minInclusive-4-1.xml", class_name="NistschemaSvIvAtomicIntegerMinInclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_min_inclusive_3_nistxml_sv_iv_atomic_integer_min_inclusive_4_2(mode, save_output, output_format): """ Type atomic/integer is restricted by facet minInclusive with value -183640263935870295. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-minInclusive-4.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-minInclusive-4-2.xml", class_name="NistschemaSvIvAtomicIntegerMinInclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_min_inclusive_3_nistxml_sv_iv_atomic_integer_min_inclusive_4_3(mode, save_output, output_format): """ Type atomic/integer is restricted by facet minInclusive with value -183640263935870295. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-minInclusive-4.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-minInclusive-4-3.xml", class_name="NistschemaSvIvAtomicIntegerMinInclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_min_inclusive_3_nistxml_sv_iv_atomic_integer_min_inclusive_4_4(mode, save_output, output_format): """ Type atomic/integer is restricted by facet minInclusive with value -183640263935870295. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-minInclusive-4.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-minInclusive-4-4.xml", class_name="NistschemaSvIvAtomicIntegerMinInclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_min_inclusive_3_nistxml_sv_iv_atomic_integer_min_inclusive_4_5(mode, save_output, output_format): """ Type atomic/integer is restricted by facet minInclusive with value -183640263935870295. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-minInclusive-4.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-minInclusive-4-5.xml", class_name="NistschemaSvIvAtomicIntegerMinInclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_min_inclusive_2_nistxml_sv_iv_atomic_integer_min_inclusive_3_1(mode, save_output, output_format): """ Type atomic/integer is restricted by facet minInclusive with value -362471093580558400. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-minInclusive-3.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-minInclusive-3-1.xml", class_name="NistschemaSvIvAtomicIntegerMinInclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_min_inclusive_2_nistxml_sv_iv_atomic_integer_min_inclusive_3_2(mode, save_output, output_format): """ Type atomic/integer is restricted by facet minInclusive with value -362471093580558400. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-minInclusive-3.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-minInclusive-3-2.xml", class_name="NistschemaSvIvAtomicIntegerMinInclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_min_inclusive_2_nistxml_sv_iv_atomic_integer_min_inclusive_3_3(mode, save_output, output_format): """ Type atomic/integer is restricted by facet minInclusive with value -362471093580558400. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-minInclusive-3.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-minInclusive-3-3.xml", class_name="NistschemaSvIvAtomicIntegerMinInclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_min_inclusive_2_nistxml_sv_iv_atomic_integer_min_inclusive_3_4(mode, save_output, output_format): """ Type atomic/integer is restricted by facet minInclusive with value -362471093580558400. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-minInclusive-3.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-minInclusive-3-4.xml", class_name="NistschemaSvIvAtomicIntegerMinInclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_min_inclusive_2_nistxml_sv_iv_atomic_integer_min_inclusive_3_5(mode, save_output, output_format): """ Type atomic/integer is restricted by facet minInclusive with value -362471093580558400. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-minInclusive-3.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-minInclusive-3-5.xml", class_name="NistschemaSvIvAtomicIntegerMinInclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_min_inclusive_1_nistxml_sv_iv_atomic_integer_min_inclusive_2_1(mode, save_output, output_format): """ Type atomic/integer is restricted by facet minInclusive with value 156487900906511434. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-minInclusive-2.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-minInclusive-2-1.xml", class_name="NistschemaSvIvAtomicIntegerMinInclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_min_inclusive_1_nistxml_sv_iv_atomic_integer_min_inclusive_2_2(mode, save_output, output_format): """ Type atomic/integer is restricted by facet minInclusive with value 156487900906511434. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-minInclusive-2.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-minInclusive-2-2.xml", class_name="NistschemaSvIvAtomicIntegerMinInclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_min_inclusive_1_nistxml_sv_iv_atomic_integer_min_inclusive_2_3(mode, save_output, output_format): """ Type atomic/integer is restricted by facet minInclusive with value 156487900906511434. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-minInclusive-2.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-minInclusive-2-3.xml", class_name="NistschemaSvIvAtomicIntegerMinInclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_min_inclusive_1_nistxml_sv_iv_atomic_integer_min_inclusive_2_4(mode, save_output, output_format): """ Type atomic/integer is restricted by facet minInclusive with value 156487900906511434. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-minInclusive-2.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-minInclusive-2-4.xml", class_name="NistschemaSvIvAtomicIntegerMinInclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_min_inclusive_1_nistxml_sv_iv_atomic_integer_min_inclusive_2_5(mode, save_output, output_format): """ Type atomic/integer is restricted by facet minInclusive with value 156487900906511434. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-minInclusive-2.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-minInclusive-2-5.xml", class_name="NistschemaSvIvAtomicIntegerMinInclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_min_inclusive_nistxml_sv_iv_atomic_integer_min_inclusive_1_1(mode, save_output, output_format): """ Type atomic/integer is restricted by facet minInclusive with value -999999999999999999. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-minInclusive-1.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-minInclusive-1-1.xml", class_name="NistschemaSvIvAtomicIntegerMinInclusive1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_min_inclusive_nistxml_sv_iv_atomic_integer_min_inclusive_1_2(mode, save_output, output_format): """ Type atomic/integer is restricted by facet minInclusive with value -999999999999999999. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-minInclusive-1.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-minInclusive-1-2.xml", class_name="NistschemaSvIvAtomicIntegerMinInclusive1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_min_inclusive_nistxml_sv_iv_atomic_integer_min_inclusive_1_3(mode, save_output, output_format): """ Type atomic/integer is restricted by facet minInclusive with value -999999999999999999. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-minInclusive-1.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-minInclusive-1-3.xml", class_name="NistschemaSvIvAtomicIntegerMinInclusive1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_min_inclusive_nistxml_sv_iv_atomic_integer_min_inclusive_1_4(mode, save_output, output_format): """ Type atomic/integer is restricted by facet minInclusive with value -999999999999999999. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-minInclusive-1.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-minInclusive-1-4.xml", class_name="NistschemaSvIvAtomicIntegerMinInclusive1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_min_inclusive_nistxml_sv_iv_atomic_integer_min_inclusive_1_5(mode, save_output, output_format): """ Type atomic/integer is restricted by facet minInclusive with value -999999999999999999. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-minInclusive-1.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-minInclusive-1-5.xml", class_name="NistschemaSvIvAtomicIntegerMinInclusive1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_min_exclusive_4_nistxml_sv_iv_atomic_integer_min_exclusive_5_1(mode, save_output, output_format): """ Type atomic/integer is restricted by facet minExclusive with value 999999999999999998. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-minExclusive-5.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-minExclusive-5-1.xml", class_name="NistschemaSvIvAtomicIntegerMinExclusive5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_min_exclusive_3_nistxml_sv_iv_atomic_integer_min_exclusive_4_1(mode, save_output, output_format): """ Type atomic/integer is restricted by facet minExclusive with value 470740450062970382. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-minExclusive-4.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-minExclusive-4-1.xml", class_name="NistschemaSvIvAtomicIntegerMinExclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_min_exclusive_3_nistxml_sv_iv_atomic_integer_min_exclusive_4_2(mode, save_output, output_format): """ Type atomic/integer is restricted by facet minExclusive with value 470740450062970382. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-minExclusive-4.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-minExclusive-4-2.xml", class_name="NistschemaSvIvAtomicIntegerMinExclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_min_exclusive_3_nistxml_sv_iv_atomic_integer_min_exclusive_4_3(mode, save_output, output_format): """ Type atomic/integer is restricted by facet minExclusive with value 470740450062970382. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-minExclusive-4.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-minExclusive-4-3.xml", class_name="NistschemaSvIvAtomicIntegerMinExclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_min_exclusive_3_nistxml_sv_iv_atomic_integer_min_exclusive_4_4(mode, save_output, output_format): """ Type atomic/integer is restricted by facet minExclusive with value 470740450062970382. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-minExclusive-4.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-minExclusive-4-4.xml", class_name="NistschemaSvIvAtomicIntegerMinExclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_min_exclusive_3_nistxml_sv_iv_atomic_integer_min_exclusive_4_5(mode, save_output, output_format): """ Type atomic/integer is restricted by facet minExclusive with value 470740450062970382. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-minExclusive-4.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-minExclusive-4-5.xml", class_name="NistschemaSvIvAtomicIntegerMinExclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_min_exclusive_2_nistxml_sv_iv_atomic_integer_min_exclusive_3_1(mode, save_output, output_format): """ Type atomic/integer is restricted by facet minExclusive with value 389578809107570477. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-minExclusive-3.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-minExclusive-3-1.xml", class_name="NistschemaSvIvAtomicIntegerMinExclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_min_exclusive_2_nistxml_sv_iv_atomic_integer_min_exclusive_3_2(mode, save_output, output_format): """ Type atomic/integer is restricted by facet minExclusive with value 389578809107570477. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-minExclusive-3.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-minExclusive-3-2.xml", class_name="NistschemaSvIvAtomicIntegerMinExclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_min_exclusive_2_nistxml_sv_iv_atomic_integer_min_exclusive_3_3(mode, save_output, output_format): """ Type atomic/integer is restricted by facet minExclusive with value 389578809107570477. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-minExclusive-3.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-minExclusive-3-3.xml", class_name="NistschemaSvIvAtomicIntegerMinExclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_min_exclusive_2_nistxml_sv_iv_atomic_integer_min_exclusive_3_4(mode, save_output, output_format): """ Type atomic/integer is restricted by facet minExclusive with value 389578809107570477. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-minExclusive-3.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-minExclusive-3-4.xml", class_name="NistschemaSvIvAtomicIntegerMinExclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_min_exclusive_2_nistxml_sv_iv_atomic_integer_min_exclusive_3_5(mode, save_output, output_format): """ Type atomic/integer is restricted by facet minExclusive with value 389578809107570477. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-minExclusive-3.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-minExclusive-3-5.xml", class_name="NistschemaSvIvAtomicIntegerMinExclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_min_exclusive_1_nistxml_sv_iv_atomic_integer_min_exclusive_2_1(mode, save_output, output_format): """ Type atomic/integer is restricted by facet minExclusive with value 511594901568435787. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-minExclusive-2.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-minExclusive-2-1.xml", class_name="NistschemaSvIvAtomicIntegerMinExclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_min_exclusive_1_nistxml_sv_iv_atomic_integer_min_exclusive_2_2(mode, save_output, output_format): """ Type atomic/integer is restricted by facet minExclusive with value 511594901568435787. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-minExclusive-2.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-minExclusive-2-2.xml", class_name="NistschemaSvIvAtomicIntegerMinExclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_min_exclusive_1_nistxml_sv_iv_atomic_integer_min_exclusive_2_3(mode, save_output, output_format): """ Type atomic/integer is restricted by facet minExclusive with value 511594901568435787. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-minExclusive-2.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-minExclusive-2-3.xml", class_name="NistschemaSvIvAtomicIntegerMinExclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_min_exclusive_1_nistxml_sv_iv_atomic_integer_min_exclusive_2_4(mode, save_output, output_format): """ Type atomic/integer is restricted by facet minExclusive with value 511594901568435787. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-minExclusive-2.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-minExclusive-2-4.xml", class_name="NistschemaSvIvAtomicIntegerMinExclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_min_exclusive_1_nistxml_sv_iv_atomic_integer_min_exclusive_2_5(mode, save_output, output_format): """ Type atomic/integer is restricted by facet minExclusive with value 511594901568435787. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-minExclusive-2.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-minExclusive-2-5.xml", class_name="NistschemaSvIvAtomicIntegerMinExclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_min_exclusive_nistxml_sv_iv_atomic_integer_min_exclusive_1_1(mode, save_output, output_format): """ Type atomic/integer is restricted by facet minExclusive with value -999999999999999999. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-minExclusive-1.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-minExclusive-1-1.xml", class_name="NistschemaSvIvAtomicIntegerMinExclusive1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_min_exclusive_nistxml_sv_iv_atomic_integer_min_exclusive_1_2(mode, save_output, output_format): """ Type atomic/integer is restricted by facet minExclusive with value -999999999999999999. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-minExclusive-1.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-minExclusive-1-2.xml", class_name="NistschemaSvIvAtomicIntegerMinExclusive1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_min_exclusive_nistxml_sv_iv_atomic_integer_min_exclusive_1_3(mode, save_output, output_format): """ Type atomic/integer is restricted by facet minExclusive with value -999999999999999999. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-minExclusive-1.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-minExclusive-1-3.xml", class_name="NistschemaSvIvAtomicIntegerMinExclusive1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_min_exclusive_nistxml_sv_iv_atomic_integer_min_exclusive_1_4(mode, save_output, output_format): """ Type atomic/integer is restricted by facet minExclusive with value -999999999999999999. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-minExclusive-1.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-minExclusive-1-4.xml", class_name="NistschemaSvIvAtomicIntegerMinExclusive1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_integer_min_exclusive_nistxml_sv_iv_atomic_integer_min_exclusive_1_5(mode, save_output, output_format): """ Type atomic/integer is restricted by facet minExclusive with value -999999999999999999. """ assert_bindings( schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-minExclusive-1.xsd", instance="nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-minExclusive-1-5.xml", class_name="NistschemaSvIvAtomicIntegerMinExclusive1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_white_space_nistxml_sv_iv_atomic_decimal_white_space_1_1(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet whiteSpace with value collapse. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-whiteSpace-1.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-whiteSpace-1-1.xml", class_name="NistschemaSvIvAtomicDecimalWhiteSpace1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_white_space_nistxml_sv_iv_atomic_decimal_white_space_1_2(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet whiteSpace with value collapse. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-whiteSpace-1.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-whiteSpace-1-2.xml", class_name="NistschemaSvIvAtomicDecimalWhiteSpace1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_white_space_nistxml_sv_iv_atomic_decimal_white_space_1_3(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet whiteSpace with value collapse. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-whiteSpace-1.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-whiteSpace-1-3.xml", class_name="NistschemaSvIvAtomicDecimalWhiteSpace1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_white_space_nistxml_sv_iv_atomic_decimal_white_space_1_4(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet whiteSpace with value collapse. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-whiteSpace-1.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-whiteSpace-1-4.xml", class_name="NistschemaSvIvAtomicDecimalWhiteSpace1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_white_space_nistxml_sv_iv_atomic_decimal_white_space_1_5(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet whiteSpace with value collapse. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-whiteSpace-1.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-whiteSpace-1-5.xml", class_name="NistschemaSvIvAtomicDecimalWhiteSpace1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_enumeration_4_nistxml_sv_iv_atomic_decimal_enumeration_5_1(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-enumeration-5.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-enumeration-5-1.xml", class_name="NistschemaSvIvAtomicDecimalEnumeration5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_enumeration_4_nistxml_sv_iv_atomic_decimal_enumeration_5_2(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-enumeration-5.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-enumeration-5-2.xml", class_name="NistschemaSvIvAtomicDecimalEnumeration5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_enumeration_4_nistxml_sv_iv_atomic_decimal_enumeration_5_3(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-enumeration-5.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-enumeration-5-3.xml", class_name="NistschemaSvIvAtomicDecimalEnumeration5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_enumeration_4_nistxml_sv_iv_atomic_decimal_enumeration_5_4(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-enumeration-5.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-enumeration-5-4.xml", class_name="NistschemaSvIvAtomicDecimalEnumeration5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_enumeration_4_nistxml_sv_iv_atomic_decimal_enumeration_5_5(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-enumeration-5.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-enumeration-5-5.xml", class_name="NistschemaSvIvAtomicDecimalEnumeration5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_enumeration_3_nistxml_sv_iv_atomic_decimal_enumeration_4_1(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-enumeration-4.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-enumeration-4-1.xml", class_name="NistschemaSvIvAtomicDecimalEnumeration4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_enumeration_3_nistxml_sv_iv_atomic_decimal_enumeration_4_2(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-enumeration-4.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-enumeration-4-2.xml", class_name="NistschemaSvIvAtomicDecimalEnumeration4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_enumeration_3_nistxml_sv_iv_atomic_decimal_enumeration_4_3(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-enumeration-4.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-enumeration-4-3.xml", class_name="NistschemaSvIvAtomicDecimalEnumeration4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_enumeration_3_nistxml_sv_iv_atomic_decimal_enumeration_4_4(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-enumeration-4.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-enumeration-4-4.xml", class_name="NistschemaSvIvAtomicDecimalEnumeration4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_enumeration_3_nistxml_sv_iv_atomic_decimal_enumeration_4_5(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-enumeration-4.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-enumeration-4-5.xml", class_name="NistschemaSvIvAtomicDecimalEnumeration4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_enumeration_2_nistxml_sv_iv_atomic_decimal_enumeration_3_1(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-enumeration-3.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-enumeration-3-1.xml", class_name="NistschemaSvIvAtomicDecimalEnumeration3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_enumeration_2_nistxml_sv_iv_atomic_decimal_enumeration_3_2(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-enumeration-3.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-enumeration-3-2.xml", class_name="NistschemaSvIvAtomicDecimalEnumeration3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_enumeration_2_nistxml_sv_iv_atomic_decimal_enumeration_3_3(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-enumeration-3.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-enumeration-3-3.xml", class_name="NistschemaSvIvAtomicDecimalEnumeration3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_enumeration_2_nistxml_sv_iv_atomic_decimal_enumeration_3_4(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-enumeration-3.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-enumeration-3-4.xml", class_name="NistschemaSvIvAtomicDecimalEnumeration3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_enumeration_2_nistxml_sv_iv_atomic_decimal_enumeration_3_5(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-enumeration-3.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-enumeration-3-5.xml", class_name="NistschemaSvIvAtomicDecimalEnumeration3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_enumeration_1_nistxml_sv_iv_atomic_decimal_enumeration_2_1(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-enumeration-2.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-enumeration-2-1.xml", class_name="NistschemaSvIvAtomicDecimalEnumeration2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_enumeration_1_nistxml_sv_iv_atomic_decimal_enumeration_2_2(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-enumeration-2.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-enumeration-2-2.xml", class_name="NistschemaSvIvAtomicDecimalEnumeration2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_enumeration_1_nistxml_sv_iv_atomic_decimal_enumeration_2_3(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-enumeration-2.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-enumeration-2-3.xml", class_name="NistschemaSvIvAtomicDecimalEnumeration2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_enumeration_1_nistxml_sv_iv_atomic_decimal_enumeration_2_4(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-enumeration-2.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-enumeration-2-4.xml", class_name="NistschemaSvIvAtomicDecimalEnumeration2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_enumeration_1_nistxml_sv_iv_atomic_decimal_enumeration_2_5(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-enumeration-2.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-enumeration-2-5.xml", class_name="NistschemaSvIvAtomicDecimalEnumeration2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_enumeration_nistxml_sv_iv_atomic_decimal_enumeration_1_1(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-enumeration-1.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-enumeration-1-1.xml", class_name="NistschemaSvIvAtomicDecimalEnumeration1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_enumeration_nistxml_sv_iv_atomic_decimal_enumeration_1_2(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-enumeration-1.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-enumeration-1-2.xml", class_name="NistschemaSvIvAtomicDecimalEnumeration1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_enumeration_nistxml_sv_iv_atomic_decimal_enumeration_1_3(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-enumeration-1.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-enumeration-1-3.xml", class_name="NistschemaSvIvAtomicDecimalEnumeration1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_enumeration_nistxml_sv_iv_atomic_decimal_enumeration_1_4(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-enumeration-1.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-enumeration-1-4.xml", class_name="NistschemaSvIvAtomicDecimalEnumeration1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_enumeration_nistxml_sv_iv_atomic_decimal_enumeration_1_5(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet enumeration. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-enumeration-1.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-enumeration-1-5.xml", class_name="NistschemaSvIvAtomicDecimalEnumeration1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_pattern_4_nistxml_sv_iv_atomic_decimal_pattern_5_1(mode, save_output, output_format): r""" Type atomic/decimal is restricted by facet pattern with value \d{5}\.\d{13}. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-pattern-5.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-pattern-5-1.xml", class_name="NistschemaSvIvAtomicDecimalPattern5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_pattern_4_nistxml_sv_iv_atomic_decimal_pattern_5_2(mode, save_output, output_format): r""" Type atomic/decimal is restricted by facet pattern with value \d{5}\.\d{13}. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-pattern-5.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-pattern-5-2.xml", class_name="NistschemaSvIvAtomicDecimalPattern5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_pattern_4_nistxml_sv_iv_atomic_decimal_pattern_5_3(mode, save_output, output_format): r""" Type atomic/decimal is restricted by facet pattern with value \d{5}\.\d{13}. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-pattern-5.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-pattern-5-3.xml", class_name="NistschemaSvIvAtomicDecimalPattern5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_pattern_4_nistxml_sv_iv_atomic_decimal_pattern_5_4(mode, save_output, output_format): r""" Type atomic/decimal is restricted by facet pattern with value \d{5}\.\d{13}. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-pattern-5.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-pattern-5-4.xml", class_name="NistschemaSvIvAtomicDecimalPattern5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_pattern_4_nistxml_sv_iv_atomic_decimal_pattern_5_5(mode, save_output, output_format): r""" Type atomic/decimal is restricted by facet pattern with value \d{5}\.\d{13}. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-pattern-5.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-pattern-5-5.xml", class_name="NistschemaSvIvAtomicDecimalPattern5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_pattern_3_nistxml_sv_iv_atomic_decimal_pattern_4_1(mode, save_output, output_format): r""" Type atomic/decimal is restricted by facet pattern with value \.\d{13}. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-pattern-4.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-pattern-4-1.xml", class_name="NistschemaSvIvAtomicDecimalPattern4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_pattern_3_nistxml_sv_iv_atomic_decimal_pattern_4_2(mode, save_output, output_format): r""" Type atomic/decimal is restricted by facet pattern with value \.\d{13}. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-pattern-4.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-pattern-4-2.xml", class_name="NistschemaSvIvAtomicDecimalPattern4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_pattern_3_nistxml_sv_iv_atomic_decimal_pattern_4_3(mode, save_output, output_format): r""" Type atomic/decimal is restricted by facet pattern with value \.\d{13}. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-pattern-4.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-pattern-4-3.xml", class_name="NistschemaSvIvAtomicDecimalPattern4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_pattern_3_nistxml_sv_iv_atomic_decimal_pattern_4_4(mode, save_output, output_format): r""" Type atomic/decimal is restricted by facet pattern with value \.\d{13}. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-pattern-4.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-pattern-4-4.xml", class_name="NistschemaSvIvAtomicDecimalPattern4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_pattern_3_nistxml_sv_iv_atomic_decimal_pattern_4_5(mode, save_output, output_format): r""" Type atomic/decimal is restricted by facet pattern with value \.\d{13}. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-pattern-4.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-pattern-4-5.xml", class_name="NistschemaSvIvAtomicDecimalPattern4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_pattern_2_nistxml_sv_iv_atomic_decimal_pattern_3_1(mode, save_output, output_format): r""" Type atomic/decimal is restricted by facet pattern with value \-\d{1}\.\d{8}. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-pattern-3.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-pattern-3-1.xml", class_name="NistschemaSvIvAtomicDecimalPattern3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_pattern_2_nistxml_sv_iv_atomic_decimal_pattern_3_2(mode, save_output, output_format): r""" Type atomic/decimal is restricted by facet pattern with value \-\d{1}\.\d{8}. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-pattern-3.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-pattern-3-2.xml", class_name="NistschemaSvIvAtomicDecimalPattern3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_pattern_2_nistxml_sv_iv_atomic_decimal_pattern_3_3(mode, save_output, output_format): r""" Type atomic/decimal is restricted by facet pattern with value \-\d{1}\.\d{8}. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-pattern-3.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-pattern-3-3.xml", class_name="NistschemaSvIvAtomicDecimalPattern3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_pattern_2_nistxml_sv_iv_atomic_decimal_pattern_3_4(mode, save_output, output_format): r""" Type atomic/decimal is restricted by facet pattern with value \-\d{1}\.\d{8}. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-pattern-3.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-pattern-3-4.xml", class_name="NistschemaSvIvAtomicDecimalPattern3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_pattern_2_nistxml_sv_iv_atomic_decimal_pattern_3_5(mode, save_output, output_format): r""" Type atomic/decimal is restricted by facet pattern with value \-\d{1}\.\d{8}. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-pattern-3.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-pattern-3-5.xml", class_name="NistschemaSvIvAtomicDecimalPattern3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_pattern_1_nistxml_sv_iv_atomic_decimal_pattern_2_1(mode, save_output, output_format): r""" Type atomic/decimal is restricted by facet pattern with value \-\d{2}\.\d{3}. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-pattern-2.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-pattern-2-1.xml", class_name="NistschemaSvIvAtomicDecimalPattern2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_pattern_1_nistxml_sv_iv_atomic_decimal_pattern_2_2(mode, save_output, output_format): r""" Type atomic/decimal is restricted by facet pattern with value \-\d{2}\.\d{3}. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-pattern-2.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-pattern-2-2.xml", class_name="NistschemaSvIvAtomicDecimalPattern2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_pattern_1_nistxml_sv_iv_atomic_decimal_pattern_2_3(mode, save_output, output_format): r""" Type atomic/decimal is restricted by facet pattern with value \-\d{2}\.\d{3}. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-pattern-2.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-pattern-2-3.xml", class_name="NistschemaSvIvAtomicDecimalPattern2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_pattern_1_nistxml_sv_iv_atomic_decimal_pattern_2_4(mode, save_output, output_format): r""" Type atomic/decimal is restricted by facet pattern with value \-\d{2}\.\d{3}. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-pattern-2.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-pattern-2-4.xml", class_name="NistschemaSvIvAtomicDecimalPattern2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_pattern_1_nistxml_sv_iv_atomic_decimal_pattern_2_5(mode, save_output, output_format): r""" Type atomic/decimal is restricted by facet pattern with value \-\d{2}\.\d{3}. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-pattern-2.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-pattern-2-5.xml", class_name="NistschemaSvIvAtomicDecimalPattern2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_pattern_nistxml_sv_iv_atomic_decimal_pattern_1_1(mode, save_output, output_format): r""" Type atomic/decimal is restricted by facet pattern with value \d{1}. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-pattern-1.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-pattern-1-1.xml", class_name="NistschemaSvIvAtomicDecimalPattern1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_pattern_nistxml_sv_iv_atomic_decimal_pattern_1_2(mode, save_output, output_format): r""" Type atomic/decimal is restricted by facet pattern with value \d{1}. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-pattern-1.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-pattern-1-2.xml", class_name="NistschemaSvIvAtomicDecimalPattern1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_pattern_nistxml_sv_iv_atomic_decimal_pattern_1_3(mode, save_output, output_format): r""" Type atomic/decimal is restricted by facet pattern with value \d{1}. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-pattern-1.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-pattern-1-3.xml", class_name="NistschemaSvIvAtomicDecimalPattern1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_pattern_nistxml_sv_iv_atomic_decimal_pattern_1_4(mode, save_output, output_format): r""" Type atomic/decimal is restricted by facet pattern with value \d{1}. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-pattern-1.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-pattern-1-4.xml", class_name="NistschemaSvIvAtomicDecimalPattern1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_pattern_nistxml_sv_iv_atomic_decimal_pattern_1_5(mode, save_output, output_format): r""" Type atomic/decimal is restricted by facet pattern with value \d{1}. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-pattern-1.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-pattern-1-5.xml", class_name="NistschemaSvIvAtomicDecimalPattern1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_total_digits_4_nistxml_sv_iv_atomic_decimal_total_digits_5_1(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet totalDigits with value 18. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-totalDigits-5.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-totalDigits-5-1.xml", class_name="NistschemaSvIvAtomicDecimalTotalDigits5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_total_digits_4_nistxml_sv_iv_atomic_decimal_total_digits_5_2(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet totalDigits with value 18. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-totalDigits-5.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-totalDigits-5-2.xml", class_name="NistschemaSvIvAtomicDecimalTotalDigits5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_total_digits_4_nistxml_sv_iv_atomic_decimal_total_digits_5_3(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet totalDigits with value 18. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-totalDigits-5.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-totalDigits-5-3.xml", class_name="NistschemaSvIvAtomicDecimalTotalDigits5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_total_digits_4_nistxml_sv_iv_atomic_decimal_total_digits_5_4(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet totalDigits with value 18. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-totalDigits-5.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-totalDigits-5-4.xml", class_name="NistschemaSvIvAtomicDecimalTotalDigits5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_total_digits_4_nistxml_sv_iv_atomic_decimal_total_digits_5_5(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet totalDigits with value 18. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-totalDigits-5.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-totalDigits-5-5.xml", class_name="NistschemaSvIvAtomicDecimalTotalDigits5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_total_digits_3_nistxml_sv_iv_atomic_decimal_total_digits_4_1(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet totalDigits with value 13. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-totalDigits-4.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-totalDigits-4-1.xml", class_name="NistschemaSvIvAtomicDecimalTotalDigits4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_total_digits_3_nistxml_sv_iv_atomic_decimal_total_digits_4_2(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet totalDigits with value 13. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-totalDigits-4.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-totalDigits-4-2.xml", class_name="NistschemaSvIvAtomicDecimalTotalDigits4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_total_digits_3_nistxml_sv_iv_atomic_decimal_total_digits_4_3(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet totalDigits with value 13. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-totalDigits-4.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-totalDigits-4-3.xml", class_name="NistschemaSvIvAtomicDecimalTotalDigits4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_total_digits_3_nistxml_sv_iv_atomic_decimal_total_digits_4_4(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet totalDigits with value 13. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-totalDigits-4.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-totalDigits-4-4.xml", class_name="NistschemaSvIvAtomicDecimalTotalDigits4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_total_digits_3_nistxml_sv_iv_atomic_decimal_total_digits_4_5(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet totalDigits with value 13. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-totalDigits-4.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-totalDigits-4-5.xml", class_name="NistschemaSvIvAtomicDecimalTotalDigits4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_total_digits_2_nistxml_sv_iv_atomic_decimal_total_digits_3_1(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet totalDigits with value 9. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-totalDigits-3.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-totalDigits-3-1.xml", class_name="NistschemaSvIvAtomicDecimalTotalDigits3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_total_digits_2_nistxml_sv_iv_atomic_decimal_total_digits_3_2(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet totalDigits with value 9. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-totalDigits-3.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-totalDigits-3-2.xml", class_name="NistschemaSvIvAtomicDecimalTotalDigits3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_total_digits_2_nistxml_sv_iv_atomic_decimal_total_digits_3_3(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet totalDigits with value 9. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-totalDigits-3.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-totalDigits-3-3.xml", class_name="NistschemaSvIvAtomicDecimalTotalDigits3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_total_digits_2_nistxml_sv_iv_atomic_decimal_total_digits_3_4(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet totalDigits with value 9. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-totalDigits-3.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-totalDigits-3-4.xml", class_name="NistschemaSvIvAtomicDecimalTotalDigits3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_total_digits_2_nistxml_sv_iv_atomic_decimal_total_digits_3_5(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet totalDigits with value 9. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-totalDigits-3.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-totalDigits-3-5.xml", class_name="NistschemaSvIvAtomicDecimalTotalDigits3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_total_digits_1_nistxml_sv_iv_atomic_decimal_total_digits_2_1(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet totalDigits with value 5. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-totalDigits-2.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-totalDigits-2-1.xml", class_name="NistschemaSvIvAtomicDecimalTotalDigits2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_total_digits_1_nistxml_sv_iv_atomic_decimal_total_digits_2_2(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet totalDigits with value 5. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-totalDigits-2.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-totalDigits-2-2.xml", class_name="NistschemaSvIvAtomicDecimalTotalDigits2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_total_digits_1_nistxml_sv_iv_atomic_decimal_total_digits_2_3(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet totalDigits with value 5. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-totalDigits-2.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-totalDigits-2-3.xml", class_name="NistschemaSvIvAtomicDecimalTotalDigits2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_total_digits_1_nistxml_sv_iv_atomic_decimal_total_digits_2_4(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet totalDigits with value 5. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-totalDigits-2.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-totalDigits-2-4.xml", class_name="NistschemaSvIvAtomicDecimalTotalDigits2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_total_digits_1_nistxml_sv_iv_atomic_decimal_total_digits_2_5(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet totalDigits with value 5. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-totalDigits-2.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-totalDigits-2-5.xml", class_name="NistschemaSvIvAtomicDecimalTotalDigits2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_total_digits_nistxml_sv_iv_atomic_decimal_total_digits_1_1(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet totalDigits with value 1. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-totalDigits-1.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-totalDigits-1-1.xml", class_name="NistschemaSvIvAtomicDecimalTotalDigits1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_total_digits_nistxml_sv_iv_atomic_decimal_total_digits_1_2(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet totalDigits with value 1. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-totalDigits-1.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-totalDigits-1-2.xml", class_name="NistschemaSvIvAtomicDecimalTotalDigits1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_total_digits_nistxml_sv_iv_atomic_decimal_total_digits_1_3(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet totalDigits with value 1. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-totalDigits-1.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-totalDigits-1-3.xml", class_name="NistschemaSvIvAtomicDecimalTotalDigits1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_total_digits_nistxml_sv_iv_atomic_decimal_total_digits_1_4(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet totalDigits with value 1. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-totalDigits-1.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-totalDigits-1-4.xml", class_name="NistschemaSvIvAtomicDecimalTotalDigits1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_total_digits_nistxml_sv_iv_atomic_decimal_total_digits_1_5(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet totalDigits with value 1. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-totalDigits-1.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-totalDigits-1-5.xml", class_name="NistschemaSvIvAtomicDecimalTotalDigits1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_fraction_digits_4_nistxml_sv_iv_atomic_decimal_fraction_digits_5_1(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet fractionDigits with value 18. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-fractionDigits-5.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-fractionDigits-5-1.xml", class_name="NistschemaSvIvAtomicDecimalFractionDigits5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_fraction_digits_4_nistxml_sv_iv_atomic_decimal_fraction_digits_5_2(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet fractionDigits with value 18. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-fractionDigits-5.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-fractionDigits-5-2.xml", class_name="NistschemaSvIvAtomicDecimalFractionDigits5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_fraction_digits_4_nistxml_sv_iv_atomic_decimal_fraction_digits_5_3(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet fractionDigits with value 18. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-fractionDigits-5.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-fractionDigits-5-3.xml", class_name="NistschemaSvIvAtomicDecimalFractionDigits5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_fraction_digits_4_nistxml_sv_iv_atomic_decimal_fraction_digits_5_4(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet fractionDigits with value 18. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-fractionDigits-5.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-fractionDigits-5-4.xml", class_name="NistschemaSvIvAtomicDecimalFractionDigits5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_fraction_digits_4_nistxml_sv_iv_atomic_decimal_fraction_digits_5_5(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet fractionDigits with value 18. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-fractionDigits-5.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-fractionDigits-5-5.xml", class_name="NistschemaSvIvAtomicDecimalFractionDigits5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_fraction_digits_3_nistxml_sv_iv_atomic_decimal_fraction_digits_4_1(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet fractionDigits with value 12. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-fractionDigits-4.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-fractionDigits-4-1.xml", class_name="NistschemaSvIvAtomicDecimalFractionDigits4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_fraction_digits_3_nistxml_sv_iv_atomic_decimal_fraction_digits_4_2(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet fractionDigits with value 12. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-fractionDigits-4.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-fractionDigits-4-2.xml", class_name="NistschemaSvIvAtomicDecimalFractionDigits4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_fraction_digits_3_nistxml_sv_iv_atomic_decimal_fraction_digits_4_3(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet fractionDigits with value 12. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-fractionDigits-4.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-fractionDigits-4-3.xml", class_name="NistschemaSvIvAtomicDecimalFractionDigits4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_fraction_digits_3_nistxml_sv_iv_atomic_decimal_fraction_digits_4_4(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet fractionDigits with value 12. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-fractionDigits-4.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-fractionDigits-4-4.xml", class_name="NistschemaSvIvAtomicDecimalFractionDigits4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_fraction_digits_3_nistxml_sv_iv_atomic_decimal_fraction_digits_4_5(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet fractionDigits with value 12. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-fractionDigits-4.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-fractionDigits-4-5.xml", class_name="NistschemaSvIvAtomicDecimalFractionDigits4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_fraction_digits_2_nistxml_sv_iv_atomic_decimal_fraction_digits_3_1(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet fractionDigits with value 8. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-fractionDigits-3.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-fractionDigits-3-1.xml", class_name="NistschemaSvIvAtomicDecimalFractionDigits3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_fraction_digits_2_nistxml_sv_iv_atomic_decimal_fraction_digits_3_2(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet fractionDigits with value 8. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-fractionDigits-3.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-fractionDigits-3-2.xml", class_name="NistschemaSvIvAtomicDecimalFractionDigits3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_fraction_digits_2_nistxml_sv_iv_atomic_decimal_fraction_digits_3_3(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet fractionDigits with value 8. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-fractionDigits-3.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-fractionDigits-3-3.xml", class_name="NistschemaSvIvAtomicDecimalFractionDigits3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_fraction_digits_2_nistxml_sv_iv_atomic_decimal_fraction_digits_3_4(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet fractionDigits with value 8. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-fractionDigits-3.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-fractionDigits-3-4.xml", class_name="NistschemaSvIvAtomicDecimalFractionDigits3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_fraction_digits_2_nistxml_sv_iv_atomic_decimal_fraction_digits_3_5(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet fractionDigits with value 8. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-fractionDigits-3.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-fractionDigits-3-5.xml", class_name="NistschemaSvIvAtomicDecimalFractionDigits3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_fraction_digits_1_nistxml_sv_iv_atomic_decimal_fraction_digits_2_1(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet fractionDigits with value 4. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-fractionDigits-2.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-fractionDigits-2-1.xml", class_name="NistschemaSvIvAtomicDecimalFractionDigits2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_fraction_digits_1_nistxml_sv_iv_atomic_decimal_fraction_digits_2_2(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet fractionDigits with value 4. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-fractionDigits-2.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-fractionDigits-2-2.xml", class_name="NistschemaSvIvAtomicDecimalFractionDigits2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_fraction_digits_1_nistxml_sv_iv_atomic_decimal_fraction_digits_2_3(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet fractionDigits with value 4. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-fractionDigits-2.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-fractionDigits-2-3.xml", class_name="NistschemaSvIvAtomicDecimalFractionDigits2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_fraction_digits_1_nistxml_sv_iv_atomic_decimal_fraction_digits_2_4(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet fractionDigits with value 4. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-fractionDigits-2.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-fractionDigits-2-4.xml", class_name="NistschemaSvIvAtomicDecimalFractionDigits2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_fraction_digits_1_nistxml_sv_iv_atomic_decimal_fraction_digits_2_5(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet fractionDigits with value 4. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-fractionDigits-2.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-fractionDigits-2-5.xml", class_name="NistschemaSvIvAtomicDecimalFractionDigits2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_fraction_digits_nistxml_sv_iv_atomic_decimal_fraction_digits_1_1(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet fractionDigits with value 0. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-fractionDigits-1.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-fractionDigits-1-1.xml", class_name="NistschemaSvIvAtomicDecimalFractionDigits1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_fraction_digits_nistxml_sv_iv_atomic_decimal_fraction_digits_1_2(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet fractionDigits with value 0. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-fractionDigits-1.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-fractionDigits-1-2.xml", class_name="NistschemaSvIvAtomicDecimalFractionDigits1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_fraction_digits_nistxml_sv_iv_atomic_decimal_fraction_digits_1_3(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet fractionDigits with value 0. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-fractionDigits-1.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-fractionDigits-1-3.xml", class_name="NistschemaSvIvAtomicDecimalFractionDigits1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_fraction_digits_nistxml_sv_iv_atomic_decimal_fraction_digits_1_4(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet fractionDigits with value 0. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-fractionDigits-1.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-fractionDigits-1-4.xml", class_name="NistschemaSvIvAtomicDecimalFractionDigits1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_fraction_digits_nistxml_sv_iv_atomic_decimal_fraction_digits_1_5(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet fractionDigits with value 0. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-fractionDigits-1.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-fractionDigits-1-5.xml", class_name="NistschemaSvIvAtomicDecimalFractionDigits1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_max_inclusive_4_nistxml_sv_iv_atomic_decimal_max_inclusive_5_1(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet maxInclusive with value 999999999999999999. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-maxInclusive-5.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-maxInclusive-5-1.xml", class_name="NistschemaSvIvAtomicDecimalMaxInclusive5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_max_inclusive_4_nistxml_sv_iv_atomic_decimal_max_inclusive_5_2(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet maxInclusive with value 999999999999999999. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-maxInclusive-5.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-maxInclusive-5-2.xml", class_name="NistschemaSvIvAtomicDecimalMaxInclusive5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_max_inclusive_4_nistxml_sv_iv_atomic_decimal_max_inclusive_5_3(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet maxInclusive with value 999999999999999999. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-maxInclusive-5.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-maxInclusive-5-3.xml", class_name="NistschemaSvIvAtomicDecimalMaxInclusive5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_max_inclusive_4_nistxml_sv_iv_atomic_decimal_max_inclusive_5_4(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet maxInclusive with value 999999999999999999. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-maxInclusive-5.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-maxInclusive-5-4.xml", class_name="NistschemaSvIvAtomicDecimalMaxInclusive5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_max_inclusive_4_nistxml_sv_iv_atomic_decimal_max_inclusive_5_5(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet maxInclusive with value 999999999999999999. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-maxInclusive-5.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-maxInclusive-5-5.xml", class_name="NistschemaSvIvAtomicDecimalMaxInclusive5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_max_inclusive_3_nistxml_sv_iv_atomic_decimal_max_inclusive_4_1(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet maxInclusive with value -95776055693671313. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-maxInclusive-4.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-maxInclusive-4-1.xml", class_name="NistschemaSvIvAtomicDecimalMaxInclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_max_inclusive_3_nistxml_sv_iv_atomic_decimal_max_inclusive_4_2(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet maxInclusive with value -95776055693671313. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-maxInclusive-4.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-maxInclusive-4-2.xml", class_name="NistschemaSvIvAtomicDecimalMaxInclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_max_inclusive_3_nistxml_sv_iv_atomic_decimal_max_inclusive_4_3(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet maxInclusive with value -95776055693671313. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-maxInclusive-4.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-maxInclusive-4-3.xml", class_name="NistschemaSvIvAtomicDecimalMaxInclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_max_inclusive_3_nistxml_sv_iv_atomic_decimal_max_inclusive_4_4(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet maxInclusive with value -95776055693671313. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-maxInclusive-4.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-maxInclusive-4-4.xml", class_name="NistschemaSvIvAtomicDecimalMaxInclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_max_inclusive_3_nistxml_sv_iv_atomic_decimal_max_inclusive_4_5(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet maxInclusive with value -95776055693671313. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-maxInclusive-4.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-maxInclusive-4-5.xml", class_name="NistschemaSvIvAtomicDecimalMaxInclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_max_inclusive_2_nistxml_sv_iv_atomic_decimal_max_inclusive_3_1(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet maxInclusive with value -888403528420030673. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-maxInclusive-3.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-maxInclusive-3-1.xml", class_name="NistschemaSvIvAtomicDecimalMaxInclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_max_inclusive_2_nistxml_sv_iv_atomic_decimal_max_inclusive_3_2(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet maxInclusive with value -888403528420030673. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-maxInclusive-3.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-maxInclusive-3-2.xml", class_name="NistschemaSvIvAtomicDecimalMaxInclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_max_inclusive_2_nistxml_sv_iv_atomic_decimal_max_inclusive_3_3(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet maxInclusive with value -888403528420030673. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-maxInclusive-3.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-maxInclusive-3-3.xml", class_name="NistschemaSvIvAtomicDecimalMaxInclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_max_inclusive_2_nistxml_sv_iv_atomic_decimal_max_inclusive_3_4(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet maxInclusive with value -888403528420030673. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-maxInclusive-3.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-maxInclusive-3-4.xml", class_name="NistschemaSvIvAtomicDecimalMaxInclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_max_inclusive_2_nistxml_sv_iv_atomic_decimal_max_inclusive_3_5(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet maxInclusive with value -888403528420030673. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-maxInclusive-3.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-maxInclusive-3-5.xml", class_name="NistschemaSvIvAtomicDecimalMaxInclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_max_inclusive_1_nistxml_sv_iv_atomic_decimal_max_inclusive_2_1(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet maxInclusive with value 625897845365533055. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-maxInclusive-2.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-maxInclusive-2-1.xml", class_name="NistschemaSvIvAtomicDecimalMaxInclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_max_inclusive_1_nistxml_sv_iv_atomic_decimal_max_inclusive_2_2(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet maxInclusive with value 625897845365533055. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-maxInclusive-2.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-maxInclusive-2-2.xml", class_name="NistschemaSvIvAtomicDecimalMaxInclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_max_inclusive_1_nistxml_sv_iv_atomic_decimal_max_inclusive_2_3(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet maxInclusive with value 625897845365533055. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-maxInclusive-2.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-maxInclusive-2-3.xml", class_name="NistschemaSvIvAtomicDecimalMaxInclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_max_inclusive_1_nistxml_sv_iv_atomic_decimal_max_inclusive_2_4(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet maxInclusive with value 625897845365533055. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-maxInclusive-2.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-maxInclusive-2-4.xml", class_name="NistschemaSvIvAtomicDecimalMaxInclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_max_inclusive_1_nistxml_sv_iv_atomic_decimal_max_inclusive_2_5(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet maxInclusive with value 625897845365533055. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-maxInclusive-2.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-maxInclusive-2-5.xml", class_name="NistschemaSvIvAtomicDecimalMaxInclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_max_inclusive_nistxml_sv_iv_atomic_decimal_max_inclusive_1_1(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet maxInclusive with value -999999999999999999. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-maxInclusive-1.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-maxInclusive-1-1.xml", class_name="NistschemaSvIvAtomicDecimalMaxInclusive1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_max_exclusive_4_nistxml_sv_iv_atomic_decimal_max_exclusive_5_1(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet maxExclusive with value 999999999999999999. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-maxExclusive-5.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-maxExclusive-5-1.xml", class_name="NistschemaSvIvAtomicDecimalMaxExclusive5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_max_exclusive_4_nistxml_sv_iv_atomic_decimal_max_exclusive_5_2(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet maxExclusive with value 999999999999999999. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-maxExclusive-5.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-maxExclusive-5-2.xml", class_name="NistschemaSvIvAtomicDecimalMaxExclusive5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_max_exclusive_4_nistxml_sv_iv_atomic_decimal_max_exclusive_5_3(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet maxExclusive with value 999999999999999999. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-maxExclusive-5.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-maxExclusive-5-3.xml", class_name="NistschemaSvIvAtomicDecimalMaxExclusive5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_max_exclusive_4_nistxml_sv_iv_atomic_decimal_max_exclusive_5_4(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet maxExclusive with value 999999999999999999. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-maxExclusive-5.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-maxExclusive-5-4.xml", class_name="NistschemaSvIvAtomicDecimalMaxExclusive5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_max_exclusive_4_nistxml_sv_iv_atomic_decimal_max_exclusive_5_5(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet maxExclusive with value 999999999999999999. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-maxExclusive-5.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-maxExclusive-5-5.xml", class_name="NistschemaSvIvAtomicDecimalMaxExclusive5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_max_exclusive_3_nistxml_sv_iv_atomic_decimal_max_exclusive_4_1(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet maxExclusive with value -214771926190724381. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-maxExclusive-4.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-maxExclusive-4-1.xml", class_name="NistschemaSvIvAtomicDecimalMaxExclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_max_exclusive_3_nistxml_sv_iv_atomic_decimal_max_exclusive_4_2(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet maxExclusive with value -214771926190724381. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-maxExclusive-4.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-maxExclusive-4-2.xml", class_name="NistschemaSvIvAtomicDecimalMaxExclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_max_exclusive_3_nistxml_sv_iv_atomic_decimal_max_exclusive_4_3(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet maxExclusive with value -214771926190724381. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-maxExclusive-4.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-maxExclusive-4-3.xml", class_name="NistschemaSvIvAtomicDecimalMaxExclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_max_exclusive_3_nistxml_sv_iv_atomic_decimal_max_exclusive_4_4(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet maxExclusive with value -214771926190724381. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-maxExclusive-4.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-maxExclusive-4-4.xml", class_name="NistschemaSvIvAtomicDecimalMaxExclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_max_exclusive_3_nistxml_sv_iv_atomic_decimal_max_exclusive_4_5(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet maxExclusive with value -214771926190724381. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-maxExclusive-4.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-maxExclusive-4-5.xml", class_name="NistschemaSvIvAtomicDecimalMaxExclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_max_exclusive_2_nistxml_sv_iv_atomic_decimal_max_exclusive_3_1(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet maxExclusive with value 171942968603657986. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-maxExclusive-3.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-maxExclusive-3-1.xml", class_name="NistschemaSvIvAtomicDecimalMaxExclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_max_exclusive_2_nistxml_sv_iv_atomic_decimal_max_exclusive_3_2(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet maxExclusive with value 171942968603657986. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-maxExclusive-3.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-maxExclusive-3-2.xml", class_name="NistschemaSvIvAtomicDecimalMaxExclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_max_exclusive_2_nistxml_sv_iv_atomic_decimal_max_exclusive_3_3(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet maxExclusive with value 171942968603657986. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-maxExclusive-3.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-maxExclusive-3-3.xml", class_name="NistschemaSvIvAtomicDecimalMaxExclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_max_exclusive_2_nistxml_sv_iv_atomic_decimal_max_exclusive_3_4(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet maxExclusive with value 171942968603657986. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-maxExclusive-3.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-maxExclusive-3-4.xml", class_name="NistschemaSvIvAtomicDecimalMaxExclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_max_exclusive_2_nistxml_sv_iv_atomic_decimal_max_exclusive_3_5(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet maxExclusive with value 171942968603657986. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-maxExclusive-3.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-maxExclusive-3-5.xml", class_name="NistschemaSvIvAtomicDecimalMaxExclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_max_exclusive_1_nistxml_sv_iv_atomic_decimal_max_exclusive_2_1(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet maxExclusive with value 78119693427168402. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-maxExclusive-2.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-maxExclusive-2-1.xml", class_name="NistschemaSvIvAtomicDecimalMaxExclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_max_exclusive_1_nistxml_sv_iv_atomic_decimal_max_exclusive_2_2(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet maxExclusive with value 78119693427168402. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-maxExclusive-2.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-maxExclusive-2-2.xml", class_name="NistschemaSvIvAtomicDecimalMaxExclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_max_exclusive_1_nistxml_sv_iv_atomic_decimal_max_exclusive_2_3(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet maxExclusive with value 78119693427168402. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-maxExclusive-2.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-maxExclusive-2-3.xml", class_name="NistschemaSvIvAtomicDecimalMaxExclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_max_exclusive_1_nistxml_sv_iv_atomic_decimal_max_exclusive_2_4(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet maxExclusive with value 78119693427168402. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-maxExclusive-2.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-maxExclusive-2-4.xml", class_name="NistschemaSvIvAtomicDecimalMaxExclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_max_exclusive_1_nistxml_sv_iv_atomic_decimal_max_exclusive_2_5(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet maxExclusive with value 78119693427168402. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-maxExclusive-2.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-maxExclusive-2-5.xml", class_name="NistschemaSvIvAtomicDecimalMaxExclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_max_exclusive_nistxml_sv_iv_atomic_decimal_max_exclusive_1_1(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet maxExclusive with value -999999999999999998. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-maxExclusive-1.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-maxExclusive-1-1.xml", class_name="NistschemaSvIvAtomicDecimalMaxExclusive1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_min_inclusive_4_nistxml_sv_iv_atomic_decimal_min_inclusive_5_1(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet minInclusive with value 999999999999999999. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-minInclusive-5.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-minInclusive-5-1.xml", class_name="NistschemaSvIvAtomicDecimalMinInclusive5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_min_inclusive_3_nistxml_sv_iv_atomic_decimal_min_inclusive_4_1(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet minInclusive with value 325207740352921658. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-minInclusive-4.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-minInclusive-4-1.xml", class_name="NistschemaSvIvAtomicDecimalMinInclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_min_inclusive_3_nistxml_sv_iv_atomic_decimal_min_inclusive_4_2(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet minInclusive with value 325207740352921658. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-minInclusive-4.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-minInclusive-4-2.xml", class_name="NistschemaSvIvAtomicDecimalMinInclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_min_inclusive_3_nistxml_sv_iv_atomic_decimal_min_inclusive_4_3(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet minInclusive with value 325207740352921658. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-minInclusive-4.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-minInclusive-4-3.xml", class_name="NistschemaSvIvAtomicDecimalMinInclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_min_inclusive_3_nistxml_sv_iv_atomic_decimal_min_inclusive_4_4(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet minInclusive with value 325207740352921658. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-minInclusive-4.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-minInclusive-4-4.xml", class_name="NistschemaSvIvAtomicDecimalMinInclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_min_inclusive_3_nistxml_sv_iv_atomic_decimal_min_inclusive_4_5(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet minInclusive with value 325207740352921658. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-minInclusive-4.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-minInclusive-4-5.xml", class_name="NistschemaSvIvAtomicDecimalMinInclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_min_inclusive_2_nistxml_sv_iv_atomic_decimal_min_inclusive_3_1(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet minInclusive with value -785368448026986020. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-minInclusive-3.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-minInclusive-3-1.xml", class_name="NistschemaSvIvAtomicDecimalMinInclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_min_inclusive_2_nistxml_sv_iv_atomic_decimal_min_inclusive_3_2(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet minInclusive with value -785368448026986020. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-minInclusive-3.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-minInclusive-3-2.xml", class_name="NistschemaSvIvAtomicDecimalMinInclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_min_inclusive_2_nistxml_sv_iv_atomic_decimal_min_inclusive_3_3(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet minInclusive with value -785368448026986020. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-minInclusive-3.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-minInclusive-3-3.xml", class_name="NistschemaSvIvAtomicDecimalMinInclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_min_inclusive_2_nistxml_sv_iv_atomic_decimal_min_inclusive_3_4(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet minInclusive with value -785368448026986020. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-minInclusive-3.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-minInclusive-3-4.xml", class_name="NistschemaSvIvAtomicDecimalMinInclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_min_inclusive_2_nistxml_sv_iv_atomic_decimal_min_inclusive_3_5(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet minInclusive with value -785368448026986020. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-minInclusive-3.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-minInclusive-3-5.xml", class_name="NistschemaSvIvAtomicDecimalMinInclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_min_inclusive_1_nistxml_sv_iv_atomic_decimal_min_inclusive_2_1(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet minInclusive with value 229822855408968073. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-minInclusive-2.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-minInclusive-2-1.xml", class_name="NistschemaSvIvAtomicDecimalMinInclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_min_inclusive_1_nistxml_sv_iv_atomic_decimal_min_inclusive_2_2(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet minInclusive with value 229822855408968073. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-minInclusive-2.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-minInclusive-2-2.xml", class_name="NistschemaSvIvAtomicDecimalMinInclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_min_inclusive_1_nistxml_sv_iv_atomic_decimal_min_inclusive_2_3(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet minInclusive with value 229822855408968073. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-minInclusive-2.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-minInclusive-2-3.xml", class_name="NistschemaSvIvAtomicDecimalMinInclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_min_inclusive_1_nistxml_sv_iv_atomic_decimal_min_inclusive_2_4(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet minInclusive with value 229822855408968073. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-minInclusive-2.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-minInclusive-2-4.xml", class_name="NistschemaSvIvAtomicDecimalMinInclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_min_inclusive_1_nistxml_sv_iv_atomic_decimal_min_inclusive_2_5(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet minInclusive with value 229822855408968073. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-minInclusive-2.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-minInclusive-2-5.xml", class_name="NistschemaSvIvAtomicDecimalMinInclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_min_inclusive_nistxml_sv_iv_atomic_decimal_min_inclusive_1_1(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet minInclusive with value -999999999999999999. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-minInclusive-1.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-minInclusive-1-1.xml", class_name="NistschemaSvIvAtomicDecimalMinInclusive1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_min_inclusive_nistxml_sv_iv_atomic_decimal_min_inclusive_1_2(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet minInclusive with value -999999999999999999. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-minInclusive-1.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-minInclusive-1-2.xml", class_name="NistschemaSvIvAtomicDecimalMinInclusive1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_min_inclusive_nistxml_sv_iv_atomic_decimal_min_inclusive_1_3(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet minInclusive with value -999999999999999999. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-minInclusive-1.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-minInclusive-1-3.xml", class_name="NistschemaSvIvAtomicDecimalMinInclusive1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_min_inclusive_nistxml_sv_iv_atomic_decimal_min_inclusive_1_4(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet minInclusive with value -999999999999999999. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-minInclusive-1.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-minInclusive-1-4.xml", class_name="NistschemaSvIvAtomicDecimalMinInclusive1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_min_inclusive_nistxml_sv_iv_atomic_decimal_min_inclusive_1_5(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet minInclusive with value -999999999999999999. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-minInclusive-1.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-minInclusive-1-5.xml", class_name="NistschemaSvIvAtomicDecimalMinInclusive1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_min_exclusive_4_nistxml_sv_iv_atomic_decimal_min_exclusive_5_1(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet minExclusive with value 999999999999999998. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-minExclusive-5.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-minExclusive-5-1.xml", class_name="NistschemaSvIvAtomicDecimalMinExclusive5", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_min_exclusive_3_nistxml_sv_iv_atomic_decimal_min_exclusive_4_1(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet minExclusive with value -294253147230818967. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-minExclusive-4.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-minExclusive-4-1.xml", class_name="NistschemaSvIvAtomicDecimalMinExclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_min_exclusive_3_nistxml_sv_iv_atomic_decimal_min_exclusive_4_2(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet minExclusive with value -294253147230818967. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-minExclusive-4.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-minExclusive-4-2.xml", class_name="NistschemaSvIvAtomicDecimalMinExclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_min_exclusive_3_nistxml_sv_iv_atomic_decimal_min_exclusive_4_3(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet minExclusive with value -294253147230818967. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-minExclusive-4.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-minExclusive-4-3.xml", class_name="NistschemaSvIvAtomicDecimalMinExclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_min_exclusive_3_nistxml_sv_iv_atomic_decimal_min_exclusive_4_4(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet minExclusive with value -294253147230818967. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-minExclusive-4.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-minExclusive-4-4.xml", class_name="NistschemaSvIvAtomicDecimalMinExclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_min_exclusive_3_nistxml_sv_iv_atomic_decimal_min_exclusive_4_5(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet minExclusive with value -294253147230818967. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-minExclusive-4.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-minExclusive-4-5.xml", class_name="NistschemaSvIvAtomicDecimalMinExclusive4", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_min_exclusive_2_nistxml_sv_iv_atomic_decimal_min_exclusive_3_1(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet minExclusive with value -67428259604688900. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-minExclusive-3.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-minExclusive-3-1.xml", class_name="NistschemaSvIvAtomicDecimalMinExclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_min_exclusive_2_nistxml_sv_iv_atomic_decimal_min_exclusive_3_2(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet minExclusive with value -67428259604688900. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-minExclusive-3.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-minExclusive-3-2.xml", class_name="NistschemaSvIvAtomicDecimalMinExclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_min_exclusive_2_nistxml_sv_iv_atomic_decimal_min_exclusive_3_3(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet minExclusive with value -67428259604688900. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-minExclusive-3.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-minExclusive-3-3.xml", class_name="NistschemaSvIvAtomicDecimalMinExclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_min_exclusive_2_nistxml_sv_iv_atomic_decimal_min_exclusive_3_4(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet minExclusive with value -67428259604688900. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-minExclusive-3.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-minExclusive-3-4.xml", class_name="NistschemaSvIvAtomicDecimalMinExclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_min_exclusive_2_nistxml_sv_iv_atomic_decimal_min_exclusive_3_5(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet minExclusive with value -67428259604688900. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-minExclusive-3.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-minExclusive-3-5.xml", class_name="NistschemaSvIvAtomicDecimalMinExclusive3", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_min_exclusive_1_nistxml_sv_iv_atomic_decimal_min_exclusive_2_1(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet minExclusive with value 631308414640570968. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-minExclusive-2.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-minExclusive-2-1.xml", class_name="NistschemaSvIvAtomicDecimalMinExclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_min_exclusive_1_nistxml_sv_iv_atomic_decimal_min_exclusive_2_2(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet minExclusive with value 631308414640570968. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-minExclusive-2.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-minExclusive-2-2.xml", class_name="NistschemaSvIvAtomicDecimalMinExclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_min_exclusive_1_nistxml_sv_iv_atomic_decimal_min_exclusive_2_3(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet minExclusive with value 631308414640570968. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-minExclusive-2.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-minExclusive-2-3.xml", class_name="NistschemaSvIvAtomicDecimalMinExclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_min_exclusive_1_nistxml_sv_iv_atomic_decimal_min_exclusive_2_4(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet minExclusive with value 631308414640570968. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-minExclusive-2.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-minExclusive-2-4.xml", class_name="NistschemaSvIvAtomicDecimalMinExclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_min_exclusive_1_nistxml_sv_iv_atomic_decimal_min_exclusive_2_5(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet minExclusive with value 631308414640570968. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-minExclusive-2.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-minExclusive-2-5.xml", class_name="NistschemaSvIvAtomicDecimalMinExclusive2", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_min_exclusive_nistxml_sv_iv_atomic_decimal_min_exclusive_1_1(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet minExclusive with value -999999999999999999. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-minExclusive-1.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-minExclusive-1-1.xml", class_name="NistschemaSvIvAtomicDecimalMinExclusive1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_min_exclusive_nistxml_sv_iv_atomic_decimal_min_exclusive_1_2(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet minExclusive with value -999999999999999999. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-minExclusive-1.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-minExclusive-1-2.xml", class_name="NistschemaSvIvAtomicDecimalMinExclusive1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_min_exclusive_nistxml_sv_iv_atomic_decimal_min_exclusive_1_3(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet minExclusive with value -999999999999999999. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-minExclusive-1.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-minExclusive-1-3.xml", class_name="NistschemaSvIvAtomicDecimalMinExclusive1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_min_exclusive_nistxml_sv_iv_atomic_decimal_min_exclusive_1_4(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet minExclusive with value -999999999999999999. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-minExclusive-1.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-minExclusive-1-4.xml", class_name="NistschemaSvIvAtomicDecimalMinExclusive1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", ) def test_atomic_decimal_min_exclusive_nistxml_sv_iv_atomic_decimal_min_exclusive_1_5(mode, save_output, output_format): """ Type atomic/decimal is restricted by facet minExclusive with value -999999999999999999. """ assert_bindings( schema="nistData/atomic/decimal/Schema+Instance/NISTSchema-SV-IV-atomic-decimal-minExclusive-1.xsd", instance="nistData/atomic/decimal/Schema+Instance/NISTXML-SV-IV-atomic-decimal-minExclusive-1-5.xml", class_name="NistschemaSvIvAtomicDecimalMinExclusive1", version="1.1", mode=mode, save_output=save_output, output_format=output_format, structure_style="filenames", )
42.519451
149
0.743563
50,316
433,911
6.125586
0.005147
0.023711
0.059277
0.06718
0.997563
0.997499
0.995237
0.994809
0.994037
0.993667
0
0.030819
0.160915
433,911
10,204
150
42.523618
0.81572
0.114924
0
0.714391
0
0.179779
0.406889
0.387301
0
0
0
0
0.090037
1
0.089889
false
0
0.000148
0
0.090037
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
dc9ce4fab2e970720267a662df0bca01bd98647e
59
py
Python
gym-number-link/gym_number_link/envs/__init__.py
EmpereurCC/Spinningup_uptodate
f93e7edf778237a0e79bfdb45f7529522bd34ed4
[ "MIT" ]
null
null
null
gym-number-link/gym_number_link/envs/__init__.py
EmpereurCC/Spinningup_uptodate
f93e7edf778237a0e79bfdb45f7529522bd34ed4
[ "MIT" ]
2
2019-06-18T14:12:26.000Z
2019-06-18T14:55:10.000Z
gym-number-link/gym_number_link/envs/__init__.py
EmpereurCC/Spinningup_uptodate
f93e7edf778237a0e79bfdb45f7529522bd34ed4
[ "MIT" ]
1
2019-06-11T15:07:59.000Z
2019-06-11T15:07:59.000Z
from gym_number_link.envs.gym_number_link import NumberLink
59
59
0.915254
10
59
5
0.7
0.36
0.52
0
0
0
0
0
0
0
0
0
0.050847
59
1
59
59
0.892857
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
dcb5e9cc6724b53d26e396d8f3bac72a88a02169
113
py
Python
_package/xms/grid/ugrid/__init__.py
kwryankrattiger/xmsgrid
a7b0058b7ae783db37638f83433cf7127efbcea4
[ "BSD-2-Clause" ]
1
2018-07-19T14:53:52.000Z
2018-07-19T14:53:52.000Z
_package/xms/grid/ugrid/__init__.py
kwryankrattiger/xmsgrid
a7b0058b7ae783db37638f83433cf7127efbcea4
[ "BSD-2-Clause" ]
11
2018-10-09T12:39:24.000Z
2022-03-16T18:16:27.000Z
_package/xms/grid/ugrid/__init__.py
kwryankrattiger/xmsgrid
a7b0058b7ae783db37638f83433cf7127efbcea4
[ "BSD-2-Clause" ]
2
2020-09-24T22:38:54.000Z
2021-04-14T21:05:34.000Z
from . import ugrid # NOQA: F401 from . import ugrid_utils # NOQA: F401 from .ugrid import UGrid # NOQA: F401
28.25
39
0.707965
17
113
4.647059
0.352941
0.417722
0.379747
0.481013
0
0
0
0
0
0
0
0.101124
0.212389
113
3
40
37.666667
0.786517
0.283186
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
8
f4b6cc6bf760c38dadb69536b36255dde31922a0
2,370
py
Python
src/draw.py
Sayeem2004/Porcelain
ead84fb85807dad102fe53d426dae19ce0f6654d
[ "MIT" ]
null
null
null
src/draw.py
Sayeem2004/Porcelain
ead84fb85807dad102fe53d426dae19ce0f6654d
[ "MIT" ]
null
null
null
src/draw.py
Sayeem2004/Porcelain
ead84fb85807dad102fe53d426dae19ce0f6654d
[ "MIT" ]
null
null
null
# Imports import var # Function for M1 button click def get_coords(event): # Drawing Point var.prev_x, var.prev_y = event.x, event.y curr = var.canvas.create_line((var.prev_x, var.prev_y, var.prev_x+1, var.prev_y+1), fill=var.line_color, width=var.line_width) # Adding point to list of canvas objects if (len(var.lines) == 0): var.lines.append([curr, var.canvas.coords(curr)]) elif (var.line_ind == len(var.lines)-1): var.line_ind += 1 var.lines.append([curr, var.canvas.coords(curr)]) else: var.line_ind += 1 var.lines = var.lines[:var.line_ind] var.lines.append([curr, var.canvas.coords(curr)]) # Function for M1 button movement def draw_line(event): # Drawing line curr = var.canvas.create_line((var.prev_x, var.prev_y, event.x, event.y), fill=var.line_color, width=var.line_width) var.prev_x, var.prev_y = event.x, event.y # Adding line to list of canvas objects if (len(var.lines) == 0): var.lines.append([curr, var.canvas.coords(curr)]) elif (var.line_ind == len(var.lines)-1): var.line_ind += 1 var.lines.append([curr, var.canvas.coords(curr)]) else: var.line_ind += 1 var.lines = var.lines[:var.line_ind] var.lines.append([curr, var.canvas.coords(curr)]) # Function for Ctrl-Z def undo_line(event): # Undoing multiple lines for _ in range(var.ch_line): # Casework for undoing lines if (var.line_ind > 0): var.canvas.delete(var.lines[var.line_ind][0]) var.line_ind -= 1 else: var.canvas.delete(var.lines[var.line_ind][0]) # Function for Ctrl-Y def redo_line(event): # Redoing multiple lines for _ in range(var.ch_line): # Casework for redoing lines if (var.line_ind < len(var.lines)-1): var.line_ind += 1 crds = var.lines[var.line_ind][1] var.canvas.delete(var.lines[var.line_ind][0]) curr = var.canvas.create_line(crds, fill=var.line_color, width=var.line_width) var.lines[var.line_ind][0] = curr else: crds = var.lines[var.line_ind][1] var.canvas.delete(var.lines[var.line_ind][0]) curr = var.canvas.create_line(crds, fill=var.line_color, width=var.line_width) var.lines[var.line_ind][0] = curr
36.461538
130
0.622363
372
2,370
3.830645
0.142473
0.137544
0.140351
0.105263
0.825965
0.806316
0.806316
0.806316
0.783158
0.712281
0
0.013274
0.237131
2,370
64
131
37.03125
0.774889
0.131646
0
0.777778
0
0
0
0
0
0
0
0
0
1
0.088889
false
0
0.022222
0
0.111111
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
52079f658ac8b57446fdd45c2c3fb3dbfb2827fc
140
py
Python
exceptions.py
kelvin-jose/stackX
329246225f847dbe8045992222fec2ddacdbcbfc
[ "MIT" ]
null
null
null
exceptions.py
kelvin-jose/stackX
329246225f847dbe8045992222fec2ddacdbcbfc
[ "MIT" ]
null
null
null
exceptions.py
kelvin-jose/stackX
329246225f847dbe8045992222fec2ddacdbcbfc
[ "MIT" ]
null
null
null
class StackOverflow(Exception): def __init__(self): pass class StackUnderflow(Exception): def __init__(self): pass
17.5
32
0.664286
14
140
6.071429
0.571429
0.282353
0.376471
0.470588
0.564706
0
0
0
0
0
0
0
0.25
140
8
33
17.5
0.809524
0
0
0.666667
0
0
0
0
0
0
0
0
0
1
0.333333
false
0.333333
0
0
0.666667
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
8
520a44a5e393aaff00752a66a623a6c3b5959e0e
40,185
py
Python
limonero/schema.py
eubr-bigsea/limonero
54851b73bb1e4f5626b3d38ea7eeb50f3ed2e3c5
[ "Apache-2.0" ]
1
2018-01-01T20:35:43.000Z
2018-01-01T20:35:43.000Z
limonero/schema.py
eubr-bigsea/limonero
54851b73bb1e4f5626b3d38ea7eeb50f3ed2e3c5
[ "Apache-2.0" ]
37
2017-02-24T17:07:25.000Z
2021-09-02T14:49:19.000Z
limonero/schema.py
eubr-bigsea/limonero
54851b73bb1e4f5626b3d38ea7eeb50f3ed2e3c5
[ "Apache-2.0" ]
2
2019-11-05T13:45:45.000Z
2020-11-13T22:02:37.000Z
# -*- coding: utf-8 -*- import datetime import json from copy import deepcopy from marshmallow import Schema, fields, post_load, post_dump, EXCLUDE, INCLUDE from marshmallow.validate import OneOf from flask_babel import gettext from limonero.models import * def partial_schema_factory(schema_cls): schema = schema_cls(partial=True) for field_name, field in list(schema.fields.items()): if isinstance(field, fields.Nested): new_field = deepcopy(field) new_field.schema.partial = True schema.fields[field_name] = new_field return schema def translate_validation(validation_errors): for field, errors in list(validation_errors.items()): if isinstance(errors, dict): validation_errors[field] = translate_validation(errors) else: validation_errors[field] = [gettext(error) for error in errors] return validation_errors def load_json(str_value): try: return json.loads(str_value) except BaseException: return None # region Protected def generate_download_token(identifier, expires=None): from flask import current_app from cryptography.fernet import Fernet import time f = current_app.fernet if expires is None: f_expires = 0 else: f_expires = time.time() + expires return f.encrypt('{{"id": {}, "expires": {} }}'.format( identifier, f_expires).encode('utf8')).decode('utf8') # endregion class BaseSchema(Schema): @post_dump def remove_skip_values(self, data, **kwargs): return { key: value for key, value in data.items() if value is not None # Empty lists must be kept! } class AttributeListResponseSchema(BaseSchema): """ JSON serialization schema """ id = fields.Integer(required=True) name = fields.String(required=True) description = fields.String(required=False, allow_none=True) type = fields.String(required=True, validate=[OneOf(list(DataType.__dict__.keys()))]) size = fields.Integer(required=False, allow_none=True) precision = fields.Integer(required=False, allow_none=True) scale = fields.Integer(required=False, allow_none=True) nullable = fields.Boolean( required=False, allow_none=True, missing=False, default=False) enumeration = fields.Boolean( required=False, allow_none=True, missing=False, default=False) missing_representation = fields.String(required=False, allow_none=True) feature = fields.Boolean( required=False, allow_none=True, missing=True, default=True) label = fields.Boolean( required=False, allow_none=True, missing=True, default=True) distinct_values = fields.Integer(required=False, allow_none=True) mean_value = fields.Float(required=False, allow_none=True) median_value = fields.String(required=False, allow_none=True) max_value = fields.String(required=False, allow_none=True) min_value = fields.String(required=False, allow_none=True) std_deviation = fields.Float(required=False, allow_none=True) missing_total = fields.String(required=False, allow_none=True) deciles = fields.String(required=False, allow_none=True) format = fields.String(required=False, allow_none=True) key = fields.Boolean( required=False, allow_none=True, missing=False, default=False) attribute_privacy = fields.Nested( 'limonero.schema.AttributePrivacyListResponseSchema', allow_none=True) # noinspection PyUnresolvedReferences @post_load def make_object(self, data, **kwargs): """ Deserialize data into an instance of Attribute""" return Attribute(**data) class Meta: ordered = True unknown = EXCLUDE class AttributeItemResponseSchema(BaseSchema): """ JSON serialization schema """ id = fields.Integer(required=True) name = fields.String(required=True) description = fields.String(required=False, allow_none=True) type = fields.String(required=True, validate=[OneOf(list(DataType.__dict__.keys()))]) size = fields.Integer(required=False, allow_none=True) precision = fields.Integer(required=False, allow_none=True) scale = fields.Integer(required=False, allow_none=True) nullable = fields.Boolean( required=False, allow_none=True, missing=False, default=False) enumeration = fields.Boolean( required=False, allow_none=True, missing=False, default=False) missing_representation = fields.String(required=False, allow_none=True) feature = fields.Boolean( required=False, allow_none=True, missing=True, default=True) label = fields.Boolean( required=False, allow_none=True, missing=True, default=True) distinct_values = fields.Integer(required=False, allow_none=True) mean_value = fields.Float(required=False, allow_none=True) median_value = fields.String(required=False, allow_none=True) max_value = fields.String(required=False, allow_none=True) min_value = fields.String(required=False, allow_none=True) std_deviation = fields.Float(required=False, allow_none=True) missing_total = fields.String(required=False, allow_none=True) deciles = fields.String(required=False, allow_none=True) format = fields.String(required=False, allow_none=True) key = fields.Boolean( required=False, allow_none=True, missing=False, default=False) attribute_privacy = fields.Nested( 'limonero.schema.AttributePrivacyItemResponseSchema', allow_none=True) # noinspection PyUnresolvedReferences @post_load def make_object(self, data, **kwargs): """ Deserialize data into an instance of Attribute""" return Attribute(**data) class Meta: ordered = True unknown = EXCLUDE class AttributeCreateRequestSchema(BaseSchema): """ JSON serialization schema """ id = fields.Integer(allow_none=True) name = fields.String(required=True) description = fields.String(required=False, allow_none=True) type = fields.String(required=True, validate=[OneOf(list(DataType.__dict__.keys()))]) size = fields.Integer(required=False, allow_none=True) precision = fields.Integer(required=False, allow_none=True) scale = fields.Integer(required=False, allow_none=True) nullable = fields.Boolean( required=False, allow_none=True, missing=False, default=False) enumeration = fields.Boolean( required=False, allow_none=True, missing=False, default=False) missing_representation = fields.String(required=False, allow_none=True) feature = fields.Boolean( required=False, allow_none=True, missing=True, default=True) label = fields.Boolean( required=False, allow_none=True, missing=True, default=True) distinct_values = fields.Integer(required=False, allow_none=True) mean_value = fields.Float(required=False, allow_none=True) median_value = fields.String(required=False, allow_none=True) max_value = fields.String(required=False, allow_none=True) min_value = fields.String(required=False, allow_none=True) std_deviation = fields.Float(required=False, allow_none=True) missing_total = fields.String(required=False, allow_none=True) deciles = fields.String(required=False, allow_none=True) format = fields.String(required=False, allow_none=True) key = fields.Boolean( required=False, allow_none=True, missing=False, default=False) attribute_privacy = fields.Nested( 'limonero.schema.AttributePrivacyCreateRequestSchema', allow_none=True) # noinspection PyUnresolvedReferences @post_load def make_object(self, data, **kwargs): """ Deserialize data into an instance of Attribute""" return Attribute(**data) class Meta: ordered = True unknown = EXCLUDE class AttributePrivacyResponseSchema(BaseSchema): """ JSON serialization schema """ id = fields.Integer(required=True) name = fields.String(required=True) attribute_privacy = fields.Nested( 'limonero.schema.AttributePrivacyItemResponseSchema', allow_none=True) # noinspection PyUnresolvedReferences @post_load def make_object(self, data, **kwargs): """ Deserialize data into an instance of Attribute""" return Attribute(**data) class Meta: ordered = True unknown = EXCLUDE class AttributePrivacyListResponseSchema(BaseSchema): """ JSON serialization schema """ id = fields.Integer(required=True) attribute_name = fields.String(required=True) data_type = fields.String(required=False, allow_none=True, validate=[OneOf(list(DataType.__dict__.keys()))]) privacy_type = fields.String(required=True, validate=[OneOf(list(PrivacyType.__dict__.keys()))]) category_technique = fields.String(required=False, allow_none=True) anonymization_technique = fields.String(required=True, validate=[OneOf(list(AnonymizationTechnique.__dict__.keys()))]) hierarchical_structure_type = fields.String( required=False, allow_none=True) privacy_model_technique = fields.String(required=False, allow_none=True) hierarchy = fields.String(required=False, allow_none=True) category_model = fields.String(required=False, allow_none=True) privacy_model = fields.String(required=False, allow_none=True) privacy_model_parameters = fields.String(required=False, allow_none=True) unlock_privacy_key = fields.String(required=False, allow_none=True) is_global_law = fields.Boolean( required=False, allow_none=True, missing=False, default=False) attribute_privacy_group = fields.Nested( 'limonero.schema.AttributePrivacyGroupListResponseSchema', allow_none=True) # noinspection PyUnresolvedReferences @post_load def make_object(self, data, **kwargs): """ Deserialize data into an instance of AttributePrivacy""" return AttributePrivacy(**data) class Meta: ordered = True unknown = EXCLUDE class AttributePrivacyItemResponseSchema(BaseSchema): """ JSON serialization schema """ id = fields.Integer(required=True) attribute_name = fields.String(required=True) data_type = fields.String(required=False, allow_none=True, validate=[OneOf(list(DataType.__dict__.keys()))]) privacy_type = fields.String(required=True, validate=[OneOf(list(PrivacyType.__dict__.keys()))]) category_technique = fields.String(required=False, allow_none=True) anonymization_technique = fields.String(required=True, validate=[OneOf(list(AnonymizationTechnique.__dict__.keys()))]) hierarchical_structure_type = fields.String( required=False, allow_none=True) privacy_model_technique = fields.String(required=False, allow_none=True) hierarchy = fields.String(required=False, allow_none=True) category_model = fields.String(required=False, allow_none=True) privacy_model = fields.String(required=False, allow_none=True) privacy_model_parameters = fields.String(required=False, allow_none=True) unlock_privacy_key = fields.String(required=False, allow_none=True) is_global_law = fields.Boolean( required=False, allow_none=True, missing=False, default=False) attribute_privacy_group_id = fields.Integer( required=False, allow_none=True) # noinspection PyUnresolvedReferences @post_load def make_object(self, data, **kwargs): """ Deserialize data into an instance of AttributePrivacy""" return AttributePrivacy(**data) class Meta: ordered = True unknown = EXCLUDE class AttributePrivacyCreateRequestSchema(BaseSchema): """ JSON serialization schema """ id = fields.Integer(allow_none=True) attribute_name = fields.String(required=True) data_type = fields.String(required=False, allow_none=True, validate=[OneOf(list(DataType.__dict__.keys()))]) privacy_type = fields.String(required=True, validate=[OneOf(list(PrivacyType.__dict__.keys()))]) category_technique = fields.String(required=False, allow_none=True) anonymization_technique = fields.String(required=True, validate=[OneOf(list(AnonymizationTechnique.__dict__.keys()))]) hierarchical_structure_type = fields.String( required=False, allow_none=True) privacy_model_technique = fields.String(required=False, allow_none=True) hierarchy = fields.String(required=False, allow_none=True) category_model = fields.String(required=False, allow_none=True) privacy_model = fields.String(required=False, allow_none=True) privacy_model_parameters = fields.String(required=False, allow_none=True) unlock_privacy_key = fields.String(required=False, allow_none=True) attribute_id = fields.Integer(required=False, allow_none=True) attribute_privacy_group_id = fields.Integer( required=False, allow_none=True) # noinspection PyUnresolvedReferences @post_load def make_object(self, data, **kwargs): """ Deserialize data into an instance of AttributePrivacy""" return AttributePrivacy(**data) class Meta: ordered = True unknown = EXCLUDE class AttributePrivacyPrivacyResponseSchema(BaseSchema): """ JSON serialization schema """ id = fields.Integer(required=True) data_type = fields.String(required=False, allow_none=True, validate=[OneOf(list(DataType.__dict__.keys()))]) privacy_type = fields.String(required=True, validate=[OneOf(list(PrivacyType.__dict__.keys()))]) category_technique = fields.String(required=False, allow_none=True) anonymization_technique = fields.String(required=True, validate=[OneOf(list(AnonymizationTechnique.__dict__.keys()))]) hierarchical_structure_type = fields.String( required=False, allow_none=True) privacy_model_technique = fields.String(required=False, allow_none=True) hierarchy = fields.String(required=False, allow_none=True) category_model = fields.String(required=False, allow_none=True) privacy_model = fields.String(required=False, allow_none=True) privacy_model_parameters = fields.String(required=False, allow_none=True) unlock_privacy_key = fields.String(required=False, allow_none=True) is_global_law = fields.Boolean( required=False, allow_none=True, missing=False, default=False) # noinspection PyUnresolvedReferences @post_load def make_object(self, data, **kwargs): """ Deserialize data into an instance of AttributePrivacy""" return AttributePrivacy(**data) class Meta: ordered = True unknown = EXCLUDE class AttributePrivacyGroupListResponseSchema(BaseSchema): """ JSON serialization schema """ id = fields.Integer(required=True) name = fields.String(required=True) # noinspection PyUnresolvedReferences @post_load def make_object(self, data, **kwargs): """ Deserialize data into an instance of AttributePrivacyGroup""" return AttributePrivacyGroup(**data) class Meta: ordered = True unknown = EXCLUDE class AttributePrivacyGroupItemResponseSchema(BaseSchema): """ JSON serialization schema """ id = fields.Integer(required=True) name = fields.String(required=True) # noinspection PyUnresolvedReferences @post_load def make_object(self, data, **kwargs): """ Deserialize data into an instance of AttributePrivacyGroup""" return AttributePrivacyGroup(**data) class Meta: ordered = True unknown = EXCLUDE class AttributePrivacyGroupCreateRequestSchema(BaseSchema): """ JSON serialization schema """ id = fields.Integer(allow_none=True) name = fields.String(required=True) user_id = fields.Integer(required=True) # noinspection PyUnresolvedReferences @post_load def make_object(self, data, **kwargs): """ Deserialize data into an instance of AttributePrivacyGroup""" return AttributePrivacyGroup(**data) class Meta: ordered = True unknown = EXCLUDE class DataSourceExecuteRequestSchema(BaseSchema): """ JSON schema for executing tasks """ id = fields.Integer(required=True) name = fields.String(required=True) # noinspection PyUnresolvedReferences @post_load def make_object(self, data, **kwargs): """ Deserialize data into an instance of DataSource""" return DataSource(**data) class Meta: ordered = True unknown = EXCLUDE class DataSourceListResponseSchema(BaseSchema): """ JSON serialization schema """ id = fields.Integer(required=True) name = fields.String(required=True) description = fields.String(required=False, allow_none=True) enabled = fields.Boolean( required=False, allow_none=True, missing=True, default=True) statistics_process_counter = fields.Integer( required=False, allow_none=True, missing=0, default=0) read_only = fields.Boolean( required=False, allow_none=True, missing=True, default=True) privacy_aware = fields.Boolean( required=False, allow_none=True, missing=False, default=False) url = fields.String(required=True) created = fields.DateTime(required=False, allow_none=True) updated = fields.DateTime( required=False, allow_none=True, missing=datetime.datetime.utcnow, default=datetime.datetime.utcnow) format = fields.String(required=True, validate=[OneOf(list(DataSourceFormat.__dict__.keys()))]) initialization = fields.String(required=False, allow_none=True, missing=DataSourceInitialization.INITIALIZED, default=DataSourceInitialization.INITIALIZED, validate=[OneOf(list(DataSourceInitialization.__dict__.keys()))]) initialization_job_id = fields.String(required=False, allow_none=True) provenience = fields.String(required=False, allow_none=True) estimated_rows = fields.Integer( required=False, allow_none=True, missing=0, default=0) estimated_size_in_mega_bytes = fields.Decimal( required=False, allow_none=True) expiration = fields.String(required=False, allow_none=True) user_id = fields.Integer(required=False, allow_none=True) user_login = fields.String(required=False, allow_none=True) user_name = fields.String(required=False, allow_none=True) tags = fields.String(required=False, allow_none=True) temporary = fields.Boolean( required=False, allow_none=True, missing=False, default=False) workflow_id = fields.Integer(required=False, allow_none=True) task_id = fields.String(required=False, allow_none=True) attribute_delimiter = fields.String(required=False, allow_none=True) record_delimiter = fields.String(required=False, allow_none=True) text_delimiter = fields.String(required=False, allow_none=True) is_public = fields.Boolean( required=False, allow_none=True, missing=False, default=False) treat_as_missing = fields.String(required=False, allow_none=True) encoding = fields.String(required=False, allow_none=True) is_first_line_header = fields.Boolean( required=False, allow_none=True, missing=0, default=0) is_multiline = fields.Boolean( required=False, allow_none=True, missing=0, default=0) command = fields.String(required=False, allow_none=True) is_lookup = fields.Boolean( required=False, allow_none=True, missing=0, default=0) use_in_workflow = fields.Boolean( required=False, allow_none=True, missing=0, default=0) attributes = fields.Nested( 'limonero.schema.AttributeListResponseSchema', allow_none=True, many=True) permissions = fields.Nested( 'limonero.schema.DataSourcePermissionListResponseSchema', allow_none=True, many=True) storage = fields.Nested( 'limonero.schema.StorageListResponseSchema', required=True) download_token = fields.Function( lambda d: generate_download_token(d.id, 600)) # noinspection PyUnresolvedReferences @post_load def make_object(self, data, **kwargs): """ Deserialize data into an instance of DataSource""" return DataSource(**data) class Meta: ordered = True unknown = EXCLUDE class DataSourceCreateRequestSchema(BaseSchema): """ JSON serialization schema """ name = fields.String(required=True) description = fields.String(required=False, allow_none=True) enabled = fields.Boolean( required=False, allow_none=True, missing=True, default=True) statistics_process_counter = fields.Integer( required=False, allow_none=True, missing=0, default=0) read_only = fields.Boolean( required=False, allow_none=True, missing=True, default=True) privacy_aware = fields.Boolean( required=False, allow_none=True, missing=False, default=False) url = fields.String(required=True) format = fields.String(required=True, validate=[OneOf(list(DataSourceFormat.__dict__.keys()))]) initialization = fields.String(required=False, allow_none=True, missing=DataSourceInitialization.INITIALIZED, default=DataSourceInitialization.INITIALIZED, validate=[OneOf(list(DataSourceInitialization.__dict__.keys()))]) initialization_job_id = fields.String(required=False, allow_none=True) provenience = fields.String(required=False, allow_none=True) expiration = fields.String(required=False, allow_none=True) user_id = fields.Integer(required=False, allow_none=True) user_login = fields.String(required=False, allow_none=True) user_name = fields.String(required=False, allow_none=True) tags = fields.String(required=False, allow_none=True) temporary = fields.Boolean( required=False, allow_none=True, missing=False, default=False) workflow_id = fields.Integer(required=False, allow_none=True) task_id = fields.String(required=False, allow_none=True) attribute_delimiter = fields.String(required=False, allow_none=True) record_delimiter = fields.String(required=False, allow_none=True) text_delimiter = fields.String(required=False, allow_none=True) is_public = fields.Boolean( required=False, allow_none=True, missing=False, default=False) treat_as_missing = fields.String(required=False, allow_none=True) encoding = fields.String(required=False, allow_none=True) is_first_line_header = fields.Boolean( required=False, allow_none=True, missing=0, default=0) is_multiline = fields.Boolean( required=False, allow_none=True, missing=0, default=0) command = fields.String(required=False, allow_none=True) is_lookup = fields.Boolean( required=False, allow_none=True, missing=0, default=0) use_in_workflow = fields.Boolean( required=False, allow_none=True, missing=0, default=0) attributes = fields.Nested( 'limonero.schema.AttributeCreateRequestSchema', allow_none=True, many=True) permissions = fields.Nested( 'limonero.schema.DataSourcePermissionCreateRequestSchema', allow_none=True, many=True) storage_id = fields.Integer(required=True) # noinspection PyUnresolvedReferences @post_load def make_object(self, data, **kwargs): """ Deserialize data into an instance of DataSource""" return DataSource(**data) class Meta: ordered = True unknown = EXCLUDE class DataSourceItemResponseSchema(BaseSchema): """ JSON serialization schema """ id = fields.Integer(required=True) name = fields.String(required=True) description = fields.String(required=False, allow_none=True) enabled = fields.Boolean( required=False, allow_none=True, missing=True, default=True) statistics_process_counter = fields.Integer( required=False, allow_none=True, missing=0, default=0) read_only = fields.Boolean( required=False, allow_none=True, missing=True, default=True) privacy_aware = fields.Boolean( required=False, allow_none=True, missing=False, default=False) url = fields.String(required=True) created = fields.DateTime(required=False, allow_none=True) updated = fields.DateTime( required=False, allow_none=True, missing=datetime.datetime.utcnow, default=datetime.datetime.utcnow) format = fields.String(required=True, validate=[OneOf(list(DataSourceFormat.__dict__.keys()))]) initialization = fields.String(required=False, allow_none=True, missing=DataSourceInitialization.INITIALIZED, default=DataSourceInitialization.INITIALIZED, validate=[OneOf(list(DataSourceInitialization.__dict__.keys()))]) initialization_job_id = fields.String(required=False, allow_none=True) provenience = fields.String(required=False, allow_none=True) estimated_rows = fields.Integer( required=False, allow_none=True, missing=0, default=0) estimated_size_in_mega_bytes = fields.Decimal( required=False, allow_none=True) expiration = fields.String(required=False, allow_none=True) user_id = fields.Integer(required=False, allow_none=True) user_login = fields.String(required=False, allow_none=True) user_name = fields.String(required=False, allow_none=True) tags = fields.String(required=False, allow_none=True) temporary = fields.Boolean( required=False, allow_none=True, missing=False, default=False) workflow_id = fields.Integer(required=False, allow_none=True) task_id = fields.String(required=False, allow_none=True) attribute_delimiter = fields.String(required=False, allow_none=True) record_delimiter = fields.String(required=False, allow_none=True) text_delimiter = fields.String(required=False, allow_none=True) is_public = fields.Boolean( required=False, allow_none=True, missing=False, default=False) treat_as_missing = fields.String(required=False, allow_none=True) encoding = fields.String(required=False, allow_none=True) is_first_line_header = fields.Boolean( required=False, allow_none=True, missing=0, default=0) is_multiline = fields.Boolean( required=False, allow_none=True, missing=0, default=0) command = fields.String(required=False, allow_none=True) is_lookup = fields.Boolean( required=False, allow_none=True, missing=0, default=0) use_in_workflow = fields.Boolean( required=False, allow_none=True, missing=0, default=0) attributes = fields.Nested( 'limonero.schema.AttributeItemResponseSchema', allow_none=True, many=True) permissions = fields.Nested( 'limonero.schema.DataSourcePermissionItemResponseSchema', allow_none=True, many=True) storage = fields.Nested( 'limonero.schema.StorageItemResponseSchema', required=True) # noinspection PyUnresolvedReferences @post_load def make_object(self, data, **kwargs): """ Deserialize data into an instance of DataSource""" return DataSource(**data) class Meta: ordered = True unknown = EXCLUDE class DataSourcePrivacyResponseSchema(BaseSchema): """ JSON serialization schema """ id = fields.Integer(required=True) name = fields.String(required=True) privacy_aware = fields.Boolean( required=False, allow_none=True, missing=False, default=False) attributes = fields.Nested( 'limonero.schema.AttributePrivacyResponseSchema', allow_none=True, many=True) # noinspection PyUnresolvedReferences @post_load def make_object(self, data, **kwargs): """ Deserialize data into an instance of DataSource""" return DataSource(**data) class Meta: ordered = True unknown = EXCLUDE class DataSourcePermissionListResponseSchema(BaseSchema): """ JSON serialization schema """ id = fields.Integer(required=True) permission = fields.String(required=True, validate=[OneOf(list(PermissionType.__dict__.keys()))]) user_id = fields.Integer(required=True) user_login = fields.String(required=True) user_name = fields.String(required=True) # noinspection PyUnresolvedReferences @post_load def make_object(self, data, **kwargs): """ Deserialize data into an instance of DataSourcePermission""" return DataSourcePermission(**data) class Meta: ordered = True unknown = EXCLUDE class DataSourcePermissionItemResponseSchema(BaseSchema): """ JSON serialization schema """ id = fields.Integer(required=True) permission = fields.String(required=True, validate=[OneOf(list(PermissionType.__dict__.keys()))]) user_id = fields.Integer(required=True) user_login = fields.String(required=True) user_name = fields.String(required=True) # noinspection PyUnresolvedReferences @post_load def make_object(self, data, **kwargs): """ Deserialize data into an instance of DataSourcePermission""" return DataSourcePermission(**data) class Meta: ordered = True unknown = EXCLUDE class DataSourcePermissionCreateRequestSchema(BaseSchema): """ JSON serialization schema """ id = fields.Integer(required=True) permission = fields.String(required=True, validate=[OneOf(list(PermissionType.__dict__.keys()))]) user_id = fields.Integer(required=True) user_login = fields.String(required=True) user_name = fields.String(required=True) # noinspection PyUnresolvedReferences @post_load def make_object(self, data, **kwargs): """ Deserialize data into an instance of DataSourcePermission""" return DataSourcePermission(**data) class Meta: ordered = True unknown = EXCLUDE class ModelListResponseSchema(BaseSchema): """ JSON serialization schema """ id = fields.Integer(required=True) name = fields.String(required=True) enabled = fields.Boolean( required=False, allow_none=True, missing=True, default=True) created = fields.DateTime(required=False, allow_none=True) path = fields.String(required=True) class_name = fields.String(required=True) type = fields.String(required=False, allow_none=True, missing=ModelType.UNSPECIFIED, default=ModelType.UNSPECIFIED, validate=[OneOf(list(ModelType.__dict__.keys()))]) deployment_status = fields.String(required=False, allow_none=True, missing=DeploymentStatus.NOT_DEPLOYED, default=DeploymentStatus.NOT_DEPLOYED, validate=[OneOf(list(DeploymentStatus.__dict__.keys()))]) user_id = fields.Integer(required=True) user_login = fields.String(required=True) user_name = fields.String(required=True) workflow_id = fields.Integer(required=False, allow_none=True) workflow_name = fields.String(required=False, allow_none=True) task_id = fields.String(required=False, allow_none=True) job_id = fields.Integer(required=False, allow_none=True) storage = fields.Nested( 'limonero.schema.StorageListResponseSchema', required=True) download_token = fields.Function( lambda d: generate_download_token(d.id, 600)) # noinspection PyUnresolvedReferences @post_load def make_object(self, data, **kwargs): """ Deserialize data into an instance of Model""" return Model(**data) class Meta: ordered = True unknown = EXCLUDE class ModelCreateRequestSchema(BaseSchema): """ JSON serialization schema """ name = fields.String(required=True) enabled = fields.Boolean( required=False, allow_none=True, missing=True, default=True) path = fields.String(required=True) class_name = fields.String(required=True) type = fields.String(required=False, allow_none=True, missing=ModelType.UNSPECIFIED, default=ModelType.UNSPECIFIED, validate=[OneOf(list(ModelType.__dict__.keys()))]) deployment_status = fields.String(required=False, allow_none=True, missing=DeploymentStatus.NOT_DEPLOYED, default=DeploymentStatus.NOT_DEPLOYED, validate=[OneOf(list(DeploymentStatus.__dict__.keys()))]) user_id = fields.Integer(required=True) user_login = fields.String(required=True) user_name = fields.String(required=True) workflow_id = fields.Integer(required=False, allow_none=True) workflow_name = fields.String(required=False, allow_none=True) task_id = fields.String(required=False, allow_none=True) job_id = fields.Integer(required=False, allow_none=True) storage_id = fields.Integer(required=True) # noinspection PyUnresolvedReferences @post_load def make_object(self, data, **kwargs): """ Deserialize data into an instance of Model""" return Model(**data) class Meta: ordered = True unknown = EXCLUDE class ModelItemResponseSchema(BaseSchema): """ JSON serialization schema """ id = fields.Integer(required=True) name = fields.String(required=True) enabled = fields.Boolean( required=False, allow_none=True, missing=True, default=True) created = fields.DateTime(required=False, allow_none=True) path = fields.String(required=True) class_name = fields.String(required=True) type = fields.String(required=False, allow_none=True, missing=ModelType.UNSPECIFIED, default=ModelType.UNSPECIFIED, validate=[OneOf(list(ModelType.__dict__.keys()))]) deployment_status = fields.String(required=False, allow_none=True, missing=DeploymentStatus.NOT_DEPLOYED, default=DeploymentStatus.NOT_DEPLOYED, validate=[OneOf(list(DeploymentStatus.__dict__.keys()))]) user_id = fields.Integer(required=True) user_login = fields.String(required=True) user_name = fields.String(required=True) workflow_id = fields.Integer(required=False, allow_none=True) workflow_name = fields.String(required=False, allow_none=True) task_id = fields.String(required=False, allow_none=True) job_id = fields.Integer(required=False, allow_none=True) storage = fields.Nested( 'limonero.schema.StorageItemResponseSchema', required=True) # noinspection PyUnresolvedReferences @post_load def make_object(self, data, **kwargs): """ Deserialize data into an instance of Model""" return Model(**data) class Meta: ordered = True unknown = EXCLUDE class ModelPermissionListResponseSchema(BaseSchema): """ JSON serialization schema """ id = fields.Integer(required=True) permission = fields.String(required=True, validate=[OneOf(list(PermissionType.__dict__.keys()))]) user_id = fields.Integer(required=True) user_login = fields.String(required=True) user_name = fields.String(required=True) # noinspection PyUnresolvedReferences @post_load def make_object(self, data, **kwargs): """ Deserialize data into an instance of ModelPermission""" return ModelPermission(**data) class Meta: ordered = True unknown = EXCLUDE class ModelPermissionItemResponseSchema(BaseSchema): """ JSON serialization schema """ id = fields.Integer(required=True) permission = fields.String(required=True, validate=[OneOf(list(PermissionType.__dict__.keys()))]) user_id = fields.Integer(required=True) user_login = fields.String(required=True) user_name = fields.String(required=True) # noinspection PyUnresolvedReferences @post_load def make_object(self, data, **kwargs): """ Deserialize data into an instance of ModelPermission""" return ModelPermission(**data) class Meta: ordered = True unknown = EXCLUDE class StorageListResponseSchema(BaseSchema): """ JSON serialization schema """ id = fields.Integer(required=True) name = fields.String(required=True) type = fields.String(required=True, validate=[OneOf(list(StorageType.__dict__.keys()))]) enabled = fields.Boolean( required=False, allow_none=True, missing=True, default=True) url = fields.String(required=True) client_url = fields.String(required=False, allow_none=True) extra_params = fields.String(required=False, allow_none=True) # noinspection PyUnresolvedReferences @post_load def make_object(self, data, **kwargs): """ Deserialize data into an instance of Storage""" return Storage(**data) class Meta: ordered = True unknown = EXCLUDE class StorageItemResponseSchema(BaseSchema): """ JSON serialization schema """ id = fields.Integer(required=True) name = fields.String(required=True) type = fields.String(required=True, validate=[OneOf(list(StorageType.__dict__.keys()))]) enabled = fields.Boolean( required=False, allow_none=True, missing=True, default=True) url = fields.String(required=True) client_url = fields.String(required=False, allow_none=True) extra_params = fields.String(required=False, allow_none=True) # noinspection PyUnresolvedReferences @post_load def make_object(self, data, **kwargs): """ Deserialize data into an instance of Storage""" return Storage(**data) class Meta: ordered = True unknown = EXCLUDE class StorageCreateRequestSchema(BaseSchema): """ JSON serialization schema """ name = fields.String(required=True) type = fields.String(required=True, validate=[OneOf(list(StorageType.__dict__.keys()))]) enabled = fields.Boolean( required=False, allow_none=True, missing=True, default=True) url = fields.String(required=True) client_url = fields.String(required=False, allow_none=True) extra_params = fields.String(required=False, allow_none=True) # noinspection PyUnresolvedReferences @post_load def make_object(self, data, **kwargs): """ Deserialize data into an instance of Storage""" return Storage(**data) class Meta: ordered = True unknown = EXCLUDE
37.002762
159
0.677666
4,307
40,185
6.157883
0.057349
0.080084
0.115678
0.18332
0.910753
0.908755
0.907963
0.907963
0.906078
0.900535
0
0.00141
0.223342
40,185
1,085
160
37.036866
0.848382
0.078313
0
0.906924
0
0
0.021673
0.020691
0
0
0
0
0
1
0.036322
false
0
0.011351
0.001135
0.53916
0
0
0
0
null
0
0
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
9
52139b7b13caef6b61235dc05744f6fb1327b120
60,165
py
Python
socialNetwork/gen-py/social_network/UserService.py
rodrigo-bruno/DeathStarBench
c9ce09aaf7c1298a7c88efacd1010a71db0fa59d
[ "Apache-2.0" ]
364
2019-04-28T01:45:37.000Z
2022-03-31T15:08:03.000Z
socialNetwork/gen-py/social_network/UserService.py
rodrigo-bruno/DeathStarBench
c9ce09aaf7c1298a7c88efacd1010a71db0fa59d
[ "Apache-2.0" ]
111
2019-04-15T11:08:49.000Z
2022-03-31T17:39:16.000Z
socialNetwork/gen-py/social_network/UserService.py
rodrigo-bruno/DeathStarBench
c9ce09aaf7c1298a7c88efacd1010a71db0fa59d
[ "Apache-2.0" ]
229
2019-05-14T08:55:57.000Z
2022-03-31T03:14:55.000Z
# # Autogenerated by Thrift Compiler (0.12.0) # # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING # # options string: py # from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException from thrift.protocol.TProtocol import TProtocolException from thrift.TRecursive import fix_spec import sys import logging from .ttypes import * from thrift.Thrift import TProcessor from thrift.transport import TTransport all_structs = [] class Iface(object): def RegisterUser(self, req_id, first_name, last_name, username, password, carrier): """ Parameters: - req_id - first_name - last_name - username - password - carrier """ pass def RegisterUserWithId(self, req_id, first_name, last_name, username, password, user_id, carrier): """ Parameters: - req_id - first_name - last_name - username - password - user_id - carrier """ pass def Login(self, req_id, username, password, carrier): """ Parameters: - req_id - username - password - carrier """ pass def ComposeCreatorWithUserId(self, req_id, user_id, username, carrier): """ Parameters: - req_id - user_id - username - carrier """ pass def ComposeCreatorWithUsername(self, req_id, username, carrier): """ Parameters: - req_id - username - carrier """ pass def GetUserId(self, req_id, username, carrier): """ Parameters: - req_id - username - carrier """ pass class Client(Iface): def __init__(self, iprot, oprot=None): self._iprot = self._oprot = iprot if oprot is not None: self._oprot = oprot self._seqid = 0 def RegisterUser(self, req_id, first_name, last_name, username, password, carrier): """ Parameters: - req_id - first_name - last_name - username - password - carrier """ self.send_RegisterUser(req_id, first_name, last_name, username, password, carrier) self.recv_RegisterUser() def send_RegisterUser(self, req_id, first_name, last_name, username, password, carrier): self._oprot.writeMessageBegin('RegisterUser', TMessageType.CALL, self._seqid) args = RegisterUser_args() args.req_id = req_id args.first_name = first_name args.last_name = last_name args.username = username args.password = password args.carrier = carrier args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_RegisterUser(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = RegisterUser_result() result.read(iprot) iprot.readMessageEnd() if result.se is not None: raise result.se return def RegisterUserWithId(self, req_id, first_name, last_name, username, password, user_id, carrier): """ Parameters: - req_id - first_name - last_name - username - password - user_id - carrier """ self.send_RegisterUserWithId(req_id, first_name, last_name, username, password, user_id, carrier) self.recv_RegisterUserWithId() def send_RegisterUserWithId(self, req_id, first_name, last_name, username, password, user_id, carrier): self._oprot.writeMessageBegin('RegisterUserWithId', TMessageType.CALL, self._seqid) args = RegisterUserWithId_args() args.req_id = req_id args.first_name = first_name args.last_name = last_name args.username = username args.password = password args.user_id = user_id args.carrier = carrier args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_RegisterUserWithId(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = RegisterUserWithId_result() result.read(iprot) iprot.readMessageEnd() if result.se is not None: raise result.se return def Login(self, req_id, username, password, carrier): """ Parameters: - req_id - username - password - carrier """ self.send_Login(req_id, username, password, carrier) return self.recv_Login() def send_Login(self, req_id, username, password, carrier): self._oprot.writeMessageBegin('Login', TMessageType.CALL, self._seqid) args = Login_args() args.req_id = req_id args.username = username args.password = password args.carrier = carrier args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_Login(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = Login_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if result.se is not None: raise result.se raise TApplicationException(TApplicationException.MISSING_RESULT, "Login failed: unknown result") def ComposeCreatorWithUserId(self, req_id, user_id, username, carrier): """ Parameters: - req_id - user_id - username - carrier """ self.send_ComposeCreatorWithUserId(req_id, user_id, username, carrier) return self.recv_ComposeCreatorWithUserId() def send_ComposeCreatorWithUserId(self, req_id, user_id, username, carrier): self._oprot.writeMessageBegin('ComposeCreatorWithUserId', TMessageType.CALL, self._seqid) args = ComposeCreatorWithUserId_args() args.req_id = req_id args.user_id = user_id args.username = username args.carrier = carrier args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_ComposeCreatorWithUserId(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = ComposeCreatorWithUserId_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if result.se is not None: raise result.se raise TApplicationException(TApplicationException.MISSING_RESULT, "ComposeCreatorWithUserId failed: unknown result") def ComposeCreatorWithUsername(self, req_id, username, carrier): """ Parameters: - req_id - username - carrier """ self.send_ComposeCreatorWithUsername(req_id, username, carrier) return self.recv_ComposeCreatorWithUsername() def send_ComposeCreatorWithUsername(self, req_id, username, carrier): self._oprot.writeMessageBegin('ComposeCreatorWithUsername', TMessageType.CALL, self._seqid) args = ComposeCreatorWithUsername_args() args.req_id = req_id args.username = username args.carrier = carrier args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_ComposeCreatorWithUsername(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = ComposeCreatorWithUsername_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if result.se is not None: raise result.se raise TApplicationException(TApplicationException.MISSING_RESULT, "ComposeCreatorWithUsername failed: unknown result") def GetUserId(self, req_id, username, carrier): """ Parameters: - req_id - username - carrier """ self.send_GetUserId(req_id, username, carrier) return self.recv_GetUserId() def send_GetUserId(self, req_id, username, carrier): self._oprot.writeMessageBegin('GetUserId', TMessageType.CALL, self._seqid) args = GetUserId_args() args.req_id = req_id args.username = username args.carrier = carrier args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() def recv_GetUserId(self): iprot = self._iprot (fname, mtype, rseqid) = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = GetUserId_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if result.se is not None: raise result.se raise TApplicationException(TApplicationException.MISSING_RESULT, "GetUserId failed: unknown result") class Processor(Iface, TProcessor): def __init__(self, handler): self._handler = handler self._processMap = {} self._processMap["RegisterUser"] = Processor.process_RegisterUser self._processMap["RegisterUserWithId"] = Processor.process_RegisterUserWithId self._processMap["Login"] = Processor.process_Login self._processMap["ComposeCreatorWithUserId"] = Processor.process_ComposeCreatorWithUserId self._processMap["ComposeCreatorWithUsername"] = Processor.process_ComposeCreatorWithUsername self._processMap["GetUserId"] = Processor.process_GetUserId def process(self, iprot, oprot): (name, type, seqid) = iprot.readMessageBegin() if name not in self._processMap: iprot.skip(TType.STRUCT) iprot.readMessageEnd() x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name)) oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid) x.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() return else: self._processMap[name](self, seqid, iprot, oprot) return True def process_RegisterUser(self, seqid, iprot, oprot): args = RegisterUser_args() args.read(iprot) iprot.readMessageEnd() result = RegisterUser_result() try: self._handler.RegisterUser(args.req_id, args.first_name, args.last_name, args.username, args.password, args.carrier) msg_type = TMessageType.REPLY except TTransport.TTransportException: raise except ServiceException as se: msg_type = TMessageType.REPLY result.se = se except TApplicationException as ex: logging.exception('TApplication exception in handler') msg_type = TMessageType.EXCEPTION result = ex except Exception: logging.exception('Unexpected exception in handler') msg_type = TMessageType.EXCEPTION result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("RegisterUser", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_RegisterUserWithId(self, seqid, iprot, oprot): args = RegisterUserWithId_args() args.read(iprot) iprot.readMessageEnd() result = RegisterUserWithId_result() try: self._handler.RegisterUserWithId(args.req_id, args.first_name, args.last_name, args.username, args.password, args.user_id, args.carrier) msg_type = TMessageType.REPLY except TTransport.TTransportException: raise except ServiceException as se: msg_type = TMessageType.REPLY result.se = se except TApplicationException as ex: logging.exception('TApplication exception in handler') msg_type = TMessageType.EXCEPTION result = ex except Exception: logging.exception('Unexpected exception in handler') msg_type = TMessageType.EXCEPTION result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("RegisterUserWithId", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_Login(self, seqid, iprot, oprot): args = Login_args() args.read(iprot) iprot.readMessageEnd() result = Login_result() try: result.success = self._handler.Login(args.req_id, args.username, args.password, args.carrier) msg_type = TMessageType.REPLY except TTransport.TTransportException: raise except ServiceException as se: msg_type = TMessageType.REPLY result.se = se except TApplicationException as ex: logging.exception('TApplication exception in handler') msg_type = TMessageType.EXCEPTION result = ex except Exception: logging.exception('Unexpected exception in handler') msg_type = TMessageType.EXCEPTION result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("Login", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_ComposeCreatorWithUserId(self, seqid, iprot, oprot): args = ComposeCreatorWithUserId_args() args.read(iprot) iprot.readMessageEnd() result = ComposeCreatorWithUserId_result() try: result.success = self._handler.ComposeCreatorWithUserId(args.req_id, args.user_id, args.username, args.carrier) msg_type = TMessageType.REPLY except TTransport.TTransportException: raise except ServiceException as se: msg_type = TMessageType.REPLY result.se = se except TApplicationException as ex: logging.exception('TApplication exception in handler') msg_type = TMessageType.EXCEPTION result = ex except Exception: logging.exception('Unexpected exception in handler') msg_type = TMessageType.EXCEPTION result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("ComposeCreatorWithUserId", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_ComposeCreatorWithUsername(self, seqid, iprot, oprot): args = ComposeCreatorWithUsername_args() args.read(iprot) iprot.readMessageEnd() result = ComposeCreatorWithUsername_result() try: result.success = self._handler.ComposeCreatorWithUsername(args.req_id, args.username, args.carrier) msg_type = TMessageType.REPLY except TTransport.TTransportException: raise except ServiceException as se: msg_type = TMessageType.REPLY result.se = se except TApplicationException as ex: logging.exception('TApplication exception in handler') msg_type = TMessageType.EXCEPTION result = ex except Exception: logging.exception('Unexpected exception in handler') msg_type = TMessageType.EXCEPTION result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("ComposeCreatorWithUsername", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def process_GetUserId(self, seqid, iprot, oprot): args = GetUserId_args() args.read(iprot) iprot.readMessageEnd() result = GetUserId_result() try: result.success = self._handler.GetUserId(args.req_id, args.username, args.carrier) msg_type = TMessageType.REPLY except TTransport.TTransportException: raise except ServiceException as se: msg_type = TMessageType.REPLY result.se = se except TApplicationException as ex: logging.exception('TApplication exception in handler') msg_type = TMessageType.EXCEPTION result = ex except Exception: logging.exception('Unexpected exception in handler') msg_type = TMessageType.EXCEPTION result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') oprot.writeMessageBegin("GetUserId", msg_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() # HELPER FUNCTIONS AND STRUCTURES class RegisterUser_args(object): """ Attributes: - req_id - first_name - last_name - username - password - carrier """ def __init__(self, req_id=None, first_name=None, last_name=None, username=None, password=None, carrier=None,): self.req_id = req_id self.first_name = first_name self.last_name = last_name self.username = username self.password = password self.carrier = carrier def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.I64: self.req_id = iprot.readI64() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.first_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRING: self.last_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 4: if ftype == TType.STRING: self.username = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 5: if ftype == TType.STRING: self.password = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 6: if ftype == TType.MAP: self.carrier = {} (_ktype40, _vtype41, _size39) = iprot.readMapBegin() for _i43 in range(_size39): _key44 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() _val45 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() self.carrier[_key44] = _val45 iprot.readMapEnd() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) return oprot.writeStructBegin('RegisterUser_args') if self.req_id is not None: oprot.writeFieldBegin('req_id', TType.I64, 1) oprot.writeI64(self.req_id) oprot.writeFieldEnd() if self.first_name is not None: oprot.writeFieldBegin('first_name', TType.STRING, 2) oprot.writeString(self.first_name.encode('utf-8') if sys.version_info[0] == 2 else self.first_name) oprot.writeFieldEnd() if self.last_name is not None: oprot.writeFieldBegin('last_name', TType.STRING, 3) oprot.writeString(self.last_name.encode('utf-8') if sys.version_info[0] == 2 else self.last_name) oprot.writeFieldEnd() if self.username is not None: oprot.writeFieldBegin('username', TType.STRING, 4) oprot.writeString(self.username.encode('utf-8') if sys.version_info[0] == 2 else self.username) oprot.writeFieldEnd() if self.password is not None: oprot.writeFieldBegin('password', TType.STRING, 5) oprot.writeString(self.password.encode('utf-8') if sys.version_info[0] == 2 else self.password) oprot.writeFieldEnd() if self.carrier is not None: oprot.writeFieldBegin('carrier', TType.MAP, 6) oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.carrier)) for kiter46, viter47 in self.carrier.items(): oprot.writeString(kiter46.encode('utf-8') if sys.version_info[0] == 2 else kiter46) oprot.writeString(viter47.encode('utf-8') if sys.version_info[0] == 2 else viter47) oprot.writeMapEnd() oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) all_structs.append(RegisterUser_args) RegisterUser_args.thrift_spec = ( None, # 0 (1, TType.I64, 'req_id', None, None, ), # 1 (2, TType.STRING, 'first_name', 'UTF8', None, ), # 2 (3, TType.STRING, 'last_name', 'UTF8', None, ), # 3 (4, TType.STRING, 'username', 'UTF8', None, ), # 4 (5, TType.STRING, 'password', 'UTF8', None, ), # 5 (6, TType.MAP, 'carrier', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 6 ) class RegisterUser_result(object): """ Attributes: - se """ def __init__(self, se=None,): self.se = se def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.se = ServiceException() self.se.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) return oprot.writeStructBegin('RegisterUser_result') if self.se is not None: oprot.writeFieldBegin('se', TType.STRUCT, 1) self.se.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) all_structs.append(RegisterUser_result) RegisterUser_result.thrift_spec = ( None, # 0 (1, TType.STRUCT, 'se', [ServiceException, None], None, ), # 1 ) class RegisterUserWithId_args(object): """ Attributes: - req_id - first_name - last_name - username - password - user_id - carrier """ def __init__(self, req_id=None, first_name=None, last_name=None, username=None, password=None, user_id=None, carrier=None,): self.req_id = req_id self.first_name = first_name self.last_name = last_name self.username = username self.password = password self.user_id = user_id self.carrier = carrier def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.I64: self.req_id = iprot.readI64() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.first_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRING: self.last_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 4: if ftype == TType.STRING: self.username = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 5: if ftype == TType.STRING: self.password = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 6: if ftype == TType.I64: self.user_id = iprot.readI64() else: iprot.skip(ftype) elif fid == 7: if ftype == TType.MAP: self.carrier = {} (_ktype49, _vtype50, _size48) = iprot.readMapBegin() for _i52 in range(_size48): _key53 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() _val54 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() self.carrier[_key53] = _val54 iprot.readMapEnd() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) return oprot.writeStructBegin('RegisterUserWithId_args') if self.req_id is not None: oprot.writeFieldBegin('req_id', TType.I64, 1) oprot.writeI64(self.req_id) oprot.writeFieldEnd() if self.first_name is not None: oprot.writeFieldBegin('first_name', TType.STRING, 2) oprot.writeString(self.first_name.encode('utf-8') if sys.version_info[0] == 2 else self.first_name) oprot.writeFieldEnd() if self.last_name is not None: oprot.writeFieldBegin('last_name', TType.STRING, 3) oprot.writeString(self.last_name.encode('utf-8') if sys.version_info[0] == 2 else self.last_name) oprot.writeFieldEnd() if self.username is not None: oprot.writeFieldBegin('username', TType.STRING, 4) oprot.writeString(self.username.encode('utf-8') if sys.version_info[0] == 2 else self.username) oprot.writeFieldEnd() if self.password is not None: oprot.writeFieldBegin('password', TType.STRING, 5) oprot.writeString(self.password.encode('utf-8') if sys.version_info[0] == 2 else self.password) oprot.writeFieldEnd() if self.user_id is not None: oprot.writeFieldBegin('user_id', TType.I64, 6) oprot.writeI64(self.user_id) oprot.writeFieldEnd() if self.carrier is not None: oprot.writeFieldBegin('carrier', TType.MAP, 7) oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.carrier)) for kiter55, viter56 in self.carrier.items(): oprot.writeString(kiter55.encode('utf-8') if sys.version_info[0] == 2 else kiter55) oprot.writeString(viter56.encode('utf-8') if sys.version_info[0] == 2 else viter56) oprot.writeMapEnd() oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) all_structs.append(RegisterUserWithId_args) RegisterUserWithId_args.thrift_spec = ( None, # 0 (1, TType.I64, 'req_id', None, None, ), # 1 (2, TType.STRING, 'first_name', 'UTF8', None, ), # 2 (3, TType.STRING, 'last_name', 'UTF8', None, ), # 3 (4, TType.STRING, 'username', 'UTF8', None, ), # 4 (5, TType.STRING, 'password', 'UTF8', None, ), # 5 (6, TType.I64, 'user_id', None, None, ), # 6 (7, TType.MAP, 'carrier', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 7 ) class RegisterUserWithId_result(object): """ Attributes: - se """ def __init__(self, se=None,): self.se = se def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.se = ServiceException() self.se.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) return oprot.writeStructBegin('RegisterUserWithId_result') if self.se is not None: oprot.writeFieldBegin('se', TType.STRUCT, 1) self.se.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) all_structs.append(RegisterUserWithId_result) RegisterUserWithId_result.thrift_spec = ( None, # 0 (1, TType.STRUCT, 'se', [ServiceException, None], None, ), # 1 ) class Login_args(object): """ Attributes: - req_id - username - password - carrier """ def __init__(self, req_id=None, username=None, password=None, carrier=None,): self.req_id = req_id self.username = username self.password = password self.carrier = carrier def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.I64: self.req_id = iprot.readI64() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.username = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRING: self.password = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 4: if ftype == TType.MAP: self.carrier = {} (_ktype58, _vtype59, _size57) = iprot.readMapBegin() for _i61 in range(_size57): _key62 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() _val63 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() self.carrier[_key62] = _val63 iprot.readMapEnd() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) return oprot.writeStructBegin('Login_args') if self.req_id is not None: oprot.writeFieldBegin('req_id', TType.I64, 1) oprot.writeI64(self.req_id) oprot.writeFieldEnd() if self.username is not None: oprot.writeFieldBegin('username', TType.STRING, 2) oprot.writeString(self.username.encode('utf-8') if sys.version_info[0] == 2 else self.username) oprot.writeFieldEnd() if self.password is not None: oprot.writeFieldBegin('password', TType.STRING, 3) oprot.writeString(self.password.encode('utf-8') if sys.version_info[0] == 2 else self.password) oprot.writeFieldEnd() if self.carrier is not None: oprot.writeFieldBegin('carrier', TType.MAP, 4) oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.carrier)) for kiter64, viter65 in self.carrier.items(): oprot.writeString(kiter64.encode('utf-8') if sys.version_info[0] == 2 else kiter64) oprot.writeString(viter65.encode('utf-8') if sys.version_info[0] == 2 else viter65) oprot.writeMapEnd() oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) all_structs.append(Login_args) Login_args.thrift_spec = ( None, # 0 (1, TType.I64, 'req_id', None, None, ), # 1 (2, TType.STRING, 'username', 'UTF8', None, ), # 2 (3, TType.STRING, 'password', 'UTF8', None, ), # 3 (4, TType.MAP, 'carrier', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 4 ) class Login_result(object): """ Attributes: - success - se """ def __init__(self, success=None, se=None,): self.success = success self.se = se def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.STRING: self.success = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.se = ServiceException() self.se.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) return oprot.writeStructBegin('Login_result') if self.success is not None: oprot.writeFieldBegin('success', TType.STRING, 0) oprot.writeString(self.success.encode('utf-8') if sys.version_info[0] == 2 else self.success) oprot.writeFieldEnd() if self.se is not None: oprot.writeFieldBegin('se', TType.STRUCT, 1) self.se.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) all_structs.append(Login_result) Login_result.thrift_spec = ( (0, TType.STRING, 'success', 'UTF8', None, ), # 0 (1, TType.STRUCT, 'se', [ServiceException, None], None, ), # 1 ) class ComposeCreatorWithUserId_args(object): """ Attributes: - req_id - user_id - username - carrier """ def __init__(self, req_id=None, user_id=None, username=None, carrier=None,): self.req_id = req_id self.user_id = user_id self.username = username self.carrier = carrier def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.I64: self.req_id = iprot.readI64() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.I64: self.user_id = iprot.readI64() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRING: self.username = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 4: if ftype == TType.MAP: self.carrier = {} (_ktype67, _vtype68, _size66) = iprot.readMapBegin() for _i70 in range(_size66): _key71 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() _val72 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() self.carrier[_key71] = _val72 iprot.readMapEnd() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) return oprot.writeStructBegin('ComposeCreatorWithUserId_args') if self.req_id is not None: oprot.writeFieldBegin('req_id', TType.I64, 1) oprot.writeI64(self.req_id) oprot.writeFieldEnd() if self.user_id is not None: oprot.writeFieldBegin('user_id', TType.I64, 2) oprot.writeI64(self.user_id) oprot.writeFieldEnd() if self.username is not None: oprot.writeFieldBegin('username', TType.STRING, 3) oprot.writeString(self.username.encode('utf-8') if sys.version_info[0] == 2 else self.username) oprot.writeFieldEnd() if self.carrier is not None: oprot.writeFieldBegin('carrier', TType.MAP, 4) oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.carrier)) for kiter73, viter74 in self.carrier.items(): oprot.writeString(kiter73.encode('utf-8') if sys.version_info[0] == 2 else kiter73) oprot.writeString(viter74.encode('utf-8') if sys.version_info[0] == 2 else viter74) oprot.writeMapEnd() oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) all_structs.append(ComposeCreatorWithUserId_args) ComposeCreatorWithUserId_args.thrift_spec = ( None, # 0 (1, TType.I64, 'req_id', None, None, ), # 1 (2, TType.I64, 'user_id', None, None, ), # 2 (3, TType.STRING, 'username', 'UTF8', None, ), # 3 (4, TType.MAP, 'carrier', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 4 ) class ComposeCreatorWithUserId_result(object): """ Attributes: - success - se """ def __init__(self, success=None, se=None,): self.success = success self.se = se def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.STRUCT: self.success = Creator() self.success.read(iprot) else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.se = ServiceException() self.se.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) return oprot.writeStructBegin('ComposeCreatorWithUserId_result') if self.success is not None: oprot.writeFieldBegin('success', TType.STRUCT, 0) self.success.write(oprot) oprot.writeFieldEnd() if self.se is not None: oprot.writeFieldBegin('se', TType.STRUCT, 1) self.se.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) all_structs.append(ComposeCreatorWithUserId_result) ComposeCreatorWithUserId_result.thrift_spec = ( (0, TType.STRUCT, 'success', [Creator, None], None, ), # 0 (1, TType.STRUCT, 'se', [ServiceException, None], None, ), # 1 ) class ComposeCreatorWithUsername_args(object): """ Attributes: - req_id - username - carrier """ def __init__(self, req_id=None, username=None, carrier=None,): self.req_id = req_id self.username = username self.carrier = carrier def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.I64: self.req_id = iprot.readI64() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.username = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.MAP: self.carrier = {} (_ktype76, _vtype77, _size75) = iprot.readMapBegin() for _i79 in range(_size75): _key80 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() _val81 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() self.carrier[_key80] = _val81 iprot.readMapEnd() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) return oprot.writeStructBegin('ComposeCreatorWithUsername_args') if self.req_id is not None: oprot.writeFieldBegin('req_id', TType.I64, 1) oprot.writeI64(self.req_id) oprot.writeFieldEnd() if self.username is not None: oprot.writeFieldBegin('username', TType.STRING, 2) oprot.writeString(self.username.encode('utf-8') if sys.version_info[0] == 2 else self.username) oprot.writeFieldEnd() if self.carrier is not None: oprot.writeFieldBegin('carrier', TType.MAP, 3) oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.carrier)) for kiter82, viter83 in self.carrier.items(): oprot.writeString(kiter82.encode('utf-8') if sys.version_info[0] == 2 else kiter82) oprot.writeString(viter83.encode('utf-8') if sys.version_info[0] == 2 else viter83) oprot.writeMapEnd() oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) all_structs.append(ComposeCreatorWithUsername_args) ComposeCreatorWithUsername_args.thrift_spec = ( None, # 0 (1, TType.I64, 'req_id', None, None, ), # 1 (2, TType.STRING, 'username', 'UTF8', None, ), # 2 (3, TType.MAP, 'carrier', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 3 ) class ComposeCreatorWithUsername_result(object): """ Attributes: - success - se """ def __init__(self, success=None, se=None,): self.success = success self.se = se def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.STRUCT: self.success = Creator() self.success.read(iprot) else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.se = ServiceException() self.se.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) return oprot.writeStructBegin('ComposeCreatorWithUsername_result') if self.success is not None: oprot.writeFieldBegin('success', TType.STRUCT, 0) self.success.write(oprot) oprot.writeFieldEnd() if self.se is not None: oprot.writeFieldBegin('se', TType.STRUCT, 1) self.se.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) all_structs.append(ComposeCreatorWithUsername_result) ComposeCreatorWithUsername_result.thrift_spec = ( (0, TType.STRUCT, 'success', [Creator, None], None, ), # 0 (1, TType.STRUCT, 'se', [ServiceException, None], None, ), # 1 ) class GetUserId_args(object): """ Attributes: - req_id - username - carrier """ def __init__(self, req_id=None, username=None, carrier=None,): self.req_id = req_id self.username = username self.carrier = carrier def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.I64: self.req_id = iprot.readI64() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.username = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.MAP: self.carrier = {} (_ktype85, _vtype86, _size84) = iprot.readMapBegin() for _i88 in range(_size84): _key89 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() _val90 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() self.carrier[_key89] = _val90 iprot.readMapEnd() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) return oprot.writeStructBegin('GetUserId_args') if self.req_id is not None: oprot.writeFieldBegin('req_id', TType.I64, 1) oprot.writeI64(self.req_id) oprot.writeFieldEnd() if self.username is not None: oprot.writeFieldBegin('username', TType.STRING, 2) oprot.writeString(self.username.encode('utf-8') if sys.version_info[0] == 2 else self.username) oprot.writeFieldEnd() if self.carrier is not None: oprot.writeFieldBegin('carrier', TType.MAP, 3) oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.carrier)) for kiter91, viter92 in self.carrier.items(): oprot.writeString(kiter91.encode('utf-8') if sys.version_info[0] == 2 else kiter91) oprot.writeString(viter92.encode('utf-8') if sys.version_info[0] == 2 else viter92) oprot.writeMapEnd() oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) all_structs.append(GetUserId_args) GetUserId_args.thrift_spec = ( None, # 0 (1, TType.I64, 'req_id', None, None, ), # 1 (2, TType.STRING, 'username', 'UTF8', None, ), # 2 (3, TType.MAP, 'carrier', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 3 ) class GetUserId_result(object): """ Attributes: - success - se """ def __init__(self, success=None, se=None,): self.success = success self.se = se def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.I64: self.success = iprot.readI64() else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.se = ServiceException() self.se.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) return oprot.writeStructBegin('GetUserId_result') if self.success is not None: oprot.writeFieldBegin('success', TType.I64, 0) oprot.writeI64(self.success) oprot.writeFieldEnd() if self.se is not None: oprot.writeFieldBegin('se', TType.STRUCT, 1) self.se.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) all_structs.append(GetUserId_result) GetUserId_result.thrift_spec = ( (0, TType.I64, 'success', None, None, ), # 0 (1, TType.STRUCT, 'se', [ServiceException, None], None, ), # 1 ) fix_spec(all_structs) del all_structs
36.775672
148
0.581368
6,431
60,165
5.238688
0.036542
0.016029
0.025646
0.013891
0.881092
0.866637
0.847225
0.834402
0.829475
0.825913
0
0.014831
0.313039
60,165
1,635
149
36.798165
0.800295
0.025713
0
0.826563
1
0
0.038067
0.006469
0
0
0
0
0
1
0.091406
false
0.035156
0.00625
0.028125
0.175
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
525656edf5262b1bb430a084e763a49e90eafe72
8,392
py
Python
design_bench/oracles/feature_extractors/feature_extractor.py
brandontrabucco/design_bench
824516ec59396aded3ca55ec7c1c313626ecaceb
[ "MIT" ]
27
2020-06-30T00:57:12.000Z
2022-03-25T16:24:11.000Z
design_bench/oracles/feature_extractors/feature_extractor.py
brandontrabucco/design_bench
824516ec59396aded3ca55ec7c1c313626ecaceb
[ "MIT" ]
7
2021-02-16T06:25:02.000Z
2022-03-31T17:21:17.000Z
design_bench/oracles/feature_extractors/feature_extractor.py
brandontrabucco/design_bench
824516ec59396aded3ca55ec7c1c313626ecaceb
[ "MIT" ]
5
2021-07-19T12:16:32.000Z
2022-03-01T16:56:16.000Z
import abc class FeatureExtractor(abc.ABC): """An abstract class for managing transformations applied to model-based optimization datasets when constructing the oracle; for example, if the oracle is intended to learn from molecule fingerprints max_x { y = f(x) } Public Methods: dataset_to_oracle_x(np.ndarray) -> np.ndarray Helper function for converting from designs contained in the dataset format into a format the oracle is expecting to process, such as from integers to logits of a categorical distribution dataset_to_oracle_y(np.ndarray) -> np.ndarray Helper function for converting from predictions contained in the dataset format into a format the oracle is expecting to process, such as from normalized to denormalized predictions oracle_to_dataset_x(np.ndarray) -> np.ndarray Helper function for converting from designs in the format of the oracle into the design format the dataset contains, such as from categorical logits to integers oracle_to_dataset_y(np.ndarray) -> np.ndarray Helper function for converting from predictions in the format of the oracle into a format the dataset contains, such as from normalized to denormalized predictions """ name = "feature_extractor" @abc.abstractmethod def dataset_to_oracle_x(self, x_batch, dataset): """Helper function for converting from designs contained in the dataset format into a format the oracle is expecting to process, such as from integers to logits of a categorical distribution Arguments: x_batch: np.ndarray a batch of design values 'x' that will be given as input to the oracle model in order to obtain a prediction value 'y' for each 'x' which is then returned dataset: DatasetBuilder an instance of a subclass of the DatasetBuilder class representing the source of the batch, must be provided Returns: x_batch: np.ndarray a batch of design values 'x' that have been converted from the format of designs contained in the dataset to the format expected by the oracle score function """ raise NotImplementedError("cannot run base class") @abc.abstractmethod def dataset_to_oracle_y(self, y_batch, dataset): """Helper function for converting from predictions contained in the dataset format into a format the oracle is expecting to process, such as from normalized to denormalized predictions Arguments: y_batch: np.ndarray a batch of prediction values 'y' that are from the dataset and will be processed into a format expected by the oracle score function, which is useful when training the oracle dataset: DatasetBuilder an instance of a subclass of the DatasetBuilder class representing the source of the batch, must be provided Returns: y_batch: np.ndarray a batch of prediction values 'y' that have been converted from the format of predictions contained in the dataset to the format expected by the oracle score function """ raise NotImplementedError("cannot run base class") @abc.abstractmethod def oracle_to_dataset_x(self, x_batch, dataset): """Helper function for converting from designs in the format of the oracle into the design format the dataset contains, such as from categorical logits to integers Arguments: x_batch: np.ndarray a batch of design values 'x' that have been converted from the format of designs contained in the dataset to the format expected by the oracle score function dataset: DatasetBuilder an instance of a subclass of the DatasetBuilder class representing the source of the batch, must be provided Returns: x_batch: np.ndarray a batch of design values 'x' that have been converted from the format of the oracle to the format of designs contained in the dataset """ raise NotImplementedError("cannot run base class") @abc.abstractmethod def oracle_to_dataset_y(self, y_batch, dataset): """Helper function for converting from predictions in the format of the oracle into a format the dataset contains, such as from normalized to denormalized predictions Arguments: y_batch: np.ndarray a batch of prediction values 'y' that have been converted from the format of predictions contained in the dataset to the format expected by the oracle score function dataset: DatasetBuilder an instance of a subclass of the DatasetBuilder class representing the source of the batch, must be provided Returns: y_batch: np.ndarray a batch of prediction values 'y' that have been converted from the format of the oracle to the format of predictions contained in the dataset """ raise NotImplementedError("cannot run base class") @abc.abstractmethod def input_shape(self, dataset): """Helper function for converting from predictions in the format of the oracle into a format the dataset contains, such as from normalized to denormalized predictions Arguments: dataset: DatasetBuilder an instance of a subclass of the DatasetBuilder class representing the source of the batch, must be provided Returns: input_shape: List[int] the shape of input tensors that were sampled from the dataset and are transformed into features using subclasses of this class """ raise NotImplementedError("cannot run base class") @abc.abstractmethod def input_dtype(self, dataset): """Helper function that returns the data type of the features returned by running the feature extractor from dataset to oracle Arguments: dataset: DatasetBuilder an instance of a subclass of the DatasetBuilder class representing the source of the batch, must be provided Returns: input_dtype: List[int] the type of input tensors that were sampled from the dataset and are transformed into features using subclasses of this class """ raise NotImplementedError("cannot run base class") @abc.abstractmethod def is_discrete(self, dataset): """Helper function that specifies whether the transformation applied by the feature extractor returns a discrete or continuous set of features, which is required for building predictive models Arguments: dataset: DatasetBuilder an instance of a subclass of the DatasetBuilder class representing the source of the batch, must be provided Returns: is_discrete: bool a boolean that indicates whether the dataset has been transformed into a discrete or continuous representation """ raise NotImplementedError("cannot run base class") @abc.abstractmethod def num_classes(self, dataset): """Helper function for determining the number of classes in the discrete representation intended for the oracle, if it is discrete, otherwise this function may not be implemented and will raise an error Arguments: dataset: DatasetBuilder an instance of a subclass of the DatasetBuilder class representing the source of the batch, must be provided Returns: num_classes: int the number of classes in the discrete representation for the model based optimization dataset used for training the oracle """ raise NotImplementedError("cannot run base class")
36.329004
81
0.654075
1,039
8,392
5.238691
0.141482
0.022047
0.026272
0.038582
0.806357
0.789822
0.772919
0.765938
0.746096
0.734705
0
0
0.314109
8,392
230
82
36.486957
0.945622
0.711034
0
0.592593
0
0
0.155332
0
0
0
0
0
0
1
0.296296
false
0
0.037037
0
0.407407
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
7
5e4df04ad40401ba698b908ddf0ce77a96fe8c0f
26,783
py
Python
GradientDescentMethods.py
bbdamodaran/RFF_PCA
33b7af8ca15c850fb6df44ea66c8656ff2132d2f
[ "MIT" ]
null
null
null
GradientDescentMethods.py
bbdamodaran/RFF_PCA
33b7af8ca15c850fb6df44ea66c8656ff2132d2f
[ "MIT" ]
null
null
null
GradientDescentMethods.py
bbdamodaran/RFF_PCA
33b7af8ca15c850fb6df44ea66c8656ff2132d2f
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- """ Created on Thu Jun 30 14:22:25 2016 @author: damodara minimization by Gradient descent method """ def Gradientdescent(SubsetData, W2,wnn,alphaN, RegW,gamma, K=None, update_b=True, philippe_update=False): import numpy as np from grad import gradcoswx1coswx2, Lossfunction2,gradcoswx1coswx2WithOutLoop import time import copy err2=2 tol =1e-10 WTMP = wnn woldnn = wnn gradnorm=[] loss =[] intloss=[] display = False it=0 max_iter = 150 Ncheck =50 beta = np.zeros((max_iter,1)) sigma_inv2 = 2*gamma intloss = np.zeros((max_iter,1)) loss = np.zeros((max_iter,1)) while(it<max_iter): # randomNoise = np.random.normal(0, 1,1) randomNoise = 0.0 r,s = np.shape(W2) if (s==1): # intloss.append(Lossfunction2(SubsetData,wnn,gamma,K)) intloss[it] =Lossfunction2(SubsetData,wnn,gamma,K) else: # intloss.append(Lossfunction2(SubsetData,W2,gamma,K, wnn)) intloss[it] =Lossfunction2(SubsetData,W2,gamma,K, wnn) if update_b: lw = len(wnn) intloss[it] = intloss[it]+RegW*np.sum(np.square(wnn[0:lw-1])) else: intloss[it] = intloss[it]+RegW*np.sum(np.square(wnn)) if display: print('Initial Loss =', intloss[it]) if (it==0): old_loss = intloss[0] if (it == Ncheck): old_loss = intloss[0] min_index = np.argmin(loss[0:Ncheck]) alphaN = beta[np.abs(min_index-1)] if display: print('min_index', min_index) print('alp', alphaN) print('min_loss', loss[min_index]) if (it<Ncheck): old_loss =0.0 #np.random.shuffle(SubsetData) GNew = gradcoswx1coswx2WithOutLoop(SubsetData,W2,wnn,gamma, K) GNew1 = GNew+randomNoise GNew = (RegW * wnn)+GNew1 if update_b: GNew[-1] = GNew1[-1].copy() #gradnorm.append(np.linalg.norm(GNew)) # alpha = alphaN*np.sqrt(100/(100+ita)) if it>=Ncheck: alpha = alphaN/(1+(alphaN*RegW*it)) else: alpha = alphaN wnew = wnn - (alpha)*(GNew) if philippe_update: if update_b: wnew[0:len(wnew)-1] = wnew[0:len(wnew)-1]*(sigma_inv2/np.std(wnew[0:len(wnew)-1])) wnew[len(wnew)-1] = wnew[len(wnew)-1]/(2*np.pi) else: wnew = wnew*(sigma_inv2/np.std(wnew)) WTMP = np.concatenate((WTMP, wnn), axis =1) iternorm2 = np.linalg.norm(wnn) if display: print('Learning rate is=', alpha) r,s = np.shape(W2) if (s==1): # loss.append(Lossfunction2(SubsetData,wnew,gamma,K)) loss[it]=Lossfunction2(SubsetData,wnew,gamma,K) else: # loss.append(Lossfunction2(SubsetData,W2,gamma,K, wnew)) loss[it]= Lossfunction2(SubsetData,W2,gamma,K, wnew) if update_b: lw = len(wnew) loss[it] = loss[it]+RegW*np.sum(np.square(wnew[0:lw-1])) else: loss[it] = loss[it]+RegW*np.sum(np.square(wnew)) new_loss =loss[it] if display: print('Loss is' , new_loss) time.sleep(0.7) print('Old Loss is', old_loss) if new_loss<old_loss: wnn = np.copy(wnew) old_loss = np.copy(new_loss) if display: print('update') else: alphaN = alphaN*0.7 beta[it]= np.copy(alphaN) err2=np.linalg.norm(woldnn-wnn) # print('The error2 is', err2) if (err2<=tol) and (it>=75): if display: print('done') break if (it>250): # print('break') break woldnn=np.copy(wnn) it=it+1 if display: print('over') print('==========================') return wnn, loss def NaturalGradientdescent(SubsetData, W2,wnn, alphaN, RegW,sigma, K=None): import numpy as np from grad import gradcoswx1coswx2, Lossfunction2 err2=2 tol =1e-5 WTMP = wnn woldnn = wnn gradnorm=[] loss =[] it=0 while(err2>tol): #np.random.shuffle(SubsetData) r,s = np.shape(W2) if (s==1): loss.append(Lossfunction2(SubsetData,wnn,sigma,K)) else: loss.append(Lossfunction2(SubsetData,W2,sigma,K, wnn)) print('Loss is', loss[it]) GNew = gradcoswx1coswx2(SubsetData,W2,wnn,sigma, K) FM = np.dot(GNew, GNew.T)+0.001 alpha = alphaN /(1+(alphaN*RegW*it)) wnn = wnn- (alpha)* (np.dot(np.linalg.inv(FM), GNew)) WTMP = np.concatenate((WTMP, wnn), axis =1) iternorm2 = np.linalg.norm(wnn) err2=np.linalg.norm(woldnn-wnn) # print('The error2 is', err2) if (err2<=tol): # print('done') break elif (it>250): # print('break') break woldnn=wnn it=it+1 return wnn, loss def BoldDrive(SubsetData, W2,wnn, alphaN, RegW,sigma, K=None): import numpy as np from grad import gradcoswx1coswx2, Lossfunction2 err2=2 tol =1e-5 WTMP = wnn woldnn = wnn gradnorm=[] loss =[] it=0 while(err2>tol): #np.random.shuffle(SubsetData) r,s = np.shape(W2) if (s==1): loss.append(Lossfunction2(SubsetData,wnn,sigma,K)) else: loss.append(Lossfunction2(SubsetData,W2,sigma,K, wnn)) print('Loss is', loss[it]) GNew = gradcoswx1coswx2(SubsetData,W2,wnn,sigma, K) gradnorm.append(np.linalg.norm(GNew)) # alpha = alphaN*np.sqrt(100/(100+ita)) alpha = alphaN /(1+(alphaN*RegW*it)) wnn = wnn - (alpha)*(RegW*wnn+GNew) WTMP = np.concatenate((WTMP, wnn), axis =1) iternorm2 = np.linalg.norm(wnn) err2=np.linalg.norm(woldnn-wnn) # print('The error2 is', err2) if (err2<=tol): # print('done') break elif (it>250): # print('break') break woldnn=wnn it=it+1 return wnn, loss def Adam(SubsetData,W2, wnn, alpha, RegW, sigma, K=None): import numpy as np from grad import gradcoswx1coswx2, Lossfunction2 import copy if K is None: from sklearn.metrics import pairwise K=pairwise.rbf_kernel(SubsetData,gamma=sigma**2/2) Nfeat = np.shape(W2)[0] err2=2 tol =1e-5 WTMP = copy.copy(wnn) woldnn = copy.copy(wnn) gradnorm=[] loss =[] it=0 m = np.zeros((Nfeat,1)) v = np.zeros((Nfeat,1)) b1 = 0.9 b2 = 0.999 eps = 1e-8 alpha=0.1 while(err2>tol): #np.random.shuffle(SubsetData) it = it+1 # print('iteration', it) GNew = gradcoswx1coswx2(SubsetData,W2,wnn,sigma, K) m = b1*m + (1-b1)*GNew v = b2*v + (1-b2)*(GNew**2) alphaN = alpha*(np.sqrt((1-b2**it)/(1-b1**it))) # alpha = alphaN /(1+(alphaN*RegW*it)) wnn = wnn - (alphaN)* (RegW*wnn+ (m/(np.sqrt(v)+eps))) # wnn = wnn - (alpha)*(0.1*wnn+GNew) # wnn = wnn - (alpha)*(0.0*wnn+NG) WTMP = np.concatenate((WTMP, wnn), axis =1) iternorm2 = np.linalg.norm(wnn) r,s = np.shape(W2) if (s==1): loss.append(Lossfunction2(SubsetData,wnn,sigma,K)) else: loss.append(Lossfunction2(SubsetData,W2,sigma,K, wnn)) print('Loss is', loss[it-1]) err2=np.linalg.norm(woldnn-wnn) # print('The error2 is', err2) if (err2<=tol): # print('done') break elif (it>250): # print('break') break woldnn=wnn return wnn, loss def Adagrad(SubsetData,W2, wnn, alpha, RegW, sigma, K=None): import numpy as np from grad import gradcoswx1coswx2, Lossfunction2 import copy if K is None: from sklearn.metrics import pairwise K=pairwise.rbf_kernel(SubsetData,gamma=sigma**2/2) Nfeat = np.shape(W2)[0] err2=2 tol =1e-5 WTMP = copy.copy(wnn) woldnn = copy.copy(wnn) gradnorm=[] loss =[] it=0 CumGrad = np.zeros((Nfeat,1)) eps = 1e-5 alpha=0.1 while(err2>tol): #np.random.shuffle(SubsetData) it = it+1 # print('iteration', it) GNew = gradcoswx1coswx2(SubsetData,W2,wnn,sigma, K) gradnorm.append(np.linalg.norm(GNew)) CumGrad = CumGrad + GNew**2 alphaN = alpha/ (eps+np.sqrt(CumGrad)) wnn = wnn - (alphaN)* (RegW*wnn+GNew ) WTMP = np.concatenate((WTMP, wnn), axis =1) iternorm2 = np.linalg.norm(wnn) r,s = np.shape(W2) if (s==1): loss.append(Lossfunction2(SubsetData,wnn,sigma,K)) else: loss.append(Lossfunction2(SubsetData,W2,sigma,K, wnn)) print('Loss is', loss[it-1]) err2=np.linalg.norm(woldnn-wnn) # print('The error2 is', err2) if (err2<=tol): # print('done') break elif (it>250): # print('break') break woldnn=wnn # minloss = np.argmin() return wnn, gradnorm, loss def Rprop(SubsetData, W2,wnn, alphaN, RegW, sigma, K=None): import numpy as np from grad import gradcoswx1coswx2, Lossfunction2 from math import copysign import copy if K is None: from sklearn.metrics import pairwise K=pairwise.rbf_kernel(SubsetData,gamma=sigma**2/2) err2=2 tol =1e-5 WTMP = wnn woldnn = wnn gradnorm=[] loss =[] it=0 deltaold = 0.01 delta = deltaold deltaMin = np.tile(0.0, (np.size(wnn),1)) deltaMax = np.tile(0.9, (np.size(wnn),1)) nplus = 1.2 nminus = 0.5 GOld = 0.0 deltaW = 0.0 alphaN = 0.1 while(err2>tol): #np.random.shuffle(SubsetData) if (it==51): index = np.argmin(loss) wnn = WTMP[:, index] wnn = wnn[:, np.newaxis] deltaold = alpha print('Index', index) # r, s = np.shape(W2) if (s==1): loss.append(Lossfunction2(SubsetData,wnn,sigma,K)) else: loss.append(Lossfunction2(SubsetData,W2,sigma,K, wnn)) print('Loss is ', loss[it]) GNew = gradcoswx1coswx2(SubsetData,W2,wnn,sigma, K) if (it<51): gradnorm.append(np.linalg.norm(GNew)) alpha = alphaN /(1+(alphaN*RegW*it)) wnn = wnn - (alpha)*(RegW*wnn+GNew) # WTMP = np.concatenate((WTMP, wnn), axis =1) ### print('The error2 is', err2) else: sigGrad = GNew*GOld deltaPlus = np.minimum(deltaold*nplus, deltaMax)*(sigGrad>0) deltaminus = np.maximum(deltaold*nminus, deltaMin)*(sigGrad<0) deltaequal = deltaold*(sigGrad==0) delta = deltaPlus+deltaminus+deltaequal signG = np.sign(sigGrad) deltaW = -(signG)*delta*(sigGrad>=0) - deltaW*(sigGrad<0) #wnn = wnn + (0.1*wnn+deltaW) wnn = wnn +deltaW iternorm2 = np.linalg.norm(wnn) if (it>0): err2=np.linalg.norm(woldnn-wnn) if (err2<=tol): # print('done') break elif (it>250): # print('break') break woldnn= copy.copy(wnn) deltaold = copy.copy(delta) GOld = copy.copy(GNew) it=it+1 print('Iteration', it) return wnn, loss def IPprop(SubsetData, W2,wnn,alphaN, RegW, sigma, K=None): import numpy as np from grad import gradcoswx1coswx2, Lossfunction2 from math import copysign import copy if K is None: from sklearn.metrics import pairwise K=pairwise.rbf_kernel(SubsetData,gamma=sigma**2/2) err2=2 tol =1e-5 WTMP = wnn woldnn = 0 gradnorm=[] loss =[] it=0 deltaold = 0.01 delta = deltaold deltaMin = np.tile(0.0, (np.size(wnn),1)) deltaMax = np.tile(1.0, (np.size(wnn),1)) nplus = 1.1 nminus = 0.25 GOld = 0.0 deltaW = 0.0 old_E = 500 while(err2>tol): #np.random.shuffle(SubsetData) if (it==51): index = np.argmin(loss) wnn = WTMP[:, index] wnn = wnn[:, np.newaxis] deltaold = 0.01 old_E = 10000 print('Index', index) r,s= np.shape(W2) if (s==1): loss.append(Lossfunction2(SubsetData,wnn,sigma,K)) else: loss.append(Lossfunction2(SubsetData,W2,sigma,K, wnn)) E = loss[it] GNew = gradcoswx1coswx2(SubsetData,W2,wnn,sigma, K) GNew = (RegW * wnn)+GNew if (it<51): gradnorm.append(np.linalg.norm(GNew)) alpha = alphaN /(1+(alphaN*RegW*it)) wnn = wnn - (alpha)*(GNew) # WTMP = np.concatenate((WTMP, wnn), axis =1) #### print('The error2 is', err2) else: sigGrad = GNew*GOld deltaPlus = np.minimum(deltaold*nplus, deltaMax)*(sigGrad>0) deltaminus = np.maximum(deltaold*nminus, deltaMin)*(sigGrad<0) deltaequal = deltaold*(sigGrad==0) delta = deltaPlus+deltaminus+deltaequal signG = np.sign(sigGrad)+ (it==0) deltaW = -(signG)*delta*(sigGrad>=0) - deltaW*((sigGrad<0)*(E>old_E)) #GNew = GNew*(sigGrad>0) #wnn = wnn + (0.1*wnn+deltaW) wnn = wnn +deltaW iternorm2 = np.linalg.norm(wnn) print('Loss is ', loss[it]) if (it>0): err2=np.linalg.norm(woldnn-wnn) if (err2<=tol): # print('done') break elif (it>250): # print('break') break woldnn= copy.copy(wnn) deltaold = copy.copy(delta) GOld = copy.copy(GNew) old_E = copy.copy(E) it=it+1 print('Iteration', it) return wnn, loss def AdaptiveRateBatchCheck(SubsetData, W2,wnn,alphaN, RegW, sigma, K=None): import numpy as np from grad import gradcoswx1coswx2, Lossfunction2 from math import copysign import copy if K is None: from sklearn.metrics import pairwise K=pairwise.rbf_kernel(SubsetData,gamma=sigma**2/2) err2=2 tol =1e-5 niter = 251 WTMP = wnn woldnn = 0 gradnorm=[] loss =np.zeros((niter,1)) it=0 deltaold = 0.01 delta = deltaold deltaMin = np.tile(0.0, (np.size(wnn),1)) deltaMax = np.tile(1.0, (np.size(wnn),1)) nplus = 1.1 nminus = 0.25 GOld = 0.0 deltaW = 0.0 E_old = 500 E = 0 alpha=np.zeros((niter,1)) Ncheck = 20 end_ncheck = 0 n_check = 0 while(err2>tol): #np.random.shuffle(SubsetData) r,s= np.shape(W2) if (s==1): loss[it] = (Lossfunction2(SubsetData,wnn,sigma,K)) else: loss[it] = (Lossfunction2(SubsetData,W2,sigma,K, wnn)) print('Loss is ', loss[it]) if ((np.mod(it, Ncheck)==0) & (it>0)): print ('Error check', it) E = loss[it] st_ncheck = end_ncheck n_check = n_check+1 end_ncheck = Ncheck*n_check if (E>E_old): print('Error greater', it) index = np.argmin(loss[st_ncheck:end_ncheck-1]) index = index+st_ncheck print('Index', index) E = loss[index] wnn = WTMP[:, index] wnn = wnn[:, np.newaxis] alphaN = alpha[index] print ('alphaN ' , alphaN) else: alphaN = alpha[it-1] GNew = gradcoswx1coswx2(SubsetData,W2,wnn,sigma, K) GNew = (RegW * wnn)+GNew alpha[it]= (alphaN /(1+(alphaN*RegW*it))) wnn = wnn - (alpha[it])*(GNew) # # if (it==0): # WTMP = wnn # else: WTMP = np.concatenate((WTMP, wnn), axis =1) iternorm2 = np.linalg.norm(wnn) if (it>0): err2=np.linalg.norm(woldnn-wnn) if (err2<=tol): # print('done') break elif (it>=niter-1): # print('break') break woldnn= copy.copy(wnn) E_old = copy.copy(E) it=it+1 print('Iteration', it) minindex = np.argmin(loss) wnn = WTMP[:, minindex] wnn = wnn[:, np.newaxis] return wnn, loss def AdaptiveLearningRateErrorCheck(SubsetData, W2,wnn,alphaN, RegW, gamma, K=None, philippe_update = False): import numpy as np from grad import Lossfunction2,gradcoswx1coswx2WithOutLoop from math import copysign import copy import time if K is None: from sklearn.metrics import pairwise K=pairwise.rbf_kernel(SubsetData,gamma=gamma) err2=2 tol =1e-5 niter = 1001 WTMP = wnn woldnn = 0 gradnorm=[] loss =np.zeros((niter,1)) it=0 SubsetDataN = SubsetData.copy() sigma_inv2 = 2*gamma E_old = 500 E = 0 alpha=np.zeros((niter,1)) Ncheck = 100 end_ncheck = 0 n_check = 0 while(err2>tol): #np.random.shuffle(SubsetData) if (it==Ncheck): # print ('Error check', it) index = np.argmin(loss[0:Ncheck]) # print('Index', index) wnn = WTMP[:, index] wnn = wnn[:, np.newaxis] alphaN = alpha[index] E = loss[index] # print('Error', E) newit = index r,s= np.shape(W2) if (s==1): loss[it] = (Lossfunction2(SubsetData,wnn,gamma,K)) else: loss[it] = (Lossfunction2(SubsetData,W2,gamma,K, wnn)) loss[it]= loss[it]+RegW*(np.sum(np.square(wnn[0:len(wnn)-1]))) # print('Loss is ', loss[it]) # time.sleep(0.5) # IRegW =1 # #nv = 0.0 # # Noise addition to the Gradient # RegW = IRegW/(1+it)*0.55 # randomNoise = np.random.normal(0, nv,1) randomNoise = 0.0 if (it<=Ncheck): #GNew = gradcoswx1coswx2(SubsetData,W2,wnn,sigma, K) GNew = gradcoswx1coswx2WithOutLoop(SubsetData,W2,wnn,gamma, K) GNew1 = GNew+randomNoise GNew = (RegW * wnn)+GNew1 GNew[-1] = GNew1[-1].copy() alpha[it]= (alphaN /(1+(alphaN*RegW*it))) wnew = wnn - (alpha[it]*GNew) if philippe_update: wnew = wnew*(sigma_inv2/np.std(wnew)) elif (it>(Ncheck)): E = loss[it] newit =newit+1 if (E>E_old): alphaN = alphaN/5.0 wnn = woldnn.copy() #GNew = gradcoswx1coswx2(SubsetData,W2,wnn,sigma, K) GNew = gradcoswx1coswx2WithOutLoop(SubsetData,W2,wnn,gamma, K) GNew1 = GNew+randomNoise GNew = (RegW * wnn)+GNew1 GNew[-1] = GNew1[-1].copy() beta = (alphaN)# /(1+(alphaN*RegW*newit))) wnew = wnn - (beta*GNew) if philippe_update: wnew = wnew*(sigma_inv2/np.std(wnew)) else: #GNew = gradcoswx1coswx2(SubsetData,W2,wnn,sigma, K) GNew = gradcoswx1coswx2WithOutLoop(SubsetData,W2,wnn,gamma, K) GNew1 = GNew+randomNoise GNew = (RegW * wnn)+GNew1 GNew[-1] = GNew1[-1].copy() alphaN = alphaN/2.0 #beta = (alphaN /(1+(alphaN*RegW*newit))) beta = alphaN wnew = wnn - (beta*GNew) if philippe_update: wnew = wnew*(sigma_inv2/np.std(wnew)) # SubsetData = SubsetDataN + np.random.normal(0,0.01,(1,1)) # if (it==0): # WTMP = wnew # else: WTMP = np.concatenate((WTMP, wnew), axis =1) iternorm2 = np.linalg.norm(wnew) if (it>0): err2=np.linalg.norm(woldnn-wnew) if (err2<=tol): # print('done') break elif (it>=niter-1): # print('break') break woldnn= copy.copy(wnn) wnn = copy.copy(wnew) E_old = copy.copy(E) it=it+1 # print('Iteration', it) # print('Iterations', it) if (it==1): minindex = np.argmin(loss[0]) else: minindex = np.argmin(loss[0:it-1]) wnn = WTMP[:, minindex] wnn = wnn[:, np.newaxis] return wnn, loss def philippe_AdaptiveLearningRateErrorCheck(SubsetData, W2,wnn,alphaN, RegW, sigma, K=None): ''' weight update as mentioned by philippe new_w = sigma/std(new_w) ''' import numpy as np from grad import gradcoswx1coswx2, Lossfunction2,gradcoswx1coswx2WithOutLoop from math import copysign import copy import time if K is None: from sklearn.metrics import pairwise K=pairwise.rbf_kernel(SubsetData,gamma=sigma**2/2) err2=2 tol =1e-10 niter = 201 WTMP = wnn woldnn = 0 gradnorm=[] loss =np.zeros((niter,1)) it=0 SubsetDataN = SubsetData.copy() gamma=sigma**2/2 b_update = True E_old = 500 E = 0 alpha=np.zeros((niter,1)) Ncheck = 100 end_ncheck = 0 n_check = 0 while(it<niter): #np.random.shuffle(SubsetData) if (it==Ncheck): print ('Error check', it) index = np.argmin(loss[0:Ncheck]) print('Index', index) wnn = WTMP[:, index] wnn = wnn[:, np.newaxis] alphaN = alpha[index] # alphaN = 1.0 E = loss[index] old_loss =E print('Error', E) newit = index r,s= np.shape(W2) if (s==1): loss[it] = (Lossfunction2(SubsetData,wnn,sigma,K)) else: loss[it] = (Lossfunction2(SubsetData,W2,sigma,K, wnn)) loss[it] = loss[it]+RegW*np.sum(np.square(wnn)) #print('Loss is ', loss[it]) # IRegW =1 # #nv = 0.0 # # Noise addition to the Gradient # RegW = IRegW/(1+it)*0.55 # randomNoise = np.random.normal(0, nv,1) randomNoise = 0.0 if (it<=Ncheck): #GNew = gradcoswx1coswx2(SubsetData,W2,wnn,sigma, K) GNew = gradcoswx1coswx2WithOutLoop(SubsetData,W2,wnn,sigma, K) GNew1 = GNew+randomNoise GNew = (RegW * wnn)+GNew1 if b_update: GNew[-1] = GNew1[-1].copy() alpha[it]= (alphaN /(1+(alphaN*RegW*it))) wnew = wnn - (alpha[it]*GNew) # philippe update wnew = wnew*(np.sqrt(2*gamma)/np.std(wnew)) wnn = copy.copy(wnew) elif (it>(Ncheck)): GNew = gradcoswx1coswx2WithOutLoop(SubsetData,W2,wnn,sigma, K) GNew1 = GNew+randomNoise GNew = (RegW * wnn)+GNew1 if b_update: GNew[-1] = GNew1[-1].copy() beta = (alphaN) /(1+(alphaN*RegW*newit)) wnew = wnn - (beta*GNew) # philippe update wnew = wnew*(np.sqrt(2*gamma)/np.std(wnew)) print('Learning rate =', beta) if (s==1): nloss = (Lossfunction2(SubsetData,wnew,sigma,K)) else: nloss = (Lossfunction2(SubsetData,W2,sigma,K, wnew)) new_loss = nloss+RegW*np.sum(np.square(wnew)) print('New Loss =', new_loss) print('Old Loss =', old_loss) time.sleep(0.7) newit =newit+1 if (new_loss<old_loss): wnn = np.copy(wnew) old_loss = np.copy(new_loss) print('update') else: alphaN = alphaN*0.7 # SubsetData = SubsetDataN + np.random.normal(0,0.01,(1,1)) # if (it==0): # WTMP = wnew # else: WTMP = np.concatenate((WTMP, wnew), axis =1) iternorm2 = np.linalg.norm(wnew) if (it>0): err2=np.linalg.norm(woldnn-wnew) # if (err2<=tol): ## print('done') # break if (it>=niter-1): # print('break') break woldnn= copy.copy(wnn) it=it+1 # print('Iteration', it) # print('Iterations', it) if (it==1): minindex = np.argmin(loss[0]) else: minindex = np.argmin(loss[0:it-1]) wnn = WTMP[:, minindex] wnn = wnn[:, np.newaxis] return wnn, loss
30.331823
106
0.480454
3,075
26,783
4.156748
0.066341
0.038492
0.031685
0.036145
0.855343
0.821311
0.793616
0.767094
0.747536
0.743311
0
0.042555
0.391965
26,783
883
107
30.331823
0.74234
0.124295
0
0.805901
0
0
0.013192
0.001159
0
0
0
0
0
1
0.015528
false
0
0.06677
0
0.097826
0.052795
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
5eae55fb2cbba8af025fdebb44832ac555b80ae9
106
py
Python
tests/services/blockchain_follower/store_notification.py
dpays/dsocial-notifications
32b1cdcd58d622407fd50206053c5b9735a56ba9
[ "MIT" ]
10
2017-10-22T20:07:40.000Z
2018-08-01T21:48:49.000Z
tests/services/blockchain_follower/store_notification.py
dpays/dsocial-notifications
32b1cdcd58d622407fd50206053c5b9735a56ba9
[ "MIT" ]
81
2017-08-19T15:38:32.000Z
2020-05-12T09:56:14.000Z
tests/services/blockchain_follower/store_notification.py
dpays/dsocial-notifications
32b1cdcd58d622407fd50206053c5b9735a56ba9
[ "MIT" ]
9
2017-09-19T07:12:20.000Z
2021-05-25T17:09:27.000Z
# coding=utf-8 import yo.services.blockchain_follower.service def test_store_notifications(): pass
13.25
46
0.783019
14
106
5.714286
1
0
0
0
0
0
0
0
0
0
0
0.01087
0.132075
106
7
47
15.142857
0.858696
0.113208
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
true
0.333333
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
1
1
0
1
0
0
7
5eaebc17ddabdc9f58a45b8cd042c376c1da5ecf
3,341
py
Python
src/save_var.py
bdrummond1/um_post_proc
2dc1dcaa164772e09e77cd3f3e7d927f2237228a
[ "MIT" ]
1
2020-04-23T17:06:40.000Z
2020-04-23T17:06:40.000Z
src/save_var.py
bdrummond1/um_post_proc
2dc1dcaa164772e09e77cd3f3e7d927f2237228a
[ "MIT" ]
null
null
null
src/save_var.py
bdrummond1/um_post_proc
2dc1dcaa164772e09e77cd3f3e7d927f2237228a
[ "MIT" ]
null
null
null
import netCDF4 as nc from constant_umpp import * import os import sys def save_var_3d(save_dir,fname,x,y,z,f): # Check if directory exists if os.path.exists(save_dir+'saved_vars') == False: # Create saved_vars directory os.makedirs(save_dir+'saved_vars') # Open file fout = nc.Dataset(save_dir+'saved_vars/'+fname,'w') # Create dimensions nx = x.size ny = y.size nz = z.size fout.createDimension('x',nx) fout.createDimension('y',ny) fout.createDimension('z',nz) # Create variable varout = fout.createVariable('variable','f8',('z','y','x')) xout = fout.createVariable('x','f8',('x')) yout = fout.createVariable('y','f8',('y')) zout = fout.createVariable('z','f8',('z')) # Copy variable varout[:,:,:] = f xout[:] = x yout[:] = y zout[:] = z fout.close() def save_var_2d(save_dir,fname,x,y,f): # Check if directory exists if os.path.exists(save_dir+'saved_vars') == False: # Create saved_vars directory os.makedirs(save_dir+'saved_vars') # Open file fout = nc.Dataset(save_dir+'saved_vars/'+fname,'w') # Create dimensions nx = x.size ny = y.size fout.createDimension('x',nx) fout.createDimension('y',ny) # Create variable varout = fout.createVariable('variable','f8',('y','x')) xout = fout.createVariable('x','f8',('x')) yout = fout.createVariable('y','f8',('y')) # Copy variable varout[:,:] = f xout[:] = x yout[:] = y fout.close() if verbose: print 'Saved variable at : ',fname # Save constructed variable to netcdf file def save_var_1d(save_dir,fname,x,f): # Check if directory exists if os.path.exists(save_dir+'saved_vars') == False: # Create saved_vars directory os.makedirs(save_dir+'saved_vars') # Open file fout = nc.Dataset(save_dir+'saved_vars/'+fname,'w') # Create Dimensions nx = x.size fout.createDimension('x',nx) # Create variable varout = fout.createVariable('variable','f8',('x')) xout = fout.createVariable('x','f8',('x')) # Copy variables varout[:] = f xout[:] = x fout.close() if verbose: print 'Saved variable at : ', fname+'_saved_var' def read_saved_var_3d(save_dir,fname): # Open file fin = nc.Dataset(save_dir+'saved_vars/'+fname,'r') # Read dimensions dims = fin.variables['variable'].dimensions x = fin.variables[dims[0]][:] y = fin.variables[dims[1]][:] z = fin.variables[dims[2]][:] # Read variable f = fin.variables['variable'][:,:] print 'Read saved variable from file: ',fname+'_saved_var' return x, y, z, f def read_saved_var_2d(save_dir,fname): # Open file fin = nc.Dataset(save_dir+'saved_vars/'+fname,'r') # Read dimensions dims = fin.variables['variable'].dimensions x = fin.variables[dims[0]][:] y = fin.variables[dims[1]][:] # Read variable f = fin.variables['variable'][:,:] print 'Read saved variable from file: ',fname+'_saved_var' return x, y, f # Read saved variable 1D def read_saved_var_1d(save_dir,fname): # Open file fin = nc.Dataset(save_dir+'saved_vars/'+fname,'r') # Read dimensions dims = fin.variables['variable'].dimensions x = fin.variables[dims[0]][:] # Read variable f = fin.variables['variable'][:] print 'Read saved variable from file: ',fname+'_saved_var' return x, f
22.422819
61
0.639629
476
3,341
4.369748
0.138655
0.060577
0.069231
0.092308
0.860577
0.816346
0.816346
0.734135
0.70625
0.620673
0
0.008519
0.191859
3,341
148
62
22.574324
0.761852
0.155642
0
0.649351
0
0
0.150179
0
0
0
0
0
0
0
null
null
0
0.051948
null
null
0.064935
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
7
5eb9af4a323bab115f39e84c403392d5963ef13d
12,621
py
Python
reduction/cifar10/factorized-layers/models.py
gishikawa3/my-nnabla-examples
44af0c16cf3e730011a54cd104483d5f956c8f15
[ "Apache-2.0" ]
1
2018-10-22T15:37:33.000Z
2018-10-22T15:37:33.000Z
reduction/cifar10/factorized-layers/models.py
gishikawa3/my-nnabla-examples
44af0c16cf3e730011a54cd104483d5f956c8f15
[ "Apache-2.0" ]
null
null
null
reduction/cifar10/factorized-layers/models.py
gishikawa3/my-nnabla-examples
44af0c16cf3e730011a54cd104483d5f956c8f15
[ "Apache-2.0" ]
null
null
null
# Copyright (c) 2017 Sony Corporation. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from six.moves import range import os import nnabla as nn import nnabla.logger as logger import nnabla.functions as F import nnabla.parametric_functions as PF import nnabla.solvers as S import nnabla.utils.save as save from nnabla.parameter import get_parameter, get_parameter_or_create, set_parameter import numpy as np def categorical_error(pred, label): """ Compute categorical error given score vectors and labels as numpy.ndarray. """ pred_label = pred.argmax(1) return (pred_label != label.flat).mean() def cifar10_resnet23_prediction(image, maps=64, test=False): """ Construct Resnet23 as reference. """ # Residual Unit def res_unit(x, scope_name, dn=False): C = x.shape[1] with nn.parameter_scope(scope_name): # Conv -> BN -> Relu with nn.parameter_scope("conv1"): h = PF.convolution(x, C // 2, kernel=(1, 1), pad=(0, 0), with_bias=False) h = PF.batch_normalization(h, batch_stat=not test) h = F.relu(h) # Conv -> BN -> Relu with nn.parameter_scope("conv2"): h = PF.convolution(h, C // 2, kernel=(3, 3), pad=(1, 1), with_bias=False) h = PF.batch_normalization(h, batch_stat=not test) h = F.relu(h) # Conv -> BN with nn.parameter_scope("conv3"): h = PF.convolution(h, C, kernel=(1, 1), pad=(0, 0), with_bias=False) h = PF.batch_normalization(h, batch_stat=not test) # Residual -> Relu h = F.relu(h + x) # Maxpooling if dn: h = F.max_pooling(h, kernel=(2, 2), stride=(2, 2)) return h ncls = 10 # Conv -> BN -> Relu with nn.parameter_scope("conv1"): # Preprocess image /= 255.0 if not test: image = F.image_augmentation(image, contrast=1.0, angle=0.25, flip_lr=True) image.need_grad = False h = PF.convolution(image, maps, kernel=(3, 3), pad=(1, 1), with_bias=False) h = PF.batch_normalization(h, batch_stat=not test) h = F.relu(h) h = res_unit(h, "conv2", False) # -> 32x32 h = res_unit(h, "conv3", True) # -> 16x16 h = res_unit(h, "conv4", False) # -> 16x16 h = res_unit(h, "conv5", True) # -> 8x8 h = res_unit(h, "conv6", False) # -> 8x8 h = res_unit(h, "conv7", True) # -> 4x4 h = res_unit(h, "conv8", False) # -> 4x4 h = F.average_pooling(h, kernel=(4, 4)) # -> 1x1 pred = PF.affine(h, ncls) return pred def cifar10_cpd3_factorized_resnet23_prediction(image, maps=64, test=False, compression_ratio=0.0): """ Construct Resnet23 with factorized affine and convolution """ # SVD affine def svd_affine(x, n_outputs, cr): W = get_parameter('affine/W') if W is None: UV = None else: UV = W.d b = get_parameter('affine/b') # compute rank (size of intermediate activations) # to obtained desired reduction inshape = np.prod(x.shape[1:]) outshape = np.prod(n_outputs) rank = int(np.floor((1-cr)*inshape*outshape/(inshape+outshape))) # Initialize bias to existing b in affine if exists if b is not None: b_new = get_parameter_or_create( 'svd_affine/b', b.d.shape, need_grad=b.need_grad) b_new.d = b.d.copy() logger.info("SVD affine created: input_shape = {}; output_shape = {}; compression = {}; rank = {};".format( inshape, outshape, cr, rank)) # create svd_affine initialized from W in current context if it exists return PF.svd_affine(x, n_outputs, rank, uv_init=UV) # CP convolution def cpd3_convolution(x, n_outputs, kernel, pad, with_bias, cr): W = get_parameter('conv/W') if W is None: OIK = None else: OIK = W.d b = get_parameter('conv/b') # compute rank (size of intermediate activations) # to obtained desired reduction inmaps = x.shape[1] outmaps = n_outputs Ksize = np.prod(kernel) rank = int(np.floor((1-cr)*inmaps*outmaps * Ksize/(inmaps+outmaps+Ksize))) # Initialize bias to existing b in affine if exists if b is not None: b_new = get_parameter_or_create( 'cpd3_conv/b', b.d.shape, need_grad=b.need_grad) b_new.d = b.d.copy() logger.info("CP convolution created: inmaps = {}; outmaps = {}; compression = {}; rank = {};".format( inmaps, outmaps, cr, rank)) # create cpd3_convolution initialized from W in current context if it exists return PF.cpd3_convolution(x, n_outputs, kernel=kernel, r=rank, pad=pad, with_bias=with_bias, oik_init=OIK) # Residual Unit def res_unit(x, scope_name, dn=False): C = x.shape[1] with nn.parameter_scope(scope_name): # Conv -> BN -> Relu with nn.parameter_scope("conv1"): h = PF.convolution(x, C // 2, kernel=(1, 1), pad=(0, 0), with_bias=False) h = PF.batch_normalization(h, batch_stat=not test) h = F.relu(h) # Conv -> BN -> Relu with nn.parameter_scope("conv2"): h = cpd3_convolution(h, C // 2, kernel=(3, 3), pad=(1, 1), with_bias=False, cr=compression_ratio) h = PF.batch_normalization(h, batch_stat=not test) h = F.relu(h) # Conv -> BN with nn.parameter_scope("conv3"): h = PF.convolution(h, C, kernel=(1, 1), pad=(0, 0), with_bias=False) h = PF.batch_normalization(h, batch_stat=not test) # Residual -> Relu h = F.relu(h + x) # Maxpooling if dn: h = F.max_pooling(h, kernel=(2, 2), stride=(2, 2)) return h ncls = 10 # Conv -> BN -> Relu with nn.parameter_scope("conv1"): # Preprocess image /= 255.0 if not test: image = F.image_augmentation(image, contrast=1.0, angle=0.25, flip_lr=True) image.need_grad = False h = cpd3_convolution(image, maps, kernel=(3, 3), pad=(1, 1), with_bias=False, cr=compression_ratio) h = PF.batch_normalization(h, batch_stat=not test) h = F.relu(h) h = res_unit(h, "conv2", False) # -> 32x32 h = res_unit(h, "conv3", True) # -> 16x16 h = res_unit(h, "conv4", False) # -> 16x16 h = res_unit(h, "conv5", True) # -> 8x8 h = res_unit(h, "conv6", False) # -> 8x8 h = res_unit(h, "conv7", True) # -> 4x4 h = res_unit(h, "conv8", False) # -> 4x4 h = F.average_pooling(h, kernel=(4, 4)) # -> 1x1 pred = svd_affine(h, ncls, compression_ratio) return pred def cifar10_svd_factorized_resnet23_prediction(image, maps=64, test=False, compression_ratio=0.0): """ Construct Resnet23 with factorized affine and convolution """ # SVD affine def svd_affine(x, n_outputs, cr): W = get_parameter('affine/W') if W is None: UV = None else: UV = W.d b = get_parameter('affine/b') # compute rank (size of intermediate activations) # to obtained desired reduction inshape = np.prod(x.shape[1:]) outshape = np.prod(n_outputs) rank = int(np.floor((1-cr)*inshape*outshape/(inshape+outshape))) # Initialize bias to existing b in affine if exists if b is not None: b_new = get_parameter_or_create( 'svd_affine/b', b.d.shape, need_grad=b.need_grad) b_new.d = b.d.copy() logger.info("SVD affine created: input_shape = {}; output_shape = {}; compression = {}; rank = {};".format( inshape, outshape, cr, rank)) # create svd_affine initialized from W in current context if it exists return PF.svd_affine(x, n_outputs, rank, uv_init=UV) # SVD convolution def svd_convolution(x, n_outputs, kernel, pad, with_bias, cr): W = get_parameter('conv/W') if W is None: UV = None else: UV = W.d b = get_parameter('conv/b') # compute rank (size of intermediate activations) # to obtained desired reduction inmaps = x.shape[1] outmaps = n_outputs Ksize = np.prod(kernel) rank = int(np.floor((1-cr)*inmaps*outmaps * Ksize/(inmaps*Ksize+inmaps*outmaps))) # Initialize bias to existing b in affine if exists if b is not None: b_new = get_parameter_or_create( 'svd_conv/b', b.d.shape, need_grad=b.need_grad) b_new.d = b.d.copy() logger.info("SVD convolution created: inmaps = {}; outmaps = {}; compression = {}; rank = {};".format( inmaps, outmaps, cr, rank)) # create svd_convolution initialized from W in current context if it exists return PF.svd_convolution(x, n_outputs, kernel=kernel, r=rank, pad=pad, with_bias=with_bias, uv_init=UV) # Residual Unit def res_unit(x, scope_name, dn=False): C = x.shape[1] with nn.parameter_scope(scope_name): # Conv -> BN -> Relu with nn.parameter_scope("conv1"): h = PF.convolution(x, C // 2, kernel=(1, 1), pad=(0, 0), with_bias=False) h = PF.batch_normalization(h, batch_stat=not test) h = F.relu(h) # Conv -> BN -> Relu with nn.parameter_scope("conv2"): h = svd_convolution(h, C // 2, kernel=(3, 3), pad=(1, 1), with_bias=False, cr=compression_ratio) h = PF.batch_normalization(h, batch_stat=not test) h = F.relu(h) # Conv -> BN with nn.parameter_scope("conv3"): h = PF.convolution(h, C, kernel=(1, 1), pad=(0, 0), with_bias=False) h = PF.batch_normalization(h, batch_stat=not test) # Residual -> Relu h = F.relu(h + x) # Maxpooling if dn: h = F.max_pooling(h, kernel=(2, 2), stride=(2, 2)) return h ncls = 10 # Conv -> BN -> Relu with nn.parameter_scope("conv1"): # Preprocess image /= 255.0 if not test: image = F.image_augmentation(image, contrast=1.0, angle=0.25, flip_lr=True) image.need_grad = False h = svd_convolution(image, maps, kernel=(3, 3), pad=(1, 1), with_bias=False, cr=compression_ratio) h = PF.batch_normalization(h, batch_stat=not test) h = F.relu(h) h = res_unit(h, "conv2", False) # -> 32x32 h = res_unit(h, "conv3", True) # -> 16x16 h = res_unit(h, "conv4", False) # -> 16x16 h = res_unit(h, "conv5", True) # -> 8x8 h = res_unit(h, "conv6", False) # -> 8x8 h = res_unit(h, "conv7", True) # -> 4x4 h = res_unit(h, "conv8", False) # -> 4x4 h = F.average_pooling(h, kernel=(4, 4)) # -> 1x1 pred = svd_affine(h, ncls, compression_ratio) return pred
37.230088
115
0.53514
1,635
12,621
4.004281
0.13211
0.025661
0.025661
0.028868
0.835344
0.835039
0.833359
0.827555
0.827555
0.827555
0
0.031486
0.348229
12,621
338
116
37.340237
0.764406
0.174788
0
0.836364
0
0
0.057936
0
0
0
0
0
0
1
0.05
false
0
0.045455
0
0.145455
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
0db39d51f35c527c2e819b21d9297351c97cd8d7
7,580
py
Python
rlpyt/ul/models/dmlab_conv2d.py
Slienteagle-wyb/rlpyt
fee14b1c28481f6751a7a1a6ce9b8cbac1e001f0
[ "MIT" ]
null
null
null
rlpyt/ul/models/dmlab_conv2d.py
Slienteagle-wyb/rlpyt
fee14b1c28481f6751a7a1a6ce9b8cbac1e001f0
[ "MIT" ]
null
null
null
rlpyt/ul/models/dmlab_conv2d.py
Slienteagle-wyb/rlpyt
fee14b1c28481f6751a7a1a6ce9b8cbac1e001f0
[ "MIT" ]
null
null
null
import torch import torch.nn.functional as F from rlpyt.models.utils import conv2d_output_shape from rlpyt.ul.models.utils import init_normalization class DmlabConv2dModelBn(torch.nn.Module): def __init__( self, in_channels, use_fourth_layer=True, skip_connections=True, use_maxpool=False, norm_type='bn', ): super().__init__() self.conv1 = torch.nn.Conv2d( in_channels=in_channels, out_channels=32, kernel_size=8, stride=1 if use_maxpool else 4, padding=2 if use_maxpool else 0, ) self.bn1 = init_normalization(channels=32, type_id=norm_type, one_d=False) self.maxpool1 = torch.nn.MaxPool2d(kernel_size=4, stride=4) if use_maxpool else None self.conv2 = torch.nn.Conv2d( in_channels=32, out_channels=64, kernel_size=4, stride=1 if use_maxpool else 2, padding=1 if use_maxpool else 0, ) self.bn2 = init_normalization(channels=64, type_id=norm_type, one_d=False) self.maxpool2 = torch.nn.MaxPool2d(kernel_size=2, stride=2) if use_maxpool else None self.conv3 = torch.nn.Conv2d( in_channels=64, out_channels=64, kernel_size=3, stride=1, padding=1, ) self.bn3 = init_normalization(channels=64, type_id=norm_type, one_d=False) if use_fourth_layer: self.conv4 = torch.nn.Conv2d( in_channels=64, out_channels=64, kernel_size=3, stride=1, padding=1, ) self.bn4 = init_normalization(channels=64, type_id=norm_type, one_d=False) else: self.conv4 = None # if skip_connections: # if self.conv4 is not None and channels4 != c3: # self.skip4 = torch.nn.Conv2d( # in_channels=c3, # out_channels=channels4, # kernel_size=1, # stride=1, # padding=0, # ) # else: # self.skip4 = None self.skip_connections = skip_connections def forward(self, x): h1 = self.bn1(self.conv1(x)) conv1 = F.relu(h1) if self.maxpool1 is not None: conv1 = self.maxpool1(conv1) h2 = self.bn2(self.conv2(conv1)) conv2 = F.relu(h2) if self.maxpool2 is not None: conv2 = self.maxpool2(conv2) conv3_pre = self.bn3(self.conv3(conv2)) if self.skip_connections: conv3_pre = conv3_pre + conv2 conv3 = F.relu(conv3_pre) if self.conv4 is None: return conv3 conv4_pre = self.bn4(self.conv4(conv3)) if self.skip_connections: # if self.skip4 is not None: # conv3_pre = self.skip4(conv3_pre) conv4_pre = conv4_pre + conv3_pre conv4 = F.relu(conv4_pre) return conv4 def output_shape(self, h, w, c=None): """Helper function ot return the output shape for a given input shape, without actually performing a forward pass through the model.""" for child in self.children(): try: h, w = conv2d_output_shape(h, w, child.kernel_size, child.stride, child.padding) except AttributeError: pass # Not a conv or maxpool layer. try: c = child.out_channels except AttributeError: pass # Not a conv layer. return c, h, w def output_size(self, h, w, c=None): """Helper function ot return the output size for a given input shape, without actually performing a forward pass through the model.""" c, h, w = self.output_shape(h=h, w=w, c=c) return c * h * w class DmlabConv2dModel(torch.nn.Module): # A more hard-coded version, easier to work with. def __init__( self, in_channels, use_fourth_layer=True, skip_connections=True, use_maxpool=False, ): super().__init__() self.conv1 = torch.nn.Conv2d( in_channels=in_channels, out_channels=32, kernel_size=8, stride=1 if use_maxpool else 4, padding=2 if use_maxpool else 0, ) self.maxpool1 = torch.nn.MaxPool2d(kernel_size=4, stride=4) if use_maxpool else None self.conv2 = torch.nn.Conv2d( in_channels=32, out_channels=64, kernel_size=4, stride=1 if use_maxpool else 2, padding=1 if use_maxpool else 0, ) self.maxpool2 = torch.nn.MaxPool2d(kernel_size=2, stride=2) if use_maxpool else None self.conv3 = torch.nn.Conv2d( in_channels=64, out_channels=64, kernel_size=3, stride=1, padding=1, ) if use_fourth_layer: self.conv4 = torch.nn.Conv2d( in_channels=64, out_channels=64, kernel_size=3, stride=1, padding=1, ) else: self.conv4 = None # if skip_connections: # if self.conv4 is not None and channels4 != c3: # self.skip4 = torch.nn.Conv2d( # in_channels=c3, # out_channels=channels4, # kernel_size=1, # stride=1, # padding=0, # ) # else: # self.skip4 = None self.skip_connections = skip_connections def forward(self, x): conv1 = F.relu(self.conv1(x)) if self.maxpool1 is not None: conv1 = self.maxpool1(conv1) conv2 = F.relu(self.conv2(conv1)) if self.maxpool2 is not None: conv2 = self.maxpool2(conv2) conv3_pre = self.conv3(conv2) if self.skip_connections: conv3_pre = conv3_pre + conv2 conv3 = F.relu(conv3_pre) if self.conv4 is None: return conv3 conv4_pre = self.conv4(conv3) if self.skip_connections: # if self.skip4 is not None: # conv3_pre = self.skip4(conv3_pre) conv4_pre = conv4_pre + conv3_pre conv4 = F.relu(conv4_pre) return conv4 def output_shape(self, h, w, c=None): """Helper function ot return the output shape for a given input shape, without actually performing a forward pass through the model.""" for child in self.children(): try: h, w = conv2d_output_shape(h, w, child.kernel_size, child.stride, child.padding) except AttributeError: pass # Not a conv or maxpool layer. try: c = child.out_channels except AttributeError: pass # Not a conv layer. return c, h, w def output_size(self, h, w, c=None): """Helper function ot return the output size for a given input shape, without actually performing a forward pass through the model.""" c, h, w = self.output_shape(h=h, w=w, c=c) return c * h * w if __name__ == '__main__': model = DmlabConv2dModelBn( in_channels=3 ) x = torch.rand(1, 3, 84, 84) conv4 = model(x) print(conv4.size())
34.770642
96
0.543536
935
7,580
4.242781
0.124064
0.029997
0.036299
0.048399
0.878498
0.878498
0.878498
0.878498
0.8727
0.8727
0
0.0468
0.371372
7,580
218
97
34.770642
0.785729
0.181266
0
0.785714
0
0
0.001633
0
0
0
0
0
0
1
0.047619
false
0.02381
0.02381
0
0.130952
0.005952
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
21a1da127de7450f570d9238684f7990e841c23c
92
py
Python
Chapter03/core/models/__init__.py
ariwells2001/Python-Programming-Blueprints
23981ab304e65bcc24560393c75fd5ee85c96ce5
[ "MIT" ]
72
2017-12-19T09:19:40.000Z
2021-11-08T13:13:34.000Z
Chapter03/core/models/__init__.py
ariwells2001/Python-Programming-Blueprints
23981ab304e65bcc24560393c75fd5ee85c96ce5
[ "MIT" ]
20
2018-03-21T01:15:27.000Z
2021-09-08T00:59:40.000Z
Chapter03/core/models/__init__.py
ariwells2001/Python-Programming-Blueprints
23981ab304e65bcc24560393c75fd5ee85c96ce5
[ "MIT" ]
53
2017-12-19T09:19:42.000Z
2022-03-06T02:21:10.000Z
from .models import Config from .models import RequestToken from .models import RequestAuth
23
32
0.836957
12
92
6.416667
0.5
0.38961
0.623377
0
0
0
0
0
0
0
0
0
0.130435
92
3
33
30.666667
0.9625
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
21a38ef9bac3fed80c67bfc66ee9bd461909cf31
242
py
Python
tests/test_statistics.py
soccermetrics/marcotti
eda2f19bd6cbc6f9c7482e8fe31b2233b33aacfd
[ "MIT" ]
30
2015-11-23T07:51:54.000Z
2020-06-29T16:11:55.000Z
tests/test_statistics.py
soccermetrics/marcotti
eda2f19bd6cbc6f9c7482e8fe31b2233b33aacfd
[ "MIT" ]
1
2016-06-26T18:44:47.000Z
2016-06-29T03:02:40.000Z
tests/test_statistics.py
soccermetrics/marcotti
eda2f19bd6cbc6f9c7482e8fe31b2233b33aacfd
[ "MIT" ]
8
2016-01-13T12:23:16.000Z
2021-10-11T07:39:33.000Z
# coding=utf-8 import pytest from sqlalchemy.exc import DataError import marcotti.models.club as mc import marcotti.models.common.personnel as mcp import marcotti.models.common.statistics as mcs import marcotti.models.common.enums as enums
24.2
47
0.830579
37
242
5.432432
0.540541
0.278607
0.39801
0.38806
0
0
0
0
0
0
0
0.00463
0.107438
242
9
48
26.888889
0.925926
0.049587
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
21a52d12088a78fbdde0193ae1f584707be0118b
124
py
Python
matilda/fundamental_analysis/fundamental_factor_scores/__init__.py
AlainDaccache/Quantropy
6cfa06ed2b764471382ebf94d40af867f10433bb
[ "MIT" ]
45
2021-01-28T04:12:21.000Z
2022-02-24T13:15:50.000Z
matilda/fundamental_analysis/fundamental_factor_scores/__init__.py
AlainDaccache/Quantropy
6cfa06ed2b764471382ebf94d40af867f10433bb
[ "MIT" ]
32
2021-03-02T18:45:16.000Z
2022-03-12T00:53:10.000Z
matilda/fundamental_analysis/fundamental_factor_scores/__init__.py
AlainDaccache/Quantropy
6cfa06ed2b764471382ebf94d40af867f10433bb
[ "MIT" ]
10
2020-12-25T15:02:40.000Z
2021-12-30T11:40:15.000Z
from matilda.fundamental_analysis.financial_statements import * from matilda.fundamental_analysis.accounting_ratios import *
62
63
0.895161
14
124
7.642857
0.642857
0.205607
0.411215
0.560748
0
0
0
0
0
0
0
0
0.056452
124
2
64
62
0.91453
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
8
21d39e33a98fb4d48063fb3e2a6b5a7c463c9737
81
py
Python
src/commands/scripts/creds/__init__.py
brandontkessler/workspace
ea2f30635563f2b9e9fc5e6fae490d77b6d84aea
[ "MIT" ]
null
null
null
src/commands/scripts/creds/__init__.py
brandontkessler/workspace
ea2f30635563f2b9e9fc5e6fae490d77b6d84aea
[ "MIT" ]
null
null
null
src/commands/scripts/creds/__init__.py
brandontkessler/workspace
ea2f30635563f2b9e9fc5e6fae490d77b6d84aea
[ "MIT" ]
null
null
null
from .code_creds import setup_bcreds from .aws_creds_setup import aws_creds_setup
40.5
44
0.888889
14
81
4.714286
0.5
0.242424
0.393939
0
0
0
0
0
0
0
0
0
0.08642
81
2
44
40.5
0.891892
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
21dba776509016c7f1a2f89cf8b92172d6ceeea6
153
py
Python
making_pizzas.py
AmitSuresh/learning-python
f1ea5b9f3659f21504b1b0e452c03239b03cde85
[ "MIT" ]
null
null
null
making_pizzas.py
AmitSuresh/learning-python
f1ea5b9f3659f21504b1b0e452c03239b03cde85
[ "MIT" ]
null
null
null
making_pizzas.py
AmitSuresh/learning-python
f1ea5b9f3659f21504b1b0e452c03239b03cde85
[ "MIT" ]
null
null
null
import pizza_fn2 #importing specific functions pizza_fn2.make_pizza(16, 'pepperoni') pizza_fn2.make_pizza(12, 'mushroom', 'peppers'), 'extra cheese'
51
63
0.771242
21
153
5.380952
0.666667
0.212389
0.212389
0.300885
0
0
0
0
0
0
0
0.051095
0.104575
153
3
63
51
0.773723
0.183007
0
0
0
0
0.292683
0
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
7
21ddad3521341aba96656875e109e2f892222293
4,513
py
Python
project/c++/mri/src/PI-slim-napa/db/mysql_migrate.py
jia57196/code41
df611f84592afd453ccb2d22a7ad999ddb68d028
[ "Apache-2.0" ]
null
null
null
project/c++/mri/src/PI-slim-napa/db/mysql_migrate.py
jia57196/code41
df611f84592afd453ccb2d22a7ad999ddb68d028
[ "Apache-2.0" ]
null
null
null
project/c++/mri/src/PI-slim-napa/db/mysql_migrate.py
jia57196/code41
df611f84592afd453ccb2d22a7ad999ddb68d028
[ "Apache-2.0" ]
null
null
null
from warnings import filterwarnings import MySQLdb import MySQLdb.cursors # Suppress Warnings. We have all the exceptions covered. filterwarnings('ignore', category = MySQLdb.Warning) #start sql helper functions def open_db(options, dictcursor=None): if options.unit_test == True: db_name=options.testdbname else: db_name=options.dbname if not dictcursor: return MySQLdb.connect(options.dbhostname, options.dbusername, options.dbpassword, db_name) return MySQLdb.connect(options.dbhostname, options.dbusername, options.dbpassword, db_name, cursorclass=dictcursor) def db_execute(options, sql, catch_error=False, print_error=False): db = open_db(options) cursor = db.cursor() try: # Execute the SQL command cursor.execute(sql) db.commit() return cursor.lastrowid # last affected row id except Exception as e: db.rollback() if catch_error: if print_error: print 'db_execute caught exception: %s'%str(e) else: raise # Raise the exception finally: # close the cursor and commit the transaction cursor.close() db.close() def db_fetch_next_row(options, sql): # Open database connection db = open_db(options) # prepare a cursor object using cursor() method cursor = db.cursor() row = None try: # Execute the SQL command cursor.execute(sql) # Fetch all the rows in a list of lists. row = cursor.fetchone() except Exception: # Try to shutdown gracefully pass #logger.error('fetch next row Caught Exception: %s.\t\t' % (str(e),)) # disconnect from server db.close() if row is not None: return row else: return (None,) def db_fetch_remaining_rows(options, sql): # Open database connection db = open_db(options) # prepare a cursor object using cursor() method cursor = db.cursor() rows = None try: # Execute the SQL command cursor.execute(sql) # Fetch all the rows in a list of lists. rows = cursor.fetchall() except Exception: pass #logger.error('fetch remaining rows Caught Exception: %s.\t\t' % (str(e),)) # disconnect from server db.close() if rows is not None: return rows else: return (None,) def db_fetch_next_row_dict(options, sql): # Open database connection db = open_db(options, MySQLdb.cursors.DictCursor) # prepare a cursor object using cursor() method cursor = db.cursor() row = None try: # Execute the SQL command cursor.execute(sql) # Fetch all the rows in a list of lists. row = cursor.fetchone() except Exception: pass #logger.error('fetch next row dict Caught Exception: %s.\t\t' % (str(e),)) # disconnect from server db.close() if row is not None: return row else: return (None,) def db_fetch_remaining_rows_dict(options, sql): # Open database connection db = open_db(options, MySQLdb.cursors.DictCursor) # prepare a cursor object using cursor() method cursor = db.cursor() rows = None try: # Execute the SQL command cursor.execute(sql) # Fetch all the rows in a list of lists. rows = cursor.fetchall() except Exception: pass #logger.error('fetch remaining rows dict Caught Exception: %s.\t\t' % (str(e),)) # disconnect from server db.close() if rows is not None: return rows else: return (None,) def db_fetch_col_by_string(options, sql, index): # Open database connection db = open_db(options, MySQLdb.cursors.DictCursor) # prepare a cursor object using cursor() method cursor = db.cursor() try: # Execute the SQL command cursor.execute(sql) # Fetch all the rows in a list of lists. row = cursor.fetchone() except Exception: pass #logger.error('fetch col by string Caught Exception: %s.\t\t' % (str(e),)) # disconnect from server db.close() #check for key membership try: if row.has_key(index): return row[index] except Exception: pass #logger.error('fetch col by string Caught Exception: %s.\t\t' % (str(e),)) return 0
30.086667
120
0.610459
564
4,513
4.819149
0.18617
0.015453
0.033481
0.033113
0.740618
0.731788
0.719279
0.719279
0.719279
0.719279
0
0.000317
0.301795
4,513
149
121
30.288591
0.862266
0.324618
0
0.707071
0
0
0.012955
0
0
0
0
0
0
0
null
null
0.080808
0.030303
null
null
0.020202
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
1
0
0
1
0
0
0
0
0
8
df4b67eaf2bd430f1903bb4c51e7fb6b8d3e1eb7
4,868
py
Python
fython/fml/add_interpolant.py
nicolasessisbreton/fython
988f5a94cee8b16b0000501a22239195c73424a1
[ "Apache-2.0" ]
41
2016-01-21T05:14:45.000Z
2021-11-24T20:37:21.000Z
fython/fml/add_interpolant.py
nicolasessisbreton/fython
988f5a94cee8b16b0000501a22239195c73424a1
[ "Apache-2.0" ]
5
2016-01-21T05:36:37.000Z
2016-08-22T19:26:51.000Z
fython/fml/add_interpolant.py
nicolasessisbreton/fython
988f5a94cee8b16b0000501a22239195c73424a1
[ "Apache-2.0" ]
3
2016-01-23T04:03:44.000Z
2016-08-21T15:58:38.000Z
from fython.config import * def add_interpolant(lexer, fmt, arg): if fmt == '': lexer += 'g0' lexer -= arg elif fmt == 'v': lexer += 'a' lexer *= 'trim({:s}(size({:s})))'.format(fytbk.int_to_char_tag, arg) lexer /= '(g0,:,' lexer *= 'achar(34)' lexer /= ', ' lexer *= 'achar(34)' lexer += ')' lexer += 'a' lexer -= "'['" lexer -= arg lexer -= "']'" elif fmt == 'vc': lexer *= 'trim({:s}(size({:s})))'.format(fytbk.int_to_char_tag, arg) lexer /= '(g0,:,' lexer *= 'achar(34)' lexer /= ', ' lexer *= 'achar(34)' lexer += ')' lexer -= arg elif fmt == 'va': lexer += 'a' lexer *= 'trim({:s}(size({:s})))'.format(fytbk.int_to_char_tag, arg) lexer /= '(g0,:,' lexer *= 'achar(34)' lexer /= ', ' lexer *= 'achar(34)' lexer += ')' lexer += 'a' lexer -= "'array(['" lexer -= arg lexer -= "'])'" elif fmt == 'jn': lexer += 'a' lexer += 'a' lexer += 'a' lexer += 'a' lexer /= '(' lexer *= 'achar(39)' lexer *= 'achar(34)' lexer *= 'achar(39)' lexer /= ',g0,' lexer *= 'achar(39)' lexer *= 'achar(34)' lexer *= 'achar(39)' lexer += ')' lexer -= 'achar(34)' lexer -= '"{:s}"'.format(arg) lexer -= 'achar(34)' lexer -= '":"' lexer -= arg elif fmt == 'j': lexer += 'a' lexer += 'a' lexer += 'a' lexer += 'a' lexer += 'a' lexer /= '(' lexer *= 'achar(39)' lexer *= 'achar(34)' lexer *= 'achar(39)' lexer /= ',g0,' lexer *= 'achar(39)' lexer *= 'achar(34)' lexer *= 'achar(39)' lexer += ')' lexer -= '","' lexer -= 'achar(34)' lexer -= '"{:s}"'.format(arg) lexer -= 'achar(34)' lexer -= '":"' lexer -= arg elif fmt == 'jv': lexer += 'a' lexer += 'a' lexer += 'a' lexer += 'a' lexer += 'a' lexer *= 'trim({:s}(size({:s})))'.format(fytbk.int_to_char_tag, arg) lexer /= '((' lexer *= 'achar(39)' lexer *= 'achar(34)' lexer *= 'achar(39)' lexer /= ',g0,' lexer *= 'achar(39)' lexer *= 'achar(34)' lexer *= 'achar(39)' lexer /= '),' lexer *= 'achar(34)' lexer /= ', ' lexer *= 'achar(34)' lexer += ')' lexer += 'tl2,a' lexer -= '","' lexer -= 'achar(34)' lexer -= '"{:s}"'.format(arg) lexer -= 'achar(34)' lexer -= '": ["' lexer -= arg lexer -= '"]"' elif fmt == 'jvn': lexer += 'a' lexer += 'a' lexer += 'a' lexer += 'a' lexer *= 'trim({:s}(size({:s})))'.format(fytbk.int_to_char_tag, arg) lexer /= '((' lexer *= 'achar(39)' lexer *= 'achar(34)' lexer *= 'achar(39)' lexer /= ',g0,' lexer *= 'achar(39)' lexer *= 'achar(34)' lexer *= 'achar(39)' lexer /= '),' lexer *= 'achar(34)' lexer /= ', ' lexer *= 'achar(34)' lexer += ')' lexer += 'tl2,a' lexer -= 'achar(34)' lexer -= '"{:s}"'.format(arg) lexer -= 'achar(34)' lexer -= '": ["' lexer -= arg lexer -= '"]"' elif fmt.startswith('jn_'): lexer += 'a' lexer += 'a' lexer += 'a' lexer += 'a' lexer /= '(' lexer *= 'achar(39)' lexer *= 'achar(34)' lexer *= 'achar(39)' lexer /= ',g0,' lexer *= 'achar(39)' lexer *= 'achar(34)' lexer *= 'achar(39)' lexer += ')' lexer -= 'achar(34)' lexer -= '"{:s}"'.format(fmt[3:]) lexer -= 'achar(34)' lexer -= '":"' lexer -= arg elif fmt.startswith('j_'): lexer += 'a' lexer += 'a' lexer += 'a' lexer += 'a' lexer += 'a' lexer /= '(' lexer *= 'achar(39)' lexer *= 'achar(34)' lexer *= 'achar(39)' lexer /= ',g0,' lexer *= 'achar(39)' lexer *= 'achar(34)' lexer *= 'achar(39)' lexer += ')' lexer -= '","' lexer -= 'achar(34)' lexer -= '"{:s}"'.format(fmt[2:]) lexer -= 'achar(34)' lexer -= '":"' lexer -= arg elif fmt.startswith('jv_'): lexer += 'a' lexer += 'a' lexer += 'a' lexer += 'a' lexer += 'a' lexer *= 'trim({:s}(size({:s})))'.format(fytbk.int_to_char_tag, arg) lexer /= '((' lexer *= 'achar(39)' lexer *= 'achar(34)' lexer *= 'achar(39)' lexer /= ',g0,' lexer *= 'achar(39)' lexer *= 'achar(34)' lexer *= 'achar(39)' lexer /= '),' lexer *= 'achar(34)' lexer /= ', ' lexer *= 'achar(34)' lexer += ')' lexer += 'tl2,a' lexer -= '","' lexer -= 'achar(34)' lexer -= '"{:s}"'.format(fmt[3:]) lexer -= 'achar(34)' lexer -= '": ["' lexer -= arg lexer -= '"]"' elif fmt.startswith('jvn_'): lexer += 'a' lexer += 'a' lexer += 'a' lexer += 'a' lexer *= 'trim({:s}(size({:s})))'.format(fytbk.int_to_char_tag, arg) lexer /= '((' lexer *= 'achar(39)' lexer *= 'achar(34)' lexer *= 'achar(39)' lexer /= ',g0,' lexer *= 'achar(39)' lexer *= 'achar(34)' lexer *= 'achar(39)' lexer /= '),' lexer *= 'achar(34)' lexer /= ', ' lexer *= 'achar(34)' lexer += ')' lexer += 'tl2,a' lexer -= 'achar(34)' lexer -= '"{:s}"'.format(fmt[4:]) lexer -= 'achar(34)' lexer -= '": ["' lexer -= arg lexer -= '"]"' else: lexer += fmt lexer -= arg
17.9631
70
0.471857
599
4,868
3.791319
0.066778
0.343461
0.243065
0.344342
0.952004
0.943197
0.93439
0.919859
0.914135
0.888155
0
0.047826
0.244043
4,868
271
71
17.9631
0.569293
0
0
0.899123
0
0
0.238447
0.031629
0
0
0
0
0
1
0.004386
false
0
0.004386
0
0.008772
0
0
0
0
null
1
1
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
10
df5967f43e728e1c19e8913baee5ba2f158f6ed4
40,430
py
Python
benchmarks/import_cost/functions_100_with_10_contracts.py
kklein/icontract
718ef1733cc2cce6d3c8f59a5a37de96f8be6664
[ "MIT" ]
244
2018-08-15T22:58:58.000Z
2022-03-12T16:10:39.000Z
benchmarks/import_cost/functions_100_with_10_contracts.py
kklein/icontract
718ef1733cc2cce6d3c8f59a5a37de96f8be6664
[ "MIT" ]
157
2018-08-29T21:36:47.000Z
2022-02-14T19:30:24.000Z
benchmarks/import_cost/functions_100_with_10_contracts.py
kklein/icontract
718ef1733cc2cce6d3c8f59a5a37de96f8be6664
[ "MIT" ]
23
2019-04-24T11:09:10.000Z
2022-02-14T15:56:26.000Z
#!/usr/bin/env python3 import icontract @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func0(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func1(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func2(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func3(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func4(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func5(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func6(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func7(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func8(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func9(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func10(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func11(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func12(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func13(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func14(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func15(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func16(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func17(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func18(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func19(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func20(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func21(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func22(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func23(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func24(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func25(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func26(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func27(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func28(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func29(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func30(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func31(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func32(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func33(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func34(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func35(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func36(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func37(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func38(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func39(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func40(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func41(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func42(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func43(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func44(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func45(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func46(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func47(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func48(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func49(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func50(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func51(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func52(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func53(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func54(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func55(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func56(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func57(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func58(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func59(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func60(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func61(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func62(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func63(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func64(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func65(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func66(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func67(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func68(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func69(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func70(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func71(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func72(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func73(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func74(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func75(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func76(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func77(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func78(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func79(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func80(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func81(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func82(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func83(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func84(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func85(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func86(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func87(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func88(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func89(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func90(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func91(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func92(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func93(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func94(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func95(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func96(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func97(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func98(x: int) -> None: pass @icontract.require(lambda x: x > 0) @icontract.require(lambda x: x > 1) @icontract.require(lambda x: x > 2) @icontract.require(lambda x: x > 3) @icontract.require(lambda x: x > 4) @icontract.require(lambda x: x > 5) @icontract.require(lambda x: x > 6) @icontract.require(lambda x: x > 7) @icontract.require(lambda x: x > 8) @icontract.require(lambda x: x > 9) def some_func99(x: int) -> None: pass
28.816821
35
0.683181
6,706
40,430
4.103937
0.018491
0.581374
0.79939
0.835725
0.976999
0.976999
0.976999
0.976999
0.976999
0.976999
0
0.034798
0.15345
40,430
1,402
36
28.837375
0.769298
0.000519
0
0.915903
0
0
0
0
0
0
0
0
0
1
0.083264
false
0.083264
0.000833
0
0.084097
0
0
0
0
null
1
1
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
11
df69e535f13ac311198bd4a7f6a41913c01cffe4
8,786
py
Python
code/Autoencoder.py
SSANGMAN/Anomaly_Detection
0607efbbb85e9b52f34e514c73395e5c446c771e
[ "MIT" ]
3
2020-01-08T08:20:02.000Z
2020-12-09T13:07:27.000Z
code/Autoencoder.py
SSANGMAN/Anomaly_Detection
0607efbbb85e9b52f34e514c73395e5c446c771e
[ "MIT" ]
null
null
null
code/Autoencoder.py
SSANGMAN/Anomaly_Detection
0607efbbb85e9b52f34e514c73395e5c446c771e
[ "MIT" ]
1
2020-12-09T13:07:29.000Z
2020-12-09T13:07:29.000Z
import numpy as np from sklearn import metrics from tensorflow.keras.models import Sequential from tensorflow.keras.layers import Dense, Dropout, GaussianNoise from keras.callbacks import EarlyStopping import gc import warnings warnings.filterwarnings('ignore') class SimpleUncompleteAutoencoder: def __init__(self, df): self.df = df self.input_dim = self.df.shape[1] def Modeling(self, train, dense_dim, batchsize = None, validation_size = None): if batchsize == None: raise AssertionError("Batchsize must be defined.") self.train = train self.dense_dim = dense_dim model = Sequential() model.add(Dense(self.dense_dim, input_dim = self.input_dim, activation = 'relu')) model.add(Dense(self.input_dim)) model.compile(loss = 'mean_squared_error', optimizer = 'adam') print(model.summary()) self.model = model self.model.fit(train, train, batch_size = batchsize, validation_split = validation_size, verbose = 1, epochs = 50, callbacks = [EarlyStopping(monitor = 'val_loss', patience = 3)]) gc.collect() def Prediction(self, test_data, data_type = None): self.test_data = test_data if data_type == None: raise AssertionError('Data type must be defined.') elif data_type == 'Insample': pred = self.model.predict(self.test_data) score = np.sqrt(metrics.mean_squared_error(pred, self.test_data)) print("Insample Normal Score (RMSE) : {}".format(score)) return pred elif data_type == 'OutOfSample': pred = self.model.predict(self.test_data) score = np.sqrt(metrics.mean_squared_error(pred, self.test_data)) print('Out of Sample Normal Score (RMSE) : {}'.format(score)) return pred elif data_type == 'Attack': pred = self.model.predict(self.test_data) score = np.sqrt(metrics.mean_squared_error(pred, self.test_data)) print('Attack Underway Score (RMSE) : {}'.format(score)) return pred class SimpleStackedAutoencoder: def __init__(self, df): self.df = df self.input_dim = self.df.shape[1] def Modeling(self, train, hidden_dim = None, coding_dim = None, batchsize = None, validation_size = None): if hidden_dim == None: raise AssertionError("Hidden Layer Dimension must be defined.") if coding_dim == None: raise AssertionError("Coding Layer Dimension must be defined.") if batchsize == None: raise AssertionError("Batchsize must be defined.") self.train = train self.hidden_dim = hidden_dim self.coding_dim = coding_dim model = Sequential() model.add(Dense(self.hidden_dim, input_dim = self.input_dim, activation = 'relu')) model.add(Dense(self.coding_dim, activation = 'relu')) model.add(Dense(self.hidden_dim, activation = 'relu')) model.add(Dense(self.input_dim)) model.compile(loss = 'mean_squared_error', optimizer = 'adam') print(model.summary()) self.model = model self.model.fit(train, train, batch_size = batchsize, validation_split = validation_size, verbose = 1, epochs = 50, callbacks = [EarlyStopping(monitor = 'val_loss', patience = 3)]) gc.collect() def Prediction(self, test_data, data_type): self.test_data = test_data if data_type == None: raise AssertionError('Data Type must be defined.') elif data_type == 'Insample': pred = self.model.predict(self.test_data) score = np.sqrt(metrics.mean_squared_error(pred, self.test_data)) print("Insample Normal Score (RMSE) : {}".format(score)) return pred elif data_type == 'OutOfSample': pred = self.model.predict(self.test_data) score = np.sqrt(metrics.mean_squared_error(pred, self.test_data)) print("Out of Sample Normal Score (RMSE) : {}".format(score)) return pred elif data_type =='Attack': pred = self.model.predict(self.test_data) score = np.sqrt(metrics.mean_squared_error(pred, self.test_data)) print("Attack Underway Score (RMSE) : {}".format(score)) return pred class SimpleDenosingAutoencoder: def __init__(self, df): self.df = df self.input_dim = self.df.shape[1] def Modeling(self, train, hidden_dim = None, coding_dim = None, batchsize = None, validation_size = None, denosing_type = None, std = None): if hidden_dim == None: raise AssertionError("Hidden Layer Dimension must be defined.") if coding_dim == None: raise AssertionError("Coding Layer Dimension must be defined.") if batchsize == None: raise AssertionError("Batchsize must be defined.") if denosing_type == None: raise AssertionError("Denosing Type must be Defined. ('Gaussian' or 'Dropout')") if denosing_type != None: if denosing_type == "Dropout": self.train = train self.hidden_dim = hidden_dim self.coding_dim = coding_dim model = Sequential() model.add(Dense(self.hidden_dim, input_dim = self.input_dim, activation = 'relu')) model.add(Dropout(0.2)) model.add(Dense(self.coding_dim, activation = 'relu')) model.add(Dense(self.hidden_dim, activation = 'relu')) model.add(Dense(self.input_dim)) model.compile(loss = 'mean_squared_error', optimizer = 'adam') print(model.summary()) self.model = model self.model.fit(train, train, batch_size = batchsize, validation_split = validation_size, verbose = 1, epochs = 50, callbacks = [EarlyStopping(monitor = 'val_loss', patience = 3)]) gc.collect() elif denosing_type == 'Gaussian': if std == None: raise AssertionError('Gaussian Noise std must be defined.') self.train = train self.hidden_dim = hidden_dim self.coding_dim = coding_dim model = Sequential() model.add(Dense(self.hidden_dim, input_dim = self.input_dim, activation = 'relu')) model.add(GaussianNoise(std)) model.add(Dense(self.coding_dim, activation = 'relu')) model.add(Dense(self.hidden_dim, activation = 'relu')) model.add(Dense(self.input_dim)) model.compile(loss = 'mean_squared_error', optimizer = 'adam') print(model.summary()) self.model = model self.model.fit(train, train, batch_size = batchsize, validation_split = validation_size, verbose = 1, epochs = 50, callbacks = [EarlyStopping(monitor = 'val_loss', patience = 3)]) gc.collect() def Prediction(self, test_data, data_type): self.test_data = test_data if data_type == None: raise AssertionError('Data Type must be defined.') elif data_type == 'Insample': pred = self.model.predict(self.test_data) score = np.sqrt(metrics.mean_squared_error(pred, self.test_data)) print("Insample Normal Score (RMSE) : {}".format(score)) return pred elif data_type == 'OutOfSample': pred = self.model.predict(self.test_data) score = np.sqrt(metrics.mean_squared_error(pred, self.test_data)) print("Out of Sample Normal Score (RMSE) : {}".format(score)) return pred elif data_type =='Attack': pred = self.model.predict(self.test_data) score = np.sqrt(metrics.mean_squared_error(pred, self.test_data)) print("Attack Underway Score (RMSE) : {}".format(score)) return pred
39.936364
144
0.559527
939
8,786
5.073482
0.106496
0.04534
0.060453
0.049958
0.881612
0.881612
0.875105
0.867758
0.867758
0.867758
0
0.003629
0.341452
8,786
220
145
39.936364
0.819737
0
0
0.814103
0
0
0.110504
0
0
0
0
0
0.076923
1
0.057692
false
0
0.044872
0
0.179487
0.083333
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
df75755e75f6a0f90615de8d6d0c99d3c805c2d8
49,756
py
Python
mff/models/combined.py
alvarovm/mff
cd1b22b606dfd64d91dc94fece72ad6a707212af
[ "Apache-2.0" ]
14
2019-03-22T18:57:34.000Z
2021-12-15T11:37:17.000Z
mff/models/combined.py
alvarovm/mff
cd1b22b606dfd64d91dc94fece72ad6a707212af
[ "Apache-2.0" ]
4
2019-06-18T14:55:46.000Z
2019-11-26T19:34:59.000Z
mff/models/combined.py
alvarovm/mff
cd1b22b606dfd64d91dc94fece72ad6a707212af
[ "Apache-2.0" ]
3
2019-08-05T14:42:20.000Z
2022-03-16T18:48:54.000Z
# -*- coding: utf-8 -*- import json import logging import warnings from itertools import combinations_with_replacement from pathlib import Path import numpy as np from mff import gp, interpolation, kernels, utility, models from .base import Model logger = logging.getLogger(__name__) class NpEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj, np.integer): return int(obj) elif isinstance(obj, np.floating): return float(obj) elif isinstance(obj, np.ndarray): return obj.tolist() else: return super(NpEncoder, self).default(obj) class CombinedSingleSpeciesModel(Model): """ 2- and 3-body single species model class Class managing the Gaussian processes and their mapped counterparts Args: element (int): The atomic number of the element considered r_cut (foat): The cutoff radius used to carve the atomic environments sigma_2b (foat): Lengthscale parameter of the 2-body Gaussian process sigma_3b (foat): Lengthscale parameter of the 2-body Gaussian process theta_2b (float): decay ratio of the cutoff function in the 2-body Gaussian Process theta_3b (float): decay ratio of the cutoff function in the 3-body Gaussian Process noise (float): noise value associated with the training output data Attributes: gp_2b (method): The 2-body single species Gaussian Process gp_3b (method): The 3-body single species Gaussian Process grid_2b (method): The 2-body single species tabulated potential grid_3b (method): The 3-body single species tabulated potential grid_start (float): Minimum atomic distance for which the grids are defined (cannot be 0.0) grid_num (int): number of points per side used to create the 2- and 3-body grid. The 3-body grid is 3-dimensional, therefore its total number of grid points will be grid_num^3 """ def __init__(self, element, r_cut, sigma_2b, sigma_3b, theta_2b, theta_3b, noise, rep_sig=1, **kwargs): super().__init__() self.element = element self.r_cut = r_cut self.rep_sig = rep_sig kernel_2b = kernels.TwoBodySingleSpeciesKernel( theta=[sigma_2b, theta_2b, r_cut]) self.gp_2b = gp.GaussianProcess( kernel=kernel_2b, noise=noise, **kwargs) kernel_3b = kernels.ThreeBodySingleSpeciesKernel( theta=[sigma_3b, theta_3b, r_cut]) self.gp_3b = gp.GaussianProcess( kernel=kernel_3b, noise=noise, **kwargs) self.grid_2b, self.grid_3b, self.grid_start, self.grid_num = None, None, None, None def fit(self, confs, forces, ncores=1): """ Fit the GP to a set of training forces using a 2- and 3-body single species force-force kernel functions. The 2-body Gaussian process is first fitted, then the 3-body GP is fitted to the difference between the training forces and the 2-body predictions of force on the training configurations Args: confs (list): List of M x 5 arrays containing coordinates and atomic numbers of atoms within a cutoff from the central one forces (array) : Array containing the vector forces on the central atoms of the training configurations ncores (int): number of CPUs to use for the gram matrix evaluation """ hypotetical_model_name = ("models/MODEL_ker_TwoBodySingleSpecies_ntr_%i.json" %(len(forces))) try: model_2b = models.TwoBodySingleSpeciesModel.from_json(hypotetical_model_name) self.rep_sig = model_2b.rep_sig self.gp_2b = model_2b.gp if self.rep_sig: self.rep_forces = utility.get_repulsive_forces(confs, self.rep_sig) forces -= self.rep_forces print("Loaded 2-body model to bootstart training") except: if self.rep_sig: self.rep_sig = utility.find_repulstion_sigma(confs) self.rep_forces = utility.get_repulsive_forces(confs, self.rep_sig) forces -= self.rep_forces self.gp_2b.fit(confs, forces, ncores=ncores) two_body_forces = self.gp_2b.predict(confs, ncores=ncores) self.gp_3b.fit(confs, forces - two_body_forces, ncores=ncores) def fit_energy(self, glob_confs, energies, ncores=1): """ Fit the GP to a set of training energies using a 2- and 3-body single species energy-energy kernel functions. The 2-body Gaussian process is first fitted, then the 3-body GP is fitted to the difference between the training energies and the 2-body predictions of energies on the training configurations. Args: glob_confs (list of lists): List of configurations arranged so that grouped configurations belong to the same snapshot energies (array) : Array containing the total energy of each snapshot ncores (int): number of CPUs to use for the gram matrix evaluation """ hypotetical_model_name = "models/MODEL_ker_TwoBodySingleSpecies_ntr_%i.json" %(len(energies)) try: model_2b = models.TwoBodySingleSpeciesModel.from_json(hypotetical_model_name) self.rep_sig = model_2b.rep_sig self.gp_2b = model_2b.gp if self.rep_sig: self.rep_energies = utility.get_repulsive_energies( glob_confs, self.rep_sig) energies -= self.rep_energies print("Loaded 2-body model to bootstart training") except: if self.rep_sig: self.rep_sig = utility.find_repulstion_sigma(glob_confs) self.rep_energies = utility.get_repulsive_energies( glob_confs, self.rep_sig) energies -= self.rep_energies self.gp_2b.fit_energy(glob_confs, energies, ncores=ncores) two_body_energies = self.gp_2b.predict_energy( glob_confs, ncores=ncores) self.gp_3b.fit_energy(glob_confs, energies - two_body_energies, ncores=ncores) def fit_force_and_energy(self, confs, forces, glob_confs, energies, ncores=1): """ Fit the GP to a set of training energies using a 2- and 3-body single species force-force, energy-energy, and energy-forces kernel functions. The 2-body Gaussian process is first fitted, then the 3-body GP is fitted to the difference between the training energies (and forces) and the 2-body predictions of energies (and forces) on the training configurations. Args: confs (list): List of M x 5 arrays containing coordinates and atomic numbers of atoms within a cutoff from the central one forces (array) : Array containing the vector forces on the central atoms of the training configurations glob_confs (list of lists): List of configurations arranged so that grouped configurations belong to the same snapshot energies (array) : Array containing the total energy of each snapshot ncores (int): number of CPUs to use for the gram matrix evaluation """ hypotetical_model_name = "models/MODEL_ker_TwoBodySingleSpecies_ntr_%i.json" %(len(energies)+len(forces)) try: model_2b = models.TwoBodySingleSpeciesModel.from_json(hypotetical_model_name) self.rep_sig = model_2b.rep_sig self.gp_2b = model_2b.gp if self.rep_sig: self.rep_energies = utility.get_repulsive_energies( glob_confs, self.rep_sig) energies -= self.rep_energies self.rep_forces = utility.get_repulsive_forces(confs, self.rep_sig) forces -= self.rep_forces print("Loaded 2-body model to bootstart training") except: if self.rep_sig: self.rep_sig = utility.find_repulstion_sigma(confs) self.rep_energies = utility.get_repulsive_energies( glob_confs, self.rep_sig) energies -= self.rep_energies self.rep_forces = utility.get_repulsive_forces(confs, self.rep_sig) forces -= self.rep_forces self.gp_2b.fit_force_and_energy( confs, forces, glob_confs, energies, ncores=ncores) two_body_forces = self.gp_2b.predict(confs, ncores=ncores) two_body_energies = self.gp_2b.predict_energy( glob_confs, ncores=ncores) self.gp_3b.fit_force_and_energy( confs, forces - two_body_forces, glob_confs, energies - two_body_energies, ncores=ncores) def predict(self, confs, return_std=False, ncores=1): """ Predict the forces acting on the central atoms of confs using the 2- and 3-body GPs. The total force is the sum of the two predictions. Args: confs (list): List of M x 5 arrays containing coordinates and atomic numbers of atoms within a cutoff from the central one return_std (bool): if True, returns the standard deviation associated to predictions according to the GP framework Returns: forces (array): array of force vectors predicted by the GPs forces_errors (array): errors associated to the force predictions, returned only if return_std is True """ if return_std: if self.rep_sig: rep_forces = utility.get_repulsive_forces(confs, self.rep_sig) force_2b, std_2b = self.gp_2b.predict(confs, return_std) force_2b += rep_forces else: force_2b, std_2b = self.gp_2b.predict( confs, return_std, ncores=ncores) force_3b, std_3b = self.gp_2b.predict( confs, return_std, ncores=ncores) return force_2b + force_3b, std_2b + std_3b else: if self.rep_sig: rep_forces = utility.get_repulsive_forces(confs, self.rep_sig) return self.gp_2b.predict(confs, return_std, ncores=ncores) + rep_forces + \ self.gp_3b.predict(confs, return_std, ncores=ncores) else: return self.gp_2b.predict(confs, return_std, ncores=ncores) + \ self.gp_3b.predict(confs, return_std, ncores=ncores) def predict_energy(self, glob_confs, return_std=False, ncores=1): """ Predict the local energies of the central atoms of confs using the 2- and 3-body GPs. The total force is the sum of the two predictions. Args: glob_confs (list of lists): List of configurations arranged so that grouped configurations belong to the same snapshot return_std (bool): if True, returns the standard deviation associated to predictions according to the GP framework Returns: energies (array) : Array containing the total energy of each snapshot energies_errors (array): errors associated to the energies predictions, returned only if return_std is True """ if return_std: if self.rep_sig: rep_energies = utility.get_repulsive_energies( glob_confs, self.rep_sig) force_2b, std_2b = self.gp_2b.predict_energy( glob_confs, return_std, ncores=ncores) energy_2b += rep_energies else: energy_2b, std_2b = self.gp_2b.predict_energy( glob_confs, return_std, ncores=ncoress) energy_3b, std_3b = self.gp_2b.predict_energy( glob_confs, return_std, ncores=ncores) return energy_2b + energy_3b, std_2b + std_3b else: if self.rep_sig: rep_energies = utility.get_repulsive_energies( glob_confs, self.rep_sig) return self.gp_2b.predict_energy(glob_confs, return_std) + rep_energies +\ self.gp_3b.predict_energy( glob_confs, return_std, ncores=ncores) else: return self.gp_2b.predict_energy(glob_confs, return_std, ncores=ncores) + \ self.gp_3b.predict_energy( glob_confs, return_std, ncores=ncores) def build_grid(self, start, num_2b, num_3b, ncores=1): """ Build the mapped 2- and 3-body potentials. Calculates the energy predicted by the GP for two and three atoms at all possible combination of num distances ranging from start to r_cut. The energy for the 3-body mapped grid is calculated only for ``valid`` triplets of atoms, i.e. sets of three distances which form a triangle (this is checked via the triangle inequality). The grid building exploits all the permutation invariances to reduce the number of energy calculations needed to fill the grid. The computed 2-body energies are stored in an array of values, and a 1D spline interpolation is created. The computed 3-body energies are stored in a 3D cube of values, and a 3D spline interpolation is created. The total force or local energy can then be calculated for any atom by summing the pairwise and triplet contributions of every valid couple and triplet of atoms of which one is always the central one. The prediction is done by the ``calculator`` module, which is built to work within the ase python package. Args: start (float): smallest interatomic distance for which the energy is predicted by the GP and stored inn the 3-body mapped potential num_2b (int):number of points to use in the grid of the 2-body mapped potential num_3b (int): number of points to use to generate the list of distances used to generate the triplets of atoms for the 2-body mapped potential ncores (int): number of CPUs to use to calculate the energy predictions """ dists_2b = np.linspace(start, self.r_cut, num_2b) confs = np.zeros((num_2b, 1, 5)) confs[:, 0, 0] = dists_2b confs[:, 0, 3], confs[:, 0, 4] = self.element, self.element grid_data = self.gp_2b.predict_energy( confs, ncores=ncores, mapping=True) if self.rep_sig: grid_data += utility.get_repulsive_energies( confs, self.rep_sig, mapping=True) grid_2b = interpolation.Spline1D(dists_2b, grid_data) # Mapping 3 body part dists_3b = np.linspace(start, self.r_cut, num_3b) inds, r_ij_x, r_ki_x, r_ki_y = self.generate_triplets(dists_3b) confs = np.zeros((len(r_ij_x), 2, 5)) confs[:, 0, 0] = r_ij_x # Element on the x axis confs[:, 1, 0] = r_ki_x # Reshape into confs shape: this is x2 confs[:, 1, 1] = r_ki_y # Reshape into confs shape: this is y2 # Permutations of elements confs[:, :, 3] = self.element # Central element is always element 1 # Element on the x axis is always element 2 confs[:, 0, 4] = self.element # Element on the xy plane is always element 3 confs[:, 1, 4] = self.element grid_3b = np.zeros((num_3b, num_3b, num_3b)) grid_3b[inds] = self.gp_3b.predict_energy( confs, ncores=ncores, mapping=True).flatten() for ind_i in range(num_3b): for ind_j in range(ind_i + 1): for ind_k in range(ind_j + 1): grid_3b[ind_i, ind_k, ind_j] = grid_3b[ind_i, ind_j, ind_k] grid_3b[ind_j, ind_i, ind_k] = grid_3b[ind_i, ind_j, ind_k] grid_3b[ind_j, ind_k, ind_i] = grid_3b[ind_i, ind_j, ind_k] grid_3b[ind_k, ind_i, ind_j] = grid_3b[ind_i, ind_j, ind_k] grid_3b[ind_k, ind_j, ind_i] = grid_3b[ind_i, ind_j, ind_k] grid_3b = interpolation.Spline3D(dists_3b, dists_3b, dists_3b, grid_3b) self.grid_2b = grid_2b self.grid_3b = grid_3b self.grid_num_2b = num_2b self.grid_num_3b = num_3b self.grid_start = start def save(self, path): """ Save the model. This creates a .json file containing the parameters of the model and the paths to the GP objects and the mapped potentials, which are saved as separate .gpy and .gpz files, respectively. Args: path (str): path to the file """ if not isinstance(path, Path): path = Path(path) ### SAVE THE 2B MODEL ### params = { 'model': self.__class__.__name__, 'element': self.element, 'r_cut': self.r_cut, 'rep_sig': self.rep_sig, 'fitted': self.gp_2b.fitted, 'gp_2b': { 'kernel': self.gp_2b.kernel.kernel_name, 'n_train': self.gp_2b.n_train, 'sigma': self.gp_2b.kernel.theta[0], 'theta': self.gp_2b.kernel.theta[1], 'noise': self.gp_2b.noise }, 'gp_3b': { 'kernel': self.gp_3b.kernel.kernel_name, 'n_train': self.gp_3b.n_train, 'sigma': self.gp_3b.kernel.theta[0], 'theta': self.gp_3b.kernel.theta[1], 'noise': self.gp_3b.noise }, 'grid_2b': { 'r_min': self.grid_start, 'r_num': self.grid_num_2b, 'filename': {} } if self.grid_2b else {}, 'grid_3b': { 'r_min': self.grid_start, 'r_num': self.grid_num_3b, 'filename': {} } if self.grid_3b else {} } gp_filename_2b = "GP_ker_{p[gp_2b][kernel]}_ntr_{p[gp_2b][n_train]}.npy".format( p=params) params['gp_2b']['filename'] = gp_filename_2b self.gp_2b.save(path / gp_filename_2b) if self.grid_2b: grid_filename_2b = "GRID_ker_{p[gp_2b][kernel]}_ntr_{p[gp_2b][n_train]}.npz".format( p=params) print("Saved 2-body grid under name %s" % (grid_filename_2b)) params['grid_2b']['filename'] = grid_filename_2b self.grid_2b.save(path / grid_filename_2b) ### SAVE THE 3B MODEL ### gp_filename_3b = "GP_ker_{p[gp_3b][kernel]}_ntr_{p[gp_3b][n_train]}.npy".format( p=params) params['gp_3b']['filename'] = gp_filename_3b self.gp_3b.save(path / gp_filename_3b) if self.grid_3b: grid_filename_3b = "GRID_ker_{p[gp_3b][kernel]}_ntr_{p[gp_3b][n_train]}.npz".format( p=params) print("Saved 3-body grid under name %s" % (grid_filename_3b)) params['grid_3b']['filename'] = grid_filename_3b self.grid_3b.save(path / grid_filename_3b) with open(path / "MODEL_combined_ntr_{p[gp_2b][n_train]}.json".format(p=params), 'w') as fp: json.dump(params, fp, indent=4, cls=NpEncoder) print("Saved model with name: MODEL_combined_ntr_{p[gp_2b][n_train]}.json".format(p=params)) @classmethod def from_json(cls, path): """ Load the model. Loads the model, the associated GPs and the mapped potentials, if available. Args: path (str): path to the .json model file Return: model (obj): the model object """ if not isinstance(path, Path): path = Path(path) directory, prefix = path.parent, path.stem with open(path) as fp: params = json.load(fp) model = cls(params['element'], params['r_cut'], params['gp_2b']['sigma'], params['gp_3b']['sigma'], params['gp_2b']['theta'], params['gp_3b']['theta'], params['gp_2b']['noise'], params['rep_sig']) gp_filename_2b = params['gp_2b']['filename'] gp_filename_3b = params['gp_3b']['filename'] try: model.gp_2b.load(directory / gp_filename_2b) except: warnings.warn("The 2-body GP file is missing") pass try: model.gp_3b.load(directory / gp_filename_3b) except: warnings.warn("The 3-body GP file is missing") pass if params['grid_2b']: grid_filename_2b = params['grid_2b']['filename'] model.grid_2b = interpolation.Spline1D.load( directory / grid_filename_2b) grid_filename_3b = params['grid_3b']['filename'] model.grid_3b = interpolation.Spline3D.load( directory / grid_filename_3b) model.grid_start = params['grid_2b']['r_min'] model.grid_num_2b = params['grid_2b']['r_num'] model.grid_num_3b = params['grid_3b']['r_num'] return model def save_gp(self, filename_2b, filename_3b): """ Saves the GP objects, now obsolete """ warnings.warn('use save and load function', DeprecationWarning) self.gp_2b.save(filename_2b) self.gp_3b.save(filename_3b) def load_gp(self, filename_2b, filename_3b): """ Loads the GP objects, now obsolete """ warnings.warn('use save and load function', DeprecationWarning) self.gp_2b.load(filename_2b) self.gp_3b.load(filename_3b) @staticmethod def generate_triplets(dists): """ Generate a list of all valid triplets using perutational invariance. Calculates the energy predicted by the GP for three atoms at all possible combination of num distances ranging from start to r_cut. The energy is calculated only for ``valid`` triplets of atoms, i.e. sets of three distances which form a triangle (this is checked via the triangle inequality). The grid building exploits all the permutation invariances to reduce the number of energy calculations needed to fill the grid. The computed energies are stored in a 3D cube of values, and a 3D spline interpolation is created, which can be used to predict the energy and, through its analytic derivative, the force associated to any triplet of atoms. The total force or local energy can then be calculated for any atom by summing the triplet contributions of every valid triplet of atoms of which one is always the central one. The prediction is done by the ``calculator`` module which is built to work within the ase python package. Args: dists (array): array of floats containing all of the distances which can be used to build triplets of atoms. This array is created by calling np.linspace(start, r_cut, num) Returns: inds (array): array of booleans indicating which triplets (three distance values) need to be evaluated to fill the 3D grid of energy values. r_ij_x (array): array containing the x coordinate of the second atom j w.r.t. the central atom i r_ki_x (array): array containing the x coordinate of the third atom k w.r.t. the central atom i r_ki_y (array): array containing the y coordinate of the third atom k w.r.t. the central atom i """ d_ij, d_jk, d_ki = np.meshgrid( dists, dists, dists, indexing='ij', sparse=False, copy=True) # Valid triangles according to triangle inequality inds = np.logical_and( d_ij <= d_jk + d_ki, np.logical_and(d_jk <= d_ki + d_ij, d_ki <= d_ij + d_jk)) # Utilizing permutation invariance inds = np.logical_and(np.logical_and(d_ij >= d_jk, d_jk >= d_ki), inds) # Element on the x axis r_ij_x = d_ij[inds] # Element on the xy plane r_ki_x = (d_ij[inds] ** 2 - d_jk[inds] ** 2 + d_ki[inds] ** 2) / (2 * d_ij[inds]) # using abs to avoid numerical error near to 0 r_ki_y = np.sqrt(np.abs(d_ki[inds] ** 2 - r_ki_x ** 2)) return inds, r_ij_x, r_ki_x, r_ki_y class CombinedManySpeciesModel(Model): """ 2- and 3-body many species model class Class managing the Gaussian processes and their mapped counterparts Args: elements (list): List containing the atomic numbers in increasing order r_cut (foat): The cutoff radius used to carve the atomic environments sigma_2b (foat): Lengthscale parameter of the 2-body Gaussian process sigma_3b (foat): Lengthscale parameter of the 2-body Gaussian process theta_2b (float): decay ratio of the cutoff function in the 2-body Gaussian Process theta_3b (float): decay ratio of the cutoff function in the 3-body Gaussian Process noise (float): noise value associated with the training output data Attributes: gp_2b (method): The 2-body single species Gaussian Process gp_3b (method): The 3-body single species Gaussian Process grid_2b (list): Contains the three 2-body two species tabulated potentials, accounting for interactions between two atoms of types 0-0, 0-1, and 1-1. grid_2b (list): Contains the three 3-body two species tabulated potentials, accounting for interactions between three atoms of types 0-0-0, 0-0-1, 0-1-1, and 1-1-1. grid_start (float): Minimum atomic distance for which the grids are defined (cannot be 0.0) grid_num_2b (int):number of points to use in the grid of the 2-body mapped potential grid_num_3b (int): number of points to use to generate the list of distances used to generate the triplets of atoms for the 2-body mapped potential """ def __init__(self, elements, r_cut, sigma_2b, sigma_3b, theta_2b, theta_3b, noise, rep_sig=1, **kwargs): super().__init__() self.elements = list(np.sort(elements)) self.r_cut = r_cut self.rep_sig = rep_sig kernel_2b = kernels.TwoBodyManySpeciesKernel( theta=[sigma_2b, theta_2b, r_cut]) self.gp_2b = gp.GaussianProcess( kernel=kernel_2b, noise=noise, **kwargs) kernel_3b = kernels.ThreeBodyManySpeciesKernel( theta=[sigma_3b, theta_3b, r_cut]) self.gp_3b = gp.GaussianProcess( kernel=kernel_3b, noise=noise, **kwargs) self.grid_2b, self.grid_3b, self.grid_start, self.grid_num_2b, self.grid_num_3b = { }, {}, None, None, None def fit(self, confs, forces, ncores=1): """ Fit the GP to a set of training forces using a 2- and 3-body single species force-force kernel functions. The 2-body Gaussian process is first fitted, then the 3-body GP is fitted to the difference between the training forces and the 2-body predictions of force on the training configurations Args: confs (list): List of M x 5 arrays containing coordinates and atomic numbers of atoms within a cutoff from the central one forces (array) : Array containing the vector forces on the central atoms of the training configurations ncores (int): number of CPUs to use for the gram matrix evaluation """ hypotetical_model_name = "models/MODEL_ker_TwoBodyManySpecies_ntr_%i.json" %(len(forces)) try: model_2b = models.TwoBodyManySpeciesModel.from_json(hypotetical_model_name) self.rep_sig = model_2b.rep_sig self.gp_2b = model_2b.gp if self.rep_sig: self.rep_sig = utility.find_repulstion_sigma(confs) self.rep_forces = utility.get_repulsive_forces(confs, self.rep_sig) forces -= self.rep_forces print("Loaded 2-body model to bootstart training") except: if self.rep_sig: self.rep_sig = utility.find_repulstion_sigma(confs) self.rep_forces = utility.get_repulsive_forces(confs, self.rep_sig) forces -= self.rep_forces self.gp_2b.fit(confs, forces, ncores=ncores) ntr = len(confs) two_body_forces = self.gp_2b.predict(confs, ncores=ncores) self.gp_3b.fit(confs, forces - two_body_forces, ncores=ncores) def fit_energy(self, glob_confs, energies, ncores=1): """ Fit the GP to a set of training energies using a 2- and 3-body single species energy-energy kernel functions. The 2-body Gaussian process is first fitted, then the 3-body GP is fitted to the difference between the training energies and the 2-body predictions of energies on the training configurations. Args: glob_confs (list of lists): List of configurations arranged so that grouped configurations belong to the same snapshot energies (array) : Array containing the total energy of each snapshot ncores (int): number of CPUs to use for the gram matrix evaluation """ hypotetical_model_name = "models/MODEL_ker_TwoBodyManySpecies_ntr_%i.json" %(len(energies)) try: model_2b = models.TwoBodyManySpeciesModel.from_json(hypotetical_model_name) self.rep_sig = model_2b.rep_sig self.gp_2b = model_2b.gp if self.rep_sig: self.rep_energies = utility.get_repulsive_energies( glob_confs, self.rep_sig) energies -= self.rep_energies print("Loaded 2-body model to bootstart training") except: if self.rep_sig: self.rep_sig = utility.find_repulstion_sigma(glob_confs) self.rep_energies = utility.get_repulsive_energies( glob_confs, self.rep_sig) energies -= self.rep_energies self.gp_2b.fit_energy(glob_confs, energies, ncores=1) ntr = len(glob_confs) two_body_energies = self.gp_2b.predict_energy( glob_confs, ncores=ncores) self.gp_3b.fit_energy(glob_confs, energies - two_body_energies, ncores=ncores) def fit_force_and_energy(self, confs, forces, glob_confs, energies, ncores=1): """ Fit the GP to a set of training energies using a 2- and 3-body single species force-force, energy-energy, and energy-forces kernel functions. The 2-body Gaussian process is first fitted, then the 3-body GP is fitted to the difference between the training energies (and forces) and the 2-body predictions of energies (and forces) on the training configurations. Args: confs (list): List of M x 5 arrays containing coordinates and atomic numbers of atoms within a cutoff from the central one forces (array) : Array containing the vector forces on the central atoms of the training configurations glob_confs (list of lists): List of configurations arranged so that grouped configurations belong to the same snapshot energies (array) : Array containing the total energy of each snapshot ncores (int): number of CPUs to use for the gram matrix evaluation """ hypotetical_model_name = "models/MODEL_ker_TwoBodyManySpecies_ntr_%i.json" %(len(forces) + len(energies)) try: model_2b = models.TwoBodyManySpeciesModel.from_json(hypotetical_model_name) self.rep_sig = model_2b.rep_sig self.gp_2b = model_2b.gp if self.rep_sig: self.rep_energies = utility.get_repulsive_energies( glob_confs, self.rep_sig) energies -= self.rep_energies self.rep_forces = utility.get_repulsive_forces(confs, self.rep_sig) forces -= self.rep_forces print("Loaded 2-body model to bootstart training") except: if self.rep_sig: self.rep_sig = utility.find_repulstion_sigma(confs) self.rep_energies = utility.get_repulsive_energies( glob_confs, self.rep_sig) energies -= self.rep_energies self.rep_forces = utility.get_repulsive_forces(confs, self.rep_sig) forces -= self.rep_forces self.gp_2b.fit_force_and_energy( confs, forces, glob_confs, energies, ncores=ncores) two_body_forces = self.gp_2b.predict(confs, ncores=ncores) two_body_energies = self.gp_2b.predict_energy( glob_confs, ncores=ncores) self.gp_3b.fit_force_and_energy( confs, forces - two_body_forces, glob_confs, energies - two_body_energies, ncores=ncores) def predict(self, confs, return_std=False, ncores=1): """ Predict the forces acting on the central atoms of confs using the 2- and 3-body GPs. The total force is the sum of the two predictions. Args: confs (list): List of M x 5 arrays containing coordinates and atomic numbers of atoms within a cutoff from the central one return_std (bool): if True, returns the standard deviation associated to predictions according to the GP framework Returns: forces (array): array of force vectors predicted by the GPs forces_errors (array): errors associated to the force predictions, returned only if return_std is True """ if return_std: if self.rep_sig: rep_forces = utility.get_repulsive_forces(confs, self.rep_sig) force_2b, std_2b = self.gp_2b.predict( confs, return_std, ncores=ncores) force_2b += rep_forces else: force_2b, std_2b = self.gp_2b.predict( confs, return_std, ncores=ncores) force_3b, std_3b = self.gp_2b.predict( confs, return_std, ncores=ncores) return force_2b + force_3b, std_2b + std_3b else: if self.rep_sig: rep_forces = utility.get_repulsive_forces(confs, self.rep_sig) return self.gp_2b.predict(confs, return_std, ncores=ncores) + rep_forces + \ self.gp_3b.predict(confs, return_std, ncores=ncores) else: return self.gp_2b.predict(confs, return_std, ncores=ncores) + \ self.gp_3b.predict(confs, return_std, ncores=ncores) def predict_energy(self, glob_confs, return_std=False, ncores=1): """ Predict the local energies of the central atoms of confs using the 2- and 3-body GPs. The total force is the sum of the two predictions. Args: glob_confs (list of lists): List of configurations arranged so that grouped configurations belong to the same snapshot return_std (bool): if True, returns the standard deviation associated to predictions according to the GP framework Returns: energies (array) : Array containing the total energy of each snapshot energies_errors (array): errors associated to the energies predictions, returned only if return_std is True """ if return_std: if self.rep_sig: rep_energies = utility.get_repulsive_energies( glob_confs, self.rep_sig) force_2b, std_2b = self.gp_2b.predict_energy( glob_confs, return_std, ncores=ncores) energy_2b += rep_energies else: energy_2b, std_2b = self.gp_2b.predict_energy( glob_confs, return_std, ncores=ncores) energy_3b, std_3b = self.gp_2b.predict_energy( glob_confs, return_std, ncores=ncores) return energy_2b + energy_3b, std_2b + std_3b else: if self.rep_sig: rep_energies = utility.get_repulsive_energies( glob_confs, self.rep_sig) return self.gp_2b.predict_energy(glob_confs, return_std, ncores=ncores) + rep_energies +\ self.gp_3b.predict_energy( glob_confs, return_std, ncores=ncores) else: return self.gp_2b.predict_energy(glob_confs, return_std, ncores=ncores) + \ self.gp_3b.predict_energy( glob_confs, return_std, ncores=ncores) def build_grid(self, start, num_2b, num_3b, ncores=1): """Function used to create the three different 2-body energy grids for atoms of elements 0-0, 0-1, and 1-1, and the four different 3-body energy grids for atoms of elements 0-0-0, 0-0-1, 0-1-1, and 1-1-1. The function calls the ``build_grid_3b`` function for each of the 3-body grids to build. Args: start (float): smallest interatomic distance for which the energy is predicted by the GP and stored inn the 3-body mapped potential num (int): number of points to use in the grid of the 2-body mapped potentials num_3b (int): number of points to use to generate the list of distances used to generate the triplets of atoms for the 3-body mapped potentials ncores (int): number of CPUs to use to calculate the energy predictions """ self.grid_start = start self.grid_num_2b = num_2b self.grid_num_3b = num_2b perm_list_2b = list(combinations_with_replacement(self.elements, 2)) perm_list_3b = list(combinations_with_replacement(self.elements, 3)) dists_2b = np.linspace(start, self.r_cut, num_2b) confs_2b = np.zeros((num_2b, 1, 5)) confs_2b[:, 0, 0] = dists_2b for pair in perm_list_2b: # in this for loop, predicting then save for each individual one confs_2b[:, 0, 3], confs_2b[:, 0, 4] = pair[0], pair[1] mapped_energies = self.gp_2b.predict_energy( list(confs_2b), ncores=ncores, mapping=True) if self.rep_sig: mapped_energies += utility.get_repulsive_energies( confs_2b, self.rep_sig, mapping=True) self.grid_2b[pair] = interpolation.Spline1D(dists_2b, mapped_energies) dists_3b = np.linspace(start, self.r_cut, num_3b) for trip in perm_list_3b: self.grid_3b[trip] = self.build_grid_3b( dists_3b, trip[0], trip[1], trip[2], ncores = ncores) def build_grid_3b(self, dists, element_k, element_i, element_j, ncores=1): """ Build a mapped 3-body potential. Calculates the energy predicted by the GP for three atoms of elements element_i, element_j, element_k, at all possible combinations of num distances ranging from start to r_cut. The energy is calculated only for ``valid`` triplets of atoms, i.e. sets of three distances which form a triangle (this is checked via the triangle inequality), found by calling the ``generate_triplets_with_permutation_invariance`` function. The computed energies are stored in a 3D cube of values, and a 3D spline interpolation is created, which can be used to predict the energy and, through its analytic derivative, the force associated to any triplet of atoms. The total force or local energy can then be calculated for any atom by summing the triplet contributions of every valid triplet of atoms of which one is always the central one. The prediction is done by the ``calculator`` module which is built to work within the ase python package. Args: dists (array): array of floats containing all of the distances which can be used to build triplets of atoms. This array is created by calling np.linspace(start, r_cut, num) element_i (int): atomic number of the central atom i in a triplet element_j (int): atomic number of the second atom j in a triplet element_k (int): atomic number of the third atom k in a triplet ncores (int): number of CPUs to use when computing the triplet local energies Returns: spline3D (obj): a 3D spline object that can be used to predict the energy and the force associated to the central atom of a triplet. """ num = len(dists) inds, r_ij_x, r_ki_x, r_ki_y = self.generate_triplets_all(dists) confs = np.zeros((len(r_ij_x), 2, 5)) confs[:, 0, 0] = r_ij_x # Element on the x axis confs[:, 1, 0] = r_ki_x # Reshape into confs shape: this is x2 confs[:, 1, 1] = r_ki_y # Reshape into confs shape: this is y2 # Permutations of elements confs[:, :, 3] = element_i # Central element is always element 1 confs[:, 0, 4] = element_j # Element on the x axis is always element 2 # Element on the xy plane is always element 3 confs[:, 1, 4] = element_k grid_3b = np.zeros((num, num, num)) grid_3b[inds] = self.gp_3b.predict_energy( confs, ncores=ncores, mapping=True).flatten() return interpolation.Spline3D(dists, dists, dists, grid_3b) def save(self, path): """ Save the model. This creates a .json file containing the parameters of the model and the paths to the GP objects and the mapped potentials, which are saved as separate .gpy and .gpz files, respectively. Args: path (str): path to the file """ if not isinstance(path, Path): path = Path(path) ### SAVE THE MODEL ### params = { 'model': self.__class__.__name__, 'elements': self.elements, 'r_cut': self.r_cut, 'rep_sig': self.rep_sig, 'fitted': self.gp_2b.fitted, 'gp_2b': { 'kernel': self.gp_2b.kernel.kernel_name, 'n_train': self.gp_2b.n_train, 'sigma': self.gp_2b.kernel.theta[0], 'theta': self.gp_2b.kernel.theta[1], 'noise': self.gp_2b.noise }, 'gp_3b': { 'kernel': self.gp_3b.kernel.kernel_name, 'n_train': self.gp_3b.n_train, 'sigma': self.gp_3b.kernel.theta[0], 'theta': self.gp_3b.kernel.theta[1], 'noise': self.gp_3b.noise }, 'grid_2b': { 'r_min': self.grid_start, 'r_num': self.grid_num_2b, 'filename': {} } if self.grid_2b else {}, 'grid_3b': { 'r_min': self.grid_start, 'r_num': self.grid_num_3b, 'filename': {} } if self.grid_3b else {} } gp_filename_2b = "GP_ker_{p[gp_2b][kernel]}_ntr_{p[gp_2b][n_train]}.npy".format( p=params) params['gp_2b']['filename'] = gp_filename_2b self.gp_2b.save(path / gp_filename_2b) for k, grid in self.grid_2b.items(): key = '_'.join(str(element) for element in k) grid_filename_2b = "GRID_{}_ker_{p[gp_2b][kernel]}_ntr_{p[gp_2b][n_train]}.npz".format( key, p=params) print("Saved 2-body grid under name %s" % (grid_filename_2b)) params['grid_2b']['filename'][key] = grid_filename_2b grid.save(path / grid_filename_2b) ### SAVE THE 3B MODEL ### gp_filename_3b = "GP_ker_{p[gp_3b][kernel]}_ntr_{p[gp_3b][n_train]}.npy".format( p=params) params['gp_3b']['filename'] = gp_filename_3b self.gp_3b.save(path / gp_filename_3b) for k, grid in self.grid_3b.items(): key = '_'.join(str(element) for element in k) grid_filename_3b = "GRID_{}_ker_{p[gp_3b][kernel]}_ntr_{p[gp_3b][n_train]}.npz".format( key, p=params) print("Saved 3-body grid under name %s" % (grid_filename_3b)) params['grid_3b']['filename'][key] = grid_filename_3b grid.save(path / grid_filename_3b) with open(path / "MODEL_combined_ntr_{p[gp_2b][n_train]}.json".format(p=params), 'w') as fp: json.dump(params, fp, indent=4, cls=NpEncoder) print("Saved model with name: MODEL_combined_ntr_{p[gp_2b][n_train]}.json".format(p=params)) @classmethod def from_json(cls, path): """ Load the model. Loads the model, the associated GPs and the mapped potentials, if available. Args: path (str): path to the .json model file Return: model (obj): the model object """ if not isinstance(path, Path): path = Path(path) directory, prefix = path.parent, path.stem with open(path) as fp: params = json.load(fp) model = cls(params['elements'], params['r_cut'], params['gp_2b']['sigma'], params['gp_3b']['sigma'], params['gp_2b']['theta'], params['gp_3b']['theta'], params['gp_2b']['noise'], params['rep_sig']) gp_filename_2b = params['gp_2b']['filename'] gp_filename_3b = params['gp_3b']['filename'] try: model.gp_2b.load(directory / gp_filename_2b) except: warnings.warn("The 2-body GP file is missing") pass try: model.gp_3b.load(directory / gp_filename_3b) except: warnings.warn("The 3-body GP file is missing") pass if params['grid_2b']: for key, grid_filename_2b in params['grid_2b']['filename'].items(): k = tuple(int(ind) for ind in key.split('_')) model.grid_2b[k] = interpolation.Spline1D.load( directory / grid_filename_2b) for key, grid_filename_3b in params['grid_3b']['filename'].items(): k = tuple(int(ind) for ind in key.split('_')) model.grid_3b[k] = interpolation.Spline3D.load( directory / grid_filename_3b) model.grid_start = params['grid_2b']['r_min'] model.grid_num_2b = params['grid_2b']['r_num'] model.grid_num_3b = params['grid_3b']['r_num'] return model def save_gp(self, filename_2b, filename_3b): """ Saves the GP objects, now obsolete """ self.gp_2b.save(filename_2b) self.gp_3b.save(filename_3b) def load_gp(self, filename_2b, filename_3b): """ Loads the GP objects, now obsolete """ self.gp_2b.load(filename_2b) self.gp_3b.load(filename_3b) @staticmethod def generate_triplets_all(dists): """ Generate a list of all valid triplets. Calculates the energy predicted by the GP for three atoms at all possible combination of num distances ranging from start to r_cut. The energy is calculated only for ``valid`` triplets of atoms, i.e. sets of three distances which form a triangle (this is checked via the triangle inequality). The computed energies are stored in a 3D cube of values, and a 3D spline interpolation is created, which can be used to predict the energy and, through its analytic derivative, the force associated to any triplet of atoms. The total force or local energy can then be calculated for any atom by summing the triplet contributions of every valid triplet of atoms of which one is always the central one. The prediction is done by the ``calculator`` module which is built to work within the ase python package. Args: dists (array): array of floats containing all of the distances which can be used to build triplets of atoms. This array is created by calling np.linspace(start, r_cut, num) Returns: inds (array): array of booleans indicating which triplets (three distance values) need to be evaluated to fill the 3D grid of energy values. r_ij_x (array): array containing the x coordinate of the second atom j w.r.t. the central atom i r_ki_x (array): array containing the x coordinate of the third atom k w.r.t. the central atom i r_ki_y (array): array containing the y coordinate of the third atom k w.r.t. the central atom i """ d_ij, d_jk, d_ki = np.meshgrid( dists, dists, dists, indexing='ij', sparse=False, copy=True) # Valid triangles according to triangle inequality inds = np.logical_and( d_ij <= d_jk + d_ki, np.logical_and(d_jk <= d_ki + d_ij, d_ki <= d_ij + d_jk)) # Element on the x axis r_ij_x = d_ij[inds] # Element on the xy plane r_ki_x = (d_ij[inds] ** 2 - d_jk[inds] ** 2 + d_ki[inds] ** 2) / (2 * d_ij[inds]) # using abs to avoid numerical error near to 0 r_ki_y = np.sqrt(np.abs(d_ki[inds] ** 2 - r_ki_x ** 2)) return inds, r_ij_x, r_ki_x, r_ki_y
45.315118
113
0.615705
6,833
49,756
4.295624
0.060735
0.023133
0.022145
0.015331
0.915917
0.894931
0.880996
0.870196
0.862394
0.855512
0
0.020111
0.304446
49,756
1,097
114
45.356427
0.828017
0.369202
0
0.775685
0
0
0.076712
0.030836
0
0
0
0
0
1
0.044521
false
0.006849
0.013699
0
0.099315
0.020548
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
10ca0d1fb65f0cf9330392f01eb786d057450fd3
14,277
py
Python
redox_net_code/lib/.ipynb_checkpoints/Reaction_Library_Mother-checkpoint.py
beangoben/gp_redox_rxn
dfd4c358858192bcff4b1c93da1b9d3e0d9b0299
[ "MIT" ]
8
2019-05-26T14:30:12.000Z
2021-09-17T20:18:23.000Z
redox_net_code/lib/.ipynb_checkpoints/Reaction_Library_Mother-checkpoint.py
beangoben/gp_redox_rxn
dfd4c358858192bcff4b1c93da1b9d3e0d9b0299
[ "MIT" ]
1
2019-06-05T13:59:02.000Z
2019-06-05T13:59:02.000Z
redox_net_code/lib/.ipynb_checkpoints/Reaction_Library_Mother-checkpoint.py
beangoben/gp_redox_rxn
dfd4c358858192bcff4b1c93da1b9d3e0d9b0299
[ "MIT" ]
3
2020-07-01T22:01:20.000Z
2021-09-17T21:19:24.000Z
from rdkit import Chem import json from rdkit.Chem import AllChem,Draw,Descriptors import copy import pandas as pd import re import matplotlib.pyplot as plt import lib.Reaction_Library as RL ### Reduction Functions: def G1_function(metab_smiles): reactant = Chem.MolFromSmiles(metab_smiles) prod_list = [] prod = RL.G1_carb_acid.RunReactants([reactant]) if len(prod) > 0: for x in prod: smiles = Chem.MolToSmiles(x[0]) prod_list.append(smiles) return prod_list def G2_function(metab_smiles): reactant = Chem.MolFromSmiles(metab_smiles) prod_list = [] #Aldehyde reduction prod_1 = RL.G2_ald.RunReactants([reactant]) if len(prod_1) > 0: for x in prod_1: smiles = Chem.MolToSmiles(x[0]) prod_list.append(smiles) #Ketone reduction prod_2 = RL.G2_ket.RunReactants([reactant]) if len(prod_2) > 0: for x in prod_2: smiles = Chem.MolToSmiles(x[0]) prod_list.append(smiles) #6 carbon ring reactions... #prod_3 = RL.G2_6c6r.RunReactants([reactant]) #if len(prod_3) > 0: # for x in prod_3: # smiles = Chem.MolToSmiles(x[0]) # prod_list.append(smiles) #prod_4 = RL.G2_6c5r.RunReactants([reactant]) #if len(prod_4) > 0: # for x in prod_4: # smiles = Chem.MolToSmiles(x[0]) # prod_list.append(smiles) #prod_5 = RL.G2_5c6r.RunReactants([reactant]) #if len(prod_5) > 0: # for x in prod_5: # smiles = Chem.MolToSmiles(x[0]) # prod_list.append(smiles) #prod_6 = RL.G2_5c5r.RunReactants([reactant]) #if len(prod_6) > 0: # for x in prod_6: # smiles = Chem.MolToSmiles(x[0]) # prod_list.append(smiles) return prod_list def G3_function(metab_smiles): reactant = Chem.MolFromSmiles(metab_smiles) prod_list = [] #Aldehyde reduction prod_1 = RL.G3_ald.RunReactants([reactant]) if len(prod_1) > 0: for x in prod_1: smiles = Chem.MolToSmiles(x[0]) prod_list.append(smiles) #Ketone reduction prod_2 = RL.G3_ket.RunReactants([reactant]) if len(prod_2) > 0: for x in prod_2: smiles = Chem.MolToSmiles(x[0]) prod_list.append(smiles) return prod_list def G4_function(metab_smiles): reactant = Chem.MolFromSmiles(metab_smiles) prod_list = [] # Hydroxyl in the end of a molecule prod_1 = RL.G4_end_hydr.RunReactants([reactant]) if len(prod_1) > 0: for x in prod_1: smiles = Chem.MolToSmiles(x[0]) prod_list.append(smiles) # Hydroxyl in the middle of a molecule. prod_2 = RL.G4_mid_hydr.RunReactants([reactant]) if len(prod_2) > 0: for x in prod_2: smiles = Chem.MolToSmiles(x[0]) prod_list.append(smiles) prod_3 = RL.G4_arom_hydr.RunReactants([reactant]) if len(prod_3) > 0: for x in prod_3: smiles = Chem.MolToSmiles(x[0]) prod_list.append(smiles) return prod_list ### Oxidation Functions: def G1_function_ox(metab_smiles): reactant = Chem.MolFromSmiles(metab_smiles) prod_list = [] prod = RL.G1_carb_acid_ox.RunReactants([reactant]) if len(prod) > 0: for x in prod: smiles = Chem.MolToSmiles(x[0]) prod_list.append(smiles) return prod_list def G2_function_ox(metab_smiles): reactant = Chem.MolFromSmiles(metab_smiles) prod_list = [] #Aldehyde reduction prod_1 = RL.G2_ald_ox.RunReactants([reactant]) if len(prod_1) > 0: for x in prod_1: smiles = Chem.MolToSmiles(x[0]) prod_list.append(smiles) #Ketone reduction prod_2 = RL.G2_ket_ox.RunReactants([reactant]) if len(prod_2) > 0: for x in prod_2: smiles = Chem.MolToSmiles(x[0]) prod_list.append(smiles) return prod_list def G3_function_ox(metab_smiles): reactant = Chem.MolFromSmiles(metab_smiles) prod_list = [] #Aldehyde reduction prod_1 = RL.G3_ald_ox.RunReactants([reactant]) if len(prod_1) > 0: for x in prod_1: smiles = Chem.MolToSmiles(x[0]) prod_list.append(smiles) #Ketone reduction prod_2 = RL.G3_ket_ox.RunReactants([reactant]) if len(prod_2) > 0: for x in prod_2: smiles = Chem.MolToSmiles(x[0]) prod_list.append(smiles) return prod_list def G4_function_ox(metab_smiles): reactant = Chem.MolFromSmiles(metab_smiles) prod_list = [] # Hydroxyl in the end of a molecule prod_1 = RL.G4_end_hydr_ox.RunReactants([reactant]) if len(prod_1) > 0: for x in prod_1: smiles = Chem.MolToSmiles(x[0]) prod_list.append(smiles) # Hydroxyl in the middle of a molecule. prod_2 = RL.G4_mid_hydr_ox.RunReactants([reactant]) if len(prod_2) > 0: for x in prod_2: smiles = Chem.MolToSmiles(x[0]) prod_list.append(smiles) prod_3 = RL.G4_arom_hydr_ox.RunReactants([reactant]) if len(prod_3) > 0: for x in prod_3: smiles = Chem.MolToSmiles(x[0]) prod_list.append(smiles) return prod_list # Old Reduce metabolite: def reduce_metabolite_old(metabolite_smiles): #empty rxn_list rxn_list = [ ] #single starting substrate substrate_list = [ metabolite_smiles ] # initialize this flag to 1 num_new_reactions = 1 while (num_new_reactions): #keep going as long as you have new reactions: new_reactions = [ ] # this is the list of new reactions in the latest iteration substrate_list_temp = [] #For every substrate in substrate list: for sub in substrate_list: # For every reaction_type: (looping through all G1 - G4 functions): #G1. Apply reaction_type to substrate G1_prods = G1_function(sub) #list of G1 products #G2 G2_prods = G2_function(sub) #list of G2 products #G3 #G3_prods = G3_function(sub) #list of G3 products #G4 G4_prods = G4_function(sub) #list of G4 products #All prods: # GAll_prods = list(set(G1_prods + G2_prods + G3_prods + G4_prods)) #unique new products. GAll_prods = list(set(G1_prods + G2_prods + G4_prods)) #unique new products. substrate_list_temp += GAll_prods #2. Store resulting (uniqe) reactions in new_reactions list for prod in GAll_prods: new_reactions.append( sub+'>>'+prod ) rxn_list += list(set(new_reactions)) #After reducing all current substrates, update substrate list. substrate_list = list(set(substrate_list_temp)) if len(GAll_prods)==0: num_new_reactions = 0 return list(set(rxn_list)) def reduce_metabolite(metabolite_smiles, Master_Sub_List = [], do_G3 = False): rxn_list = [ ] #empty rxn_list substrate_list = [ metabolite_smiles ] #single starting substrate num_new_reactions = 1 # initialize this flag to 1 while (num_new_reactions): #keep going as long as you have new reactions: new_reactions = [ ] # this is the list of new reactions in the latest iteration substrate_list_temp = [] # Remove substrates that are already in the Master Substrate List: substrate_list = list( set(substrate_list) - set(Master_Sub_List) ) Master_Sub_List += substrate_list # --> shouldn't you turn this into a set? #For every substrate in substrate list: for sub in substrate_list: # For every reaction_type: (looping through all G1 - G4 functions): #G1_prods = G1_function(sub) # list of G1 products G2_prods = G2_function(sub) # list of G2 products G3_prods = G3_function(sub) # list of G4 products # if do_G3: # G3_prods = G3_function(sub) # list of G3 products # GAll_prods = list(set(G1_prods + G2_prods + G3_prods + G4_prods)) # else: GAll_prods = list(set(G2_prods + G3_prods)) #unique new products. substrate_list_temp += GAll_prods # this puts together the products of all substrates? # Store resulting (uniqe) reactions in new_reactions list for prod in GAll_prods: new_reactions.append( sub+'>>'+prod ) rxn_list += list(set(new_reactions)) #After reducing all current substrates, update substrate list. substrate_list = list(set(substrate_list_temp)) if len(substrate_list)==0: num_new_reactions = 0 return list(set(rxn_list)), Master_Sub_List def oxidize_metabolite_old(metabolite_smiles): #empty rxn_list rxn_list = [ ] #single starting substrate substrate_list = [ metabolite_smiles ] # initialize this flag to 1 num_new_reactions = 1 while (num_new_reactions): #keep going as long as you have new reactions: new_reactions = [ ] # this is the list of new reactions in the latest iteration substrate_list_temp = [] #For every substrate in substrate list: for sub in substrate_list: # For every reaction_type: (looping through all G1 - G4 functions): #G1. Apply reaction_type to substrate G1_prods = G1_function_ox(sub) #list of G1 products #G2 G2_prods = G2_function_ox(sub) #list of G2 products #G3 # G3_prods = G3_function_ox(sub) #list of G3 products #G4 G4_prods = G4_function_ox(sub) #list of G4 products #All prods: GAll_prods = list(set(G1_prods + G2_prods + G4_prods)) #unique new products. substrate_list_temp += GAll_prods #2. Store resulting (uniqe) reactions in new_reactions list for prod in GAll_prods: new_reactions.append( sub+'>>'+prod ) rxn_list += list(set(new_reactions)) #print 'GAll:' + str(GAll_prods) #print 'new:' + str(new_reactions) #for rxn in new_reactions: # display(AllChem.ReactionFromSmarts(rxn)) #After reducing all current substrates, update substrate list. substrate_list = list(set(substrate_list_temp)) #print substrate_list if len(GAll_prods)==0: num_new_reactions = 0 return list(set(rxn_list)) def oxidize_metabolite(metabolite_smiles, Master_Sub_List = None, rxn_network_file = None, rxn_limit = None): # empty rxn_list rxn_list = [ ] # single starting substrate substrate_list = [ metabolite_smiles ] # initialize this flag to 1 num_new_reactions = 1 # num reaction status len_status = 0 while (num_new_reactions): #keep going as long as you have new reactions: new_reactions = [ ] # this is the list of new reactions in the latest iteration substrate_list_temp = [] # Remove substrates that are already in the Master Substrate List: # print 'Substrate List before filtering:', substrate_list substrate_list = list(set(substrate_list) - set(Master_Sub_List)) # print 'Substrate List after filtering:', substrate_list Master_Sub_List += substrate_list # print 'Updated MasterSubList', Master_Sub_List, '\n' #For every substrate in substrate list, perform all 4 types of oxidations: for sub in substrate_list: # For every reaction_type: (looping through all G1 - G4 functions): #G1. Apply reaction_type to substrate # G1_prods = G1_function_ox(sub) #list of G1 products #G2 G2_prods = G2_function_ox(sub) #list of G2 products #G3 G3_prods = G3_function_ox(sub) #list of G3 products #G4 # G4_prods = G4_function_ox(sub) #list of G4 products # These are all the products from all reactions acting on current substrate GAll_prods = list(set(G2_prods + G3_prods)) #unique new products. substrate_list_temp += GAll_prods #2. Store resulting (uniqe) reactions in new_reactions list for prod in GAll_prods: new_reactions.append( sub+'>>'+prod ) # Update the list of reactions rxn_list += new_reactions # Get rid of duplicates --> rxn_list = list(set(rxn_list)) #After reducing all current substrates, update substrate list. substrate_list = list(set(substrate_list_temp)) #print substrate_list if len(substrate_list)==0: num_new_reactions = 0 # If you only want to run up to a certain number of reactions: if rxn_limit: if len(rxn_list) > rxn_limit: return list(set(rxn_list)), Master_Sub_List if len(rxn_list) > len_status: # print 'number of reactions', len(rxn_list) len_status = len(rxn_list) + 1000 return list(set(rxn_list)), Master_Sub_List def visualize_reaction_old(metabolite_smiles): rxn_list = reduce_metabolite_old(metabolite_smiles) rxn_list.sort(key = len, reverse = True) print("Starting metabolite: " + metabolite_smiles) print("Number of reactions: " + str(len(rxn_list))) for rxn in rxn_list: print(rxn) display(AllChem.ReactionFromSmarts(rxn)) def visualize_reaction_new(metabolite_smiles): rxn_list = reduce_metabolite(metabolite_smiles) rxn_list.sort(key = len, reverse = True) print("Starting metabolite: " + metabolite_smiles) print("Number of reactions: " + str(len(rxn_list))) for rxn in rxn_list: print(rxn) display(AllChem.ReactionFromSmarts(rxn)) def visualize_reaction_ox_old(metabolite_smiles): rxn_list = oxidize_metabolite_old(metabolite_smiles) rxn_list.sort(key = len) print("Starting metabolite: " + metabolite_smiles) print("Number of reactions: " + str(len(rxn_list))) for rxn in rxn_list: print(rxn) display(AllChem.ReactionFromSmarts(rxn))
34.992647
109
0.630595
1,895
14,277
4.525594
0.089182
0.072761
0.051306
0.058302
0.876283
0.849813
0.820079
0.817048
0.8132
0.79431
0
0.023856
0.283603
14,277
407
110
35.078624
0.814627
0.304406
0
0.759657
0
0
0.013687
0
0
0
0
0
0
1
0.064378
false
0
0.034335
0
0.154506
0.038627
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
10f12e1f8552b6679ab96e5fe7ff167e31068f3b
7,132
py
Python
tests/functional/basic_queries_tests.py
suhaibaffan/database
1ff9d8b770d893ffa53d8fdd0f004518e91763c1
[ "Apache-2.0" ]
1
2021-09-20T05:09:34.000Z
2021-09-20T05:09:34.000Z
tests/functional/basic_queries_tests.py
suhaibaffan/database
1ff9d8b770d893ffa53d8fdd0f004518e91763c1
[ "Apache-2.0" ]
null
null
null
tests/functional/basic_queries_tests.py
suhaibaffan/database
1ff9d8b770d893ffa53d8fdd0f004518e91763c1
[ "Apache-2.0" ]
null
null
null
# Copyright 2020 Alex Dukhno # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import psycopg2 as pg import pytest @pytest.fixture(scope="session", autouse=True) def create_cursor(request): try: conn = pg.connect(host="localhost", password="check_this_out", database="postgres") cur = conn.cursor() def close_all(): conn.close() cur.close() request.addfinalizer(close_all) except Exception as e: assert False, str(e) return cur def test_create_drop_schema(create_cursor): cur = create_cursor try: cur.execute('create schema schema_name;') finally: cur.execute('drop schema schema_name;') def test_create_drop_empty_table(create_cursor): cur = create_cursor try: cur.execute('create schema schema_name;') cur.execute('create table schema_name.empty_table();') finally: cur.execute('drop table schema_name.empty_table;') cur.execute('drop schema schema_name;') def test_insert_select(create_cursor): cur = create_cursor try: cur.execute('create schema schema_name;') cur.execute('create table schema_name.table_name(si_column smallint);') cur.execute('insert into schema_name.table_name values (%d);' % 1) cur.execute('select * from schema_name.table_name;') r = cur.fetchone() assert r == (1,) finally: cur.execute('drop table schema_name.table_name;') cur.execute('drop schema schema_name;') def test_insert_select_many(create_cursor): cur = create_cursor try: cur.execute('create schema schema_name;') cur.execute('create table schema_name.table_name(si_column smallint);') cur.execute('insert into schema_name.table_name values (%d), (%d), (%d);' % (1, 2, 3)) cur.execute('select * from schema_name.table_name;') r = cur.fetchmany(3) assert r == [(1,), (2,), (3,)] finally: cur.execute('drop table schema_name.table_name;') cur.execute('drop schema schema_name;') def test_insert_select_update_all_select(create_cursor): cur = create_cursor try: cur.execute('create schema schema_name;') cur.execute('create table schema_name.table_name(si_column smallint);') cur.execute('insert into schema_name.table_name values (%d), (%d), (%d);' % (1, 2, 3)) cur.execute('select * from schema_name.table_name;') r = cur.fetchmany(3) assert r == [(1,), (2,), (3,)] cur.execute('update schema_name.table_name set si_column = %d;' % 4) cur.execute('select * from schema_name.table_name;') r = cur.fetchmany(3) assert r == [(4,), (4,), (4,)] finally: cur.execute('drop table schema_name.table_name;') cur.execute('drop schema schema_name;') def test_insert_select_delete_all_select(create_cursor): cur = create_cursor try: cur.execute('create schema schema_name;') cur.execute('create table schema_name.table_name(si_column smallint);') cur.execute('insert into schema_name.table_name values (%d), (%d), (%d);' % (1, 2, 3)) cur.execute('select * from schema_name.table_name;') r = cur.fetchmany(3) assert r == [(1,), (2,), (3,)] cur.execute('delete from schema_name.table_name;') cur.execute('select * from schema_name.table_name;') r = cur.fetchmany(3) assert r == [] finally: cur.execute('drop table schema_name.table_name;') cur.execute('drop schema schema_name;') def test_insert_select_many_columns(create_cursor): cur = create_cursor try: cur.execute('create schema schema_name;') cur.execute('create table schema_name.table_name(si_column_1 smallint, si_column_2 smallint, si_column_3 smallint);') for t in [(1, 2, 3), (4, 5, 6), (7, 8, 9)]: cur.execute('insert into schema_name.table_name values (%s, %s, %s);' % t) cur.execute('select * from schema_name.table_name;') r = cur.fetchmany(3) assert r == [(1, 2, 3,), (4, 5, 6,), (7, 8, 9,)] finally: cur.execute('drop table schema_name.table_name;') cur.execute('drop schema schema_name;') def test_insert_update_specified_column(create_cursor): cur = create_cursor try: cur.execute('create schema schema_name;') cur.execute('create table schema_name.table_name(si_column_1 smallint, si_column_2 smallint, si_column_3 smallint);') for t in [(1, 2, 3), (4, 5, 6), (7, 8, 9)]: cur.execute('insert into schema_name.table_name values (%s, %s, %s);' % t) cur.execute('select * from schema_name.table_name;') r = cur.fetchmany(3) assert r == [(1, 2, 3,), (4, 5, 6,), (7, 8, 9,)] cur.execute('update schema_name.table_name set si_column_2 = %d;' % 10) cur.execute('select * from schema_name.table_name;') r = cur.fetchmany(3) assert r == [(1, 10, 3,), (4, 10, 6,), (7, 10, 9,)] finally: cur.execute('drop table schema_name.table_name;') cur.execute('drop schema schema_name;') def test_insert_select_reordered(create_cursor): cur = create_cursor try: cur.execute('create schema schema_name;') cur.execute('create table schema_name.table_name(si_column_1 smallint, si_column_2 smallint, si_column_3 smallint);') for t in [(1, 2, 3), (4, 5, 6), (7, 8, 9)]: cur.execute('insert into schema_name.table_name values (%s, %s, %s);' % t) cur.execute('select si_column_3, si_column_1, si_column_2 from schema_name.table_name;') r = cur.fetchmany(3) assert r == [(3, 1, 2,), (6, 4, 5,), (9, 7, 8,)] finally: cur.execute('drop table schema_name.table_name;') cur.execute('drop schema schema_name;') def test_insert_select_same_column_many_times(create_cursor): cur = create_cursor try: cur.execute('create schema schema_name;') cur.execute('create table schema_name.table_name(si_column_1 smallint, si_column_2 smallint, si_column_3 smallint);') for t in [(1, 2, 3), (4, 5, 6), (7, 8, 9)]: cur.execute('insert into schema_name.table_name values (%s, %s, %s);' % t) cur.execute('select si_column_3, si_column_1, si_column_2, si_column_1, si_column_3 from schema_name.table_name;') r = cur.fetchmany(3) assert r == [(3, 1, 2, 1, 3), (6, 4, 5, 4, 6,), (9, 7, 8, 7, 9,)] finally: cur.execute('drop table schema_name.table_name;') cur.execute('drop schema schema_name;')
35.66
125
0.638671
1,025
7,132
4.238049
0.132683
0.138122
0.131215
0.166206
0.797422
0.787983
0.78361
0.775322
0.766805
0.766805
0
0.028251
0.225743
7,132
199
126
35.839196
0.758421
0.076837
0
0.728571
0
0
0.395738
0.151294
0
0
0
0
0.085714
1
0.085714
false
0.007143
0.014286
0
0.107143
0
0
0
0
null
0
0
1
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
804f1f6bf2b4d3e4254e40f96d8dc384dff4ffac
95
py
Python
tools/Vitis-AI-Quantizer/vai_q_pytorch/pytorch_binding/pytorch_nndct/pruning/__init__.py
Carles-Figuerola/Vitis-AI
fc043ea4aca1f9fe4e18962e6a6ae397812bb34b
[ "Apache-2.0" ]
1
2021-04-01T06:38:48.000Z
2021-04-01T06:38:48.000Z
tools/Vitis-AI-Quantizer/vai_q_pytorch/pytorch_binding/pytorch_nndct/pruning/__init__.py
cy333/Vitis-AI
611b82cfc32ea2fe04491432bf8feed1f378c9de
[ "Apache-2.0" ]
null
null
null
tools/Vitis-AI-Quantizer/vai_q_pytorch/pytorch_binding/pytorch_nndct/pruning/__init__.py
cy333/Vitis-AI
611b82cfc32ea2fe04491432bf8feed1f378c9de
[ "Apache-2.0" ]
null
null
null
from pytorch_nndct.pruning.core import InputSpec from pytorch_nndct.pruning.core import Pruner
31.666667
48
0.873684
14
95
5.785714
0.571429
0.271605
0.395062
0.567901
0.814815
0.814815
0
0
0
0
0
0
0.084211
95
2
49
47.5
0.931034
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
1
1
1
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
9
337f488f4f8222c18e39047a33b38cab4dd65d8d
12,000
py
Python
models/mix_2dmodel.py
bzhai/Ubi-SleepNet
27837827dec608d06659421d073872fb1f68453e
[ "MIT" ]
3
2022-01-22T15:55:31.000Z
2022-01-28T16:09:02.000Z
models/mix_2dmodel.py
bzhai/Ubi-SleepNet
27837827dec608d06659421d073872fb1f68453e
[ "MIT" ]
null
null
null
models/mix_2dmodel.py
bzhai/Ubi-SleepNet
27837827dec608d06659421d073872fb1f68453e
[ "MIT" ]
null
null
null
import torch.nn as nn import torch def num_flat_features(x): size = x.size()[1:] num_features = 1 for s in size: num_features *= s return num_features class VggIMG(nn.Module): """ Treat inputs as a image using 3x1 kernel, e.g. mesa H=9, W=101 """ def __init__(self, in_channels=9, num_classes=3, time_step_dim=25) -> None: super(VggIMG, self).__init__() self.channels = in_channels self.features = nn.Sequential( # #### block 1 nn.Conv2d(1, 512, kernel_size=(1, 3), padding=(0, 1)), # con_1 nn.ReLU(inplace=True), nn.Conv2d(512, 512, kernel_size=(1, 3), padding=(0, 1)), # con_2 nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=(1, 2), stride=(1, 2)), # max_pool # #### block 2 nn.Conv2d(512, 128, kernel_size=(1, 3), padding=(0, 1)), # con_3 nn.ReLU(inplace=True), nn.Conv2d(128, 128, kernel_size=(1, 3), padding=(0, 1)), # con_4 nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=(1, 2), stride=(1, 2)), # max pool # #### block 3 = con_3 nn.Conv2d(128, 512, kernel_size=(1, 3), padding=(0, 1)), # con_5 nn.ReLU(inplace=True), nn.Conv2d(512, 512, kernel_size=(1, 3), padding=(0, 1)), # con_6 nn.ReLU(inplace=True), nn.Conv2d(512, 512, kernel_size=(1, 3), padding=(0, 1)), # con_7 nn.ReLU(inplace=True), ) self.classifier = nn.Sequential( nn.Linear(time_step_dim * 512 * self.channels, 512), nn.ReLU(inplace=True), nn.Dropout(p=0.25), nn.Linear(512, 128), nn.ReLU(inplace=True), nn.Dropout(p=0.25), nn.Linear(128, 32), nn.ReLU(inplace=True), nn.Dropout(p=0.25), nn.Linear(32, num_classes), ) def forward(self, x: torch.Tensor) -> torch.Tensor: x = self.features(x) x = torch.flatten(x, 1) x = self.classifier(x) return x class VggIMGRes(nn.Module): """ Treat inputs as a image using 3x1 kernel, e.g. mesa H=9, W=101 """ def __init__(self, in_channels=9, num_classes=3, time_step_dim=25) -> None: super(VggIMGRes, self).__init__() self.conv_1 = nn.Conv2d(1, 512, kernel_size=(1, 3), padding=(0, 1)) # con_1 self.conv_2 = nn.Conv2d(512, 512, kernel_size=(1, 3), padding=(0, 1)) # con_2 self.conv_3 = nn.Conv2d(512, 128, kernel_size=(1, 3), padding=(0, 1)) # conv_3 self.conv_4 = nn.Conv2d(128, 128, kernel_size=(1, 3), padding=(0, 1)) # conv_4 self.conv_5 = nn.Conv2d(128, 512, kernel_size=(1, 3), padding=(0, 1)) # conv_5 self.conv_6 = nn.Conv2d(512, 512, kernel_size=(1, 3), padding=(0, 1)) # conv_6 self.conv_7 = nn.Conv2d(512, 512, kernel_size=(1, 3), padding=(0, 1)) # conv_7 self.relu = nn.ReLU(inplace=True) self.max_pool = nn.MaxPool2d(kernel_size=(1, 2), stride=(1, 2)) self.channels = in_channels self.classifier = nn.Sequential( nn.Linear(time_step_dim * 512 * self.channels, 512), nn.ReLU(inplace=True), nn.Dropout(p=0.25), nn.Linear(512, 128), nn.ReLU(inplace=True), nn.Dropout(p=0.25), nn.Linear(128, 32), nn.ReLU(inplace=True), nn.Dropout(p=0.25), nn.Linear(32, num_classes), ) def forward(self, x: torch.Tensor) -> torch.Tensor: x = self.conv_1(x) x = self.relu(x) residual_1 = x output = self.conv_2(x) output += residual_1 output = self.relu(output) output = self.max_pool(output) output_2 = self.conv_3(output) residual_2 = output_2 output_2 = self.relu(output_2) output_2 = self.conv_4(output_2) output_2 += residual_2 output_2 = self.relu(output_2) output_2 = self.max_pool(output_2) output_3 = self.conv_5(output_2) output_3 = self.relu(output_3) residual_3 = output_3 output_3 = self.conv_6(output_3) output_3 = self.relu(output_3) output_3 = self.conv_7(output_3) output_3 += residual_3 output_3 = self.relu(output_3) output_3 = torch.flatten(output_3, 1) output_3 = self.classifier(output_3) return output_3 class VggIMGResSum(nn.Module): """ Treat inputs as a image using 3x1 kernel, e.g. mesa H=9, W=101 """ def __init__(self, in_channels=9, num_classes=3, time_step_dim=25) -> None: super(VggIMGResSum, self).__init__() self.conv_1 = nn.Conv2d(1, 512, kernel_size=(1, 3), padding=(0, 1)) # con_1 self.conv_2 = nn.Conv2d(512, 512, kernel_size=(1, 3), padding=(0, 1)) # con_2 self.conv_3 = nn.Conv2d(512, 128, kernel_size=(1, 3), padding=(0, 1)) # conv_3 self.conv_4 = nn.Conv2d(128, 128, kernel_size=(1, 3), padding=(0, 1)) # conv_4 self.conv_5 = nn.Conv2d(128, 512, kernel_size=(1, 3), padding=(0, 1)) # conv_5 self.conv_6 = nn.Conv2d(512, 512, kernel_size=(1, 3), padding=(0, 1)) # conv_6 self.conv_7 = nn.Conv2d(512, 512, kernel_size=(1, 3), padding=(0, 1)) # conv_7 self.relu = nn.ReLU(inplace=True) self.max_pool = nn.MaxPool2d(kernel_size=(1, 2), stride=(1, 2)) self.channels = in_channels self.classifier = nn.Sequential( nn.Linear(time_step_dim * 512, 512), nn.ReLU(inplace=True), nn.Dropout(p=0.25), nn.Linear(512, 128), nn.ReLU(inplace=True), nn.Dropout(p=0.25), nn.Linear(128, 32), nn.ReLU(inplace=True), nn.Dropout(p=0.25), nn.Linear(32, num_classes), ) def forward(self, x: torch.Tensor) -> torch.Tensor: x = self.conv_1(x) x = self.relu(x) residual_1 = x output = self.conv_2(x) output += residual_1 output = self.relu(output) output = self.max_pool(output) output_2 = self.conv_3(output) residual_2 = output_2 output_2 = self.relu(output_2) output_2 = self.conv_4(output_2) output_2 += residual_2 output_2 = self.relu(output_2) output_2 = self.max_pool(output_2) output_3 = self.conv_5(output_2) output_3 = self.relu(output_3) residual_3 = output_3 output_3 = self.conv_6(output_3) output_3 = self.relu(output_3) output_3 = self.conv_7(output_3) output_3 += residual_3 output_3 = self.relu(output_3) output_3 = output_3.sum(axis=2) output_3 = torch.flatten(output_3, 1) output_3 = self.classifier(output_3) return output_3 class VggIMGSum(nn.Module): """ Treat inputs as a image using 3x1 kernel, e.g. mesa H=9, W=101 """ def __init__(self, in_channels=9, num_classes=3, time_step_dim=25) -> None: super(VggIMGSum, self).__init__() self.channels = in_channels self.features = nn.Sequential( # #### block 1 nn.Conv2d(1, 512, kernel_size=(1, 3), padding=(0, 1)), # con_1 nn.ReLU(inplace=True), nn.Conv2d(512, 512, kernel_size=(1, 3), padding=(0, 1)), # con_2 nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=(1, 2), stride=(1, 2)), # max_pool # #### block 2 nn.Conv2d(512, 128, kernel_size=(1, 3), padding=(0, 1)), # con_3 nn.ReLU(inplace=True), nn.Conv2d(128, 128, kernel_size=(1, 3), padding=(0, 1)), # con_4 nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=(1, 2), stride=(1, 2)), # max pool # #### block 3 = con_3 nn.Conv2d(128, 512, kernel_size=(1, 3), padding=(0, 1)), # con_5 nn.ReLU(inplace=True), nn.Conv2d(512, 512, kernel_size=(1, 3), padding=(0, 1)), # con_6 nn.ReLU(inplace=True), nn.Conv2d(512, 512, kernel_size=(1, 3), padding=(0, 1)), # con_7 nn.ReLU(inplace=True), ) self.classifier = nn.Sequential( nn.Linear(time_step_dim * 512, 512), nn.ReLU(inplace=True), nn.Dropout(p=0.25), nn.Linear(512, 128), nn.ReLU(inplace=True), nn.Dropout(p=0.25), nn.Linear(128, 32), nn.ReLU(inplace=True), nn.Dropout(p=0.25), nn.Linear(32, num_classes), ) def forward(self, x: torch.Tensor) -> torch.Tensor: x = self.features(x) x = x.sum(axis=2) x = torch.flatten(x, 1) x = self.classifier(x) return x class VggIMGNOFC(nn.Module): """ Treat inputs as a image using 3x1 kernel, e.g. mesa H=9, W=101 """ def __init__(self) -> None: super(VggIMGNOFC, self).__init__() self.features = nn.Sequential( # #### block 1 nn.Conv2d(1, 512, kernel_size=(1, 3), padding=(0, 1)), # con_1 nn.ReLU(inplace=True), nn.Conv2d(512, 512, kernel_size=(1, 3), padding=(0, 1)), # con_2 nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=(1, 2), stride=(1, 2)), # max_pool # #### block 2 nn.Conv2d(512, 128, kernel_size=(1, 3), padding=(0, 1)), # con_3 nn.ReLU(inplace=True), nn.Conv2d(128, 128, kernel_size=(1, 3), padding=(0, 1)), # con_4 nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=(1, 2), stride=(1, 2)), # max pool # #### block 3 = con_3 nn.Conv2d(128, 512, kernel_size=(1, 3), padding=(0, 1)), # con_5 nn.ReLU(inplace=True), nn.Conv2d(512, 512, kernel_size=(1, 3), padding=(0, 1)), # con_6 nn.ReLU(inplace=True), nn.Conv2d(512, 512, kernel_size=(1, 3), padding=(0, 1)), # con_7 nn.ReLU(inplace=True), ) def forward(self, x: torch.Tensor) -> torch.Tensor: x = self.features(x) return x class VggIMGResNOFC(nn.Module): """ Treat inputs as a image using 3x1 kernel, e.g. mesa H=9, W=101 """ def __init__(self, in_channels=9, num_classes=3, time_step_dim=25) -> None: super(VggIMGResNOFC, self).__init__() self.conv_1 = nn.Conv2d(1, 512, kernel_size=(1, 3), padding=(0, 1)) # con_1 self.conv_2 = nn.Conv2d(512, 512, kernel_size=(1, 3), padding=(0, 1)) # con_2 self.conv_3 = nn.Conv2d(512, 128, kernel_size=(1, 3), padding=(0, 1)) # conv_3 self.conv_4 = nn.Conv2d(128, 128, kernel_size=(1, 3), padding=(0, 1)) # conv_4 self.conv_5 = nn.Conv2d(128, 512, kernel_size=(1, 3), padding=(0, 1)) # conv_5 self.conv_6 = nn.Conv2d(512, 512, kernel_size=(1, 3), padding=(0, 1)) # conv_6 self.conv_7 = nn.Conv2d(512, 512, kernel_size=(1, 3), padding=(0, 1)) # conv_7 self.relu = nn.ReLU(inplace=True) self.max_pool = nn.MaxPool2d(kernel_size=(1, 2), stride=(1, 2)) def forward(self, x: torch.Tensor) -> torch.Tensor: x = self.conv_1(x) x = self.relu(x) residual_1 = x output = self.conv_2(x) output += residual_1 output = self.relu(output) output = self.max_pool(output) output_2 = self.conv_3(output) residual_2 = output_2 output_2 = self.relu(output_2) output_2 = self.conv_4(output_2) output_2 += residual_2 output_2 = self.relu(output_2) output_2 = self.max_pool(output_2) output_3 = self.conv_5(output_2) output_3 = self.relu(output_3) residual_3 = output_3 output_3 = self.conv_6(output_3) output_3 = self.relu(output_3) output_3 = self.conv_7(output_3) output_3 += residual_3 output_3 = self.relu(output_3) return output_3
39.087948
87
0.555167
1,793
12,000
3.523146
0.045176
0.041159
0.088808
0.079785
0.95615
0.952984
0.952984
0.952984
0.952984
0.952984
0
0.103277
0.295583
12,000
306
88
39.215686
0.644032
0.067
0
0.88
0
0
0
0
0
0
0
0
0
1
0.052
false
0
0.008
0
0.112
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
3393c05f17122c315f018eb952815b894d1f5ade
8,522
py
Python
tests/redeem_deposit_test.py
mixbytes/lido-dot-ksm
d9cfa4bd113a14d18cf2e4c8cf2c9a08dde8e5ff
[ "MIT" ]
null
null
null
tests/redeem_deposit_test.py
mixbytes/lido-dot-ksm
d9cfa4bd113a14d18cf2e4c8cf2c9a08dde8e5ff
[ "MIT" ]
5
2022-03-21T15:23:26.000Z
2022-03-28T07:59:27.000Z
tests/redeem_deposit_test.py
mixbytes/lido-dot-ksm
d9cfa4bd113a14d18cf2e4c8cf2c9a08dde8e5ff
[ "MIT" ]
null
null
null
from brownie import chain from helpers import RelayChain, distribute_initial_tokens def test_redeem_right_after_deposit_equal(lido, oracle_master, vKSM, accounts): distribute_initial_tokens(vKSM, lido, accounts) relay = RelayChain(lido, vKSM, oracle_master, accounts, chain) relay.new_ledger("0x10", "0x11") relay.new_ledger("0x20", "0x21") relay.new_ledger("0x30", "0x31") deposit = 20 * 10**18 lido.deposit(deposit, {'from': accounts[0]}) relay.new_era() assert relay.ledgers[0].free_balance > 0 assert relay.ledgers[1].free_balance > 0 assert relay.ledgers[2].free_balance > 0 assert relay.ledgers[0].active_balance == 0 assert relay.ledgers[1].active_balance == 0 assert relay.ledgers[2].active_balance == 0 for i in range(20): relay.new_era() # 0. Save ledger stakes before actions led1_stake = lido.ledgerStake(relay.ledgers[0].ledger_address) led2_stake = lido.ledgerStake(relay.ledgers[1].ledger_address) led3_stake = lido.ledgerStake(relay.ledgers[2].ledger_address) # 1. Deposit and redeem before new era deposit_2 = 5 * 10**18 lido.deposit(deposit_2, {'from': accounts[1]}) lido.redeem(deposit_2, {'from': accounts[1]}) relay.new_era() # 2. Check token distirbution after new era led1_stake_upd = lido.ledgerStake(relay.ledgers[0].ledger_address) led2_stake_upd = lido.ledgerStake(relay.ledgers[1].ledger_address) led3_stake_upd = lido.ledgerStake(relay.ledgers[2].ledger_address) assert led1_stake_upd == led1_stake assert led2_stake_upd == led2_stake assert led3_stake_upd == led3_stake # 3. check unbonding balance (waitingToUnbonding, readyToClaim) = lido.getUnbonded(accounts[1]) assert waitingToUnbonding == deposit_2 assert readyToClaim == 0 # 4. wait and check relay.new_era() (waitingToUnbonding, readyToClaim) = lido.getUnbonded(accounts[1]) assert waitingToUnbonding == 0 assert readyToClaim == deposit_2 balance_before_claim = vKSM.balanceOf(accounts[1]) lido.claimUnbonded({'from': accounts[1]}) assert vKSM.balanceOf(accounts[1]) == (deposit_2 + balance_before_claim) def test_redeem_right_after_deposit_less(lido, oracle_master, vKSM, accounts): distribute_initial_tokens(vKSM, lido, accounts) relay = RelayChain(lido, vKSM, oracle_master, accounts, chain) relay.new_ledger("0x10", "0x11") deposit = 20 * 10**18 lido.deposit(deposit, {'from': accounts[0]}) relay.new_era() relay.new_era() # 0. Save ledger stakes before actions led1_stake = lido.ledgerStake(relay.ledgers[0].ledger_address) # 1. Deposit and redeem before new era deposit_2 = 5 * 10**18 redeem = 4 * 10**18 lido.deposit(deposit_2, {'from': accounts[1]}) lido.redeem(redeem, {'from': accounts[1]}) relay.new_era() # 2. Check token distirbution after new era led1_stake_upd = lido.ledgerStake(relay.ledgers[0].ledger_address) assert led1_stake_upd == (led1_stake + (deposit_2 - redeem)) # 3. check unbonding balance (waitingToUnbonding, readyToClaim) = lido.getUnbonded(accounts[1]) assert waitingToUnbonding == redeem assert readyToClaim == 0 # 4. wait and check relay.new_era() (waitingToUnbonding, readyToClaim) = lido.getUnbonded(accounts[1]) assert waitingToUnbonding == 0 assert readyToClaim == redeem balance_before_claim = vKSM.balanceOf(accounts[1]) lido.claimUnbonded({'from': accounts[1]}) assert vKSM.balanceOf(accounts[1]) == (redeem + balance_before_claim) def test_redeem_right_after_deposit_greater(lido, oracle_master, vKSM, accounts): distribute_initial_tokens(vKSM, lido, accounts) relay = RelayChain(lido, vKSM, oracle_master, accounts, chain) relay.new_ledger("0x10", "0x11") deposit = 20 * 10**18 lido.deposit(deposit, {'from': accounts[0]}) relay.new_era() relay.new_era() # 0. Save ledger stakes before actions led1_stake = lido.ledgerStake(relay.ledgers[0].ledger_address) # 1. Deposit and redeem before new era deposit_2 = 5 * 10**18 redeem = 8 * 10**18 lido.deposit(deposit_2, {'from': accounts[0]}) lido.redeem(redeem, {'from': accounts[0]}) relay.new_era() # 2. Check token distirbution after new era led1_stake_upd = lido.ledgerStake(relay.ledgers[0].ledger_address) assert led1_stake_upd == (led1_stake - (redeem - deposit_2)) # 3. check unbonding balance (waitingToUnbonding, readyToClaim) = lido.getUnbonded(accounts[0]) assert waitingToUnbonding == redeem assert readyToClaim == 0 # 5. wait for unbonding for i in range(33): relay.new_era() balance_before_claim = vKSM.balanceOf(accounts[0]) lido.claimUnbonded({'from': accounts[0]}) assert vKSM.balanceOf(accounts[0]) == (redeem + balance_before_claim) def test_deposit_after_redeem_in_new_era(lido, oracle_master, vKSM, accounts): distribute_initial_tokens(vKSM, lido, accounts) relay = RelayChain(lido, vKSM, oracle_master, accounts, chain) relay.new_ledger("0x10", "0x11") relay.new_ledger("0x20", "0x21") deposit = 20 * 10**12 lido.deposit(deposit, {'from': accounts[0]}) relay.new_era() relay.new_era() redeem = 10 * 10**12 lido.redeem(redeem, {'from': accounts[0]}) relay.new_era() assert relay.ledgers[0].active_balance == (deposit - redeem) / 2 assert relay.ledgers[1].active_balance == (deposit - redeem) / 2 lido.deposit(redeem, {'from': accounts[1]}) relay.new_era() # transfer excess to withdrawal relay.new_era() # remove element from queue (waitingToUnbonding, readyToClaim) = lido.getUnbonded(accounts[0]) assert waitingToUnbonding == 0 assert readyToClaim == redeem balance_before_claim = vKSM.balanceOf(accounts[0]) lido.claimUnbonded({'from': accounts[0]}) assert vKSM.balanceOf(accounts[0]) == (redeem + balance_before_claim) def test_deposit_after_redeem_in_new_era_less(lido, oracle_master, vKSM, Ledger, accounts): distribute_initial_tokens(vKSM, lido, accounts) relay = RelayChain(lido, vKSM, oracle_master, accounts, chain) relay.new_ledger("0x10", "0x11") deposit = 20 * 10**12 lido.deposit(deposit, {'from': accounts[0]}) relay.new_era() relay.new_era() assert Ledger.at(lido.findLedger(relay.ledgers[0].stash_account)).ledgerStake() == deposit redeem = 5 * 10**12 lido.redeem(redeem, {'from': accounts[0]}) relay.new_era() assert Ledger.at(lido.findLedger(relay.ledgers[0].stash_account)).ledgerStake() == deposit - redeem deposit_2 = 15 * 10**12 lido.deposit(deposit_2, {'from': accounts[1]}) relay.new_era() # transfer excess to withdrawal relay.new_era() # remove element from queue assert Ledger.at(lido.findLedger(relay.ledgers[0].stash_account)).ledgerStake() == deposit + (deposit_2 - redeem) (waitingToUnbonding, readyToClaim) = lido.getUnbonded(accounts[0]) assert waitingToUnbonding == 0 assert readyToClaim == redeem balance_before_claim = vKSM.balanceOf(accounts[0]) lido.claimUnbonded({'from': accounts[0]}) assert vKSM.balanceOf(accounts[0]) == (redeem + balance_before_claim) def test_deposit_after_redeem_in_new_era_greater(lido, oracle_master, vKSM, Ledger, accounts): distribute_initial_tokens(vKSM, lido, accounts) relay = RelayChain(lido, vKSM, oracle_master, accounts, chain) relay.new_ledger("0x10", "0x11") deposit = 20 * 10**12 lido.deposit(deposit, {'from': accounts[0]}) relay.new_era() relay.new_era() assert Ledger.at(lido.findLedger(relay.ledgers[0].stash_account)).ledgerStake() == deposit redeem = 10 * 10**12 lido.redeem(redeem, {'from': accounts[0]}) relay.new_era() assert Ledger.at(lido.findLedger(relay.ledgers[0].stash_account)).ledgerStake() == deposit - redeem deposit_2 = 5 * 10**12 lido.deposit(deposit_2, {'from': accounts[1]}) relay.new_era() assert Ledger.at(lido.findLedger(relay.ledgers[0].stash_account)).ledgerStake() == deposit + (deposit_2 - redeem) (waitingToUnbonding, readyToClaim) = lido.getUnbonded(accounts[0]) assert waitingToUnbonding == redeem assert readyToClaim == 0 for i in range(32): relay.new_era() balance_before_claim = vKSM.balanceOf(accounts[0]) lido.claimUnbonded({'from': accounts[0]}) assert vKSM.balanceOf(accounts[0]) == (redeem + balance_before_claim)
30.765343
117
0.697489
1,094
8,522
5.256856
0.082267
0.050078
0.051643
0.031299
0.955834
0.940358
0.89654
0.884716
0.870283
0.824204
0
0.040729
0.176015
8,522
277
118
30.765343
0.778268
0.070171
0
0.743902
0
0
0.021252
0
0
0
0.009108
0
0.25
1
0.036585
false
0
0.012195
0
0.04878
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
33bfe3938c921779de89f8550adc636b7f2f43fb
117
py
Python
python/testData/refactoring/move/moveDoesntMergeFromImportsAccordingToCodeStyle/after/src/dst.py
jnthn/intellij-community
8fa7c8a3ace62400c838e0d5926a7be106aa8557
[ "Apache-2.0" ]
2
2019-04-28T07:48:50.000Z
2020-12-11T14:18:08.000Z
python/testData/refactoring/move/moveDoesntMergeFromImportsAccordingToCodeStyle/after/src/dst.py
Cyril-lamirand/intellij-community
60ab6c61b82fc761dd68363eca7d9d69663cfa39
[ "Apache-2.0" ]
173
2018-07-05T13:59:39.000Z
2018-08-09T01:12:03.000Z
python/testData/refactoring/move/moveDoesntMergeFromImportsAccordingToCodeStyle/after/src/dst.py
Cyril-lamirand/intellij-community
60ab6c61b82fc761dd68363eca7d9d69663cfa39
[ "Apache-2.0" ]
2
2020-03-15T08:57:37.000Z
2020-04-07T04:48:14.000Z
from lib import bar from lib import baz, quux from lib import foo print(baz, quux) def func(): print(foo, bar)
13
25
0.700855
21
117
3.904762
0.47619
0.256098
0.47561
0
0
0
0
0
0
0
0
0
0.213675
117
9
26
13
0.891304
0
0
0
0
0
0
0
0
0
0
0
0
1
0.166667
true
0
0.5
0
0.666667
0.333333
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
33d79871557dc79e9faae381c037ca5743ff7435
239
py
Python
examples/middleware.py
David-Wobrock/simpleflow
09f59105b48ae79aef37b506bbde0cd1f2c360d1
[ "MIT" ]
69
2015-02-24T00:49:40.000Z
2022-02-05T02:35:04.000Z
examples/middleware.py
David-Wobrock/simpleflow
09f59105b48ae79aef37b506bbde0cd1f2c360d1
[ "MIT" ]
295
2015-02-06T11:02:00.000Z
2022-03-21T11:01:34.000Z
examples/middleware.py
David-Wobrock/simpleflow
09f59105b48ae79aef37b506bbde0cd1f2c360d1
[ "MIT" ]
27
2015-08-31T22:14:42.000Z
2022-02-08T07:25:01.000Z
def my_pre_execution_middleware(context, **kwargs): print("AAAH PRE EXECUTION MIDDLEWARE", context) def my_post_execution_middleware(context, result, **kwargs): print("AAAH POST EXECUTION MIDDLEWARE", "activity result:", result)
34.142857
71
0.76569
29
239
6.103448
0.413793
0.429379
0.440678
0.327684
0
0
0
0
0
0
0
0
0.121339
239
6
72
39.833333
0.842857
0
0
0
0
0
0.313808
0
0
0
0
0
0
1
0.5
false
0
0
0
0.5
0.5
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
1
0
7
1d313a592762c41ab237bf1484187ca6a0afff37
90
py
Python
scripts/graph/rrn/__init__.py
dair-iitd/1oML_workdir
37117de4abf1774548786e9534c90977d67091d8
[ "Apache-2.0" ]
3
2021-05-07T02:41:54.000Z
2022-02-10T05:18:55.000Z
scripts/graph/rrn/__init__.py
dair-iitd/1oML_workdir
37117de4abf1774548786e9534c90977d67091d8
[ "Apache-2.0" ]
null
null
null
scripts/graph/rrn/__init__.py
dair-iitd/1oML_workdir
37117de4abf1774548786e9534c90977d67091d8
[ "Apache-2.0" ]
null
null
null
from .rrn import * from .sudoku import * from .sudoku_data import * #import .sudoku_data
15
26
0.744444
13
90
5
0.384615
0.307692
0.492308
0
0
0
0
0
0
0
0
0
0.166667
90
5
27
18
0.866667
0.211111
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
1d410ce5953d7a05e5a377ee96b89775dc5c9f7e
3,621
py
Python
code_tools/src/bsd_3_clause_license.py
xykivo/percipio
a583b7887d5515d928ad4c47c425c4363462838c
[ "BSD-3-Clause" ]
2
2016-04-14T09:11:01.000Z
2016-05-05T17:52:33.000Z
code_tools/src/bsd_3_clause_license.py
xykivo/percipio
a583b7887d5515d928ad4c47c425c4363462838c
[ "BSD-3-Clause" ]
4
2018-04-03T02:53:08.000Z
2018-12-08T00:18:41.000Z
code_tools/src/bsd_3_clause_license.py
xykivo/percipio
a583b7887d5515d928ad4c47c425c4363462838c
[ "BSD-3-Clause" ]
null
null
null
#!/usr/bin/python3 # -*- coding: utf-8 -*- # BSD 3-Clause License # # Copyright (c) 2021, Dror Smolarsky # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # 3. Neither the name of the copyright holder nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. '''This file contains constants for the BSD 3 clause license. The constants contain regular expression captures and escapes so they can be used to compare to licenses in files using regular expression matching. The constants are stored as a sequence of lines, where each line contains one line form the license. ''' BSD_LICENSE_3_CLAUSE = r'''BSD 3-Clause License Copyright \(c\) (?P<year>[\d\-]+), (?P<copyright_owner>[\w\s]+) All rights reserved\. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1\. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer\. 2\. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution\. 3\. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission\. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED\. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES \(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION\) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT \(INCLUDING NEGLIGENCE OR OTHERWISE\) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE\. '''.split('\n')
48.28
79
0.765811
527
3,621
5.254269
0.316888
0.026002
0.024558
0.033225
0.876851
0.876851
0.857349
0.857349
0.857349
0.857349
0
0.005402
0.181994
3,621
74
80
48.932432
0.92944
0.515051
0
0
0
0
0.952555
0.017032
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
1d4bedc87cfc353189f577742fc5ac0a24898e5d
35
py
Python
Problems/Calculate it/main.py
TataSatyaPratheek/Tic-Tac-Toe
fa3da80f9ec9ffa3c8c9aaa34a5bb1e88553fecd
[ "MIT" ]
null
null
null
Problems/Calculate it/main.py
TataSatyaPratheek/Tic-Tac-Toe
fa3da80f9ec9ffa3c8c9aaa34a5bb1e88553fecd
[ "MIT" ]
null
null
null
Problems/Calculate it/main.py
TataSatyaPratheek/Tic-Tac-Toe
fa3da80f9ec9ffa3c8c9aaa34a5bb1e88553fecd
[ "MIT" ]
null
null
null
print((1234567890*987654321)+67890)
35
35
0.828571
4
35
7.25
1
0
0
0
0
0
0
0
0
0
0
0.685714
0
35
1
35
35
0.142857
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
1
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
7
1d98de36b51912212cbeec045cba61df6188c6ac
1,945
py
Python
test_1D.py
jl2264/Flood44
e2b83a6e7da86d200d9e63394e346211e27536c7
[ "MIT" ]
null
null
null
test_1D.py
jl2264/Flood44
e2b83a6e7da86d200d9e63394e346211e27536c7
[ "MIT" ]
null
null
null
test_1D.py
jl2264/Flood44
e2b83a6e7da86d200d9e63394e346211e27536c7
[ "MIT" ]
1
2022-02-06T23:00:09.000Z
2022-02-06T23:00:09.000Z
from floodsystem.geo import rivers_with_station from floodsystem.station import MonitoringStation def test_rivers_with_station_no_duplicate(): stations = [] s1 = MonitoringStation( station_id=1, measure_id=1, label='label1', coord=(float(52.2053), float(0.1218)), typical_range='typical_range', river='river1', town='town') s2 = MonitoringStation( station_id=2, measure_id=1, label='label2', coord=(float(2.2053), float(10.1218)), typical_range='typical_range', river='river2', town='town') s3 = MonitoringStation( station_id=3, measure_id=1, label='label3', coord=(float(52.2153), float(0.1318)), typical_range='typical_range', river='river3', town='town') stations.append(s1) stations.append(s2) stations.append(s3) list_test_1D = rivers_with_station(stations) assert len(list_test_1D) == 3 # all of the station rivers def test_rivers_with_station_with_duplicate(): stations = [] s1 = MonitoringStation( station_id=1, measure_id=1, label='label1', coord=(float(52.2053), float(0.1218)), typical_range='typical_range', river='river1', town='town') s2 = MonitoringStation( station_id=2, measure_id=1, label='label2', coord=(float(2.2053), float(10.1218)), typical_range='typical_range', river='river2', town='town') s3 = MonitoringStation( station_id=3, measure_id=1, label='label3', coord=(float(52.2153), float(0.1318)), typical_range='typical_range', river='river1', town='town') stations.append(s1) stations.append(s2) stations.append(s3) list_test_1D = rivers_with_station(stations) assert len(list_test_1D) == 2 # all of the station rivers with one duplicate assert list_test_1D[0] == s1.river assert list_test_1D[1] == s2.river assert s1.river == s3.river
22.616279
82
0.653985
254
1,945
4.80315
0.19685
0.118033
0.127869
0.07377
0.84918
0.77541
0.77541
0.77541
0.770492
0.770492
0
0.075718
0.212339
1,945
85
83
22.882353
0.720627
0.03599
0
0.850746
0
0
0.092998
0
0
0
0
0
0.074627
1
0.029851
false
0
0.029851
0
0.059701
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
1daa3aa15bbf2d249439189e5cd6e4758de130df
45,169
py
Python
SBaaS_thermodynamics/stage03_quantification_dG_r_query.py
dmccloskey/SBaaS_thermodynamics
0eeed0191f952ea0226ab8bbc234a30638fb2f9f
[ "MIT" ]
null
null
null
SBaaS_thermodynamics/stage03_quantification_dG_r_query.py
dmccloskey/SBaaS_thermodynamics
0eeed0191f952ea0226ab8bbc234a30638fb2f9f
[ "MIT" ]
null
null
null
SBaaS_thermodynamics/stage03_quantification_dG_r_query.py
dmccloskey/SBaaS_thermodynamics
0eeed0191f952ea0226ab8bbc234a30638fb2f9f
[ "MIT" ]
null
null
null
#SBaaS models from .stage03_quantification_dG_r_postgresql_models import * #SBaaS base from SBaaS_base.sbaas_base import sbaas_base from SBaaS_base.sbaas_base_query_update import sbaas_base_query_update from SBaaS_base.sbaas_base_query_drop import sbaas_base_query_drop from SBaaS_base.sbaas_base_query_initialize import sbaas_base_query_initialize from SBaaS_base.sbaas_base_query_insert import sbaas_base_query_insert from SBaaS_base.sbaas_base_query_select import sbaas_base_query_select from SBaaS_base.sbaas_base_query_delete import sbaas_base_query_delete from SBaaS_base.sbaas_template_query import sbaas_template_query #Resources #from math import copysign class stage03_quantification_dG_r_query(sbaas_template_query): def initialize_supportedTables(self): '''Set the supported tables dict for data_stage03_quantification_dG_r ''' tables_supported = { 'data_stage03_quantification_dG0_r':data_stage03_quantification_dG0_r, 'data_stage03_quantification_dG_r':data_stage03_quantification_dG_r, 'data_stage03_quantification_tcc':data_stage03_quantification_tcc, 'data_stage03_quantification_dG_r_comparison':data_stage03_quantification_dG_r_comparison, }; self.set_supportedTables(tables_supported); ## Query from data_stage03_quantification_dG0_r # query rows from data_stage03_quantificaton_dG0_r def get_rowsDict_experimentIDAndModelIDAndTimePointAndSampleNameAbbreviations_dataStage03QuantificationDG0r(self,experiment_id_I, model_id_I, time_point_I, sample_name_abbreviation_I, measured_dG_f_coverage_criteria_I=0.0): '''Query rows that are used from the dG_r''' try: #data = self.session.query(data_stage03_quantification_dG0_r).filter( # data_stage03_quantification_dG0_r.model_id.like(model_id_I), # data_stage03_quantification_dG0_r.time_point.like(time_point_I), # data_stage03_quantification_dG0_r.sample_name_abbreviation.like(sample_name_abbreviation_I), # data_stage03_quantification_dG0_r.experiment_id.like(experiment_id_I), # data_stage03_quantification_dG0_r.model_id.like(data_stage03_quantification_tcc.model_id), # data_stage03_quantification_dG0_r.time_point.like(data_stage03_quantification_tcc.time_point), # data_stage03_quantification_dG0_r.sample_name_abbreviation.like(data_stage03_quantification_tcc.sample_name_abbreviation), # data_stage03_quantification_dG0_r.experiment_id.like(data_stage03_quantification_tcc.experiment_id), # data_stage03_quantification_dG0_r.rxn_id.like(data_stage03_quantification_tcc.rxn_id), # data_stage03_quantification_tcc.measured_dG_f_coverage>measured_dG_f_coverage_criteria_I, # data_stage03_quantification_dG0_r.used_.is_(True)).all(); data = self.session.query(data_stage03_quantification_dG0_r).filter( data_stage03_quantification_dG0_r.model_id.like(model_id_I), data_stage03_quantification_dG0_r.time_point.like(time_point_I), data_stage03_quantification_dG0_r.sample_name_abbreviation.like(sample_name_abbreviation_I), data_stage03_quantification_dG0_r.experiment_id.like(experiment_id_I), data_stage03_quantification_dG0_r.used_.is_(True)).all(); rows_O = {}; if data: for d in data: if d.rxn_id in rows_O: print('duplicate rxn_id found!'); else: rows_O[d.rxn_id]={ 'Keq_lb':d.Keq_lb, 'Keq_ub':d.Keq_ub, 'dG_r':d.dG0_r, 'dG_r_var':d.dG0_r_var, 'dG_r_units':d.dG0_r_units, 'dG_r_lb':d.dG0_r_lb, 'dG_r_ub':d.dG0_r_ub}; return rows_O; except SQLAlchemyError as e: print(e); # update rows of data_stage03_quantification_dG0_r def update_dataStage03DG0r(self,data_I): '''update rows of data_stage03_quantification_dG0_r''' if data_I: for d in data_I: try: data_update = self.session.query(data_stage03_quantification_dG0_r).filter( data_stage03_quantification_dG0_r.experiment_id.like(d['experiment_id']), data_stage03_quantification_dG0_r.model_id.like(d['model_id']), data_stage03_quantification_dG0_r.rxn_id.like(d['rxn_id']), data_stage03_quantification_dG_r.sample_name_abbreviation.like(d['sample_name_abbreviation']), data_stage03_quantification_dG_r.time_point.like(d['time_point'])).update( { 'Keq_lb':d['Keq_lb'], 'Keq_ub':d['Keq_ub'], 'dG0_r':d['dG0_r'], 'dG0_r_var':d['dG0_r_var'], 'dG0_r_units':d['dG0_r_units'], 'dG0_r_lb':d['dG0_r_lb'], 'dG0_r_ub':d['dG0_r_ub'], 'used_':d['used_'], 'comment_':d['comment_']}, synchronize_session=False); except SQLAlchemyError as e: print(e); self.session.commit(); ## Query from data_stage03_quantification_dG_r # query rows from data_stage03_quantification_dG_r def get_rows_experimentIDAndModelIDAndTimePointAndSampleNameAbbreviations_dataStage03QuantificationDGr(self,experiment_id_I, model_id_I, time_point_I, sample_name_abbreviation_I, measured_concentration_coverage_criteria_I=0.5, measured_dG_f_coverage_criteria_I=0.99): '''Query rows that are used from the dG_r''' try: data = self.session.query(data_stage03_quantification_dG_r).filter( data_stage03_quantification_dG_r.model_id.like(model_id_I), data_stage03_quantification_dG_r.time_point.like(time_point_I), data_stage03_quantification_dG_r.sample_name_abbreviation.like(sample_name_abbreviation_I), data_stage03_quantification_dG_r.experiment_id.like(experiment_id_I), data_stage03_quantification_dG_r.model_id.like(data_stage03_quantification_tcc.model_id), data_stage03_quantification_dG_r.time_point.like(data_stage03_quantification_tcc.time_point), data_stage03_quantification_dG_r.sample_name_abbreviation.like(data_stage03_quantification_tcc.sample_name_abbreviation), data_stage03_quantification_dG_r.experiment_id.like(data_stage03_quantification_tcc.experiment_id), data_stage03_quantification_dG_r.rxn_id.like(data_stage03_quantification_tcc.rxn_id), data_stage03_quantification_tcc.measured_concentration_coverage>measured_concentration_coverage_criteria_I, data_stage03_quantification_tcc.measured_dG_f_coverage>measured_dG_f_coverage_criteria_I, data_stage03_quantification_tcc.used_.is_(True), data_stage03_quantification_dG_r.used_.is_(True)).all(); rows_O = []; if data: for d in data: data_tmp = {'experiment_id':d.experiment_id, 'model_id':d.model_id, 'sample_name_abbreviation':d.sample_name_abbreviation, 'time_point':d.time_point, 'rxn_id':d.rxn_id, 'Keq_lb':d.Keq_lb, 'Keq_ub':d.Keq_ub, 'dG_r':d.dG_r, 'dG_r_var':d.dG_r_var, 'dG_r_units':d.dG_r_units, 'dG_r_lb':d.dG_r_lb, 'dG_r_ub':d.dG_r_ub, 'displacement_lb':d.displacement_lb, 'displacement_ub':d.displacement_ub, 'Q_lb':d.Q_lb, 'Q_ub':d.Q_ub, 'used_':d.used_, 'comment_':d.comment_}; rows_O.append(data_tmp); return rows_O; except SQLAlchemyError as e: print(e); def get_rowsDict_experimentIDAndModelIDAndTimePointAndSampleNameAbbreviations_dataStage03QuantificationDGr(self,experiment_id_I, model_id_I, time_point_I, sample_name_abbreviation_I, measured_concentration_coverage_criteria_I=0.0, measured_dG_f_coverage_criteria_I=0.0): '''Query rows that are used from the dG_r''' try: #data = self.session.query(data_stage03_quantification_dG_r).filter( # data_stage03_quantification_dG_r.model_id.like(model_id_I), # data_stage03_quantification_dG_r.time_point.like(time_point_I), # data_stage03_quantification_dG_r.sample_name_abbreviation.like(sample_name_abbreviation_I), # data_stage03_quantification_dG_r.experiment_id.like(experiment_id_I), # data_stage03_quantification_dG_r.model_id.like(data_stage03_quantification_tcc.model_id), # data_stage03_quantification_dG_r.time_point.like(data_stage03_quantification_tcc.time_point), # data_stage03_quantification_dG_r.sample_name_abbreviation.like(data_stage03_quantification_tcc.sample_name_abbreviation), # data_stage03_quantification_dG_r.experiment_id.like(data_stage03_quantification_tcc.experiment_id), # data_stage03_quantification_dG_r.rxn_id.like(data_stage03_quantification_tcc.rxn_id), # data_stage03_quantification_tcc.measured_concentration_coverage>measured_concentration_coverage_criteria_I, # data_stage03_quantification_tcc.measured_dG_f_coverage>measured_dG_f_coverage_criteria_I, # data_stage03_quantification_dG_r.used_.is_(True)).all(); data = self.session.query(data_stage03_quantification_dG_r).filter( data_stage03_quantification_dG_r.model_id.like(model_id_I), data_stage03_quantification_dG_r.time_point.like(time_point_I), data_stage03_quantification_dG_r.sample_name_abbreviation.like(sample_name_abbreviation_I), data_stage03_quantification_dG_r.experiment_id.like(experiment_id_I), data_stage03_quantification_dG_r.used_.is_(True)).all(); rows_O = {}; if data: for d in data: if d.rxn_id in rows_O: print('duplicate rxn_id found!'); else: rows_O[d.rxn_id]={ 'Keq_lb':d.Keq_lb, 'Keq_ub':d.Keq_ub, 'dG_r':d.dG_r, 'dG_r_var':d.dG_r_var, 'dG_r_units':d.dG_r_units, 'dG_r_lb':d.dG_r_lb, 'dG_r_ub':d.dG_r_ub}; return rows_O; except SQLAlchemyError as e: print(e); def get_rowsEscherDGrLbUb_experimentIDAndModelIDAndTimePointAndSampleNameAbbreviations_dataStage03QuantificationDGr(self,experiment_id_I, model_id_I, time_point_I, sample_name_abbreviation_I, measured_concentration_coverage_criteria_I=0.5, measured_dG_f_coverage_criteria_I=0.99): '''Query rows that are used from the dG_r''' try: data = self.session.query(data_stage03_quantification_dG_r).filter( data_stage03_quantification_dG_r.model_id.like(model_id_I), data_stage03_quantification_dG_r.time_point.like(time_point_I), data_stage03_quantification_dG_r.sample_name_abbreviation.like(sample_name_abbreviation_I), data_stage03_quantification_dG_r.experiment_id.like(experiment_id_I), data_stage03_quantification_dG_r.model_id.like(data_stage03_quantification_tcc.model_id), data_stage03_quantification_dG_r.time_point.like(data_stage03_quantification_tcc.time_point), data_stage03_quantification_dG_r.sample_name_abbreviation.like(data_stage03_quantification_tcc.sample_name_abbreviation), data_stage03_quantification_dG_r.experiment_id.like(data_stage03_quantification_tcc.experiment_id), data_stage03_quantification_dG_r.rxn_id.like(data_stage03_quantification_tcc.rxn_id), data_stage03_quantification_tcc.measured_concentration_coverage>measured_concentration_coverage_criteria_I, data_stage03_quantification_tcc.measured_dG_f_coverage>measured_dG_f_coverage_criteria_I, data_stage03_quantification_tcc.used_.is_(True), data_stage03_quantification_dG_r.used_.is_(True)).all(); rows_O = [None,None]; rows_O[0] = {}; rows_O[1] = {} if data: for d in data: rows_O[0][d.rxn_id]=d.dG_r_lb; rows_O[1][d.rxn_id]=d.dG_r_ub; return rows_O; except SQLAlchemyError as e: print(e); def get_rowsEscherDGr_experimentIDAndModelIDAndTimePointAndSampleNameAbbreviations_dataStage03QuantificationDGr(self,experiment_id_I, model_id_I, time_point_I, sample_name_abbreviation_I, measured_concentration_coverage_criteria_I=0.5, measured_dG_f_coverage_criteria_I=0.99): '''Query rows that are used from the dG_r''' try: data = self.session.query(data_stage03_quantification_dG_r).filter( data_stage03_quantification_dG_r.model_id.like(model_id_I), data_stage03_quantification_dG_r.time_point.like(time_point_I), data_stage03_quantification_dG_r.sample_name_abbreviation.like(sample_name_abbreviation_I), data_stage03_quantification_dG_r.experiment_id.like(experiment_id_I), data_stage03_quantification_dG_r.model_id.like(data_stage03_quantification_tcc.model_id), data_stage03_quantification_dG_r.time_point.like(data_stage03_quantification_tcc.time_point), data_stage03_quantification_dG_r.sample_name_abbreviation.like(data_stage03_quantification_tcc.sample_name_abbreviation), data_stage03_quantification_dG_r.experiment_id.like(data_stage03_quantification_tcc.experiment_id), data_stage03_quantification_dG_r.rxn_id.like(data_stage03_quantification_tcc.rxn_id), data_stage03_quantification_tcc.measured_concentration_coverage>measured_concentration_coverage_criteria_I, data_stage03_quantification_tcc.measured_dG_f_coverage>measured_dG_f_coverage_criteria_I, data_stage03_quantification_tcc.used_.is_(True), data_stage03_quantification_dG_r.used_.is_(True)).all(); rows_O = {} if data: for d in data: rows_O[d.rxn_id]=(d.dG_r_lb+d.dG_r_ub)/2; return rows_O; except SQLAlchemyError as e: print(e); # update rows of data_stage03_quantification_dG_r def update_dataStage03DGr(self,data_I): '''update rows of data_stage03_quantification_dG_r''' if data_I: for d in data_I: try: data_update = self.session.query(data_stage03_quantification_dG_r).filter( data_stage03_quantification_dG_r.experiment_id.like(d['experiment_id']), data_stage03_quantification_dG_r.model_id.like(d['model_id']), data_stage03_quantification_dG_r.rxn_id.like(d['rxn_id']), data_stage03_quantification_dG_r.sample_name_abbreviation.like(d['sample_name_abbreviation']), data_stage03_quantification_dG_r.time_point.like(d['time_point'])).update( { 'Keq_lb':d['Keq_lb'], 'Keq_ub':d['Keq_ub'], 'dG_r':d['dG_r'], 'dG_r_var':d['dG_r_var'], 'dG_r_units':d['dG_r_units'], 'dG_r_lb':d['dG_r_lb'], 'dG_r_ub':d['dG_r_ub'], 'displacement_lb':d['displacement_lb'], 'displacement_ub':d['displacement_ub'], 'Q_lb':d['Q_lb'], 'Q_ub':d['Q_ub'], 'used_':d['used_'], 'comment_':d['comment_']}, synchronize_session=False); except SQLAlchemyError as e: print(e); self.session.commit(); ## Query from data_stage03_quantification_tcc # query rows from data_stage03_quantification_tcc def get_infeasibleReactions_experimentIDAndModelIDAndTimePointAndSampleNameAbbreviations_dataStage03QuantificationTCC(self,experiment_id_I, model_id_I, time_point_I, sample_name_abbreviation_I, feasible_I=False): '''Query rows that are used from the dG_r''' try: data = self.session.query( data_stage03_quantification_tcc.rxn_id).filter( data_stage03_quantification_dG_r.model_id.like(model_id_I), data_stage03_quantification_dG_r.time_point.like(time_point_I), data_stage03_quantification_dG_r.sample_name_abbreviation.like(sample_name_abbreviation_I), data_stage03_quantification_dG_r.experiment_id.like(experiment_id_I), data_stage03_quantification_tcc.feasible.is_(feasible_I), data_stage03_quantification_tcc.used_.is_(True), data_stage03_quantification_dG_r.used_.is_(True)).group_by( data_stage03_quantification_tcc.rxn_id).order_by( data_stage03_quantification_tcc.rxn_id.asc()).all(); rows_O = []; if data: for d in data: data_tmp = {'experiment_id':d.data_stage03_quantification_dG_r.experiment_id, 'model_id':d.data_stage03_quantification_dG_r.model_id, 'sample_name_abbreviation':d.data_stage03_quantification_dG_r.sample_name_abbreviation, 'time_point':d.data_stage03_quantification_dG_r.time_point, 'rxn_id':d.data_stage03_quantification_dG_r.rxn_id, 'dG_r_units':d.data_stage03_quantification_dG_r.dG_r_units, 'dG_r_lb':d.data_stage03_quantification_dG_r.dG_r_lb, 'dG_r_ub':d.data_stage03_quantification_dG_r.dG_r_ub, 'displacement_lb':d.data_stage03_quantification_dG_r.displacement_lb, 'displacement_ub':d.data_stage03_quantification_dG_r.displacement_ub, 'feasible':d.feasible, 'used_':d.data_stage03_quantification_dG_r.used_, 'comment_':d.data_stage03_quantification_dG_r.comment_}; rows_O.append(data_tmp); return rows_O; except SQLAlchemyError as e: print(e); # query rows from data_stage03_quantification_tcc def get_rows_experimentIDAndModelIDAndTimePointAndSampleNameAbbreviations_dataStage03QuantificationTCC(self,experiment_id_I, model_id_I, time_point_I, sample_name_abbreviation_I, measured_concentration_coverage_criteria_I=0.5, measured_dG_f_coverage_criteria_I=0.99): '''Query rows that are used from the dG_r''' try: data = self.session.query(data_stage03_quantification_dG_r, data_stage03_quantification_tcc.measured_concentration_coverage, data_stage03_quantification_tcc.measured_dG_f_coverage, data_stage03_quantification_tcc.feasible).filter( data_stage03_quantification_dG_r.model_id.like(model_id_I), data_stage03_quantification_dG_r.time_point.like(time_point_I), data_stage03_quantification_dG_r.sample_name_abbreviation.like(sample_name_abbreviation_I), data_stage03_quantification_dG_r.experiment_id.like(experiment_id_I), data_stage03_quantification_dG_r.model_id.like(data_stage03_quantification_tcc.model_id), data_stage03_quantification_dG_r.time_point.like(data_stage03_quantification_tcc.time_point), data_stage03_quantification_dG_r.sample_name_abbreviation.like(data_stage03_quantification_tcc.sample_name_abbreviation), data_stage03_quantification_dG_r.experiment_id.like(data_stage03_quantification_tcc.experiment_id), data_stage03_quantification_dG_r.rxn_id.like(data_stage03_quantification_tcc.rxn_id), data_stage03_quantification_tcc.measured_concentration_coverage>measured_concentration_coverage_criteria_I, data_stage03_quantification_tcc.measured_dG_f_coverage>measured_dG_f_coverage_criteria_I, data_stage03_quantification_tcc.used_.is_(True), data_stage03_quantification_dG_r.used_.is_(True)).all(); rows_O = []; if data: for d in data: data_tmp = {'experiment_id':d.data_stage03_quantification_dG_r.experiment_id, 'model_id':d.data_stage03_quantification_dG_r.model_id, 'sample_name_abbreviation':d.data_stage03_quantification_dG_r.sample_name_abbreviation, 'time_point':d.data_stage03_quantification_dG_r.time_point, 'rxn_id':d.data_stage03_quantification_dG_r.rxn_id, 'dG_r':d.data_stage03_quantification_dG_r.dG_r, 'dG_r_units':d.data_stage03_quantification_dG_r.dG_r_units, 'dG_r_lb':d.data_stage03_quantification_dG_r.dG_r_lb, 'dG_r_ub':d.data_stage03_quantification_dG_r.dG_r_ub, 'displacement_lb':d.data_stage03_quantification_dG_r.displacement_lb, 'displacement_ub':d.data_stage03_quantification_dG_r.displacement_ub, 'feasible':d.feasible, 'used_':d.data_stage03_quantification_dG_r.used_, 'comment_':d.data_stage03_quantification_dG_r.comment_}; rows_O.append(data_tmp); return rows_O; except SQLAlchemyError as e: print(e); def get_row_experimentIDAndModelIDAndTimePointAndSampleNameAbbreviations_dataStage03QuantificationTCC(self,experiment_id_I, model_id_I, time_point_I, sample_name_abbreviation_I, rxn_id_I, dG_r_lb_I, dG_r_ub_I, measured_concentration_coverage_criteria_I=0.5, measured_dG_f_coverage_criteria_I=0.99): '''Query rows that are used from the dG_r Assumption: dG_r_lb < dG_r_ub''' try: data = self.session.query(data_stage03_quantification_dG_r, data_stage03_quantification_tcc.measured_concentration_coverage, data_stage03_quantification_tcc.measured_dG_f_coverage, data_stage03_quantification_tcc.feasible).filter( data_stage03_quantification_dG_r.rxn_id.like(rxn_id_I), data_stage03_quantification_dG_r.model_id.like(model_id_I), data_stage03_quantification_dG_r.time_point.like(time_point_I), data_stage03_quantification_dG_r.sample_name_abbreviation.like(sample_name_abbreviation_I), data_stage03_quantification_dG_r.experiment_id.like(experiment_id_I), data_stage03_quantification_dG_r.model_id.like(data_stage03_quantification_tcc.model_id), data_stage03_quantification_dG_r.time_point.like(data_stage03_quantification_tcc.time_point), data_stage03_quantification_dG_r.sample_name_abbreviation.like(data_stage03_quantification_tcc.sample_name_abbreviation), data_stage03_quantification_dG_r.experiment_id.like(data_stage03_quantification_tcc.experiment_id), data_stage03_quantification_dG_r.rxn_id.like(data_stage03_quantification_tcc.rxn_id), data_stage03_quantification_dG_r.used_.is_(True), ## constraint for statistical significance #or_(data_stage03_quantification_dG_r.dG_r_ub < dG_r_lb_I, # data_stage03_quantification_dG_r.dG_r_lb > dG_r_ub_I), ## constraint for biological significance #or_(copysign(1.0,data_stage03_quantification_dG_r.dG_r_lb) != copysign(1.0,dG_r_lb_I), # copysign(1.0,data_stage03_quantification_dG_r.dG_r_ub) != copysign(1.0,dG_r_ub_I)), data_stage03_quantification_tcc.measured_concentration_coverage>measured_concentration_coverage_criteria_I, data_stage03_quantification_tcc.measured_dG_f_coverage>measured_dG_f_coverage_criteria_I, data_stage03_quantification_tcc.used_.is_(True)).all(); rows_O = {}; if data: for d in data: data_tmp = {'experiment_id':d.data_stage03_quantification_dG_r.experiment_id, 'model_id':d.data_stage03_quantification_dG_r.model_id, 'sample_name_abbreviation':d.data_stage03_quantification_dG_r.sample_name_abbreviation, 'time_point':d.data_stage03_quantification_dG_r.time_point, 'rxn_id':d.data_stage03_quantification_dG_r.rxn_id, 'dG_r_units':d.data_stage03_quantification_dG_r.dG_r_units, 'dG_r_lb':d.data_stage03_quantification_dG_r.dG_r_lb, 'dG_r_ub':d.data_stage03_quantification_dG_r.dG_r_ub, 'displacement_lb':d.data_stage03_quantification_dG_r.displacement_lb, 'displacement_ub':d.data_stage03_quantification_dG_r.displacement_ub, 'feasible':d.feasible, 'used_':d.data_stage03_quantification_dG_r.used_, 'comment_':d.data_stage03_quantification_dG_r.comment_}; rows_O.update(data_tmp); return rows_O; except SQLAlchemyError as e: print(e); except TypeError as e: print(e); def get_rowsDict_experimentIDAndModelIDAndTimePointAndSampleNameAbbreviations_dataStage03QuantificationTCC(self,experiment_id_I, model_id_I, time_point_I, sample_name_abbreviation_I, measured_concentration_coverage_criteria_I=0.5, measured_dG_f_coverage_criteria_I=0.99): '''Query rows that are used from the tcc''' try: data = self.session.query(data_stage03_quantification_tcc.rxn_id, data_stage03_quantification_tcc.measured_concentration_coverage, data_stage03_quantification_tcc.measured_dG_f_coverage, data_stage03_quantification_tcc.feasible).filter( data_stage03_quantification_tcc.model_id.like(model_id_I), data_stage03_quantification_tcc.time_point.like(time_point_I), data_stage03_quantification_tcc.sample_name_abbreviation.like(sample_name_abbreviation_I), data_stage03_quantification_tcc.experiment_id.like(experiment_id_I), data_stage03_quantification_tcc.measured_concentration_coverage>measured_concentration_coverage_criteria_I, data_stage03_quantification_tcc.measured_dG_f_coverage>measured_dG_f_coverage_criteria_I, data_stage03_quantification_tcc.used_.is_(True)).all(); measured_concentration_coverage_O = {}; measured_dG_f_coverage_O = {}; feasible_O = {}; if data: for d in data: if d.rxn_id in measured_concentration_coverage_O: print('duplicate rxn_id found!'); else: measured_concentration_coverage_O[d.rxn_id]=d.measured_concentration_coverage measured_dG_f_coverage_O[d.rxn_id]=d.measured_dG_f_coverage feasible_O[d.rxn_id]=d.feasible return measured_concentration_coverage_O,measured_dG_f_coverage_O,feasible_O; except SQLAlchemyError as e: print(e); # update rows of data_stage03_quantification_tcc def update_dataStage03Tcc(self,data_I): '''update rows of data_stage03_quantification_tcc''' if data_I: for d in data_I: try: data_update = self.session.query(data_stage03_quantification_tcc).filter( data_stage03_quantification_tcc.experiment_id.like(d['experiment_id']), data_stage03_quantification_tcc.model_id.like(d['model_id']), data_stage03_quantification_tcc.rxn_id.like(d['rxn_id']), data_stage03_quantification_dG_r.sample_name_abbreviation.like(d['sample_name_abbreviation']), data_stage03_quantification_dG_r.time_point.like(d['time_point'])).update( { 'feasible':d['feasible'], 'measured_concentration_coverage_criteria':d['measured_concentration_coverage_criteria'], 'measured_dG_f_coverage_criteria':d['measured_dG_f_coverage_criteria'], 'measured_concentration_coverage':d['measured_concentration_coverage'], 'measured_dG_f_coverage':d['measured_dG_f_coverage'], 'used_':d['used_'], 'comment_':d['comment_']}, synchronize_session=False); except SQLAlchemyError as e: print(e); self.session.commit(); def add_dataStage03QuantificationDG0r(self, data_I): '''add rows of data_stage03_quantification_dG0_r''' if data_I: for d in data_I: try: data_add = data_stage03_quantification_dG0_r(d #d['experiment_id'], #d['model_id'], #d['sample_name_abbreviation'], #d['time_point'], #d['rxn_id'], #d['Keq_lb'], #d['Keq_ub'], #d['dG0_r'], #d['dG0_r_var'], #d['dG0_r_units'], #d['dG0_r_lb'], #d['dG0_r_ub'], #d['used_'], #d['comment_'] ); self.session.add(data_add); except SQLAlchemyError as e: print(e); self.session.commit(); def update_dataStage03QuantificationDG0r(self,data_I): #Not yet tested '''update rows of data_stage03_quantification_dG0_r''' if data_I: for d in data_I: try: data_update = self.session.query(data_stage03_quantification_dG0_r).filter( standards.id.like(d['id'])).update( {'experiment_id':d['experiment_id'], 'model_id':d['model_id'], 'sample_name_abbreviation':d['sample_name_abbreviation'], 'time_point':d['time_point'], 'rxn_id':d['rxn_id'], 'Keq_lb':d['Keq_lb'], 'Keq_ub':d['Keq_ub'], 'dG0_r':d['dG0_r'], 'dG0_r_var':d['dG0_r_var'], 'dG0_r_units':d['dG0_r_units'], 'dG0_r_lb':d['dG0_r_lb'], 'dG0_r_ub':d['dG0_r_ub'], 'used_':d['used_'], 'comment_':d['comment_']}, synchronize_session=False); except SQLAlchemyError as e: print(e); self.session.commit(); def add_dataStage03QuantificationDGr(self, data_I): '''add rows of data_stage03_quantification_dG_r''' if data_I: for d in data_I: try: data_add = data_stage03_quantification_dG_r(d #d['experiment_id'], #d['model_id'], #d['sample_name_abbreviation'], #d['time_point'], #d['rxn_id'], #d['Keq_lb'], #d['Keq_ub'], #d['dG_r'], #d['dG_r_var'], #d['dG_r_units'], #d['dG_r_lb'], #d['dG_r_ub'], #d['displacement_lb'], #d['displacement_ub'], #d['Q_lb'], #d['Q_ub'], #d['used_'], #d['comment_'] ); self.session.add(data_add); except SQLAlchemyError as e: print(e); self.session.commit(); def update_dataStage03QuantificationDGr(self,data_I): #Not yet tested '''update rows of data_stage03_quantification_dG_r''' if data_I: for d in data_I: try: data_update = self.session.query(data_stage03_quantification_dG_r).filter( standards.id.like(d['id'])).update( {'experiment_id':d['experiment_id'], 'model_id':d['model_id'], 'sample_name_abbreviation':d['sample_name_abbreviation'], 'time_point':d['time_point'], 'rxn_id':d['rxn_id'], 'Keq_lb':d['Keq_lb'], 'Keq_ub':d['Keq_ub'], 'dG_r':d['dG_r'], 'dG_r_var':d['dG_r_var'], 'dG_r_units':d['dG_r_units'], 'dG_r_lb':d['dG_r_lb'], 'dG_r_ub':d['dG_r_ub'], 'displacement_lb':d['displacement_lb'], 'displacement_ub':d['displacement_ub'], 'Q_lb':d['Q_lb'], 'Q_ub':d['Q_ub'], 'used_':d['used_'], 'comment_':d['comment_']}, synchronize_session=False); except SQLAlchemyError as e: print(e); self.session.commit(); def add_dataStage03QuantificationTcc(self, data_I): '''add rows of data_stage03_quantification_tcc''' if data_I: for d in data_I: try: data_add = data_stage03_quantification_tcc(d #d['experiment_id'], #d['model_id'], #d['sample_name_abbreviation'], #d['time_point'], #d['rxn_id'], #d['feasible'], #d['measured_concentration_coverage_criteria'], #d['measured_dG_f_coverage_criteria'], #d['measured_concentration_coverage'], #d['measured_dG_f_coverage'], #d['used_'], #d['comment_'] ); self.session.add(data_add); except SQLAlchemyError as e: print(e); self.session.commit(); def update_dataStage03QuantificationTcc(self,data_I): #Not yet tested '''update rows of data_stage03_quantification_tcc''' if data_I: for d in data_I: try: data_update = self.session.query(data_stage03_quantification_tcc).filter( standards.id.like(d['id'])).update( {'experiment_id':d['experiment_id'], 'model_id':d['model_id'], 'sample_name_abbreviation':d['sample_name_abbreviation'], 'time_point':d['time_point'], 'rxn_id':d['rxn_id'], 'feasible':d['feasible'], 'measured_concentration_coverage_criteria':d['measured_concentration_coverage_criteria'], 'measured_dG_f_coverage_criteria':d['measured_dG_f_coverage_criteria'], 'measured_concentration_coverage':d['measured_concentration_coverage'], 'measured_dG_f_coverage':d['measured_dG_f_coverage'], 'used_':d['used_'], 'comment_':d['comment_']}, synchronize_session=False); except SQLAlchemyError as e: print(e); self.session.commit(); def reset_dataStage03_quantification_dG_r_all(self,experiment_id_I = None,simulation_id_I=None): try: if experiment_id_I: reset = self.session.query(data_stage03_quantification_dG0_r).filter(data_stage03_quantification_dG0_r.experiment_id.like(experiment_id_I)).delete(synchronize_session=False); reset = self.session.query(data_stage03_quantification_dG_r).filter(data_stage03_quantification_dG_r.experiment_id.like(experiment_id_I)).delete(synchronize_session=False); reset = self.session.query(data_stage03_quantification_tcc).filter(data_stage03_quantification_tcc.experiment_id.like(experiment_id_I)).delete(synchronize_session=False); self.session.commit(); except SQLAlchemyError as e: print(e); def reset_dataStage03_quantification_dG0_r(self,experiment_id_I = None,simulation_id_I=None): try: if experiment_id_I: reset = self.session.query(data_stage03_quantification_dG0_r).filter(data_stage03_quantification_dG0_r.experiment_id.like(experiment_id_I)).delete(synchronize_session=False); self.session.commit(); except SQLAlchemyError as e: print(e); def reset_dataStage03_quantification_dG_r(self,experiment_id_I = None,simulation_id_I=None): try: if experiment_id_I: reset = self.session.query(data_stage03_quantification_dG_r).filter(data_stage03_quantification_dG_r.experiment_id.like(experiment_id_I)).delete(synchronize_session=False); self.session.commit(); except SQLAlchemyError as e: print(e); def reset_dataStage03_quantification_tcc(self,experiment_id_I = None,simulation_id_I=None): try: if experiment_id_I: reset = self.session.query(data_stage03_quantification_tcc).filter(data_stage03_quantification_tcc.experiment_id.like(experiment_id_I)).delete(synchronize_session=False); self.session.commit(); except SQLAlchemyError as e: print(e); def reset_dataStage03_quantification_dG_r_comparison(self,analysis_id_I = None): try: if analysis_id_I: reset = self.session.query(data_stage03_quantification_dG_r_comparison).filter( data_stage03_quantification_dG_r_comparison.analysis_id.like(analysis_id_I)).delete(synchronize_session=False); self.session.commit(); except SQLAlchemyError as e: print(e);
65.844023
190
0.554119
4,636
45,169
4.875539
0.027826
0.258284
0.305269
0.159271
0.945007
0.935363
0.911782
0.90196
0.883644
0.864752
0
0.025625
0.375346
45,169
685
191
65.940146
0.775474
0.107419
0
0.788256
0
0
0.06658
0.023606
0
0
0
0
0
1
0.042705
false
0
0.016014
0
0.076512
0.048043
0
0
0
null
1
1
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
9
d52b0d5eb3843c29566679026847ae177dc5fb82
307
py
Python
platform/hwconf_data/efr32mg1p/modules/WDOG/__init__.py
lenloe1/v2.7
9ac9c4a7bb37987af382c80647f42d84db5f2e1d
[ "Zlib" ]
null
null
null
platform/hwconf_data/efr32mg1p/modules/WDOG/__init__.py
lenloe1/v2.7
9ac9c4a7bb37987af382c80647f42d84db5f2e1d
[ "Zlib" ]
1
2020-08-25T02:36:22.000Z
2020-08-25T02:36:22.000Z
platform/hwconf_data/efr32mg1p/modules/WDOG/__init__.py
lenloe1/v2.7
9ac9c4a7bb37987af382c80647f42d84db5f2e1d
[ "Zlib" ]
1
2020-08-25T01:56:04.000Z
2020-08-25T01:56:04.000Z
import efr32mg1p.halconfig.halconfig_types as halconfig_types import efr32mg1p.halconfig.halconfig_dependency as halconfig_dependency import efr32mg1p.PythonSnippet.ExporterModel as ExporterModel import efr32mg1p.PythonSnippet.RuntimeModel as RuntimeModel import efr32mg1p.PythonSnippet.Metadata as Metadata
61.4
71
0.905537
34
307
8.058824
0.294118
0.273723
0.306569
0.240876
0
0
0
0
0
0
0
0.052083
0.061889
307
5
72
61.4
0.899306
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
d5a343d753defa3c4332c241341060bdd8d38479
6,575
py
Python
tests/test_search.py
timmahrt/pysle
b81d0be0bf3a9994ad449f8802db0e6f5cb36265
[ "MIT" ]
30
2016-04-20T22:18:59.000Z
2022-03-07T13:34:16.000Z
tests/test_search.py
timmahrt/pysle
b81d0be0bf3a9994ad449f8802db0e6f5cb36265
[ "MIT" ]
14
2016-10-20T09:00:50.000Z
2022-01-03T13:50:28.000Z
tests/test_search.py
timmahrt/pysle
b81d0be0bf3a9994ad449f8802db0e6f5cb36265
[ "MIT" ]
6
2016-05-09T07:35:30.000Z
2021-04-14T18:09:35.000Z
import unittest from typing import List from pysle import phonetics from pysle import isletool from pysle import praattools from pysle.utilities import errors class VirtualIsle(isletool.Isle): def _load(self, _islePath): return { "another": [ "another(dt,nn,prp) # ə . n ˈʌ . ð ɚ #", "another(dt,nn,prp) # ə . n ˈʌ ð . ə ɹ #", ], "any": ["any(dt) # ˈɛ . n i #"], "brown": ["brown(jj) # b ɹ ˈaʊ n #"], "brown_cat": ["brown_cat() # b ɹ ˈaʊ n # k ˌæ t˺ #"], "cat": ["cat(dt,nn,prp) # k ˌæ t˺ #"], "nominee": ["nominee(nn) # n ˌɑ . m ə . n ˈi #"], } class TestSearch(unittest.TestCase): def setUp(self): self.isle = VirtualIsle() def test_num_syllables(self): results = [result for result in self.isle.search("VNV", numSyllables=None)] self.assertEqual(4, len(results)) self.assertEqual("another", results[0]["word"]) self.assertEqual("another", results[1]["word"]) self.assertEqual("any", results[2]["word"]) self.assertEqual("nominee", results[3]["word"]) results = [result for result in self.isle.search("VNV", numSyllables=1)] self.assertEqual(0, len(results)) results = [result for result in self.isle.search("VNV", numSyllables=2)] self.assertEqual(1, len(results)) self.assertEqual("any", results[0]["word"]) results = [result for result in self.isle.search("VNV", numSyllables=3)] self.assertEqual(3, len(results)) self.assertEqual("another", results[0]["word"]) self.assertEqual("another", results[1]["word"]) self.assertEqual("nominee", results[2]["word"]) def test_word_initial(self): results = [result for result in self.isle.search("NV", wordInitial="ok")] self.assertEqual(4, len(results)) self.assertEqual("another", results[0]["word"]) self.assertEqual("another", results[1]["word"]) self.assertEqual("any", results[2]["word"]) self.assertEqual("nominee", results[3]["word"]) results = [result for result in self.isle.search("NV", wordInitial="no")] self.assertEqual(4, len(results)) self.assertEqual("another", results[0]["word"]) self.assertEqual("another", results[1]["word"]) self.assertEqual("any", results[2]["word"]) self.assertEqual("nominee", results[3]["word"]) # b/c the second 'n' results = [result for result in self.isle.search("NV", wordInitial="only")] self.assertEqual(1, len(results)) self.assertEqual("nominee", results[0]["word"]) def test_word_final(self): results = [result for result in self.isle.search("ɹ", wordFinal="ok")] self.assertEqual(3, len(results)) self.assertEqual("another", results[0]["word"]) self.assertEqual("brown", results[1]["word"]) self.assertEqual("brown_cat", results[2]["word"]) results = [result for result in self.isle.search("ɹ", wordFinal="no")] self.assertEqual(2, len(results)) self.assertEqual("brown", results[0]["word"]) self.assertEqual("brown_cat", results[1]["word"]) results = [result for result in self.isle.search("ɹ", wordFinal="only")] self.assertEqual(1, len(results)) self.assertEqual("another", results[0]["word"]) def test_span_syllable(self): results = [result for result in self.isle.search("VD", spanSyllable="ok")] self.assertEqual(4, len(results)) self.assertEqual("another", results[0]["word"]) self.assertEqual("another", results[1]["word"]) self.assertEqual("brown_cat", results[2]["word"]) self.assertEqual("cat", results[3]["word"]) results = [result for result in self.isle.search("VD", spanSyllable="no")] self.assertEqual(3, len(results)) self.assertEqual("another", results[0]["word"]) self.assertEqual("brown_cat", results[1]["word"]) self.assertEqual("cat", results[2]["word"]) results = [result for result in self.isle.search("VD", spanSyllable="only")] self.assertEqual(1, len(results)) self.assertEqual("another", results[0]["word"]) def test_stressed_syllable(self): results = [result for result in self.isle.search("Ni", stressedSyllable="ok")] self.assertEqual(2, len(results)) self.assertEqual("any", results[0]["word"]) self.assertEqual("nominee", results[1]["word"]) results = [result for result in self.isle.search("Ni", stressedSyllable="only")] self.assertEqual(1, len(results)) self.assertEqual("nominee", results[0]["word"]) results = [result for result in self.isle.search("Ni", stressedSyllable="no")] self.assertEqual(1, len(results)) self.assertEqual("any", results[0]["word"]) def test_multiword(self): results = [result for result in self.isle.search("kV", multiword="ok")] self.assertEqual(2, len(results)) self.assertEqual("brown_cat", results[0]["word"]) self.assertEqual("cat", results[1]["word"]) results = [result for result in self.isle.search("kV", multiword="only")] self.assertEqual(1, len(results)) self.assertEqual("brown_cat", results[0]["word"]) results = [result for result in self.isle.search("kV", multiword="no")] self.assertEqual(1, len(results)) self.assertEqual("cat", results[0]["word"]) def test_pos(self): results = [result for result in self.isle.search("Vt", pos=None)] self.assertEqual(2, len(results)) self.assertEqual(("brown_cat", ""), (results[0]["word"], results[0]["posList"])) self.assertEqual( ("cat", "dt,nn,prp"), (results[1]["word"], results[1]["posList"]) ) results = [result for result in self.isle.search("Vt", pos="nn")] self.assertEqual(1, len(results)) self.assertEqual( ("cat", "dt,nn,prp"), (results[0]["word"], results[0]["posList"]) ) def test_exact_match(self): results = [result for result in self.isle.search("bɹaʊn", exactMatch=False)] self.assertEqual(2, len(results)) self.assertEqual("brown", results[0]["word"]) self.assertEqual("brown_cat", results[1]["word"]) results = [result for result in self.isle.search("bɹaʊn", exactMatch=True)] self.assertEqual(1, len(results)) self.assertEqual("brown", results[0]["word"])
40.838509
88
0.604867
809
6,575
4.886279
0.118665
0.258032
0.093094
0.128004
0.863395
0.833797
0.826714
0.815077
0.747281
0.741209
0
0.014419
0.219468
6,575
160
89
41.09375
0.755456
0.002738
0
0.472
0
0
0.127536
0
0
0
0
0
0.544
1
0.08
false
0
0.048
0.008
0.152
0
0
0
0
null
1
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
8
6371cf5a7236bcd5a3bb838f26e872e03c51d4a2
45
py
Python
hydrogels/systems/__init__.py
debeshmandal/brownian
bc5b2e00a04d11319c85e749f9c056b75b450ff7
[ "MIT" ]
3
2020-05-13T01:07:30.000Z
2021-02-12T13:37:23.000Z
hydrogels/systems/__init__.py
debeshmandal/brownian
bc5b2e00a04d11319c85e749f9c056b75b450ff7
[ "MIT" ]
24
2020-06-04T13:48:57.000Z
2021-12-31T18:46:52.000Z
hydrogels/systems/__init__.py
debeshmandal/brownian
bc5b2e00a04d11319c85e749f9c056b75b450ff7
[ "MIT" ]
1
2020-07-23T17:15:23.000Z
2020-07-23T17:15:23.000Z
from .degradation import EnzymaticDegradation
45
45
0.911111
4
45
10.25
1
0
0
0
0
0
0
0
0
0
0
0
0.066667
45
1
45
45
0.97619
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7