hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
dfa7d2490f520b795ded3eda0b6702f7c61e53a9
| 3,228
|
py
|
Python
|
tbot/migrations/20190127_01_7TC2M-twitch-user-id-type-change.py
|
thomaserlang/tbot
|
99cfa204d86ef35cf2cc9482ae5a44abb35b443a
|
[
"MIT"
] | null | null | null |
tbot/migrations/20190127_01_7TC2M-twitch-user-id-type-change.py
|
thomaserlang/tbot
|
99cfa204d86ef35cf2cc9482ae5a44abb35b443a
|
[
"MIT"
] | 10
|
2022-02-14T11:40:20.000Z
|
2022-03-09T22:44:03.000Z
|
tbot/migrations/20190127_01_7TC2M-twitch-user-id-type-change.py
|
thomaserlang/tbot
|
99cfa204d86ef35cf2cc9482ae5a44abb35b443a
|
[
"MIT"
] | 1
|
2020-09-19T16:38:24.000Z
|
2020-09-19T16:38:24.000Z
|
"""
twitch user id type change
"""
from yoyo import step
__depends__ = {'20190126_02_Fnsd3-drop-twitch-chatlog-insert-trigger'}
steps = [
step('''
ALTER TABLE `twitch_badges`
CHANGE COLUMN `channel_id` `channel_id` VARCHAR(36) NOT NULL ,
CHANGE COLUMN `user_id` `user_id` VARCHAR(36) NOT NULL ;
'''),
step('''
ALTER TABLE `twitch_channel_admins`
CHANGE COLUMN `channel_id` `channel_id` VARCHAR(36) NOT NULL ,
CHANGE COLUMN `user_id` `user_id` VARCHAR(36) NOT NULL ;
'''),
step('''
ALTER TABLE `twitch_channel_cache`
CHANGE COLUMN `channel_id` `channel_id` VARCHAR(36) NOT NULL ;
'''),
step('''
ALTER TABLE `twitch_channel_mods`
CHANGE COLUMN `channel_id` `channel_id` VARCHAR(36) NOT NULL ,
CHANGE COLUMN `user_id` `user_id` VARCHAR(36) NOT NULL ;
'''),
step('''
ALTER TABLE `twitch_channels`
CHANGE COLUMN `channel_id` `channel_id` VARCHAR(36) NOT NULL ;
'''),
step('''
ALTER TABLE `twitch_chat_alerts`
CHANGE COLUMN `channel_id` `channel_id` VARCHAR(36) NULL DEFAULT NULL ;
'''),
step('''
ALTER TABLE `twitch_chatlog`
CHANGE COLUMN `type` `type` INT(3) UNSIGNED NOT NULL DEFAULT 1 ,
CHANGE COLUMN `created_at` `created_at` DATETIME NOT NULL ,
CHANGE COLUMN `channel_id` `channel_id` VARCHAR(36) NOT NULL ,
CHANGE COLUMN `user_id` `user_id` VARCHAR(36) NOT NULL ,
CHANGE COLUMN `word_count` `word_count` INT(11) NULL DEFAULT 0 ;
'''),
step('''
ALTER TABLE `twitch_commands`
CHANGE COLUMN `channel_id` `channel_id` VARCHAR(36) NOT NULL ;
'''),
step('''
ALTER TABLE `twitch_discord_roles`
CHANGE COLUMN `channel_id` `channel_id` VARCHAR(36) NULL DEFAULT NULL ;
'''),
step('''
ALTER TABLE `twitch_discord_users`
CHANGE COLUMN `twitch_id` `twitch_id` VARCHAR(36) NULL DEFAULT NULL ;
'''),
step('''
ALTER TABLE `twitch_modlog`
CHANGE COLUMN `created_at` `created_at` DATETIME NOT NULL ,
CHANGE COLUMN `channel_id` `channel_id` VARCHAR(36) NOT NULL ,
CHANGE COLUMN `user_id` `user_id` VARCHAR(36) NOT NULL ,
CHANGE COLUMN `target_user_id` `target_user_id` VARCHAR(36) NULL DEFAULT NULL ;
'''),
step('''
ALTER TABLE `twitch_spotify`
CHANGE COLUMN `channel_id` `channel_id` VARCHAR(36) NOT NULL ;
'''),
step('''
ALTER TABLE `twitch_stream_watchtime`
CHANGE COLUMN `channel_id` `channel_id` VARCHAR(36) NOT NULL ,
CHANGE COLUMN `user_id` `user_id` VARCHAR(36) NOT NULL ,
CHANGE COLUMN `user` `user` VARCHAR(25) NULL DEFAULT NULL ;
'''),
step('''
ALTER TABLE `twitch_streams`
CHANGE COLUMN `channel_id` `channel_id` VARCHAR(36) NOT NULL ;
'''),
step('''
ALTER TABLE `twitch_user_chat_stats`
CHANGE COLUMN `channel_id` `channel_id` VARCHAR(36) NOT NULL ,
CHANGE COLUMN `user_id` `user_id` VARCHAR(36) NOT NULL ;
'''),
step('''
ALTER TABLE `twitch_user_stats`
CHANGE COLUMN `channel_id` `channel_id` VARCHAR(36) NOT NULL ,
CHANGE COLUMN `user_id` `user_id` VARCHAR(36) NOT NULL ;
'''),
step('''
ALTER TABLE `twitch_usernames`
CHANGE COLUMN `user_id` `user_id` VARCHAR(36) NOT NULL ;
'''),
]
| 34.709677
| 83
| 0.650867
| 429
| 3,228
| 4.680653
| 0.135198
| 0.185259
| 0.14243
| 0.153386
| 0.804781
| 0.804781
| 0.797809
| 0.780378
| 0.778386
| 0.778386
| 0
| 0.027778
| 0.219331
| 3,228
| 92
| 84
| 35.086957
| 0.769048
| 0.008055
| 0
| 0.697674
| 0
| 0
| 0.886349
| 0.066061
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.011628
| 0
| 0.011628
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dfa917d9001f65635c687515492e42cc11f281ed
| 66,134
|
py
|
Python
|
UMLRT2Kiltera_MM/Properties/Pattern/Himesis/HExitPtConOPsOfExitPtOUT_CompleteLHS.py
|
levilucio/SyVOLT
|
7526ec794d21565e3efcc925a7b08ae8db27d46a
|
[
"MIT"
] | 3
|
2017-06-02T19:26:27.000Z
|
2021-06-14T04:25:45.000Z
|
UMLRT2Kiltera_MM/Properties/Pattern/Himesis/HExitPtConOPsOfExitPtOUT_CompleteLHS.py
|
levilucio/SyVOLT
|
7526ec794d21565e3efcc925a7b08ae8db27d46a
|
[
"MIT"
] | 8
|
2016-08-24T07:04:07.000Z
|
2017-05-26T16:22:47.000Z
|
UMLRT2Kiltera_MM/Properties/Pattern/Himesis/HExitPtConOPsOfExitPtOUT_CompleteLHS.py
|
levilucio/SyVOLT
|
7526ec794d21565e3efcc925a7b08ae8db27d46a
|
[
"MIT"
] | 1
|
2019-10-31T06:00:23.000Z
|
2019-10-31T06:00:23.000Z
|
from core.himesis import Himesis, HimesisPreConditionPatternLHS
import cPickle as pickle
from uuid import UUID
class HExitPtConOPsOfExitPtOUT_CompleteLHS(HimesisPreConditionPatternLHS):
def __init__(self):
"""
Creates the himesis graph representing the AToM3 model HExitPtConOPsOfExitPtOUT_CompleteLHS.
"""
# Flag this instance as compiled now
self.is_compiled = True
super(HExitPtConOPsOfExitPtOUT_CompleteLHS, self).__init__(name='HExitPtConOPsOfExitPtOUT_CompleteLHS', num_nodes=30, edges=[])
# Add the edges
self.add_edges([(5, 10), (12, 10), (6, 11), (13, 11), (0, 20), (0, 5), (23, 1), (1, 27), (1, 6), (20, 2), (26, 2), (27, 2), (2, 21), (3, 23), (3, 24), (3, 26), (21, 4), (28, 4), (29, 4), (4, 22), (18, 12), (19, 13), (22, 9), (7, 28), (8, 29), (24, 7), (7, 25), (25, 8), (18, 14), (14, 16), (19, 15), (15, 17)])
# Set the graph attributes
self["mm__"] = pickle.loads("""(lp1
S'MT_pre__UMLRT2Kiltera_MM'
p2
aS'MoTifRule'
p3
a.""")
self["MT_constraint__"] = pickle.loads("""V#===============================================================================\u000a# This code is executed after the nodes in the LHS have been matched.\u000a# You can access a matched node labelled n by: PreNode('n').\u000a# To access attribute x of node n, use: PreNode('n')['x'].\u000a# The given constraint must evaluate to a boolean expression:\u000a# returning True enables the rule to be applied,\u000a# returning False forbids the rule from being applied.\u000a#===============================================================================\u000aif (PreNode('19')['name']=='isComposite')and (PreNode('22')['name']=='true') and (PreNode('25')['name']=='channel')and (PreNode('27')['name']=='sh_in'):\u000a return True\u000areturn False\u000a
p1
.""")
self["name"] = """"""
self["GUID__"] = UUID('30c86f89-5734-44cf-b217-dd34b5a20132')
# Set the node attributes
self.vs[0]["MT_pivotOut__"] = """element1"""
self.vs[0]["MT_subtypeMatching__"] = False
self.vs[0]["MT_pre__classtype"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[0]["MT_pre__cardinality"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[0]["MT_pivotIn__"] = """element1"""
self.vs[0]["MT_label__"] = """1"""
self.vs[0]["MT_subtypes__"] = pickle.loads("""(lp1
.""")
self.vs[0]["mm__"] = """MT_pre__State"""
self.vs[0]["MT_pre__name"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[0]["MT_dirty__"] = False
self.vs[0]["GUID__"] = UUID('b10b16e9-ded9-47c3-af26-a9bfa03f8416')
self.vs[1]["MT_subtypeMatching__"] = False
self.vs[1]["MT_pre__classtype"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[1]["MT_pre__cardinality"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[1]["MT_label__"] = """9"""
self.vs[1]["MT_subtypes__"] = pickle.loads("""(lp1
.""")
self.vs[1]["mm__"] = """MT_pre__Trigger_T"""
self.vs[1]["MT_pre__name"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[1]["MT_dirty__"] = False
self.vs[1]["GUID__"] = UUID('8c4c5341-0e39-4a89-8035-45a4e083d252')
self.vs[2]["MT_pivotOut__"] = """element2"""
self.vs[2]["MT_subtypeMatching__"] = False
self.vs[2]["MT_pre__classtype"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[2]["MT_pre__cardinality"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[2]["MT_pivotIn__"] = """element2"""
self.vs[2]["MT_label__"] = """2"""
self.vs[2]["MT_subtypes__"] = pickle.loads("""(lp1
.""")
self.vs[2]["mm__"] = """MT_pre__ExitPoint"""
self.vs[2]["MT_pre__name"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[2]["MT_dirty__"] = False
self.vs[2]["GUID__"] = UUID('da25b028-9cff-438c-8a7c-6a3094a5aeaf')
self.vs[3]["MT_subtypeMatching__"] = False
self.vs[3]["MT_pre__classtype"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[3]["MT_pre__cardinality"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[3]["MT_label__"] = """8"""
self.vs[3]["MT_subtypes__"] = pickle.loads("""(lp1
S'MT_pre__Seq'
p2
a.""")
self.vs[3]["mm__"] = """MT_pre__Par"""
self.vs[3]["MT_pre__name"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[3]["MT_dirty__"] = False
self.vs[3]["GUID__"] = UUID('6ad943c7-ae1e-4b2d-9e64-f2ed290e4ea4')
self.vs[4]["MT_pivotOut__"] = """element3"""
self.vs[4]["MT_subtypeMatching__"] = False
self.vs[4]["MT_pre__classtype"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[4]["MT_pre__cardinality"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[4]["MT_pivotIn__"] = """element3"""
self.vs[4]["MT_label__"] = """3"""
self.vs[4]["MT_subtypes__"] = pickle.loads("""(lp1
.""")
self.vs[4]["mm__"] = """MT_pre__Transition"""
self.vs[4]["MT_pre__name"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[4]["MT_dirty__"] = False
self.vs[4]["GUID__"] = UUID('df1e83d1-32db-4056-ac38-2741b5898582')
self.vs[5]["MT_subtypeMatching__"] = False
self.vs[5]["MT_label__"] = """24"""
self.vs[5]["MT_subtypes__"] = pickle.loads("""(lp1
.""")
self.vs[5]["mm__"] = """MT_pre__hasAttribute_S"""
self.vs[5]["MT_dirty__"] = False
self.vs[5]["GUID__"] = UUID('8a054c15-e3fa-49b6-b451-0b72ab6a2584')
self.vs[6]["MT_subtypeMatching__"] = False
self.vs[6]["MT_label__"] = """28"""
self.vs[6]["MT_subtypes__"] = pickle.loads("""(lp1
.""")
self.vs[6]["mm__"] = """MT_pre__hasAttribute_T"""
self.vs[6]["MT_dirty__"] = False
self.vs[6]["GUID__"] = UUID('8a1e71e5-372c-4e51-928c-78a4af5fd4ea')
self.vs[7]["MT_subtypeMatching__"] = False
self.vs[7]["MT_pre__classtype"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[7]["MT_pre__cardinality"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[7]["MT_label__"] = """10"""
self.vs[7]["MT_subtypes__"] = pickle.loads("""(lp1
.""")
self.vs[7]["mm__"] = """MT_pre__Inst"""
self.vs[7]["MT_pre__name"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[7]["MT_dirty__"] = False
self.vs[7]["GUID__"] = UUID('40520d48-84cf-4a4c-8c62-e2a2fbadb9e9')
self.vs[8]["MT_subtypeMatching__"] = False
self.vs[8]["MT_pre__classtype"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[8]["MT_pre__cardinality"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[8]["MT_label__"] = """15"""
self.vs[8]["MT_subtypes__"] = pickle.loads("""(lp1
S'MT_pre__PythonRef'
p2
a.""")
self.vs[8]["mm__"] = """MT_pre__Name"""
self.vs[8]["MT_pre__name"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[8]["MT_dirty__"] = False
self.vs[8]["GUID__"] = UUID('9f164020-f954-4b0f-8d01-9152416dfe73')
self.vs[9]["MT_pivotOut__"] = """element4"""
self.vs[9]["MT_subtypeMatching__"] = False
self.vs[9]["MT_pre__classtype"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[9]["MT_pre__cardinality"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[9]["MT_pivotIn__"] = """element4"""
self.vs[9]["MT_label__"] = """4"""
self.vs[9]["MT_subtypes__"] = pickle.loads("""(lp1
.""")
self.vs[9]["mm__"] = """MT_pre__OUT2"""
self.vs[9]["MT_pre__name"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[9]["MT_dirty__"] = False
self.vs[9]["GUID__"] = UUID('48145f24-2dd2-4428-b68d-cb52aa333a09')
self.vs[10]["MT_subtypeMatching__"] = False
self.vs[10]["MT_pre__Type"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[10]["MT_label__"] = """19"""
self.vs[10]["MT_subtypes__"] = pickle.loads("""(lp1
.""")
self.vs[10]["mm__"] = """MT_pre__Attribute"""
self.vs[10]["MT_pre__name"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[10]["MT_dirty__"] = False
self.vs[10]["GUID__"] = UUID('1b23a4f9-8e30-4509-be19-5b827290f33f')
self.vs[11]["MT_subtypeMatching__"] = False
self.vs[11]["MT_pre__Type"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[11]["MT_label__"] = """25"""
self.vs[11]["MT_subtypes__"] = pickle.loads("""(lp1
.""")
self.vs[11]["mm__"] = """MT_pre__Attribute"""
self.vs[11]["MT_pre__name"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[11]["MT_dirty__"] = False
self.vs[11]["GUID__"] = UUID('8aebfcf3-9e7b-4f6f-8e48-bcb0f096b1ba')
self.vs[12]["MT_subtypeMatching__"] = False
self.vs[12]["MT_label__"] = """21"""
self.vs[12]["MT_subtypes__"] = pickle.loads("""(lp1
.""")
self.vs[12]["mm__"] = """MT_pre__leftExpr"""
self.vs[12]["MT_dirty__"] = False
self.vs[12]["GUID__"] = UUID('4416fc7e-9ab9-4b93-bef2-dbe271d49a34')
self.vs[13]["MT_subtypeMatching__"] = False
self.vs[13]["MT_label__"] = """29"""
self.vs[13]["MT_subtypes__"] = pickle.loads("""(lp1
.""")
self.vs[13]["mm__"] = """MT_pre__leftExpr"""
self.vs[13]["MT_dirty__"] = False
self.vs[13]["GUID__"] = UUID('990e486f-db6b-41da-aa75-a99035d876be')
self.vs[14]["MT_subtypeMatching__"] = False
self.vs[14]["MT_label__"] = """23"""
self.vs[14]["MT_subtypes__"] = pickle.loads("""(lp1
.""")
self.vs[14]["mm__"] = """MT_pre__rightExpr"""
self.vs[14]["MT_dirty__"] = False
self.vs[14]["GUID__"] = UUID('ecaaf206-0f7a-41da-9907-e7c23040450f')
self.vs[15]["MT_subtypeMatching__"] = False
self.vs[15]["MT_label__"] = """30"""
self.vs[15]["MT_subtypes__"] = pickle.loads("""(lp1
.""")
self.vs[15]["mm__"] = """MT_pre__rightExpr"""
self.vs[15]["MT_dirty__"] = False
self.vs[15]["GUID__"] = UUID('361f9501-5606-43ef-ad3a-b1a182678ed0')
self.vs[16]["MT_subtypeMatching__"] = False
self.vs[16]["MT_pre__Type"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[16]["MT_label__"] = """22"""
self.vs[16]["MT_subtypes__"] = pickle.loads("""(lp1
.""")
self.vs[16]["mm__"] = """MT_pre__Constant"""
self.vs[16]["MT_pre__name"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[16]["MT_dirty__"] = False
self.vs[16]["GUID__"] = UUID('ff03edde-10aa-40b5-ba3e-c8cc70e2813f')
self.vs[17]["MT_subtypeMatching__"] = False
self.vs[17]["MT_pre__Type"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[17]["MT_label__"] = """27"""
self.vs[17]["MT_subtypes__"] = pickle.loads("""(lp1
.""")
self.vs[17]["mm__"] = """MT_pre__Constant"""
self.vs[17]["MT_pre__name"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[17]["MT_dirty__"] = False
self.vs[17]["GUID__"] = UUID('b76e2ed0-f89e-4109-80e1-aa9210384e29')
self.vs[18]["MT_subtypeMatching__"] = False
self.vs[18]["MT_label__"] = """20"""
self.vs[18]["MT_subtypes__"] = pickle.loads("""(lp1
.""")
self.vs[18]["mm__"] = """MT_pre__Equation"""
self.vs[18]["MT_pre__name"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[18]["MT_dirty__"] = False
self.vs[18]["GUID__"] = UUID('9ca52834-938c-4e53-a23f-a6f4cf4cb32f')
self.vs[19]["MT_subtypeMatching__"] = False
self.vs[19]["MT_label__"] = """26"""
self.vs[19]["MT_subtypes__"] = pickle.loads("""(lp1
.""")
self.vs[19]["mm__"] = """MT_pre__Equation"""
self.vs[19]["MT_pre__name"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[19]["MT_dirty__"] = False
self.vs[19]["GUID__"] = UUID('b4bf5af6-227a-4180-be07-0d7e9c2796bb')
self.vs[20]["MT_subtypeMatching__"] = False
self.vs[20]["MT_pre__associationType"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[20]["MT_label__"] = """5"""
self.vs[20]["MT_subtypes__"] = pickle.loads("""(lp1
.""")
self.vs[20]["mm__"] = """MT_pre__directLink_S"""
self.vs[20]["MT_dirty__"] = False
self.vs[20]["GUID__"] = UUID('016f9a5b-dd82-4012-9418-8063acff8bc4')
self.vs[21]["MT_subtypeMatching__"] = False
self.vs[21]["MT_pre__associationType"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[21]["MT_label__"] = """6"""
self.vs[21]["MT_subtypes__"] = pickle.loads("""(lp1
.""")
self.vs[21]["mm__"] = """MT_pre__directLink_S"""
self.vs[21]["MT_dirty__"] = False
self.vs[21]["GUID__"] = UUID('99da727d-a6f1-49ee-bc0c-4dc52e9253d1')
self.vs[22]["MT_subtypeMatching__"] = False
self.vs[22]["MT_pre__associationType"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[22]["MT_label__"] = """7"""
self.vs[22]["MT_subtypes__"] = pickle.loads("""(lp1
.""")
self.vs[22]["mm__"] = """MT_pre__directLink_S"""
self.vs[22]["MT_dirty__"] = False
self.vs[22]["GUID__"] = UUID('ad1d5bcc-69c9-4096-9e8c-2babf124c55d')
self.vs[23]["MT_subtypeMatching__"] = False
self.vs[23]["MT_pre__associationType"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[23]["MT_label__"] = """11"""
self.vs[23]["MT_subtypes__"] = pickle.loads("""(lp1
.""")
self.vs[23]["mm__"] = """MT_pre__directLink_T"""
self.vs[23]["MT_dirty__"] = False
self.vs[23]["GUID__"] = UUID('e1999e40-265b-4732-9750-b171a3615c84')
self.vs[24]["MT_subtypeMatching__"] = False
self.vs[24]["MT_pre__associationType"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[24]["MT_label__"] = """12"""
self.vs[24]["MT_subtypes__"] = pickle.loads("""(lp1
.""")
self.vs[24]["mm__"] = """MT_pre__directLink_T"""
self.vs[24]["MT_dirty__"] = False
self.vs[24]["GUID__"] = UUID('c71873fd-38d5-40ac-bf49-84a739dc6ee2')
self.vs[25]["MT_subtypeMatching__"] = False
self.vs[25]["MT_pre__associationType"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[25]["MT_label__"] = """16"""
self.vs[25]["MT_subtypes__"] = pickle.loads("""(lp1
.""")
self.vs[25]["mm__"] = """MT_pre__directLink_T"""
self.vs[25]["MT_dirty__"] = False
self.vs[25]["GUID__"] = UUID('2c08c353-2aab-4914-aa0a-7e6ebe4801d6')
self.vs[26]["MT_subtypeMatching__"] = False
self.vs[26]["MT_label__"] = """13"""
self.vs[26]["MT_subtypes__"] = pickle.loads("""(lp1
.""")
self.vs[26]["mm__"] = """MT_pre__trace_link"""
self.vs[26]["MT_dirty__"] = False
self.vs[26]["GUID__"] = UUID('d8e97959-3d17-4a0c-8454-ff7cd97fabdf')
self.vs[27]["MT_subtypeMatching__"] = False
self.vs[27]["MT_label__"] = """14"""
self.vs[27]["MT_subtypes__"] = pickle.loads("""(lp1
.""")
self.vs[27]["mm__"] = """MT_pre__trace_link"""
self.vs[27]["MT_dirty__"] = False
self.vs[27]["GUID__"] = UUID('e8b49140-2e75-4bd8-8d94-dc4b9ddb4cd6')
self.vs[28]["MT_subtypeMatching__"] = False
self.vs[28]["MT_label__"] = """17"""
self.vs[28]["MT_subtypes__"] = pickle.loads("""(lp1
.""")
self.vs[28]["mm__"] = """MT_pre__trace_link"""
self.vs[28]["MT_dirty__"] = False
self.vs[28]["GUID__"] = UUID('179bb74d-4204-42e4-a873-cc4f497a34de')
self.vs[29]["MT_subtypeMatching__"] = False
self.vs[29]["MT_label__"] = """18"""
self.vs[29]["MT_subtypes__"] = pickle.loads("""(lp1
.""")
self.vs[29]["mm__"] = """MT_pre__trace_link"""
self.vs[29]["MT_dirty__"] = False
self.vs[29]["GUID__"] = UUID('c61492ed-4e12-46bd-8700-98405154b0ab')
def eval_Type19(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_name19(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_Type25(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_name25(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_classtype1(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_cardinality1(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_name1(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_classtype9(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_cardinality9(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_name9(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_classtype2(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_cardinality2(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_name2(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_classtype8(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_cardinality8(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_name8(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_classtype3(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_cardinality3(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_name3(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_associationType5(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_associationType6(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_associationType7(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_associationType11(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_associationType12(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_associationType16(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_classtype10(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_cardinality10(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_name10(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_classtype15(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_cardinality15(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_name15(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_Type22(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_name22(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_Type27(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_name27(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_classtype4(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_cardinality4(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_name4(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_name20(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_name26(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def constraint(self, PreNode, graph):
"""
Executable constraint code.
@param PreNode: Function taking an integer as parameter
and returns the node corresponding to that label.
"""
#===============================================================================
# This code is executed after the nodes in the LHS have been matched.
# You can access a matched node labelled n by: PreNode('n').
# To access attribute x of node n, use: PreNode('n')['x'].
# The given constraint must evaluate to a boolean expression:
# returning True enables the rule to be applied,
# returning False forbids the rule from being applied.
#===============================================================================
if (PreNode('19')['name']=='isComposite')and (PreNode('22')['name']=='true') and (PreNode('25')['name']=='channel')and (PreNode('27')['name']=='sh_in'):
return True
return False
| 50.330289
| 811
| 0.524012
| 7,769
| 66,134
| 4.342129
| 0.048397
| 0.040553
| 0.057627
| 0.043754
| 0.891741
| 0.845379
| 0.838442
| 0.806664
| 0.800024
| 0.797059
| 0
| 0.024358
| 0.197962
| 66,134
| 1,313
| 812
| 50.368621
| 0.611629
| 0.335833
| 0
| 0.622222
| 0
| 0.001307
| 0.645662
| 0.186694
| 0
| 0
| 0
| 0
| 0
| 1
| 0.054902
| false
| 0
| 0.003922
| 0.052288
| 0.16732
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5f32dbdfc7c7dd6cd652f8b348ec5fa64ed25be0
| 8,383
|
py
|
Python
|
models/only_students.py
|
fvergaracl/gonsa2_backend
|
2e354179cd337e6e98a6f23eb0a5c3f9c5e7b57e
|
[
"MIT"
] | null | null | null |
models/only_students.py
|
fvergaracl/gonsa2_backend
|
2e354179cd337e6e98a6f23eb0a5c3f9c5e7b57e
|
[
"MIT"
] | 1
|
2019-10-19T21:11:38.000Z
|
2019-10-19T21:11:38.000Z
|
models/only_students.py
|
fvergaracl/gonsa2_backend
|
2e354179cd337e6e98a6f23eb0a5c3f9c5e7b57e
|
[
"MIT"
] | null | null | null |
import mysql.connector
from settings.config import Database, Config
from datetime import datetime
def get_all_challenges_student(student_nick):
try:
database_ = Database()
config = database_.config
cnx = mysql.connector.connect(**config)
cursor = cnx.cursor()
query = "SELECT all_c.id_number, all_c.title, all_c.photourl, all_c.summary, all_c.description, all_c.aim, all_c.created, all_c.last_edit, all_c.fk_category ,all_c.owner_fk_nick FROM all_challenges as all_c INNER JOIN class_challenges INNER JOIN class_list INNER JOIN classes WHERE class_challenges.FK_challenge_id_number = all_c.id_number AND class_list.id_number = class_challenges.FK_class_id_number AND classes.FK_class_id_number = class_list.id_number AND classes.FK_student_nick = %s;"
data = (student_nick,)
cursor.execute(query, data)
r = []
for (id_number, title, photourl, summary, description, aim, created, last_edit, fk_category, owner_fk_nick) in cursor:
temp = [id_number, title, photourl, summary, description, aim, created, last_edit, fk_category, owner_fk_nick]
r.append(temp)
cursor.close()
cnx.close()
return r
except Exception as e:
print e
def get_status_challenge_by_id(nick_student,id_challenge_):
try:
database_ = Database()
config = database_.config
cnx = mysql.connector.connect(**config)
cursor = cnx.cursor()
query = "SELECT finalized, FK_student_nick FROM Challenge_last_activity WHERE FK_student_nick = %s AND FK_challenge_id_number = %s;"
data = (nick_student,id_challenge_,)
cursor.execute(query, data)
r = []
for (finalized, FK_student_nick) in cursor:
if finalized == '1':
return 'finalized'
else:
return 'init'
cursor.close()
cnx.close()
return 'noinit'
except Exception as e:
print e
def get_all_challenges_student_finalized(student_nick):
try:
database_ = Database()
config = database_.config
cnx = mysql.connector.connect(**config)
cursor = cnx.cursor()
query = "SELECT all_c.id_number, all_c.title, all_c.photourl, all_c.summary, all_c.description, all_c.aim, all_c.created, all_c.last_edit, all_c.fk_category ,all_c.owner_fk_nick, cla.init_date, cla.end_date, cla.number_of_interaction, cla.last_response FROM all_challenges as all_c INNER JOIN class_challenges INNER JOIN Challenge_last_activity as cla INNER JOIN class_list INNER JOIN classes WHERE class_challenges.FK_challenge_id_number = all_c.id_number AND class_list.id_number = class_challenges.FK_class_id_number AND classes.FK_class_id_number = class_list.id_number AND classes.FK_student_nick = %s AND cla.FK_student_nick = classes.FK_student_nick and cla.FK_challenge_id_number = all_c.id_number and cla.finalized = '1' ORDER BY cla.number_of_interaction DESC; "
data = (student_nick,)
cursor.execute(query, data)
r = []
for (id_number, title, photourl, summary, description, aim, created, last_edit, fk_category, owner_fk_nick, init_date, end_date, number_of_interaction, last_response) in cursor:
temp = [id_number, title, photourl, summary, description, aim, created, last_edit, fk_category, owner_fk_nick, init_date, end_date, number_of_interaction, last_response]
r.append(temp)
cursor.close()
cnx.close()
return r
except Exception as e:
print e
print 'fina'
def get_all_challenges_student_no_finalized(student_nick):
try:
database_ = Database()
config = database_.config
cnx = mysql.connector.connect(**config)
cursor = cnx.cursor()
query = "SELECT all_c.id_number, all_c.title, all_c.photourl, all_c.summary, all_c.description, all_c.aim, all_c.created, all_c.last_edit, all_c.fk_category ,all_c.owner_fk_nick, cla.init_date, cla.number_of_interaction, cla.last_response FROM all_challenges as all_c INNER JOIN class_challenges INNER JOIN Challenge_last_activity as cla INNER JOIN class_list INNER JOIN classes WHERE class_challenges.FK_challenge_id_number = all_c.id_number AND class_list.id_number = class_challenges.FK_class_id_number AND classes.FK_class_id_number = class_list.id_number AND classes.FK_student_nick = %s AND cla.FK_student_nick = classes.FK_student_nick and cla.FK_challenge_id_number = all_c.id_number and cla.finalized = '0' ORDER BY cla.number_of_interaction DESC;"
data = (student_nick,)
cursor.execute(query, data)
r = []
for (id_number, title, photourl, summary, description, aim, created, last_edit, fk_category, owner_fk_nick, init_date, number_of_interaction, last_response) in cursor:
temp = [id_number, title, photourl, summary, description, aim, created, last_edit, fk_category, owner_fk_nick, init_date, number_of_interaction, last_response]
r.append(temp)
cursor.close()
cnx.close()
return r
except Exception as e:
print e
print ' no final'
def get_all_challenges_student_by_cat(student_nick, category_):
try:
database_ = Database()
config = database_.config
cnx = mysql.connector.connect(**config)
cursor = cnx.cursor()
query = "SELECT all_c.id_number, all_c.title, all_c.photourl, all_c.summary, all_c.description, all_c.aim, all_c.created, all_c.last_edit, all_c.owner_fk_nick FROM all_challenges as all_c INNER JOIN class_challenges INNER JOIN class_list INNER JOIN classes WHERE class_challenges.FK_challenge_id_number = all_c.id_number AND class_list.id_number = class_challenges.FK_class_id_number AND classes.FK_student_nick = %s AND all_c.fk_category = %s;"
data = (student_nick,category_,)
cursor.execute(query, data)
r = []
for (id_number, title, photourl, summary, description, aim, created, last_edit, owner_fk_nick) in cursor:
temp = [id_number, title, photourl, summary, description, aim, created, last_edit, owner_fk_nick]
r.append(temp)
cursor.close()
cnx.close()
return r
except Exception as e:
print e
def add_text_library(FK_student_nick , FK_challenge_id_number , title_text, url_text , date_added, state):
try:
c = Config()
database_ = Database()
config = database_.config
cnx = mysql.connector.connect(**config)
cursor = cnx.cursor()
query = "INSERT INTO Student_personal_library(FK_student_nick , FK_challenge_id_number , title_text, url_text, date_added, state) VALUES (%s, %s, %s, %s, %s, %s);"
data = (FK_student_nick , FK_challenge_id_number , title_text, url_text , date_added, state,)
cursor.execute(query, data)
cnx.commit()
cnx.close()
return True
except Exception as e:
print e
return False
def check_is_in_text_library(FK_student_nick , FK_challenge_id_number , title_text, url_text):
try:
database_ = Database()
config = database_.config
cnx = mysql.connector.connect(**config)
cursor = cnx.cursor()
query = "SELECT state FROM Student_personal_library WHERE FK_student_nick=%s AND FK_challenge_id_number=%s AND title_text=%s AND url_text=%s;"
data = (FK_student_nick , FK_challenge_id_number , title_text, url_text,)
cursor.execute(query, data)
for (state) in cursor:
return True
cursor.close()
cnx.close()
return False
except Exception as e:
print e
return False
def update_text_library(FK_student_nick , FK_challenge_id_number , title_text, url_text, state):
try:
c = Config()
database_ = Database()
config = database_.config
cnx = mysql.connector.connect(**config)
cursor = cnx.cursor()
now = datetime.now()
query = "UPDATE Student_personal_library SET state=%s WHERE FK_student_nick=%s AND FK_challenge_id_number=%s AND title_text=%s AND url_text=%s;"
data = (state,FK_student_nick , FK_challenge_id_number , title_text, url_text,)
cursor.execute(query, data)
cnx.commit()
cnx.close()
return True
except Exception as e:
print e
return False
def getallmylibrary_by_challenge(FK_student_nick, FK_challenge_id_number):
try:
database_ = Database()
config = database_.config
cnx = mysql.connector.connect(**config)
cursor = cnx.cursor()
query = "SELECT title_text, url_text, date_added, state FROM Student_personal_library WHERE FK_student_nick=%s AND FK_challenge_id_number=%s;"
data = (FK_student_nick , FK_challenge_id_number,)
cursor.execute(query, data)
r = []
for (title_text, url_text, date_added, state) in cursor:
r.append([title_text, url_text, date_added, state])
cursor.close()
cnx.close()
return r
except Exception as e:
print e
return []
| 47.630682
| 774
| 0.752714
| 1,278
| 8,383
| 4.615023
| 0.076682
| 0.069176
| 0.050695
| 0.061207
| 0.90963
| 0.883689
| 0.874873
| 0.859105
| 0.848423
| 0.834012
| 0
| 0.000423
| 0.15436
| 8,383
| 175
| 775
| 47.902857
| 0.83157
| 0
| 0
| 0.709091
| 0
| 0.042424
| 0.373658
| 0.120377
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.018182
| null | null | 0.066667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a033c804c79d97af64a034a3f15ee889001dc815
| 3,188
|
py
|
Python
|
flask/send/zones_upload.py
|
Artelys/Safer-Roads
|
ba9eb4b2c0f02c40142caa612ed7998c7ee01155
|
[
"MIT"
] | null | null | null |
flask/send/zones_upload.py
|
Artelys/Safer-Roads
|
ba9eb4b2c0f02c40142caa612ed7998c7ee01155
|
[
"MIT"
] | null | null | null |
flask/send/zones_upload.py
|
Artelys/Safer-Roads
|
ba9eb4b2c0f02c40142caa612ed7998c7ee01155
|
[
"MIT"
] | null | null | null |
import pandas as pd
import json
from functions import typeOfJson
def zones_upload(es, path, name):
with open(path, "r") as f:
data = json.load(f)
data = data["features"]
category = typeOfJson(name, data[0]["properties"])
if category == "corine":
return corine_upload(es, data, name)
else:
return admin_upload(es, data, name)
def corine_upload(es, data, name):
err= ""
important_columns = [
"ID",
"CODE_18",
"AREA_ID",
"POLYGON_NM"
]
for column in important_columns:
if column not in data[0]["properties"]:
err += " ERREUR : La colonne "+column+" n'est pas présente\n"
if err:
return err, "corine"
n = 1000
to_send = []
for index, row in enumerate(data):
geometry = row["geometry"]
row = row["properties"]
row["geometry"] = geometry
if (index+1)%n == 0:
if not to_send:
continue
resp = es.bulk(index=name, body=to_send, request_timeout=30000)
# If there are errors
if resp["errors"]:
for i, item in enumerate(resp["items"]):
if "error" in item["index"].keys():
err += "ERREUR : La ligne "+str(index+2-n+i)+ " contient une erreur\n"
err += item["index"]["error"]["reason"]+" caused by "+item["index"]["error"]["caused_by"]["reason"]+"\n"
print(err)
return err, 'corine'
to_send = []
to_send.append({"index":{}})
to_send.append(row)
if to_send:
resp = es.bulk(index=name, body=to_send, request_timeout=30000)
if resp["errors"]:
for i, item in enumerate(resp["items"]):
if "error" in item["index"].keys():
err += "ERREUR : La ligne "+str((data.shape[0]//n)*data.shape[0]+i)+ " contient une erreur\n"
err += item["index"]["error"]["reason"]+" caused by "+item["index"]["error"]["caused_by"]["reason"]+"\n"
return err, 'corine'
return err, 'corine'
def admin_upload(es, data, name):
err= ""
important_columns = [
"AREA_ID",
"POLYGON_NM"
]
for column in important_columns:
if column not in data[0]["properties"]:
err += " ERREUR : La colonne "+column+" n'est pas présente\n"
if err:
return err, "admin"
n = 1000
to_send = []
for index, row in enumerate(data):
geometry = row["geometry"]
row = row["properties"]
row["geometry"] = geometry
if (index+1)%n == 0:
if not to_send:
continue
resp = es.bulk(index=name, body=to_send, request_timeout=30000)
# If there are errors
if resp["errors"]:
for i, item in enumerate(resp["items"]):
if "error" in item["index"].keys():
err += "ERREUR : La ligne "+str(index+2-n+i)+ " contient une erreur\n"
err += item["index"]["error"]["reason"]+" caused by "+item["index"]["error"]["caused_by"]["reason"]+"\n"
return err, 'admin'
to_send = []
to_send.append({"index":{}})
to_send.append(row)
if to_send:
resp = es.bulk(index=name, body=to_send, request_timeout=30000)
if resp["errors"]:
for i, item in enumerate(resp["items"]):
if "error" in item["index"].keys():
err += "ERREUR : La ligne "+str((data.shape[0]//n)*data.shape[0]+i)+ " contient une erreur\n"
err += item["index"]["error"]["reason"]+" caused by "+item["index"]["error"]["caused_by"]["reason"]+"\n"
return err, 'admin'
return err, 'admin'
| 28.464286
| 110
| 0.619511
| 470
| 3,188
| 4.121277
| 0.176596
| 0.049561
| 0.057821
| 0.033041
| 0.857512
| 0.829633
| 0.829633
| 0.793495
| 0.793495
| 0.793495
| 0
| 0.016673
| 0.191029
| 3,188
| 112
| 111
| 28.464286
| 0.734393
| 0.012233
| 0
| 0.774194
| 0
| 0
| 0.234584
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.032258
| false
| 0
| 0.075269
| 0
| 0.215054
| 0.010753
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a04519be0ab141f0a16e04a2501ddc8cc852f1af
| 10,402
|
py
|
Python
|
tests/export/html/test_drawing_graphic_blip.py
|
botzill/pydocx
|
98c6aa626d875278240eabea8f86a914840499b3
|
[
"Apache-2.0"
] | 127
|
2015-01-12T22:35:34.000Z
|
2022-01-20T06:24:18.000Z
|
tests/export/html/test_drawing_graphic_blip.py
|
turbo-q/pydocx
|
98c6aa626d875278240eabea8f86a914840499b3
|
[
"Apache-2.0"
] | 156
|
2015-01-05T19:55:56.000Z
|
2020-10-14T07:01:42.000Z
|
tests/export/html/test_drawing_graphic_blip.py
|
turbo-q/pydocx
|
98c6aa626d875278240eabea8f86a914840499b3
|
[
"Apache-2.0"
] | 45
|
2015-02-22T18:52:08.000Z
|
2021-06-14T08:05:47.000Z
|
# coding: utf-8
from __future__ import (
absolute_import,
print_function,
unicode_literals,
)
import base64
from pydocx.constants import EMUS_PER_PIXEL
from pydocx.openxml.packaging import ImagePart, MainDocumentPart
from pydocx.test import DocumentGeneratorTestCase
from pydocx.test.utils import WordprocessingDocumentFactory
class DrawingGraphicBlipTestCase(DocumentGeneratorTestCase):
def test_inline_image_with_multiple_ext_definitions(self):
# Ensure that the image size can be calculated correctly even if the
# image size ext isn't the first ext in the drawing node
width_px = 5
height_px = 10
document_xml = '''
<p>
<r>
<t>Foo</t>
<drawing>
<inline>
<graphic>
<graphicData>
<pic>
<blipFill>
<blip embed="foobar">
<extLst>
<ext/>
</extLst>
</blip>
</blipFill>
<spPr>
<xfrm>
<ext cx="{cx}" cy="{cy}"/>
</xfrm>
</spPr>
</pic>
</graphicData>
</graphic>
</inline>
</drawing>
<t>Bar</t>
</r>
</p>
'''.format(
cx=width_px * EMUS_PER_PIXEL,
cy=height_px * EMUS_PER_PIXEL,
)
document = WordprocessingDocumentFactory()
image_url = 'http://google.com/image1.gif'
document_rels = document.relationship_format.format(
id='foobar',
type=ImagePart.relationship_type,
target=image_url,
target_mode='External',
)
document.add(MainDocumentPart, document_xml, document_rels)
expected_html = '''
<p>
Foo
<img
height="{height}px"
src="http://google.com/image1.gif"
width="{width}px"
/>
Bar
</p>
'''.format(width=width_px, height=height_px)
self.assert_document_generates_html(document, expected_html)
def test_anchor_with_multiple_ext_definitions(self):
width_px = 5
height_px = 10
# Ensure that the image size can be calculated correctly even if the
# image size ext isn't the first ext in the drawing node
document_xml = '''
<p>
<r>
<t>Foo</t>
<drawing>
<anchor>
<graphic>
<graphicData>
<pic>
<blipFill>
<blip embed="foobar">
<extLst>
<ext/>
</extLst>
</blip>
</blipFill>
<spPr>
<xfrm>
<ext cx="{cx}" cy="{cy}"/>
</xfrm>
</spPr>
</pic>
</graphicData>
</graphic>
</anchor>
</drawing>
<t>Bar</t>
</r>
</p>
'''.format(
cx=width_px * EMUS_PER_PIXEL,
cy=height_px * EMUS_PER_PIXEL,
)
document = WordprocessingDocumentFactory()
image_url = 'http://google.com/image1.gif'
document_rels = document.relationship_format.format(
id='foobar',
type=ImagePart.relationship_type,
target=image_url,
target_mode='External',
)
document.add(MainDocumentPart, document_xml, document_rels)
expected_html = '''
<p>
Foo
<img
height="{height}px"
src="http://google.com/image1.gif"
width="{width}px"
/>
Bar
</p>
'''.format(width=width_px, height=height_px)
self.assert_document_generates_html(document, expected_html)
def test_anchor_with_no_size_ext(self):
# Ensure the image html is still rendered even if the size cannot be
# calculated
document_xml = '''
<p>
<r>
<t>Foo</t>
<drawing>
<anchor>
<graphic>
<graphicData>
<pic>
<blipFill>
<blip embed="foobar"/>
</blipFill>
<spPr>
<xfrm/>
</spPr>
</pic>
</graphicData>
</graphic>
</anchor>
</drawing>
<t>Bar</t>
</r>
</p>
'''
document = WordprocessingDocumentFactory()
image_url = 'http://google.com/image1.gif'
document_rels = document.relationship_format.format(
id='foobar',
type=ImagePart.relationship_type,
target=image_url,
target_mode='External',
)
document.add(MainDocumentPart, document_xml, document_rels)
expected_html = '''
<p>
Foo
<img src="http://google.com/image1.gif" />
Bar
</p>
'''
self.assert_document_generates_html(document, expected_html)
def test_blip_embed_refers_to_undefined_image_relationship(self):
# Ensure that if a blip embed refers to an undefined image
# relationshipp, the image rendering is skipped
document_xml = '''
<p>
<r>
<t>Foo</t>
<drawing>
<anchor>
<graphic>
<graphicData>
<pic>
<blipFill>
<blip embed="foobar" />
</blipFill>
</pic>
</graphicData>
</graphic>
</anchor>
</drawing>
<t>Bar</t>
</r>
</p>
'''
document = WordprocessingDocumentFactory()
document.add(MainDocumentPart, document_xml)
expected_html = '<p>FooBar</p>'
self.assert_document_generates_html(document, expected_html)
def test_internal_image_is_included_with_base64_content(self):
width_px = 5
height_px = 10
document_xml = '''
<p>
<r>
<t>Foo</t>
<drawing>
<anchor>
<graphic>
<graphicData>
<pic>
<blipFill>
<blip embed="foobar" />
</blipFill>
<spPr>
<xfrm>
<ext cx="{cx}" cy="{cy}"/>
</xfrm>
</spPr>
</pic>
</graphicData>
</graphic>
</anchor>
</drawing>
<t>Bar</t>
</r>
</p>
'''.format(
cx=width_px * EMUS_PER_PIXEL,
cy=height_px * EMUS_PER_PIXEL,
)
document = WordprocessingDocumentFactory()
document_rels = document.relationship_format.format(
id='foobar',
type=ImagePart.relationship_type,
target='media/image1.jpeg',
target_mode='Internal',
)
document.add(MainDocumentPart, document_xml, document_rels)
image_data = 'fake data'
expected_html = '''
<p>
Foo
<img
height="{height}px"
src="data:image/jpeg;base64,{data}"
width="{width}px"
/>
Bar
</p>
'''.format(
width=width_px,
height=height_px,
# This is kind of weird, needed otherwise python 3.3 breaks
data=base64.b64encode(image_data.encode('utf-8')).decode('utf-8'),
)
self.assert_document_generates_html(
document,
expected_html,
additional_parts={
'word/media/image1.jpeg': image_data,
},
)
def test_internal_image_is_not_included_if_part_is_missing(self):
width_px = 5
height_px = 10
document_xml = '''
<p>
<r>
<t>Foo</t>
<drawing>
<anchor>
<graphic>
<graphicData>
<pic>
<blipFill>
<blip embed="foobar" />
</blipFill>
<spPr>
<xfrm>
<ext cx="{cx}" cy="{cy}"/>
</xfrm>
</spPr>
</pic>
</graphicData>
</graphic>
</anchor>
</drawing>
<t>Bar</t>
</r>
</p>
'''.format(
cx=width_px * EMUS_PER_PIXEL,
cy=height_px * EMUS_PER_PIXEL,
)
document = WordprocessingDocumentFactory()
document_rels = document.relationship_format.format(
id='foobar',
type=ImagePart.relationship_type,
target='media/image1.jpeg',
target_mode='Internal',
)
document.add(MainDocumentPart, document_xml, document_rels)
expected_html = '<p>FooBar</p>'
self.assert_document_generates_html(
document,
expected_html,
additional_parts={
# Purposefully commented out
# 'word/media/image1.jpeg': '',
},
)
| 29.384181
| 78
| 0.422034
| 820
| 10,402
| 5.153659
| 0.167073
| 0.02319
| 0.025556
| 0.026503
| 0.804307
| 0.774255
| 0.768339
| 0.763133
| 0.763133
| 0.754614
| 0
| 0.006868
| 0.482119
| 10,402
| 353
| 79
| 29.467422
| 0.777613
| 0.053163
| 0
| 0.861386
| 0
| 0
| 0.548394
| 0.005795
| 0
| 0
| 0
| 0
| 0.019802
| 1
| 0.019802
| false
| 0
| 0.023102
| 0
| 0.046205
| 0.0033
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a075c4d6e61059a94becc41d6c616a24115804bb
| 102
|
py
|
Python
|
chainer_dense_fusion/visualizations/__init__.py
|
knorth55/chainer-dense-fusion
|
8ff53173d7071fc2cfcd05b1e0b2c544aeed090b
|
[
"MIT"
] | 22
|
2019-01-31T23:50:30.000Z
|
2021-09-13T09:41:00.000Z
|
chainer_dense_fusion/visualizations/__init__.py
|
knorth55/chainer-dense-fusion
|
8ff53173d7071fc2cfcd05b1e0b2c544aeed090b
|
[
"MIT"
] | 4
|
2019-07-31T14:40:06.000Z
|
2022-03-16T13:32:45.000Z
|
chainer_dense_fusion/visualizations/__init__.py
|
knorth55/chainer-dense-fusion
|
8ff53173d7071fc2cfcd05b1e0b2c544aeed090b
|
[
"MIT"
] | 3
|
2019-08-30T09:18:45.000Z
|
2020-03-03T16:07:51.000Z
|
from chainer_dense_fusion.visualizations.vis_6d_pose_estimation import vis_6d_pose_estimation # NOQA
| 51
| 101
| 0.901961
| 15
| 102
| 5.6
| 0.733333
| 0.119048
| 0.214286
| 0.452381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.021053
| 0.068627
| 102
| 1
| 102
| 102
| 0.863158
| 0.039216
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
26023fb2fe35eecfaa07de370d0a3755ea9bf29c
| 13,324
|
py
|
Python
|
pimdm/tree/assert_state.py
|
leoplo/pim_dm
|
e097fb8e247b14f142b6aa97d8ee34440aeba806
|
[
"MIT"
] | 6
|
2020-02-04T20:59:59.000Z
|
2021-11-24T09:56:07.000Z
|
pimdm/tree/assert_state.py
|
leoplo/pim_dm
|
e097fb8e247b14f142b6aa97d8ee34440aeba806
|
[
"MIT"
] | 4
|
2020-04-10T14:51:39.000Z
|
2022-02-14T00:59:21.000Z
|
pimdm/tree/assert_state.py
|
leoplo/pim_dm
|
e097fb8e247b14f142b6aa97d8ee34440aeba806
|
[
"MIT"
] | 3
|
2020-08-13T17:56:35.000Z
|
2021-11-24T11:03:12.000Z
|
from abc import ABCMeta, abstractmethod
from pimdm.tree import pim_globals
from .metric import AssertMetric
from pimdm.utils import TYPE_CHECKING
if TYPE_CHECKING:
from .tree_if_downstream import TreeInterfaceDownstream
class AssertStateABC(metaclass=ABCMeta):
@staticmethod
@abstractmethod
def receivedDataFromDownstreamIf(interface: "TreeInterfaceDownstream"):
"""
An (S,G) Data packet received on downstream interface
@type interface: TreeInterface
"""
raise NotImplementedError()
@staticmethod
@abstractmethod
def receivedInferiorMetricFromWinner(interface: "TreeInterfaceDownstream"):
"""
Receive Inferior (Assert OR State Refresh) from Assert Winner
@type interface: TreeInterface
"""
raise NotImplementedError()
@staticmethod
@abstractmethod
def receivedInferiorMetricFromNonWinner_couldAssertIsTrue(interface: "TreeInterfaceDownstream"):
"""
Receive Inferior (Assert OR State Refresh) from non-Assert Winner
AND CouldAssert==TRUE
@type interface: TreeInterface
"""
raise NotImplementedError()
@staticmethod
@abstractmethod
def receivedPreferedMetric(interface: "TreeInterfaceDownstream", better_metric, is_metric_equal):
"""
Receive Preferred Assert OR State Refresh
@type interface: TreeInterface
@type better_metric: AssertMetric
"""
raise NotImplementedError()
@staticmethod
@abstractmethod
def sendStateRefresh(interface: "TreeInterfaceDownstream", time):
"""
Send State Refresh
@type interface: TreeInterface
@type time: int
"""
raise NotImplementedError()
@staticmethod
@abstractmethod
def assertTimerExpires(interface: "TreeInterfaceDownstream"):
"""
AT(S,G) Expires
@type interface: TreeInterface
"""
raise NotImplementedError()
@staticmethod
@abstractmethod
def couldAssertIsNowFalse(interface: "TreeInterfaceDownstream"):
"""
CouldAssert -> FALSE
@type interface: TreeInterface
"""
raise NotImplementedError()
@staticmethod
@abstractmethod
def couldAssertIsNowTrue(interface: "TreeInterfaceDownstream"):
"""
CouldAssert -> TRUE
@type interface: TreeInterface
"""
raise NotImplementedError()
@staticmethod
@abstractmethod
def winnerLivelinessTimerExpires(interface: "TreeInterfaceDownstream"):
"""
Winner’s NLT(N,I) Expires
@type interface: TreeInterface
"""
raise NotImplementedError()
@staticmethod
@abstractmethod
def receivedPruneOrJoinOrGraft(interface: "TreeInterfaceDownstream"):
"""
Receive Prune(S,G), Join(S,G) or Graft(S,G)
@type interface: TreeInterface
"""
raise NotImplementedError()
def _sendAssert_setAT(interface: "TreeInterfaceDownstream"):
interface.set_assert_timer(pim_globals.ASSERT_TIME)
interface.send_assert()
@staticmethod
@abstractmethod
def is_preferred_assert(interface: "TreeInterfaceDownstream", received_metric):
raise NotImplementedError()
# Override
def __str__(self) -> str:
return "AssertSM:" + self.__class__.__name__
class NoInfoState(AssertStateABC):
'''
NoInfoState (NI)
This router has no (S,G) Assert state on interface I.
'''
@staticmethod
def is_preferred_assert(interface: "TreeInterfaceDownstream", received_metric):
return received_metric.is_better_than(interface._assert_winner_metric)
@staticmethod
def receivedDataFromDownstreamIf(interface: "TreeInterfaceDownstream"):
"""
@type interface: TreeInterface
"""
interface.assert_logger.debug('receivedDataFromDownstreamIf, NI -> W')
interface.set_assert_winner_metric(interface.my_assert_metric())
interface.set_assert_state(AssertState.Winner)
NoInfoState._sendAssert_setAT(interface)
@staticmethod
def receivedInferiorMetricFromWinner(interface: "TreeInterfaceDownstream"):
assert False, "this should never ocurr"
@staticmethod
def receivedInferiorMetricFromNonWinner_couldAssertIsTrue(interface: "TreeInterfaceDownstream"):
interface.assert_logger.debug('receivedInferiorMetricFromNonWinner_couldAssertIsTrue, NI -> W')
interface.set_assert_winner_metric(interface.my_assert_metric())
interface.set_assert_state(AssertState.Winner)
NoInfoState._sendAssert_setAT(interface)
@staticmethod
def receivedPreferedMetric(interface: "TreeInterfaceDownstream", better_metric, is_metric_equal):
'''
@type interface: TreeInterface
'''
if is_metric_equal:
return
interface.assert_logger.debug('receivedPreferedMetric, NI -> L')
state_refresh_interval = better_metric.state_refresh_interval
if state_refresh_interval is None:
# event caused by Assert Msg
assert_timer_value = pim_globals.ASSERT_TIME
else:
# event caused by StateRefreshMsg
assert_timer_value = state_refresh_interval*3
interface.set_assert_timer(assert_timer_value)
interface.set_assert_winner_metric(better_metric)
interface.set_assert_state(AssertState.Loser)
# MUST also multicast a Prune(S,G) to the Assert winner
if interface.could_assert():
interface.send_prune(holdtime=assert_timer_value)
@staticmethod
def sendStateRefresh(interface: "TreeInterfaceDownstream", time):
pass
@staticmethod
def assertTimerExpires(interface: "TreeInterfaceDownstream"):
assert False, "this should never ocurr"
@staticmethod
def couldAssertIsNowFalse(interface: "TreeInterfaceDownstream"):
interface.assert_logger.debug('couldAssertIsNowFalse, NI -> NI')
@staticmethod
def couldAssertIsNowTrue(interface: "TreeInterfaceDownstream"):
interface.assert_logger.debug('couldAssertIsNowTrue, NI -> NI')
@staticmethod
def winnerLivelinessTimerExpires(interface: "TreeInterfaceDownstream"):
assert False, "this should never ocurr"
@staticmethod
def receivedPruneOrJoinOrGraft(interface: "TreeInterfaceDownstream"):
interface.assert_logger.debug('receivedPruneOrJoinOrGraft, NI -> NI')
def __str__(self) -> str:
return "NoInfo"
class WinnerState(AssertStateABC):
'''
I am Assert Winner (W)
This router has won an (S,G) Assert on interface I. It is now
responsible for forwarding traffic from S destined for G via
interface I.
'''
@staticmethod
def is_preferred_assert(interface: "TreeInterfaceDownstream", received_metric):
return received_metric.is_better_than(interface.my_assert_metric())
@staticmethod
def receivedDataFromDownstreamIf(interface: "TreeInterfaceDownstream"):
"""
@type interface: TreeInterface
"""
interface.assert_logger.debug('receivedDataFromDownstreamIf, W -> W')
WinnerState._sendAssert_setAT(interface)
@staticmethod
def receivedInferiorMetricFromWinner(interface: "TreeInterfaceDownstream"):
assert False, "this should never ocurr"
@staticmethod
def receivedInferiorMetricFromNonWinner_couldAssertIsTrue(interface: "TreeInterfaceDownstream"):
interface.assert_logger.debug('receivedInferiorMetricFromNonWinner_couldAssertIsTrue, W -> W')
WinnerState._sendAssert_setAT(interface)
@staticmethod
def receivedPreferedMetric(interface: "TreeInterfaceDownstream", better_metric, is_metric_equal):
'''
@type better_metric: AssertMetric
'''
if is_metric_equal:
return
interface.assert_logger.debug('receivedPreferedMetric, W -> L')
state_refresh_interval = better_metric.state_refresh_interval
if state_refresh_interval is None:
# event caused by AssertMsg
assert_timer_value = pim_globals.ASSERT_TIME
else:
# event caused by State Refresh Msg
assert_timer_value = state_refresh_interval*3
interface.set_assert_timer(assert_timer_value)
interface.set_assert_winner_metric(better_metric)
interface.set_assert_state(AssertState.Loser)
interface.send_prune(holdtime=assert_timer_value)
@staticmethod
def sendStateRefresh(interface: "TreeInterfaceDownstream", state_refresh_interval):
interface.set_assert_timer(state_refresh_interval*3)
@staticmethod
def assertTimerExpires(interface: "TreeInterfaceDownstream"):
interface.assert_logger.debug('assertTimerExpires, W -> NI')
interface.set_assert_winner_metric(AssertMetric.infinite_assert_metric())
interface.set_assert_state(AssertState.NoInfo)
@staticmethod
def couldAssertIsNowFalse(interface: "TreeInterfaceDownstream"):
interface.assert_logger.debug('couldAssertIsNowFalse, W -> NI')
interface.send_assert_cancel()
interface.clear_assert_timer()
interface.set_assert_winner_metric(AssertMetric.infinite_assert_metric())
interface.set_assert_state(AssertState.NoInfo)
@staticmethod
def couldAssertIsNowTrue(interface: "TreeInterfaceDownstream"):
assert False, "this should never ocurr"
@staticmethod
def winnerLivelinessTimerExpires(interface: "TreeInterfaceDownstream"):
assert False, "this should never ocurr"
@staticmethod
def receivedPruneOrJoinOrGraft(interface: "TreeInterfaceDownstream"):
pass
def __str__(self) -> str:
return "Winner"
class LoserState(AssertStateABC):
'''
I am Assert Loser (L)
This router has lost an (S,G) Assert on interface I. It must not
forward packets from S destined for G onto interface I.
'''
@staticmethod
def is_preferred_assert(interface: "TreeInterfaceDownstream", received_metric):
return received_metric.is_better_than(interface._assert_winner_metric) or \
received_metric.equal_metric(interface._assert_winner_metric)
@staticmethod
def receivedDataFromDownstreamIf(interface: "TreeInterfaceDownstream"):
"""
@type interface: TreeInterface
"""
interface.assert_logger.debug('receivedDataFromDownstreamIf, L -> L')
@staticmethod
def receivedInferiorMetricFromWinner(interface: "TreeInterfaceDownstream"):
interface.assert_logger.debug('receivedInferiorMetricFromWinner, L -> NI')
LoserState._to_NoInfo(interface)
@staticmethod
def receivedInferiorMetricFromNonWinner_couldAssertIsTrue(interface: "TreeInterfaceDownstream"):
interface.assert_logger.debug('receivedInferiorMetricFromNonWinner_couldAssertIsTrue, L -> L')
@staticmethod
def receivedPreferedMetric(interface: "TreeInterfaceDownstream", better_metric, is_metric_equal):
'''
@type better_metric: AssertMetric
'''
interface.assert_logger.debug('receivedPreferedMetric, L -> L')
state_refresh_interval = better_metric.state_refresh_interval
if state_refresh_interval is None:
assert_timer_value = pim_globals.ASSERT_TIME
else:
assert_timer_value = state_refresh_interval*3
interface.set_assert_timer(assert_timer_value)
interface.set_assert_winner_metric(better_metric)
interface.set_assert_state(AssertState.Loser)
if not is_metric_equal and interface.could_assert():
interface.send_prune(holdtime=assert_timer_value)
@staticmethod
def sendStateRefresh(interface: "TreeInterfaceDownstream", time):
assert False, "this should never ocurr"
@staticmethod
def assertTimerExpires(interface: "TreeInterfaceDownstream"):
interface.assert_logger.debug('assertTimerExpires, L -> NI')
LoserState._to_NoInfo(interface)
@staticmethod
def couldAssertIsNowFalse(interface: "TreeInterfaceDownstream"):
interface.assert_logger.debug('couldAssertIsNowFalse, L -> NI')
LoserState._to_NoInfo(interface)
@staticmethod
def couldAssertIsNowTrue(interface: "TreeInterfaceDownstream"):
interface.assert_logger.debug('couldAssertIsNowTrue, L -> NI')
LoserState._to_NoInfo(interface)
@staticmethod
def winnerLivelinessTimerExpires(interface: "TreeInterfaceDownstream"):
interface.assert_logger.debug('winnerLivelinessTimerExpires, L -> NI')
LoserState._to_NoInfo(interface)
@staticmethod
def receivedPruneOrJoinOrGraft(interface: "TreeInterfaceDownstream"):
interface.assert_logger.debug('receivedPruneOrJoinOrGraft, L -> L')
interface.send_assert()
@staticmethod
def _to_NoInfo(interface: "TreeInterfaceDownstream"):
interface.clear_assert_timer()
interface.set_assert_winner_metric(AssertMetric.infinite_assert_metric())
interface.set_assert_state(AssertState.NoInfo)
def __str__(self) -> str:
return "Loser"
class AssertState():
NoInfo = NoInfoState()
Winner = WinnerState()
Loser = LoserState()
| 33.903308
| 103
| 0.711048
| 1,176
| 13,324
| 7.82483
| 0.118197
| 0.159965
| 0.041078
| 0.056509
| 0.807651
| 0.748316
| 0.726581
| 0.726581
| 0.659965
| 0.617257
| 0
| 0.00038
| 0.210973
| 13,324
| 392
| 104
| 33.989796
| 0.874917
| 0.113404
| 0
| 0.779736
| 0
| 0
| 0.175839
| 0.14202
| 0
| 0
| 0
| 0
| 0.440529
| 1
| 0.220264
| false
| 0.008811
| 0.022026
| 0.030837
| 0.317181
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
260e054fc78ceb55c7ef8d4568e5ad88b73646a2
| 308
|
py
|
Python
|
back/api/email.py
|
NOBLEGG/RPS
|
0088af2db89e070377f56b0e2ebb34b5d7999fc0
|
[
"RSA-MD"
] | null | null | null |
back/api/email.py
|
NOBLEGG/RPS
|
0088af2db89e070377f56b0e2ebb34b5d7999fc0
|
[
"RSA-MD"
] | 7
|
2021-03-19T13:26:27.000Z
|
2022-02-27T05:28:30.000Z
|
back/api/email.py
|
NOBLEGG/RPS
|
0088af2db89e070377f56b0e2ebb34b5d7999fc0
|
[
"RSA-MD"
] | null | null | null |
def active_message(domain, uidb64, token):
return f"아래 링크를 클릭하시면 인증이 완료되며, 바로 로그인하실 수 있습니다.\n\n링크 : https://{domain}/activate/{uidb64}/{token}\n\n감사합니다."
def reset_message(domain, uidb64, token):
return f"아래 링크를 클릭하시면 비밀번호 변경을 진행하실 수 있습니다.\n\n링크 : https://{domain}/reset/{uidb64}/{token}\n\n감사합니다."
| 51.333333
| 114
| 0.698052
| 51
| 308
| 4.176471
| 0.509804
| 0.206573
| 0.178404
| 0.225352
| 0.57277
| 0.57277
| 0.384977
| 0.384977
| 0.384977
| 0
| 0
| 0.029851
| 0.12987
| 308
| 5
| 115
| 61.6
| 0.764925
| 0
| 0
| 0
| 0
| 0.5
| 0.623377
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 7
|
26249f2d384f2cf9aa52dcca113a31404e295b4e
| 105
|
py
|
Python
|
multilingual_project_blog/settings/__init__.py
|
bigee/django-multilingual-project-blog
|
c62d568ac42b3c7f711624bf7029ac56e7342b6a
|
[
"MIT"
] | null | null | null |
multilingual_project_blog/settings/__init__.py
|
bigee/django-multilingual-project-blog
|
c62d568ac42b3c7f711624bf7029ac56e7342b6a
|
[
"MIT"
] | 8
|
2020-02-11T23:25:46.000Z
|
2022-03-11T23:15:31.000Z
|
multilingual_project_blog/settings/__init__.py
|
bigee/django-multilingual-project-blog
|
c62d568ac42b3c7f711624bf7029ac56e7342b6a
|
[
"MIT"
] | null | null | null |
# flake8: noqa
from .base_settings import *
from .django_settings import *
from .installed_apps import *
| 21
| 30
| 0.780952
| 14
| 105
| 5.642857
| 0.642857
| 0.35443
| 0.455696
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011111
| 0.142857
| 105
| 4
| 31
| 26.25
| 0.866667
| 0.114286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2658998d970dc0a72102e1ef78ea720e04cb88a8
| 1,570
|
py
|
Python
|
diwali-wishes/code.py
|
AtmaMani/py-micro
|
7fb4d4b28aa55172c351a69d3e50ef17f905ebb5
|
[
"MIT"
] | null | null | null |
diwali-wishes/code.py
|
AtmaMani/py-micro
|
7fb4d4b28aa55172c351a69d3e50ef17f905ebb5
|
[
"MIT"
] | null | null | null |
diwali-wishes/code.py
|
AtmaMani/py-micro
|
7fb4d4b28aa55172c351a69d3e50ef17f905ebb5
|
[
"MIT"
] | null | null | null |
import board
import neopixel
import time
pixels = neopixel.NeoPixel(board.NEOPIXEL, 10)
pixels.brightness = 0.01
while True:
# animate first 3 led
for i in range(4):
pixels[i] = (255,255,255)
time.sleep(0.15)
# animate 4 - 10 led faster
for i in range(4,10):
pixels[i] = (255,255,255)
time.sleep(0.1)
# blank out
pixels.fill((255,255,255))
# animate first 3 led
for i in range(4):
pixels[i] = (0,0,0)
time.sleep(0.1)
pixels.fill((255,255,255))
# animate 4 - 10 led faster
for i in range(4,10):
pixels[i] = (0,0,0)
time.sleep(0.05)
pixels.fill((255,255,255))
for i in range(len(pixels)):
pixels[i] = (0,0,0)
time.sleep(0.01)
pixels.fill((255,255,255))
for i in range(len(pixels)):
pixels[i] = (0,0,0)
time.sleep(0.01)
pixels.fill((255,255,255))
for i in range(len(pixels)):
pixels[i] = (0,0,0)
time.sleep(0.01)
pixels.fill((255,255,255))
for i in range(len(pixels)):
pixels[i] = (0,0,0)
time.sleep(0.01)
pixels.fill((255,255,255))
for i in range(len(pixels)):
pixels[i] = (0,0,0)
time.sleep(0.01)
pixels.fill((255,255,255))
pixels.fill((0,0,0))
time.sleep(0.1)
pixels.fill((255,0,0))
time.sleep(0.1)
pixels.fill((0,0,0))
time.sleep(0.1)
pixels.fill((0,255,0))
time.sleep(0.1)
pixels.fill((0,0,0))
time.sleep(0.1)
pixels.fill((0,0,255))
time.sleep(0.1)
| 22.428571
| 46
| 0.535032
| 265
| 1,570
| 3.169811
| 0.113208
| 0.052381
| 0.178571
| 0.157143
| 0.863095
| 0.845238
| 0.815476
| 0.815476
| 0.754762
| 0.754762
| 0
| 0.172784
| 0.288535
| 1,570
| 70
| 47
| 22.428571
| 0.57923
| 0.064331
| 0
| 0.792453
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.056604
| 0
| 0.056604
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cd370987032767e358bed122591bc54a6d5651ed
| 465
|
py
|
Python
|
temp/test.py
|
Team20s/blockchain_python
|
438eb4540198693f4f592541d671a2056adb8a42
|
[
"Apache-2.0"
] | 1
|
2021-11-04T05:46:19.000Z
|
2021-11-04T05:46:19.000Z
|
temp/test.py
|
Team20s/blockchain_python
|
438eb4540198693f4f592541d671a2056adb8a42
|
[
"Apache-2.0"
] | null | null | null |
temp/test.py
|
Team20s/blockchain_python
|
438eb4540198693f4f592541d671a2056adb8a42
|
[
"Apache-2.0"
] | null | null | null |
from bitcoin import *
priv = sha256('a')
pubk = privtopub(priv)
addr = pubtoaddr(pubk)
print(priv)
print(pubk)
print(addr)
print()
priv = sha256('b')
pubk = privtopub(priv)
addr = pubtoaddr(pubk)
print(priv)
print(pubk)
print(addr)
print()
priv = sha256('c')
pubk = privtopub(priv)
addr = pubtoaddr(pubk)
print(priv)
print(pubk)
print(addr)
print()
priv = sha256('d')
pubk = privtopub(priv)
addr = pubtoaddr(pubk)
print(priv)
print(pubk)
print(addr)
print()
| 12.567568
| 22
| 0.698925
| 67
| 465
| 4.850746
| 0.208955
| 0.221538
| 0.209231
| 0.258462
| 0.904615
| 0.904615
| 0.904615
| 0.904615
| 0.904615
| 0.904615
| 0
| 0.029851
| 0.135484
| 465
| 37
| 23
| 12.567568
| 0.778607
| 0
| 0
| 0.827586
| 0
| 0
| 0.008584
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.034483
| 0
| 0.034483
| 0.551724
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 11
|
cd75e4b929ff589bf6a42b8d6a2cdbd0d5ad51b1
| 137
|
py
|
Python
|
src/engagement_service/sensors.py
|
littlebee/shelly-bot
|
e25f2759bf1c7ac61bacbe70221910184e49beba
|
[
"MIT"
] | null | null | null |
src/engagement_service/sensors.py
|
littlebee/shelly-bot
|
e25f2759bf1c7ac61bacbe70221910184e49beba
|
[
"MIT"
] | null | null | null |
src/engagement_service/sensors.py
|
littlebee/shelly-bot
|
e25f2759bf1c7ac61bacbe70221910184e49beba
|
[
"MIT"
] | null | null | null |
from common.hardware.adeept_ultrasonic import get_distance
# returns float in meters
def camera_distance():
return get_distance()
| 17.125
| 58
| 0.79562
| 18
| 137
| 5.833333
| 0.833333
| 0.209524
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145985
| 137
| 7
| 59
| 19.571429
| 0.897436
| 0.167883
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 0
| 0
|
0
| 7
|
cd94070ee4a3832f7b976ff0ad7c1269cbb6e8bf
| 197
|
py
|
Python
|
brainlit/algorithms/__init__.py
|
alishakodibagkar/brainlit
|
2dc12eac3ea71412a36ecace3bab2deebd2ef29c
|
[
"Apache-2.0"
] | null | null | null |
brainlit/algorithms/__init__.py
|
alishakodibagkar/brainlit
|
2dc12eac3ea71412a36ecace3bab2deebd2ef29c
|
[
"Apache-2.0"
] | null | null | null |
brainlit/algorithms/__init__.py
|
alishakodibagkar/brainlit
|
2dc12eac3ea71412a36ecace3bab2deebd2ef29c
|
[
"Apache-2.0"
] | null | null | null |
import brainlit.algorithms.connect_fragments
import brainlit.algorithms.generate_fragments
from brainlit.algorithms.connect_fragments import *
from brainlit.algorithms.generate_fragments import *
| 32.833333
| 52
| 0.883249
| 22
| 197
| 7.727273
| 0.318182
| 0.423529
| 0.282353
| 0.4
| 0.470588
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.06599
| 197
| 5
| 53
| 39.4
| 0.923913
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
26e2879ab5a5dfc3861128ad1fc88e3a36887a6c
| 4,541
|
py
|
Python
|
code.py
|
Phyxius/quarantine-day-counter
|
67bf0f3b8c27e7a90c4e3aed91fce55e83aaffb9
|
[
"WTFPL"
] | null | null | null |
code.py
|
Phyxius/quarantine-day-counter
|
67bf0f3b8c27e7a90c4e3aed91fce55e83aaffb9
|
[
"WTFPL"
] | null | null | null |
code.py
|
Phyxius/quarantine-day-counter
|
67bf0f3b8c27e7a90c4e3aed91fce55e83aaffb9
|
[
"WTFPL"
] | null | null | null |
import time
import board
import terminalio
from adafruit_matrixportal.matrixportal import MatrixPortal
from secrets import secrets
import microcontroller
TEXT_COLOR = 0x202020
TIME_UPDATE_INTERVAL = 1 * 60 * 60
OPENWEATHER_API_URL = f"https://api.openweathermap.org/data/2.5/weather?q={secrets['zip']},{secrets.get('country','US')}&appid={secrets['openweather_api_key']}&units={secrets.get('units', 'imperial')}"
TEMP_PATH = ["main", "temp"]
matrixportal = MatrixPortal(
status_neopixel=board.NEOPIXEL,
bit_depth = 4
)
weather = None
def refresh_data():
global weather
temp = matrixportal.network.fetch_data(OPENWEATHER_API_URL, json_path=(TEMP_PATH))[0]
weather = int(temp)
matrixportal.get_local_time()
matrixportal.add_text(
text_font=terminalio.FONT,
text_position=(3, 9),
text_color=TEXT_COLOR,
text_scale=2,
)
matrixportal.add_text(
text_font=terminalio.FONT,
text_position=(3, 24),
text_color=TEXT_COLOR,
)
matrixportal.set_text("Connecting", 1)
matrixportal.network.connect()
matrixportal.set_text("Get time", 1)
# matrixportal.get_local_time()
refresh_data()
last_time_refresh = time.time()
epoch = 1584576000 #march 19, 2020 00:00 UTC
seconds_per_day = 24 * 60 * 60
days = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']
matrixportal.preload_font(b"012345789Quarantinedy") # preload numbers
try:
while True:
t = time.time()
dt = time.localtime(t)
hour = dt.tm_hour
if hour == 0: hour = 12
elif hour > 12: hour -= 12
first_line = f"{hour:02}:{(dt.tm_min):02}"
day_of_week = days[dt.tm_wday]
date = ""
data_selector = t % 90
if data_selector >= 60:
day = (t - epoch) // seconds_per_day
date = f"QD {day}"
elif data_selector >= 30:
date = f"{weather}F"
else:
date = f"{dt.tm_mon:02}/{dt.tm_mday:02}"
matrixportal.set_text(first_line, 0)
matrixportal.set_text(f"{day_of_week} {date}", 1)
if (t - last_time_refresh > TIME_UPDATE_INTERVAL):
refresh_data()
# matrixportal.get_local_time()
last_time_refresh = t
time.sleep(0.05)
except e:
microcontroller.reset(import time
import board
import terminalio
from adafruit_matrixportal.matrixportal import MatrixPortal
from secrets import secrets
import microcontroller
TEXT_COLOR = 0x202020
TIME_UPDATE_INTERVAL = 1 * 60 * 60
OPENWEATHER_API_URL = f"https://api.openweathermap.org/data/2.5/weather?q={secrets['zip']},{secrets.get('country','US')}&appid={secrets['openweather_api_key']}&units={secrets.get('units', 'imperial')}"
TEMP_PATH = ["main", "temp"]
matrixportal = MatrixPortal(
status_neopixel=board.NEOPIXEL,
bit_depth = 4
)
weather = None
def refresh_data():
global weather
temp = matrixportal.network.fetch_data(OPENWEATHER_API_URL, json_path=(TEMP_PATH))[0]
weather = int(temp)
matrixportal.get_local_time()
matrixportal.add_text(
text_font=terminalio.FONT,
text_position=(3, 9),
text_color=TEXT_COLOR,
text_scale=2,
)
matrixportal.add_text(
text_font=terminalio.FONT,
text_position=(3, 24),
text_color=TEXT_COLOR,
)
try:
matrixportal.set_text("Connecting", 1)
matrixportal.network.connect()
matrixportal.set_text("Get time", 1)
# matrixportal.get_local_time()
refresh_data()
last_time_refresh = time.time()
epoch = 1584576000 #march 19, 2020 00:00 UTC
seconds_per_day = 24 * 60 * 60
days = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']
matrixportal.preload_font(b"012345789Quarantinedy") # preload numbers
while True:
t = time.time()
dt = time.localtime(t)
hour = dt.tm_hour
if hour == 0: hour = 12
elif hour > 12: hour -= 12
first_line = f"{hour:02}:{(dt.tm_min):02}"
day_of_week = days[dt.tm_wday]
date = ""
data_selector = t % 90
if data_selector >= 60:
day = (t - epoch) // seconds_per_day
date = f"QD {day}"
elif data_selector >= 30:
date = f"{weather}F"
else:
date = f"{dt.tm_mon:02}/{dt.tm_mday:02}"
matrixportal.set_text(first_line, 0)
matrixportal.set_text(f"{day_of_week} {date}", 1)
if (t - last_time_refresh > TIME_UPDATE_INTERVAL):
refresh_data()
# matrixportal.get_local_time()
last_time_refresh = t
time.sleep(0.05)
except e:
microcontroller.reset())
| 29.296774
| 201
| 0.650518
| 604
| 4,541
| 4.668874
| 0.193709
| 0.031915
| 0.053901
| 0.051064
| 0.997872
| 0.997872
| 0.997872
| 0.997872
| 0.997872
| 0.997872
| 0
| 0.048023
| 0.220436
| 4,541
| 155
| 202
| 29.296774
| 0.748588
| 0.043823
| 0
| 0.931298
| 0
| 0.015267
| 0.155976
| 0.035533
| 0
| 0
| 0.003692
| 0
| 0
| 0
| null | null | 0
| 0.091603
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f83a61b23a4cf54a789b09e73ce12dbb4b510e89
| 98
|
py
|
Python
|
modules/transformations/__init__.py
|
bira37/puzzle-effect-filter
|
5960d741aab7bed1368d0f66699b7cde3ea8fb17
|
[
"MIT"
] | 1
|
2021-11-19T00:38:07.000Z
|
2021-11-19T00:38:07.000Z
|
modules/transformations/__init__.py
|
bira37/puzzle-effect-filter
|
5960d741aab7bed1368d0f66699b7cde3ea8fb17
|
[
"MIT"
] | null | null | null |
modules/transformations/__init__.py
|
bira37/puzzle-effect-filter
|
5960d741aab7bed1368d0f66699b7cde3ea8fb17
|
[
"MIT"
] | 1
|
2021-11-19T09:23:25.000Z
|
2021-11-19T09:23:25.000Z
|
from .transformations_handler import add_padding
from .transformations_handler import transform_v1
| 49
| 49
| 0.908163
| 12
| 98
| 7.083333
| 0.666667
| 0.447059
| 0.611765
| 0.752941
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010989
| 0.071429
| 98
| 2
| 49
| 49
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
f89e71799e940b6a08893000fb99cb18ef1f2397
| 342
|
py
|
Python
|
pacote-download/PythonTeste/aula07.py
|
Mariana02Santos/Python
|
a7bf3489b58d691578f888719c1ad6227ad39d74
|
[
"MIT"
] | null | null | null |
pacote-download/PythonTeste/aula07.py
|
Mariana02Santos/Python
|
a7bf3489b58d691578f888719c1ad6227ad39d74
|
[
"MIT"
] | null | null | null |
pacote-download/PythonTeste/aula07.py
|
Mariana02Santos/Python
|
a7bf3489b58d691578f888719c1ad6227ad39d74
|
[
"MIT"
] | null | null | null |
# ** potência ou pow(4,3)
# // divisão inteira
nom = input('Comment vous vous appellez? ')
print('Cest un plaisir de vous conaitre {:20}!'.format(nom))
print('Cest un plaisir de vous conaitre {:>20}!'.format(nom))
print('Cest un plaisir de vous conaitre {:^20}!'.format(nom))
print('Cest un plaisir de vous conaitre {:=^20}!'.format(nom))
| 31.090909
| 62
| 0.678363
| 53
| 342
| 4.377358
| 0.377358
| 0.155172
| 0.189655
| 0.310345
| 0.741379
| 0.741379
| 0.741379
| 0.741379
| 0.741379
| 0.741379
| 0
| 0.033898
| 0.137427
| 342
| 10
| 63
| 34.2
| 0.752542
| 0.122807
| 0
| 0
| 0
| 0
| 0.639456
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.8
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 9
|
f8a78c723191f2bc762ec7170a8bb695008ce29c
| 5,062
|
py
|
Python
|
modules/test_cases.py
|
vince-wu/CompositionalDrift
|
96f0cca2197d8398128f49b394b36f6d8b5bdc04
|
[
"MIT"
] | 5
|
2020-03-18T05:03:03.000Z
|
2021-06-16T18:05:34.000Z
|
modules/test_cases.py
|
vince-wu/CompositionalDrift
|
96f0cca2197d8398128f49b394b36f6d8b5bdc04
|
[
"MIT"
] | 5
|
2019-05-20T22:40:18.000Z
|
2021-06-01T23:46:09.000Z
|
modules/test_cases.py
|
vince-wu/CompositionalDrift
|
96f0cca2197d8398128f49b394b36f6d8b5bdc04
|
[
"MIT"
] | 1
|
2020-10-20T22:31:37.000Z
|
2020-10-20T22:31:37.000Z
|
import math
import numpy as np
from structures import Reaction, Polymer
def some_zero_valued_reactivities():
"""
DESCRIPTION: Generates a reaction with zeros in the reactivity ratio list
RETURNS: Reaction object
"""
n = np.random.randint(2, 4)
model = "Mayo-Lewis"
max_monomer_amounts = 5000
avg_DP = np.random.randint(1, 500)
reactivity_ratios = np.random.rand(n, n-1)
# choose a random number of attempts put zeros in reactivity ratios
num_zeros = np.random.randint(n*(n-1))
# put zeros into the reactivity ratio matrix
for i in range(num_zeros):
row = np.random.randint(0,n)
col = np.random.randint(0,n-1)
reactivity_ratios[row][col] = 0
conversion = np.random.rand()
monomer_amounts = np.random.randint(low=100, high=max_monomer_amounts, size=n)
chain_transfer_probability = np.random.rand()
reaction = Reaction(n, model)
reaction.set_monomer_amounts(monomer_amounts)
reaction.set_reactivity_ratios(reactivity_ratios)
reaction.set_average_DP(avg_DP)
reaction.set_monomer_amounts(monomer_amounts)
reaction.set_conversion(conversion)
reaction.set_chain_transfer_probability(chain_transfer_probability)
return reaction
def some_inf_valued_reactivities():
"""
DESCRIPTION: Generates a reaction with infinities in the reactivity ratio list
RETURNS: Reaction object
"""
n = np.random.randint(2, 4)
model = "Mayo-Lewis"
max_monomer_amounts = 5000
avg_DP = np.random.randint(1, 500)
reactivity_ratios = np.random.rand(n, n-1)
num_infs = np.random.randint(n*(n-1))
for i in range(num_infs):
row = np.random.randint(0,n)
col = np.random.randint(0,n-1)
reactivity_ratios[row][col] = math.inf
conversion = np.random.rand()
monomer_amounts = np.random.randint(low=100, high=max_monomer_amounts, size=n)
chain_transfer_probability = np.random.rand()
reaction = Reaction(n, model)
reaction.set_monomer_amounts(monomer_amounts)
reaction.set_reactivity_ratios(reactivity_ratios)
reaction.set_average_DP(avg_DP)
reaction.set_monomer_amounts(monomer_amounts)
reaction.set_conversion(conversion)
reaction.set_chain_transfer_probability(chain_transfer_probability)
return reaction
def all_zero_valued_reactivities():
"""
DESCRIPTION: Generates a reaction with zeros in the entire reactivity ratio list
RETURNS: Reaction object
"""
n = np.random.randint(2, 4)
model = "Mayo-Lewis"
max_monomer_amounts = 5000
avg_DP = np.random.randint(1, 500)
reactivity_ratios = np.zeros((n, n-1))
conversion = np.random.rand()
monomer_amounts = np.random.randint(low=100, high=max_monomer_amounts, size=n)
chain_transfer_probability = np.random.rand()
reaction = Reaction(n, model)
reaction.set_monomer_amounts(monomer_amounts)
reaction.set_reactivity_ratios(reactivity_ratios)
reaction.set_average_DP(avg_DP)
reaction.set_monomer_amounts(monomer_amounts)
reaction.set_conversion(conversion)
reaction.set_chain_transfer_probability(chain_transfer_probability)
return reaction
def all_inf_valued_reactivities():
"""
DESCRIPTION: Generates a reaction with zeros in the entire reactivity ratio list
RETURNS: Reaction object
"""
n = np.random.randint(2, 4)
model = "Mayo-Lewis"
max_monomer_amounts = 5000
avg_DP = np.random.randint(1, 500)
reactivity_ratios = np.full((n, n-1), math.inf)
conversion = np.random.rand()
monomer_amounts = np.random.randint(low=100, high=max_monomer_amounts, size=n)
chain_transfer_probability = np.random.rand()
reaction = Reaction(n, model)
reaction.set_monomer_amounts(monomer_amounts)
reaction.set_reactivity_ratios(reactivity_ratios)
reaction.set_average_DP(avg_DP)
reaction.set_monomer_amounts(monomer_amounts)
reaction.set_conversion(conversion)
reaction.set_chain_transfer_probability(chain_transfer_probability)
return reaction
def dispersity_is_one():
"""
DESCRIPTION: Generates a reaction with zeros in the entire reactivity ratio list
RETURNS: Reaction object
"""
n = np.random.randint(2, 4)
model = "Mayo-Lewis"
max_monomer_amounts = 5000
avg_DP = 1
reactivity_ratios = np.random.rand(n, n-1)
conversion = np.random.rand()
monomer_amounts = np.random.randint(low=100, high=max_monomer_amounts, size=n)
chain_transfer_probability = np.random.rand()
reaction = Reaction(n, model)
reaction.set_monomer_amounts(monomer_amounts)
reaction.set_reactivity_ratios(reactivity_ratios)
reaction.set_average_DP(avg_DP)
reaction.set_monomer_amounts(monomer_amounts)
reaction.set_conversion(conversion)
reaction.set_chain_transfer_probability(chain_transfer_probability)
return reaction
edge_case_reaction_generators = [
some_zero_valued_reactivities,
some_inf_valued_reactivities,
all_zero_valued_reactivities,
all_inf_valued_reactivities,
dispersity_is_one,
]
| 37.220588
| 84
| 0.73627
| 675
| 5,062
| 5.266667
| 0.12
| 0.137834
| 0.084388
| 0.070323
| 0.897046
| 0.88917
| 0.879044
| 0.879044
| 0.855134
| 0.855134
| 0
| 0.018169
| 0.173647
| 5,062
| 136
| 85
| 37.220588
| 0.8317
| 0.125444
| 0
| 0.771429
| 1
| 0
| 0.011545
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.047619
| false
| 0
| 0.028571
| 0
| 0.12381
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e47744c83a0c8d7f650614c97fe46a4f84afe368
| 6,361
|
py
|
Python
|
testing.py
|
IntegratedMindHeart/project_red_mind
|
a76af03627a4202a4f26a092b5cf4f93f2f5e2a6
|
[
"Apache-2.0"
] | 2
|
2021-07-01T08:17:00.000Z
|
2021-07-01T08:18:35.000Z
|
testing.py
|
IntegratedMindHeart/project_red_mind
|
a76af03627a4202a4f26a092b5cf4f93f2f5e2a6
|
[
"Apache-2.0"
] | null | null | null |
testing.py
|
IntegratedMindHeart/project_red_mind
|
a76af03627a4202a4f26a092b5cf4f93f2f5e2a6
|
[
"Apache-2.0"
] | 1
|
2020-10-15T20:06:43.000Z
|
2020-10-15T20:06:43.000Z
|
import time
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
driver=webdriver.Chrome(r'C:\Users\Tranzita Systems\Downloads\chromedriver_win32\chromedriver.exe')
time.sleep(2)
driver.maximize_window()
# driver.get('http://projectredmind.herokuapp.com')
driver.get('http://127.0.0.1:8000')
def page_down():
body = driver.find_element_by_css_selector('body')
body.click()
body.send_keys(Keys.PAGE_DOWN)
def page_up():
body = driver.find_element_by_css_selector('body')
body.click()
body.send_keys(Keys.PAGE_UP)
# Home Page
time.sleep(2)
page_down()
time.sleep(2)
page_up()
time.sleep(2)
movie_name_input=driver.find_element_by_xpath('/html/body/div/div/div[2]/form/div/div/input')
movie_name_input.send_keys('Aliens')
time.sleep(2)
search_btn=driver.find_element_by_xpath('/html/body/div/div/div[2]/form/div/div/button')
search_btn.click()
time.sleep(2)
# # Search Result
driver.execute_script("window.scrollBy(0,300)")
time.sleep(5)
driver.execute_script("window.scrollBy(0,640)")
time.sleep(2)
driver.execute_script("window.scrollBy(0,190)")
time.sleep(2)
driver.execute_script("window.scrollBy(0,765)")
time.sleep(2)
driver.execute_script("window.scrollBy(0,770)")
time.sleep(2)
driver.execute_script("window.scrollBy(0,800)")
time.sleep(5)
detailed_view_btn=driver.find_element_by_xpath('/html/body/div[1]/div/div[6]/div[11]/div/div[2]/a/small')
detailed_view_btn.click()
time.sleep(2)
# Cast Page
driver.execute_script("window.scrollBy(0,80)")
time.sleep(5)
driver.back()
time.sleep(2)
# Bcak to Home Page
driver.execute_script("window.scrollBy(0,700)")
time.sleep(5)
driver.execute_script("window.scrollBy(0,500)")
time.sleep(2)
driver.execute_script("window.scrollBy(0,650)")
time.sleep(5)
driver.execute_script("window.scrollBy(0,150)")
time.sleep(2)
driver.execute_script("window.scrollBy(0,750)")
time.sleep(5)
driver.execute_script("window.scrollBy(0,750)")
time.sleep(2)
driver.execute_script("window.scrollBy(0,750)")
time.sleep(2)
driver.execute_script("window.scrollBy(0,880)")
time.sleep(5)
detailed_view_btn=driver.find_element_by_xpath('/html/body/div[1]/div/div[9]/div[15]/div/div[2]/form/input[3]')
detailed_view_btn.click()
time.sleep(2)
# Search Result
driver.execute_script("window.scrollBy(0,300)")
time.sleep(2)
driver.execute_script("window.scrollBy(0,640)")
time.sleep(2)
driver.execute_script("window.scrollBy(0,190)")
time.sleep(2)
driver.execute_script("window.scrollBy(0,765)")
time.sleep(2)
driver.execute_script("window.scrollBy(0,770)")
time.sleep(2)
driver.execute_script("window.scrollBy(0,800)")
time.sleep(2)
detailed_view_btn=driver.find_element_by_xpath('/html/body/div[1]/div/div[6]/div[10]/div/div[2]/a/small')
detailed_view_btn.click()
time.sleep(2)
# Cast Page
driver.execute_script("window.scrollBy(0,80)")
time.sleep(2)
driver.back()
time.sleep(2)
# Bcak to Home Page
driver.execute_script("window.scrollBy(0,700)")
time.sleep(2)
driver.execute_script("window.scrollBy(0,350)")
time.sleep(2)
driver.execute_script("window.scrollBy(0,650)")
time.sleep(2)
driver.execute_script("window.scrollBy(0,150)")
time.sleep(2)
driver.execute_script("window.scrollBy(0,750)")
time.sleep(2)
driver.execute_script("window.scrollBy(0,750)")
time.sleep(2)
driver.execute_script("window.scrollBy(0,750)")
time.sleep(2)
driver.execute_script("window.scrollBy(0,900)")
time.sleep(2)
driver.execute_script("scroll(0, 0)")
time.sleep(5)
movie_name_input=driver.find_element_by_xpath('/html/body/div/div/div[2]/form/div/div/input')
movie_name_input.send_keys('cjkbjscbhjdv')
time.sleep(2)
search_btn=driver.find_element_by_xpath('/html/body/div/div/div[2]/form/div/div/button')
search_btn.click()
time.sleep(5)
api_link=driver.find_element_by_xpath('/html/body/nav/div/div/ul/li[2]/a')
api_link.click()
time.sleep(5)
# API Page
recommendation_api_link=driver.find_element_by_xpath('/html/body/div[1]/div[1]/div[2]/div[2]/a')
recommendation_api_link.click()
time.sleep(5)
driver.switch_to.window(driver.window_handles[1])
driver.close()
time.sleep(2)
driver.switch_to.window(driver.window_handles[0])
driver.execute_script("window.scrollBy(0,500)")
time.sleep(5)
review_api_link=driver.find_element_by_xpath('/html/body/div[1]/div[2]/div[2]/div[2]/a')
review_api_link.click()
time.sleep(5)
driver.switch_to.window(driver.window_handles[1])
driver.close()
time.sleep(2)
driver.switch_to.window(driver.window_handles[0])
time.sleep(2)
driver.execute_script("scroll(0, 0)")
time.sleep(5)
api_link=driver.find_element_by_xpath('/html/body/nav/div/div/ul/li[3]/a')
api_link.click()
time.sleep(5)
# Admin Panel Login Page
driver.switch_to.window(driver.window_handles[1])
username_input=driver.find_element_by_xpath('/html/body/div[1]/div[2]/div/div[1]/div/form/div[1]/input')
username_input.send_keys('root')
time.sleep(2)
password_input=driver.find_element_by_xpath('/html/body/div[1]/div[2]/div/div[1]/div/form/div[2]/input[1]')
password_input.send_keys('root')
time.sleep(2)
login_btn=driver.find_element_by_xpath('/html/body/div[1]/div[2]/div/div[1]/div/form/div[3]/input')
login_btn.click()
time.sleep(5)
# Admin Show Tables Panel Page
movies_records_link=driver.find_element_by_xpath('/html/body/div[1]/div[2]/div/div[1]/div[1]/div[2]/table/tbody/tr/th/a')
movies_records_link.click()
time.sleep(5)
driver.switch_to.window(driver.window_handles[1])
driver.close()
time.sleep(2)
driver.switch_to.window(driver.window_handles[0])
time.sleep(2)
about_link=driver.find_element_by_xpath('/html/body/nav/div/div/ul/li[4]/a')
about_link.click()
time.sleep(3)
# About Page
driver.execute_script("window.scrollBy(0,105)")
time.sleep(5)
driver.execute_script("window.scrollBy(0,710)")
time.sleep(5)
# Youtube Page
youtube_link=driver.find_element_by_css_selector('body > footer > div > div > a:nth-child(1) > svg')
youtube_link.click()
time.sleep(10)
driver.switch_to.window(driver.window_handles[1])
driver.close()
time.sleep(2)
driver.switch_to.window(driver.window_handles[0])
time.sleep(3)
github_link=driver.find_element_by_css_selector('body > footer > div > div > a:nth-child(2) > svg')
github_link.click()
time.sleep(10)
# quit
driver.quit()
| 29.586047
| 122
| 0.742336
| 1,063
| 6,361
| 4.269991
| 0.118532
| 0.130866
| 0.094735
| 0.181758
| 0.849526
| 0.832562
| 0.829258
| 0.796211
| 0.787398
| 0.750386
| 0
| 0.04695
| 0.082534
| 6,361
| 215
| 123
| 29.586047
| 0.730809
| 0.036944
| 0
| 0.745562
| 0
| 0.053254
| 0.295235
| 0.262506
| 0
| 0
| 0
| 0
| 0
| 1
| 0.011834
| false
| 0.011834
| 0.017751
| 0
| 0.029586
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e4e9dfbc867048cc1632162adb47ab3aca136e7b
| 587
|
py
|
Python
|
debugging/name-mapping-test.py
|
bietiekay/riak-fuse
|
ea9c4c0cfdc171c7d467040539d224f7559c76e1
|
[
"BSD-2-Clause"
] | null | null | null |
debugging/name-mapping-test.py
|
bietiekay/riak-fuse
|
ea9c4c0cfdc171c7d467040539d224f7559c76e1
|
[
"BSD-2-Clause"
] | null | null | null |
debugging/name-mapping-test.py
|
bietiekay/riak-fuse
|
ea9c4c0cfdc171c7d467040539d224f7559c76e1
|
[
"BSD-2-Clause"
] | null | null | null |
#!/usr/bin/env python
# Quick test
import NameMapping
print(NameMapping.legacyPathToRiakBucketName('IMG_','/fdaf16c657d997656bbccc5752eefa9f/images/1620028670_192497.jpg'))
print(NameMapping.legacyPathToRiakBucketName('IMG_','/fdaf16c657d997656bbccc5752eefa9f/images/'))
print(NameMapping.legacyPathToRiakBucketName('IMG_','/fdaf16c657d997656bbccc5752eefa9f/images'))
print(NameMapping.legacyPathToRiakKeyName('/fdaf16c657d997656bbccc5752eefa9f/images/1620028670_192497.jpg'))
print(NameMapping.legacyPathToRiakKeyName('/fdaf16c657d997656bbccc5752eefa9f/imes/1620028670_192497.jpg'))
| 48.916667
| 118
| 0.860307
| 45
| 587
| 11.088889
| 0.4
| 0.160321
| 0.252505
| 0.270541
| 0.715431
| 0.715431
| 0.625251
| 0.364729
| 0
| 0
| 0
| 0.223776
| 0.025554
| 587
| 11
| 119
| 53.363636
| 0.648601
| 0.052811
| 0
| 0
| 0
| 0
| 0.5
| 0.478339
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.166667
| 0
| 0.166667
| 0.833333
| 0
| 0
| 1
| null | 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
5f93ea1379a12178e3db326eb696ce61fe0ba9ea
| 2,617
|
py
|
Python
|
data_processor/modify_filenames.py
|
BoyuanChen/visual_behavior_modeling
|
8b6eb0516c562306c5d775632223ad0de775f170
|
[
"MIT"
] | 9
|
2019-12-04T12:50:43.000Z
|
2021-02-28T13:45:30.000Z
|
data_processor/modify_filenames.py
|
BoyuanChen/visual_behavior_modeling
|
8b6eb0516c562306c5d775632223ad0de775f170
|
[
"MIT"
] | null | null | null |
data_processor/modify_filenames.py
|
BoyuanChen/visual_behavior_modeling
|
8b6eb0516c562306c5d775632223ad0de775f170
|
[
"MIT"
] | 2
|
2020-07-09T20:35:15.000Z
|
2020-11-16T14:03:10.000Z
|
# import os
# from tqdm import tqdm
# from subprocess import call
# base_path = './data_imgs/rgb_target_imgs'
# filelist = os.listdir(base_path)
# for p_file in tqdm(filelist):
# filepath = os.path.join(base_path, p_file)
# new_file_name = 'cur_' + p_file.split('_')[1]
# new_file_name = os.path.join(base_path, new_file_name)
# subprocess_command_line = 'mv ' + str(filepath) + ' ' + str(new_file_name)
# call(subprocess_command_line, shell=True)
import os
import shutil
from tqdm import tqdm
from subprocess import call
base_path = '/home/cml/bo/ToM_Base/sim_tom/rgb/tom_simple_rgb/data_processor/data_imgs/rgb_train_data_imgs/output'
saved_path = '/home/cml/bo/ToM_Base/sim_tom/rgb/tom_simple_rgb/data_processor/augmented_train_data_imgs'
filelist = os.listdir(base_path)
for p_file in tqdm(filelist):
pre = p_file.split('_')[1]
if pre == 'groundtruth':
ori_target_name = os.path.join(base_path, p_file)
ori_data_name = '_'.join(p_file.split('_')[3:])
ori_data_name = '_'.join(ori_data_name.split('_')[:4] + ['original'] + ori_data_name.split('_')[4:])
ori_data_name = os.path.join(base_path, ori_data_name)
target_name = 'end_' + p_file.split('_')[8].split('.')[0] + '_' + p_file.split('_')[9]
data_name = 'cur_' + p_file.split('_')[8].split('.')[0] + '_' + p_file.split('_')[9]
target_name = os.path.join(saved_path, target_name)
data_name = os.path.join(saved_path, data_name)
shutil.copy(ori_target_name, target_name)
shutil.copy(ori_data_name, data_name)
base_path = '/home/cml/bo/ToM_Base/sim_tom/rgb/tom_simple_rgb/data_processor/data_imgs/rgb_test_data_imgs/output'
saved_path = '/home/cml/bo/ToM_Base/sim_tom/rgb/tom_simple_rgb/data_processor/augmented_test_data_imgs'
filelist = os.listdir(base_path)
for p_file in tqdm(filelist):
pre = p_file.split('_')[1]
if pre == 'groundtruth':
ori_target_name = os.path.join(base_path, p_file)
ori_data_name = '_'.join(p_file.split('_')[3:])
ori_data_name = '_'.join(ori_data_name.split('_')[:4] + ['original'] + ori_data_name.split('_')[4:])
ori_data_name = os.path.join(base_path, ori_data_name)
target_name = 'end_' + p_file.split('_')[8].split('.')[0] + '_' + p_file.split('_')[9]
data_name = 'cur_' + p_file.split('_')[8].split('.')[0] + '_' + p_file.split('_')[9]
target_name = os.path.join(saved_path, target_name)
data_name = os.path.join(saved_path, data_name)
shutil.copy(ori_target_name, target_name)
shutil.copy(ori_data_name, data_name)
| 33.987013
| 114
| 0.677875
| 407
| 2,617
| 3.945946
| 0.137592
| 0.109589
| 0.09589
| 0.078456
| 0.888543
| 0.877958
| 0.864259
| 0.849938
| 0.849938
| 0.849938
| 0
| 0.009558
| 0.160489
| 2,617
| 77
| 115
| 33.987013
| 0.721438
| 0.166221
| 0
| 0.777778
| 0
| 0.055556
| 0.211157
| 0.173352
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
39667e2c31cf2f368d6a4ea10e67dfac3e4bfba2
| 35
|
py
|
Python
|
src/lib/pdb.py
|
DTenore/skulpt
|
098d20acfb088d6db85535132c324b7ac2f2d212
|
[
"MIT"
] | 2,671
|
2015-01-03T08:23:25.000Z
|
2022-03-31T06:15:48.000Z
|
src/lib/pdb.py
|
wakeupmuyunhe/skulpt
|
a8fb11a80fb6d7c016bab5dfe3712517a350b347
|
[
"MIT"
] | 972
|
2015-01-05T08:11:00.000Z
|
2022-03-29T13:47:15.000Z
|
src/lib/pdb.py
|
wakeupmuyunhe/skulpt
|
a8fb11a80fb6d7c016bab5dfe3712517a350b347
|
[
"MIT"
] | 845
|
2015-01-03T19:53:36.000Z
|
2022-03-29T18:34:22.000Z
|
import _sk_fail; _sk_fail._("pdb")
| 17.5
| 34
| 0.742857
| 6
| 35
| 3.5
| 0.666667
| 0.571429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085714
| 35
| 1
| 35
| 35
| 0.65625
| 0
| 0
| 0
| 0
| 0
| 0.085714
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
397cbf7e0c5de3cf4fce3323e878b0f32167cfae
| 122
|
py
|
Python
|
montepython/likelihoods/extended_plik_Planck_highl_EE/__init__.py
|
ivandebono/montepython_public_3.2dev_Python3
|
16771c3d37faaa3f80b171c01d78da56a75aa3d9
|
[
"MIT"
] | null | null | null |
montepython/likelihoods/extended_plik_Planck_highl_EE/__init__.py
|
ivandebono/montepython_public_3.2dev_Python3
|
16771c3d37faaa3f80b171c01d78da56a75aa3d9
|
[
"MIT"
] | null | null | null |
montepython/likelihoods/extended_plik_Planck_highl_EE/__init__.py
|
ivandebono/montepython_public_3.2dev_Python3
|
16771c3d37faaa3f80b171c01d78da56a75aa3d9
|
[
"MIT"
] | null | null | null |
from montepython.likelihood_class import Likelihood_clik
class extended_plik_Planck_highl_EE(Likelihood_clik):
pass
| 20.333333
| 56
| 0.860656
| 16
| 122
| 6.125
| 0.75
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.106557
| 122
| 5
| 57
| 24.4
| 0.899083
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
f2fe3f6a87e365cedb1fb047e0d9cf49639fa253
| 462
|
py
|
Python
|
src/accounts/serializers/__init__.py
|
loganathanengrr/Django-Rest-Core
|
928c2d816c0aa48453dde8642ef1b263f76ae39d
|
[
"MIT"
] | 1
|
2020-02-18T11:09:56.000Z
|
2020-02-18T11:09:56.000Z
|
src/accounts/serializers/__init__.py
|
loganathanengrr/Django-Rest-Core
|
928c2d816c0aa48453dde8642ef1b263f76ae39d
|
[
"MIT"
] | 8
|
2020-02-11T23:20:50.000Z
|
2022-03-11T23:32:18.000Z
|
src/accounts/serializers/__init__.py
|
loganathanengrr/Django-Rest-Core
|
928c2d816c0aa48453dde8642ef1b263f76ae39d
|
[
"MIT"
] | null | null | null |
from .serializers import (
UserCreateSerializer,
CurrentUserSerializer,
TokenCreateSerializer,
PasswordResetSerializer,
PasswordResetConfirmSerializer,
ActivationSerializer,
PasswordChangeSerializer,
UsernameChangeSerializer,
)
__all__ = [
'UserCreateSerializer','CurrentUserSerializer','TokenCreateSerializer','PasswordResetSerializer',
'PasswordResetConfirmSerializer','ActivationSerializer','PasswordChangeSerializer','UsernameChangeSerializer'
]
| 28.875
| 110
| 0.852814
| 20
| 462
| 19.5
| 0.6
| 0.210256
| 0.317949
| 0.435897
| 0.938462
| 0.938462
| 0.938462
| 0.938462
| 0
| 0
| 0
| 0
| 0.0671
| 462
| 16
| 111
| 28.875
| 0.904872
| 0
| 0
| 0
| 0
| 0
| 0.395248
| 0.308855
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.357143
| 0.071429
| 0
| 0.071429
| 0
| 1
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 10
|
844b2918c7505b4587fbe2607186313750e8a86a
| 4,535
|
py
|
Python
|
datahub/dbmaintenance/test/commands/test_update_adviser_contact_email.py
|
Staberinde/data-hub-api
|
3d0467dbceaf62a47158eea412a3dba827073300
|
[
"MIT"
] | 6
|
2019-12-02T16:11:24.000Z
|
2022-03-18T10:02:02.000Z
|
datahub/dbmaintenance/test/commands/test_update_adviser_contact_email.py
|
Staberinde/data-hub-api
|
3d0467dbceaf62a47158eea412a3dba827073300
|
[
"MIT"
] | 1,696
|
2019-10-31T14:08:37.000Z
|
2022-03-29T12:35:57.000Z
|
datahub/dbmaintenance/test/commands/test_update_adviser_contact_email.py
|
Staberinde/data-hub-api
|
3d0467dbceaf62a47158eea412a3dba827073300
|
[
"MIT"
] | 9
|
2019-11-22T12:42:03.000Z
|
2021-09-03T14:25:05.000Z
|
from io import BytesIO
import pytest
from django.core.management import call_command
from reversion.models import Version
from datahub.company.test.factories import AdviserFactory
pytestmark = pytest.mark.django_db
def test_run(s3_stubber, caplog):
"""Test that the command updates the specified records (ignoring ones with errors)."""
caplog.set_level('ERROR')
advisers = [
AdviserFactory(contact_email='adviser0@test.com'),
AdviserFactory(contact_email='adviser1@test.com'),
AdviserFactory(contact_email='adviser2@test.com'),
AdviserFactory(contact_email=''),
AdviserFactory(contact_email='adviser4@test.com'),
]
bucket = 'test_bucket'
object_key = 'test_key'
csv_content = f"""id,contact_email
00000000-0000-0000-0000-000000000000,adviser9@test.com
{advisers[0].id},invalid_email
{advisers[1].id},adviser1changed@test.com
{advisers[2].id},adviser2@test.com
{advisers[3].id},adviser3changed@test.com
{advisers[4].id},
"""
s3_stubber.add_response(
'get_object',
{
'Body': BytesIO(bytes(csv_content, encoding='utf-8')),
},
expected_params={
'Bucket': bucket,
'Key': object_key,
},
)
call_command('update_adviser_contact_email', bucket, object_key)
assert len(caplog.records) == 2
assert 'Advisor matching query does not exist' in caplog.text
assert 'Enter a valid email address' in caplog.text
for adviser in advisers:
adviser.refresh_from_db()
expected_emails = [
'adviser0@test.com', 'adviser1changed@test.com', 'adviser2@test.com',
'adviser3changed@test.com', '',
]
assert [adviser.contact_email for adviser in advisers] == expected_emails
def test_simulate(s3_stubber, caplog):
"""Test that the command simulates updates if --simulate is passed in."""
caplog.set_level('ERROR')
advisers = [
AdviserFactory(contact_email='adviser0@test.com'),
AdviserFactory(contact_email='adviser1@test.com'),
AdviserFactory(contact_email='adviser2@test.com'),
AdviserFactory(contact_email=''),
AdviserFactory(contact_email='adviser4@test.com'),
]
bucket = 'test_bucket'
object_key = 'test_key'
csv_content = f"""id,contact_email
00000000-0000-0000-0000-000000000000,adviser9@test.com
{advisers[0].id},invalid_email
{advisers[1].id},adviser1changed@test.com
{advisers[2].id},adviser2@test.com
{advisers[3].id},adviser3changed@test.com
{advisers[4].id},
"""
s3_stubber.add_response(
'get_object',
{
'Body': BytesIO(bytes(csv_content, encoding='utf-8')),
},
expected_params={
'Bucket': bucket,
'Key': object_key,
},
)
call_command('update_adviser_contact_email', bucket, object_key, simulate=True)
assert len(caplog.records) == 2
assert 'Advisor matching query does not exist' in caplog.text
assert 'Enter a valid email address' in caplog.text
for adviser in advisers:
adviser.refresh_from_db()
expected_emails = [
'adviser0@test.com', 'adviser1@test.com', 'adviser2@test.com', '', 'adviser4@test.com',
]
assert [adviser.contact_email for adviser in advisers] == expected_emails
def test_audit_log(s3_stubber):
"""Test that reversion revisions are created."""
advisers = [
AdviserFactory(contact_email='adviser0@test.com'),
AdviserFactory(contact_email='adviser1@test.com'),
AdviserFactory(contact_email='adviser2@test.com'),
]
bucket = 'test_bucket'
object_key = 'test_key'
csv_content = f"""id,contact_email
{advisers[0].id},invalid_email
{advisers[1].id},adviser1changed@test.com
{advisers[2].id},adviser2@test.com
"""
s3_stubber.add_response(
'get_object',
{
'Body': BytesIO(bytes(csv_content, encoding='utf-8')),
},
expected_params={
'Bucket': bucket,
'Key': object_key,
},
)
call_command('update_adviser_contact_email', bucket, object_key)
for adviser in advisers:
adviser.refresh_from_db()
versions = Version.objects.get_for_object(advisers[0])
assert versions.count() == 0
versions = Version.objects.get_for_object(advisers[1])
assert versions.count() == 1
assert versions[0].revision.get_comment() == 'Loaded contact email from spreadsheet.'
versions = Version.objects.get_for_object(advisers[2])
assert versions.count() == 0
| 29.640523
| 95
| 0.670121
| 543
| 4,535
| 5.423573
| 0.206262
| 0.06893
| 0.114771
| 0.076061
| 0.821732
| 0.806791
| 0.806791
| 0.741596
| 0.728014
| 0.728014
| 0
| 0.034273
| 0.202205
| 4,535
| 152
| 96
| 29.835526
| 0.779713
| 0.042117
| 0
| 0.70339
| 0
| 0
| 0.310305
| 0.147181
| 0
| 0
| 0
| 0
| 0.101695
| 1
| 0.025424
| false
| 0
| 0.042373
| 0
| 0.067797
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ffbcbdd7ad21abf9e5df46244f0645f34558089d
| 259
|
py
|
Python
|
kissim/encoding/__init__.py
|
AJK-dev/kissim
|
15375000d47b5d5485322fc725809f853a3659de
|
[
"MIT"
] | null | null | null |
kissim/encoding/__init__.py
|
AJK-dev/kissim
|
15375000d47b5d5485322fc725809f853a3659de
|
[
"MIT"
] | 1
|
2021-05-03T14:35:34.000Z
|
2021-05-18T16:15:02.000Z
|
kissim/encoding/__init__.py
|
AJK-dev/kissim
|
15375000d47b5d5485322fc725809f853a3659de
|
[
"MIT"
] | null | null | null |
"""
Encode kinase pockets as subpocket-based structural fingerprint.
"""
from .base import FingerprintBase
from .fingerprint import Fingerprint
from .fingerprint_normalized import FingerprintNormalized
from .fingerprint_generator import FingerprintGenerator
| 28.777778
| 64
| 0.853282
| 26
| 259
| 8.423077
| 0.615385
| 0.205479
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.100386
| 259
| 8
| 65
| 32.375
| 0.939914
| 0.247104
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 7
|
ffe7411aec8a66eaa1b6165766ca9d1acfa4f1f8
| 5,837
|
py
|
Python
|
aiolifx/products.py
|
bdraco/aiolifx
|
9bd8c5e6d291f4c79314989402f7e2c6476d5851
|
[
"MIT"
] | null | null | null |
aiolifx/products.py
|
bdraco/aiolifx
|
9bd8c5e6d291f4c79314989402f7e2c6476d5851
|
[
"MIT"
] | null | null | null |
aiolifx/products.py
|
bdraco/aiolifx
|
9bd8c5e6d291f4c79314989402f7e2c6476d5851
|
[
"MIT"
] | null | null | null |
product_map = { 1: 'Original 1000',
3: 'Color 650',
10: 'White 800 (Low Voltage)',
11: 'White 800 (High Voltage)',
18: 'White 900 BR30 (Low Voltage)',
20: 'Color 1000 BR30',
22: 'Color 1000',
27: 'LIFX A19',
28: 'LIFX BR30',
29: 'LIFX+ A19',
30: 'LIFX+ BR30',
31: 'LIFX Z',
32: 'LIFX Z 2',
36: 'LIFX Downlight',
37: 'LIFX Downlight',
38: 'LIFX Beam',
43: 'LIFX A19',
44: 'LIFX BR30',
45: 'LIFX+ A19',
46: 'LIFX+ BR30',
49: 'LIFX Mini',
50: 'LIFX Mini Day and Dusk',
51: 'LIFX Mini White',
52: 'LIFX GU10',
55: 'LIFX Tile',
59: 'LIFX Mini Color',
60: 'LIFX Mini Day and Dusk',
61: 'LIFX Mini White'}
features_map = { 1: { 'chain': False,
'color': True,
'infrared': False,
'max_kelvin': 9000,
'min_kelvin': 2500,
'multizone': False},
3: { 'chain': False,
'color': True,
'infrared': False,
'max_kelvin': 9000,
'min_kelvin': 2500,
'multizone': False},
10: { 'chain': False,
'color': False,
'infrared': False,
'max_kelvin': 6500,
'min_kelvin': 2700,
'multizone': False},
11: { 'chain': False,
'color': False,
'infrared': False,
'max_kelvin': 6500,
'min_kelvin': 2700,
'multizone': False},
18: { 'chain': False,
'color': False,
'infrared': False,
'max_kelvin': 9000,
'min_kelvin': 2500,
'multizone': False},
20: { 'chain': False,
'color': True,
'infrared': False,
'max_kelvin': 9000,
'min_kelvin': 2500,
'multizone': False},
22: { 'chain': False,
'color': True,
'infrared': False,
'max_kelvin': 9000,
'min_kelvin': 2500,
'multizone': False},
27: { 'chain': False,
'color': True,
'infrared': False,
'max_kelvin': 9000,
'min_kelvin': 2500,
'multizone': False},
28: { 'chain': False,
'color': True,
'infrared': False,
'max_kelvin': 9000,
'min_kelvin': 2500,
'multizone': False},
29: { 'chain': False,
'color': True,
'infrared': True,
'max_kelvin': 9000,
'min_kelvin': 2500,
'multizone': False},
30: { 'chain': False,
'color': True,
'infrared': True,
'max_kelvin': 9000,
'min_kelvin': 2500,
'multizone': False},
31: { 'chain': False,
'color': True,
'infrared': False,
'max_kelvin': 9000,
'min_kelvin': 2500,
'multizone': True},
32: { 'chain': False,
'color': True,
'infrared': False,
'max_kelvin': 9000,
'min_kelvin': 2500,
'multizone': True},
36: { 'chain': False,
'color': True,
'infrared': False,
'max_kelvin': 9000,
'min_kelvin': 2500,
'multizone': False},
37: { 'chain': False,
'color': True,
'infrared': False,
'max_kelvin': 9000,
'min_kelvin': 2500,
'multizone': False},
38: { 'chain': False,
'color': True,
'infrared': False,
'max_kelvin': 9000,
'min_kelvin': 2500,
'multizone': True},
43: { 'chain': False,
'color': True,
'infrared': False,
'max_kelvin': 9000,
'min_kelvin': 2500,
'multizone': False},
44: { 'chain': False,
'color': True,
'infrared': False,
'max_kelvin': 9000,
'min_kelvin': 2500,
'multizone': False},
45: { 'chain': False,
'color': True,
'infrared': True,
'max_kelvin': 9000,
'min_kelvin': 2500,
'multizone': False},
46: { 'chain': False,
'color': True,
'infrared': True,
'max_kelvin': 9000,
'min_kelvin': 2500,
'multizone': False},
49: { 'chain': False,
'color': True,
'infrared': False,
'max_kelvin': 9000,
'min_kelvin': 2500,
'multizone': False},
50: { 'chain': False,
'color': False,
'infrared': False,
'max_kelvin': 4000,
'min_kelvin': 1500,
'multizone': False},
51: { 'chain': False,
'color': False,
'infrared': False,
'max_kelvin': 2700,
'min_kelvin': 2700,
'multizone': False},
52: { 'chain': False,
'color': True,
'infrared': False,
'max_kelvin': 9000,
'min_kelvin': 2500,
'multizone': False},
55: { 'chain': True,
'color': True,
'infrared': False,
'max_kelvin': 9000,
'min_kelvin': 2500,
'multizone': False},
59: { 'chain': False,
'color': True,
'infrared': False,
'max_kelvin': 9000,
'min_kelvin': 2500,
'multizone': False},
60: { 'chain': False,
'color': False,
'infrared': False,
'max_kelvin': 4000,
'min_kelvin': 1500,
'multizone': False},
61: { 'chain': False,
'color': False,
'infrared': False,
'max_kelvin': 2700,
'min_kelvin': 2700,
'multizone': False}}
| 29.331658
| 41
| 0.429501
| 526
| 5,837
| 4.655894
| 0.129278
| 0.102899
| 0.165374
| 0.215598
| 0.83381
| 0.81911
| 0.81911
| 0.81911
| 0.810943
| 0.810943
| 0
| 0.111832
| 0.419394
| 5,837
| 198
| 42
| 29.479798
| 0.6108
| 0
| 0
| 0.709184
| 0
| 0
| 0.287354
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
fff92904da86d16d1bc43c0f273b3756307eb940
| 205
|
py
|
Python
|
tests/test_utilities.py
|
echeu/ironman
|
551baae6e3c8515347c2c47128d77897e7c4c38b
|
[
"MIT"
] | 5
|
2017-11-17T12:30:27.000Z
|
2019-08-15T03:04:17.000Z
|
tests/test_utilities.py
|
echeu/ironman
|
551baae6e3c8515347c2c47128d77897e7c4c38b
|
[
"MIT"
] | 24
|
2015-11-03T06:54:48.000Z
|
2022-03-03T21:51:21.000Z
|
tests/test_utilities.py
|
echeu/ironman
|
551baae6e3c8515347c2c47128d77897e7c4c38b
|
[
"MIT"
] | 3
|
2018-12-24T08:39:54.000Z
|
2021-09-29T21:42:01.000Z
|
from ironman.utilities import chunks
def test_chunks():
assert list(chunks('abc', 1)) == ['a', 'b', 'c']
assert list(chunks('abc', 2)) == ['ab', 'c']
assert list(chunks('abc', 3)) == ['abc']
| 25.625
| 52
| 0.560976
| 29
| 205
| 3.931034
| 0.586207
| 0.263158
| 0.421053
| 0.5
| 0.350877
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017964
| 0.185366
| 205
| 7
| 53
| 29.285714
| 0.664671
| 0
| 0
| 0
| 0
| 0
| 0.087805
| 0
| 0
| 0
| 0
| 0
| 0.6
| 1
| 0.2
| true
| 0
| 0.2
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
08215ae83d393d01678e0bd082fde450f91a0a94
| 3,741
|
py
|
Python
|
modules/dart.py
|
ymshenyu/QvTodoBot
|
20af06caaaedbee85247c94af04be7aadd7508ac
|
[
"MIT"
] | 2
|
2020-06-28T19:49:37.000Z
|
2021-12-28T11:01:13.000Z
|
modules/dart.py
|
ymshenyu/QvTodoBot
|
20af06caaaedbee85247c94af04be7aadd7508ac
|
[
"MIT"
] | null | null | null |
modules/dart.py
|
ymshenyu/QvTodoBot
|
20af06caaaedbee85247c94af04be7aadd7508ac
|
[
"MIT"
] | 5
|
2020-05-16T07:11:00.000Z
|
2021-09-27T15:15:09.000Z
|
from telegram.bot import Bot
from telegram.ext import CommandHandler, CallbackContext
from telegram import Update, Message
from typing import List
class Darter:
def __init__(self, bot: Bot):
self.bot = bot
def getCommands(self) -> List[CommandHandler]:
return [CommandHandler("dart", self.dart),
CommandHandler("dice", self.dice),
CommandHandler("basketball", self.basketball),
CommandHandler("soccer", self.soccer),
CommandHandler("roll", self.roll),
CommandHandler("bowl", self.bowl)]
def dart(self, update: Update, context: CallbackContext):
assert isinstance(update.message, Message)
assert isinstance(update.message.text, str)
parsed_message = update.message.text.split(' ', 1)
try:
if len(parsed_message) == 2:
times = int(parsed_message[1])
else:
times = 1
except Exception:
times = 1
for i in range(times):
self.bot.send_dice(chat_id=update.message.chat_id, emoji='🎯')
def dice(self, update: Update, context: CallbackContext):
assert isinstance(update.message, Message)
assert isinstance(update.message.text, str)
parsed_message = update.message.text.split(' ', 1)
try:
if len(parsed_message) == 2:
times = int(parsed_message[1])
else:
times = 1
except Exception:
times = 1
for i in range(times):
self.bot.send_dice(chat_id=update.message.chat_id, emoji='🎲')
def basketball(self, update: Update, context: CallbackContext):
assert isinstance(update.message, Message)
assert isinstance(update.message.text, str)
parsed_message = update.message.text.split(' ', 1)
try:
if len(parsed_message) == 2:
times = int(parsed_message[1])
else:
times = 1
except Exception:
times = 1
for i in range(times):
self.bot.send_dice(chat_id=update.message.chat_id, emoji='🏀')
def soccer(self, update: Update, context: CallbackContext):
assert isinstance(update.message, Message)
assert isinstance(update.message.text, str)
parsed_message = update.message.text.split(' ', 1)
try:
if len(parsed_message) == 2:
times = int(parsed_message[1])
else:
times = 1
except Exception:
times = 1
for i in range(times):
self.bot.send_dice(chat_id=update.message.chat_id, emoji='⚽️')
def roll(self, update: Update, context: CallbackContext):
assert isinstance(update.message, Message)
assert isinstance(update.message.text, str)
parsed_message = update.message.text.split(' ', 1)
try:
if len(parsed_message) == 2:
times = int(parsed_message[1])
else:
times = 1
except Exception:
times = 1
for i in range(times):
self.bot.send_dice(chat_id=update.message.chat_id, emoji='🎰')
def bowl(self, update: Update, context: CallbackContext):
assert isinstance(update.message, Message)
assert isinstance(update.message.text, str)
parsed_message = update.message.text.split(' ', 1)
try:
if len(parsed_message) == 2:
times = int(parsed_message[1])
else:
times = 1
except Exception:
times = 1
for i in range(times):
self.bot.send_dice(chat_id=update.message.chat_id, emoji='🎳')
| 36.676471
| 74
| 0.577118
| 420
| 3,741
| 5.061905
| 0.130952
| 0.152869
| 0.124177
| 0.163688
| 0.795861
| 0.795861
| 0.795861
| 0.795861
| 0.795861
| 0.795861
| 0
| 0.011755
| 0.317829
| 3,741
| 101
| 75
| 37.039604
| 0.818574
| 0
| 0
| 0.717391
| 0
| 0
| 0.012029
| 0
| 0
| 0
| 0
| 0
| 0.130435
| 1
| 0.086957
| false
| 0
| 0.043478
| 0.01087
| 0.152174
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
082fec2734637c2d7456b33d6880c0867ffa1207
| 256
|
py
|
Python
|
fewshot/data/iterators/__init__.py
|
sebamenabar/oc-fewshot-public
|
2dad8c9f24cb1bfe72d8b13b33d28f6788d86ca8
|
[
"MIT"
] | 18
|
2020-07-12T11:07:27.000Z
|
2022-02-06T04:17:23.000Z
|
fewshot/data/iterators/__init__.py
|
renmengye/oc-fewshot
|
eb12bd5b426518fd8353304f0760f5c24f1b3c12
|
[
"MIT"
] | 2
|
2021-10-14T17:38:50.000Z
|
2021-11-10T14:07:04.000Z
|
fewshot/data/iterators/__init__.py
|
renmengye/oc-fewshot
|
eb12bd5b426518fd8353304f0760f5c24f1b3c12
|
[
"MIT"
] | 6
|
2020-11-11T19:18:28.000Z
|
2021-06-04T14:20:03.000Z
|
from .episode_iterator import EpisodeIterator # NOQA
from .minibatch_iterator import MinibatchIterator # NOQA
from .semisupervised_episode_iterator import SemiSupervisedEpisodeIterator # NOQA
from .sim_episode_iterator import SimEpisodeIterator # NOQA
| 51.2
| 82
| 0.859375
| 26
| 256
| 8.230769
| 0.461538
| 0.261682
| 0.294393
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.109375
| 256
| 4
| 83
| 64
| 0.938596
| 0.074219
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
f2313c9174553d27ffe161673e7b37399a82f929
| 37
|
py
|
Python
|
notebook/python_sandbox/pkg_sample/foo.py
|
xyise/xyise
|
e2bc1c2e824da4fc5cd1d81aaef76a1ad147fb01
|
[
"Apache-2.0"
] | null | null | null |
notebook/python_sandbox/pkg_sample/foo.py
|
xyise/xyise
|
e2bc1c2e824da4fc5cd1d81aaef76a1ad147fb01
|
[
"Apache-2.0"
] | null | null | null |
notebook/python_sandbox/pkg_sample/foo.py
|
xyise/xyise
|
e2bc1c2e824da4fc5cd1d81aaef76a1ad147fb01
|
[
"Apache-2.0"
] | null | null | null |
def some_function():
return 42000
| 18.5
| 20
| 0.72973
| 5
| 37
| 5.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 0.189189
| 37
| 2
| 21
| 18.5
| 0.7
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
f25f4159b5a1ffb25bf24507c1664dbfb0c3c34c
| 26,514
|
py
|
Python
|
Utils/Data/Features/Generated/EngagerFeature/NumberOfPreviousEngagementWithLanguage.py
|
MaurizioFD/recsys-challenge-2020-twitter
|
95dc024fb4f8777aa62e1304536daece640428de
|
[
"Apache-2.0"
] | 44
|
2020-07-09T11:31:17.000Z
|
2022-03-04T05:50:48.000Z
|
Utils/Data/Features/Generated/EngagerFeature/NumberOfPreviousEngagementWithLanguage.py
|
kiminh/recsys-challenge-2020-twitter
|
567f0db40be7db3d21c360f2ca6cdf2addc7c698
|
[
"Apache-2.0"
] | 3
|
2020-10-02T18:55:21.000Z
|
2020-10-13T22:13:58.000Z
|
Utils/Data/Features/Generated/EngagerFeature/NumberOfPreviousEngagementWithLanguage.py
|
kiminh/recsys-challenge-2020-twitter
|
567f0db40be7db3d21c360f2ca6cdf2addc7c698
|
[
"Apache-2.0"
] | 9
|
2020-08-08T14:55:59.000Z
|
2021-09-06T09:17:03.000Z
|
import numpy as np
from Utils.Data.DatasetUtils import is_test_or_val_set, get_train_set_id_from_test_or_val_set, \
get_test_or_val_set_id_from_train
from Utils.Data.Features.Generated.TweetFeature.IsEngagementType import *
from Utils.Data.Features.MappedFeatures import MappedFeatureEngagerId, MappedFeatureCreatorId, \
MappedFeatureTweetLanguage
import time
def find_and_increase_engager(eng_id, cre_id, lang, dictionary):
# Number of time the user_1 has interacted with user_2
current_count = dictionary.get((eng_id, lang), 0)
dictionary[(cre_id, lang)] = dictionary.get((cre_id, lang), 0) + 1
dictionary[(eng_id, lang)] = current_count + 1
return current_count
def find_and_increase_creator(eng_id, cre_id, lang, dictionary):
# Number of time the user_1 has interacted with user_2
current_count = dictionary.get((eng_id, lang), 0)
dictionary[(cre_id, lang)] = dictionary.get((cre_id, lang), 0) + 1
return current_count
class EngagerFeatureNumberOfPreviousLikeEngagementWithLanguage(GeneratedFeaturePickle):
# Has the creator ever liked a tweet of the engager? If yes, how many times?
def __init__(self, dataset_id: str):
super().__init__("engager_feature_number_of_previous_like_engagement_with_language",
dataset_id)
self.pck_path = pl.Path(
f"{Feature.ROOT_PATH}/{self.dataset_id}/generated/number_of_previous_engagement_with_language/{self.feature_name}.pck.gz")
self.csv_path = pl.Path(
f"{Feature.ROOT_PATH}/{self.dataset_id}/generated/number_of_previous_engagement_with_language/{self.feature_name}.csv.gz")
def create_feature(self):
# Check if the dataset id is train or test
if is_test_or_val_set(self.dataset_id):
train_dataset_id = get_train_set_id_from_test_or_val_set(self.dataset_id)
test_dataset_id = self.dataset_id
else:
train_dataset_id = self.dataset_id
test_dataset_id = get_test_or_val_set_id_from_train(train_dataset_id)
# Load features
creation_timestamps_feature = RawFeatureTweetTimestamp(train_dataset_id)
engagers_feature = MappedFeatureEngagerId(train_dataset_id)
creators_feature = MappedFeatureCreatorId(train_dataset_id)
language_feature = MappedFeatureTweetLanguage(train_dataset_id)
engagement_feature = TweetFeatureEngagementIsLike(train_dataset_id)
dataframe = pd.concat([
creation_timestamps_feature.load_or_create(),
engagers_feature.load_or_create(),
engagement_feature.load_or_create(),
creators_feature.load_or_create(),
language_feature.load_or_create()
], axis=1)
dataframe.sort_values(creation_timestamps_feature.feature_name, inplace=True)
# KEY: a tuple (creator, engager)
# VALUE: the number of time the engager has engaged with the creator
# If key does not exists -> 0 times.
engagement_dict = {}
result = pd.DataFrame(
[find_and_increase_engager(eng_id, cre_id, lang, engagement_dict)
if engagement
else engagement_dict.get((eng_id, lang), 0)
for eng_id, cre_id, lang, engagement
in zip(dataframe[engagers_feature.feature_name],
dataframe[creators_feature.feature_name],
dataframe[language_feature.feature_name],
dataframe[engagement_feature.feature_name])],
index=dataframe.index
)
if not EngagerFeatureNumberOfPreviousLikeEngagementWithLanguage(train_dataset_id).has_feature():
result.sort_index(inplace=True)
EngagerFeatureNumberOfPreviousLikeEngagementWithLanguage(train_dataset_id).save_feature(result)
if not EngagerFeatureNumberOfPreviousLikeEngagementWithLanguage(test_dataset_id).has_feature():
# Load features
creation_timestamps_feature = RawFeatureTweetTimestamp(test_dataset_id)
engagers_feature = MappedFeatureEngagerId(test_dataset_id)
language_feature = MappedFeatureTweetLanguage(test_dataset_id)
creators_feature = MappedFeatureCreatorId(test_dataset_id)
dataframe = pd.concat([
creation_timestamps_feature.load_or_create(),
engagers_feature.load_or_create(),
creators_feature.load_or_create(),
language_feature.load_or_create()
], axis=1)
dataframe.sort_values(creation_timestamps_feature.feature_name, inplace=True)
result = pd.DataFrame(
[find_and_increase_creator(eng_id, cre_id, lang, engagement_dict)
for eng_id, cre_id, lang
in zip(dataframe[engagers_feature.feature_name],
dataframe[creators_feature.feature_name],
dataframe[language_feature.feature_name])],
index=dataframe.index
)
result.sort_index(inplace=True)
EngagerFeatureNumberOfPreviousLikeEngagementWithLanguage(test_dataset_id).save_feature(result)
class EngagerFeatureNumberOfPreviousRetweetEngagementWithLanguage(GeneratedFeaturePickle):
# Has the creator ever liked a tweet of the engager? If yes, how many times?
def __init__(self, dataset_id: str):
super().__init__("engager_feature_number_of_previous_retweet_engagement_with_language",
dataset_id)
self.pck_path = pl.Path(
f"{Feature.ROOT_PATH}/{self.dataset_id}/generated/number_of_previous_engagement_with_language/{self.feature_name}.pck.gz")
self.csv_path = pl.Path(
f"{Feature.ROOT_PATH}/{self.dataset_id}/generated/number_of_previous_engagement_with_language/{self.feature_name}.csv.gz")
def create_feature(self):
# Check if the dataset id is train or test
if is_test_or_val_set(self.dataset_id):
train_dataset_id = get_train_set_id_from_test_or_val_set(self.dataset_id)
test_dataset_id = self.dataset_id
else:
train_dataset_id = self.dataset_id
test_dataset_id = get_test_or_val_set_id_from_train(train_dataset_id)
# Load features
creation_timestamps_feature = RawFeatureTweetTimestamp(train_dataset_id)
engagers_feature = MappedFeatureEngagerId(train_dataset_id)
creators_feature = MappedFeatureCreatorId(train_dataset_id)
language_feature = MappedFeatureTweetLanguage(train_dataset_id)
engagement_feature = TweetFeatureEngagementIsRetweet(train_dataset_id)
dataframe = pd.concat([
creation_timestamps_feature.load_or_create(),
engagers_feature.load_or_create(),
engagement_feature.load_or_create(),
creators_feature.load_or_create(),
language_feature.load_or_create()
], axis=1)
dataframe.sort_values(creation_timestamps_feature.feature_name, inplace=True)
# KEY: a tuple (creator, engager)
# VALUE: the number of time the engager has engaged with the creator
# If key does not exists -> 0 times.
engagement_dict = {}
result = pd.DataFrame(
[find_and_increase_engager(eng_id, cre_id, lang, engagement_dict)
if engagement
else engagement_dict.get((eng_id, lang), 0)
for eng_id, cre_id, lang, engagement
in zip(dataframe[engagers_feature.feature_name],
dataframe[creators_feature.feature_name],
dataframe[language_feature.feature_name],
dataframe[engagement_feature.feature_name])],
index=dataframe.index
)
if not EngagerFeatureNumberOfPreviousRetweetEngagementWithLanguage(
train_dataset_id).has_feature():
result.sort_index(inplace=True)
EngagerFeatureNumberOfPreviousRetweetEngagementWithLanguage(train_dataset_id).save_feature(result)
if not EngagerFeatureNumberOfPreviousRetweetEngagementWithLanguage(
test_dataset_id).has_feature():
# Load features
creation_timestamps_feature = RawFeatureTweetTimestamp(test_dataset_id)
engagers_feature = MappedFeatureEngagerId(test_dataset_id)
language_feature = MappedFeatureTweetLanguage(test_dataset_id)
creators_feature = MappedFeatureCreatorId(test_dataset_id)
dataframe = pd.concat([
creation_timestamps_feature.load_or_create(),
engagers_feature.load_or_create(),
creators_feature.load_or_create(),
language_feature.load_or_create()
], axis=1)
dataframe.sort_values(creation_timestamps_feature.feature_name, inplace=True)
result = pd.DataFrame(
[find_and_increase_creator(eng_id, cre_id, lang, engagement_dict)
for eng_id, cre_id, lang
in zip(dataframe[engagers_feature.feature_name],
dataframe[creators_feature.feature_name],
dataframe[language_feature.feature_name])],
index=dataframe.index
)
result.sort_index(inplace=True)
EngagerFeatureNumberOfPreviousRetweetEngagementWithLanguage(test_dataset_id).save_feature(result)
class EngagerFeatureNumberOfPreviousReplyEngagementWithLanguage(GeneratedFeaturePickle):
# Has the creator ever liked a tweet of the engager? If yes, how many times?
def __init__(self, dataset_id: str):
super().__init__("engager_feature_number_of_previous_reply_engagement_with_language",
dataset_id)
self.pck_path = pl.Path(
f"{Feature.ROOT_PATH}/{self.dataset_id}/generated/number_of_previous_engagement_with_language/{self.feature_name}.pck.gz")
self.csv_path = pl.Path(
f"{Feature.ROOT_PATH}/{self.dataset_id}/generated/number_of_previous_engagement_with_language/{self.feature_name}.csv.gz")
def create_feature(self):
# Check if the dataset id is train or test
if is_test_or_val_set(self.dataset_id):
train_dataset_id = get_train_set_id_from_test_or_val_set(self.dataset_id)
test_dataset_id = self.dataset_id
else:
train_dataset_id = self.dataset_id
test_dataset_id = get_test_or_val_set_id_from_train(train_dataset_id)
# Load features
creation_timestamps_feature = RawFeatureTweetTimestamp(train_dataset_id)
engagers_feature = MappedFeatureEngagerId(train_dataset_id)
creators_feature = MappedFeatureCreatorId(train_dataset_id)
language_feature = MappedFeatureTweetLanguage(train_dataset_id)
engagement_feature = TweetFeatureEngagementIsReply(train_dataset_id)
dataframe = pd.concat([
creation_timestamps_feature.load_or_create(),
engagers_feature.load_or_create(),
engagement_feature.load_or_create(),
creators_feature.load_or_create(),
language_feature.load_or_create()
], axis=1)
dataframe.sort_values(creation_timestamps_feature.feature_name, inplace=True)
# KEY: a tuple (creator, engager)
# VALUE: the number of time the engager has engaged with the creator
# If key does not exists -> 0 times.
engagement_dict = {}
result = pd.DataFrame(
[find_and_increase_engager(eng_id, cre_id, lang, engagement_dict)
if engagement
else engagement_dict.get((eng_id, lang), 0)
for eng_id, cre_id, lang, engagement
in zip(dataframe[engagers_feature.feature_name],
dataframe[creators_feature.feature_name],
dataframe[language_feature.feature_name],
dataframe[engagement_feature.feature_name])],
index=dataframe.index
)
if not EngagerFeatureNumberOfPreviousReplyEngagementWithLanguage(
train_dataset_id).has_feature():
result.sort_index(inplace=True)
EngagerFeatureNumberOfPreviousReplyEngagementWithLanguage(train_dataset_id).save_feature(result)
if not EngagerFeatureNumberOfPreviousReplyEngagementWithLanguage(
test_dataset_id).has_feature():
# Load features
creation_timestamps_feature = RawFeatureTweetTimestamp(test_dataset_id)
engagers_feature = MappedFeatureEngagerId(test_dataset_id)
language_feature = MappedFeatureTweetLanguage(test_dataset_id)
creators_feature = MappedFeatureCreatorId(test_dataset_id)
dataframe = pd.concat([
creation_timestamps_feature.load_or_create(),
engagers_feature.load_or_create(),
creators_feature.load_or_create(),
language_feature.load_or_create()
], axis=1)
dataframe.sort_values(creation_timestamps_feature.feature_name, inplace=True)
result = pd.DataFrame(
[find_and_increase_creator(eng_id, cre_id, lang, engagement_dict)
for eng_id, cre_id, lang
in zip(dataframe[engagers_feature.feature_name],
dataframe[creators_feature.feature_name],
dataframe[language_feature.feature_name])],
index=dataframe.index
)
result.sort_index(inplace=True)
EngagerFeatureNumberOfPreviousReplyEngagementWithLanguage(test_dataset_id).save_feature(result)
class EngagerFeatureNumberOfPreviousCommentEngagementWithLanguage(GeneratedFeaturePickle):
# Has the creator ever liked a tweet of the engager? If yes, how many times?
def __init__(self, dataset_id: str):
super().__init__("engager_feature_number_of_previous_comment_engagement_with_language",
dataset_id)
self.pck_path = pl.Path(
f"{Feature.ROOT_PATH}/{self.dataset_id}/generated/number_of_previous_engagement_with_language/{self.feature_name}.pck.gz")
self.csv_path = pl.Path(
f"{Feature.ROOT_PATH}/{self.dataset_id}/generated/number_of_previous_engagement_with_language/{self.feature_name}.csv.gz")
def create_feature(self):
# Check if the dataset id is train or test
if is_test_or_val_set(self.dataset_id):
train_dataset_id = get_train_set_id_from_test_or_val_set(self.dataset_id)
test_dataset_id = self.dataset_id
else:
train_dataset_id = self.dataset_id
test_dataset_id = get_test_or_val_set_id_from_train(train_dataset_id)
# Load features
creation_timestamps_feature = RawFeatureTweetTimestamp(train_dataset_id)
engagers_feature = MappedFeatureEngagerId(train_dataset_id)
creators_feature = MappedFeatureCreatorId(train_dataset_id)
language_feature = MappedFeatureTweetLanguage(train_dataset_id)
engagement_feature = TweetFeatureEngagementIsComment(train_dataset_id)
dataframe = pd.concat([
creation_timestamps_feature.load_or_create(),
engagers_feature.load_or_create(),
engagement_feature.load_or_create(),
creators_feature.load_or_create(),
language_feature.load_or_create()
], axis=1)
dataframe.sort_values(creation_timestamps_feature.feature_name, inplace=True)
# KEY: a tuple (creator, engager)
# VALUE: the number of time the engager has engaged with the creator
# If key does not exists -> 0 times.
engagement_dict = {}
result = pd.DataFrame(
[find_and_increase_engager(eng_id, cre_id, lang, engagement_dict)
if engagement
else engagement_dict.get((eng_id, lang), 0)
for eng_id, cre_id, lang, engagement
in zip(dataframe[engagers_feature.feature_name],
dataframe[creators_feature.feature_name],
dataframe[language_feature.feature_name],
dataframe[engagement_feature.feature_name])],
index=dataframe.index
)
if not EngagerFeatureNumberOfPreviousCommentEngagementWithLanguage(
train_dataset_id).has_feature():
result.sort_index(inplace=True)
EngagerFeatureNumberOfPreviousCommentEngagementWithLanguage(train_dataset_id).save_feature(result)
if not EngagerFeatureNumberOfPreviousCommentEngagementWithLanguage(
test_dataset_id).has_feature():
# Load features
creation_timestamps_feature = RawFeatureTweetTimestamp(test_dataset_id)
engagers_feature = MappedFeatureEngagerId(test_dataset_id)
language_feature = MappedFeatureTweetLanguage(test_dataset_id)
creators_feature = MappedFeatureCreatorId(test_dataset_id)
dataframe = pd.concat([
creation_timestamps_feature.load_or_create(),
engagers_feature.load_or_create(),
creators_feature.load_or_create(),
language_feature.load_or_create()
], axis=1)
dataframe.sort_values(creation_timestamps_feature.feature_name, inplace=True)
result = pd.DataFrame(
[find_and_increase_creator(eng_id, cre_id, lang, engagement_dict)
for eng_id, cre_id, lang
in zip(dataframe[engagers_feature.feature_name],
dataframe[creators_feature.feature_name],
dataframe[language_feature.feature_name])],
index=dataframe.index
)
result.sort_index(inplace=True)
EngagerFeatureNumberOfPreviousCommentEngagementWithLanguage(test_dataset_id).save_feature(result)
class EngagerFeatureNumberOfPreviousPositiveEngagementWithLanguage(GeneratedFeaturePickle):
# Has the creator ever liked a tweet of the engager? If yes, how many times?
def __init__(self, dataset_id: str):
super().__init__("engager_feature_number_of_previous_positive_engagement_with_language",
dataset_id)
self.pck_path = pl.Path(
f"{Feature.ROOT_PATH}/{self.dataset_id}/generated/number_of_previous_engagement_with_language/{self.feature_name}.pck.gz")
self.csv_path = pl.Path(
f"{Feature.ROOT_PATH}/{self.dataset_id}/generated/number_of_previous_engagement_with_language/{self.feature_name}.csv.gz")
def create_feature(self):
# Check if the dataset id is train or test
if is_test_or_val_set(self.dataset_id):
train_dataset_id = get_train_set_id_from_test_or_val_set(self.dataset_id)
test_dataset_id = self.dataset_id
else:
train_dataset_id = self.dataset_id
test_dataset_id = get_test_or_val_set_id_from_train(train_dataset_id)
# Load features
creation_timestamps_feature = RawFeatureTweetTimestamp(train_dataset_id)
engagers_feature = MappedFeatureEngagerId(train_dataset_id)
creators_feature = MappedFeatureCreatorId(train_dataset_id)
language_feature = MappedFeatureTweetLanguage(train_dataset_id)
engagement_feature = TweetFeatureEngagementIsPositive(train_dataset_id)
dataframe = pd.concat([
creation_timestamps_feature.load_or_create(),
engagers_feature.load_or_create(),
engagement_feature.load_or_create(),
creators_feature.load_or_create(),
language_feature.load_or_create()
], axis=1)
dataframe.sort_values(creation_timestamps_feature.feature_name, inplace=True)
# KEY: a tuple (creator, engager)
# VALUE: the number of time the engager has engaged with the creator
# If key does not exists -> 0 times.
engagement_dict = {}
result = pd.DataFrame(
[find_and_increase_engager(eng_id, cre_id, lang, engagement_dict)
if engagement
else engagement_dict.get((eng_id, lang), 0)
for eng_id, cre_id, lang, engagement
in zip(dataframe[engagers_feature.feature_name],
dataframe[creators_feature.feature_name],
dataframe[language_feature.feature_name],
dataframe[engagement_feature.feature_name])],
index=dataframe.index
)
if not EngagerFeatureNumberOfPreviousPositiveEngagementWithLanguage(
train_dataset_id).has_feature():
result.sort_index(inplace=True)
EngagerFeatureNumberOfPreviousPositiveEngagementWithLanguage(train_dataset_id).save_feature(result)
if not EngagerFeatureNumberOfPreviousPositiveEngagementWithLanguage(
test_dataset_id).has_feature():
# Load features
creation_timestamps_feature = RawFeatureTweetTimestamp(test_dataset_id)
engagers_feature = MappedFeatureEngagerId(test_dataset_id)
language_feature = MappedFeatureTweetLanguage(test_dataset_id)
creators_feature = MappedFeatureCreatorId(test_dataset_id)
dataframe = pd.concat([
creation_timestamps_feature.load_or_create(),
engagers_feature.load_or_create(),
creators_feature.load_or_create(),
language_feature.load_or_create()
], axis=1)
dataframe.sort_values(creation_timestamps_feature.feature_name, inplace=True)
result = pd.DataFrame(
[find_and_increase_creator(eng_id, cre_id, lang, engagement_dict)
for eng_id, cre_id, lang
in zip(dataframe[engagers_feature.feature_name],
dataframe[creators_feature.feature_name],
dataframe[language_feature.feature_name])],
index=dataframe.index
)
result.sort_index(inplace=True)
EngagerFeatureNumberOfPreviousPositiveEngagementWithLanguage(test_dataset_id).save_feature(result)
class EngagerFeatureNumberOfPreviousNegativeEngagementWithLanguage(GeneratedFeaturePickle):
# Has the creator ever liked a tweet of the engager? If yes, how many times?
def __init__(self, dataset_id: str):
super().__init__("engager_feature_number_of_previous_negative_engagement_with_language",
dataset_id)
self.pck_path = pl.Path(
f"{Feature.ROOT_PATH}/{self.dataset_id}/generated/number_of_previous_engagement_with_language/{self.feature_name}.pck.gz")
self.csv_path = pl.Path(
f"{Feature.ROOT_PATH}/{self.dataset_id}/generated/number_of_previous_engagement_with_language/{self.feature_name}.csv.gz")
def create_feature(self):
# Check if the dataset id is train or test
if is_test_or_val_set(self.dataset_id):
train_dataset_id = get_train_set_id_from_test_or_val_set(self.dataset_id)
test_dataset_id = self.dataset_id
else:
train_dataset_id = self.dataset_id
test_dataset_id = get_test_or_val_set_id_from_train(train_dataset_id)
# Load features
creation_timestamps_feature = RawFeatureTweetTimestamp(train_dataset_id)
engagers_feature = MappedFeatureEngagerId(train_dataset_id)
creators_feature = MappedFeatureCreatorId(train_dataset_id)
language_feature = MappedFeatureTweetLanguage(train_dataset_id)
engagement_feature = TweetFeatureEngagementIsNegative(train_dataset_id)
dataframe = pd.concat([
creation_timestamps_feature.load_or_create(),
engagers_feature.load_or_create(),
engagement_feature.load_or_create(),
creators_feature.load_or_create(),
language_feature.load_or_create()
], axis=1)
dataframe.sort_values(creation_timestamps_feature.feature_name, inplace=True)
# KEY: a tuple (creator, engager)
# VALUE: the number of time the engager has engaged with the creator
# If key does not exists -> 0 times.
engagement_dict = {}
result = pd.DataFrame(
[find_and_increase_engager(eng_id, cre_id, lang, engagement_dict)
if engagement
else engagement_dict.get((eng_id, lang), 0)
for eng_id, cre_id, lang, engagement
in zip(dataframe[engagers_feature.feature_name],
dataframe[creators_feature.feature_name],
dataframe[language_feature.feature_name],
dataframe[engagement_feature.feature_name])],
index=dataframe.index
)
if not EngagerFeatureNumberOfPreviousNegativeEngagementWithLanguage(
train_dataset_id).has_feature():
result.sort_index(inplace=True)
EngagerFeatureNumberOfPreviousNegativeEngagementWithLanguage(train_dataset_id).save_feature(result)
if not EngagerFeatureNumberOfPreviousNegativeEngagementWithLanguage(
test_dataset_id).has_feature():
# Load features
creation_timestamps_feature = RawFeatureTweetTimestamp(test_dataset_id)
engagers_feature = MappedFeatureEngagerId(test_dataset_id)
language_feature = MappedFeatureTweetLanguage(test_dataset_id)
creators_feature = MappedFeatureCreatorId(test_dataset_id)
dataframe = pd.concat([
creation_timestamps_feature.load_or_create(),
engagers_feature.load_or_create(),
creators_feature.load_or_create(),
language_feature.load_or_create()
], axis=1)
dataframe.sort_values(creation_timestamps_feature.feature_name, inplace=True)
result = pd.DataFrame(
[find_and_increase_creator(eng_id, cre_id, lang, engagement_dict)
for eng_id, cre_id, lang
in zip(dataframe[engagers_feature.feature_name],
dataframe[creators_feature.feature_name],
dataframe[language_feature.feature_name])],
index=dataframe.index
)
result.sort_index(inplace=True)
EngagerFeatureNumberOfPreviousNegativeEngagementWithLanguage(test_dataset_id).save_feature(result)
| 49.744841
| 134
| 0.683563
| 2,833
| 26,514
| 5.993999
| 0.044123
| 0.085861
| 0.049467
| 0.06042
| 0.90666
| 0.905836
| 0.862729
| 0.852423
| 0.838172
| 0.833873
| 0
| 0.001755
| 0.247944
| 26,514
| 532
| 135
| 49.838346
| 0.84985
| 0.06687
| 0
| 0.870192
| 1
| 0
| 0.073494
| 0.073494
| 0
| 0
| 0
| 0
| 0
| 1
| 0.033654
| false
| 0
| 0.012019
| 0
| 0.064904
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f28df92d282153528564387f196718a04f8a879b
| 3,177
|
py
|
Python
|
tests/cli/test_subscriptions.py
|
RichMooreNR/newrelic-lambda-cli
|
770df2f2097c7ab250375a3787be30f2cbdb80ce
|
[
"Apache-2.0"
] | 29
|
2019-11-21T18:39:02.000Z
|
2022-02-11T19:28:25.000Z
|
tests/cli/test_subscriptions.py
|
RichMooreNR/newrelic-lambda-cli
|
770df2f2097c7ab250375a3787be30f2cbdb80ce
|
[
"Apache-2.0"
] | 118
|
2019-11-21T18:32:00.000Z
|
2022-03-31T23:58:31.000Z
|
tests/cli/test_subscriptions.py
|
RichMooreNR/newrelic-lambda-cli
|
770df2f2097c7ab250375a3787be30f2cbdb80ce
|
[
"Apache-2.0"
] | 28
|
2019-11-21T18:02:38.000Z
|
2022-02-26T10:08:38.000Z
|
from moto import mock_lambda, mock_logs
from newrelic_lambda_cli.cli import cli, register_groups
@mock_lambda
@mock_logs
def test_subscriptions_install(aws_credentials, cli_runner):
"""
Assert that 'newrelic-lambda subscriptions install' attempts to install the
New Relic log subscription on a function.
"""
register_groups(cli)
result = cli_runner.invoke(
cli,
[
"subscriptions",
"install",
"--no-aws-permissions-check",
"--function",
"foobar",
"--aws-region",
"us-east-1",
],
env={
"AWS_ACCESS_KEY_ID": "testing",
"AWS_SECRET_ACCESS_KEY": "testing",
"AWS_SECURITY_TOKEN": "testing",
"AWS_SESSION_TOKEN": "testing",
},
)
assert result.exit_code == 1
assert result.stdout == ""
assert (
"Could not find 'newrelic-log-ingestion' function. "
"Is the New Relic AWS integration installed?"
) in result.stderr
result2 = cli_runner.invoke(
cli,
[
"subscriptions",
"install",
"--no-aws-permissions-check",
"--function",
"foobar",
"--function",
"barbaz",
"--aws-region",
"us-east-1",
],
env={
"AWS_ACCESS_KEY_ID": "testing",
"AWS_SECRET_ACCESS_KEY": "testing",
"AWS_SECURITY_TOKEN": "testing",
"AWS_SESSION_TOKEN": "testing",
},
)
assert result2.exit_code == 1
assert result2.stdout == ""
assert (
"Could not find 'newrelic-log-ingestion' function. "
"Is the New Relic AWS integration installed?"
) in result2.stderr
@mock_lambda
@mock_logs
def test_subscriptions_uninstall(aws_credentials, cli_runner):
"""
Assert that 'newrelic-lambda subscriptions uninstall' attempts to uninstall the
New Relic log subscription on a function.
"""
register_groups(cli)
result = cli_runner.invoke(
cli,
[
"subscriptions",
"uninstall",
"--no-aws-permissions-check",
"--function",
"foobar",
"--aws-region",
"us-east-1",
],
env={
"AWS_ACCESS_KEY_ID": "testing",
"AWS_SECRET_ACCESS_KEY": "testing",
"AWS_SECURITY_TOKEN": "testing",
"AWS_SESSION_TOKEN": "testing",
},
)
assert result.exit_code == 1
assert result.stdout == ""
result2 = cli_runner.invoke(
cli,
[
"subscriptions",
"uninstall",
"--no-aws-permissions-check",
"--function",
"foobar",
"--function",
"barbaz",
"--aws-region",
"us-east-1",
],
env={
"AWS_ACCESS_KEY_ID": "testing",
"AWS_SECRET_ACCESS_KEY": "testing",
"AWS_SECURITY_TOKEN": "testing",
"AWS_SESSION_TOKEN": "testing",
},
)
assert result2.exit_code == 1
assert result2.stdout == ""
| 25.620968
| 83
| 0.52282
| 300
| 3,177
| 5.313333
| 0.21
| 0.075282
| 0.027604
| 0.045169
| 0.892095
| 0.892095
| 0.883312
| 0.835634
| 0.835634
| 0.760351
| 0
| 0.007317
| 0.354737
| 3,177
| 123
| 84
| 25.829268
| 0.770244
| 0.075228
| 0
| 0.823529
| 0
| 0
| 0.330459
| 0.081407
| 0
| 0
| 0
| 0
| 0.098039
| 1
| 0.019608
| false
| 0
| 0.019608
| 0
| 0.039216
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4b50b9fa31d96ed0379ceb60bf424af98ae38590
| 12,683
|
py
|
Python
|
chest.py
|
EtlamGit/PySlicer
|
4f2015ca995377d34dd176045396dcf51bc78a2a
|
[
"MIT"
] | 1
|
2021-02-16T16:27:54.000Z
|
2021-02-16T16:27:54.000Z
|
chest.py
|
EtlamGit/PySlicer
|
4f2015ca995377d34dd176045396dcf51bc78a2a
|
[
"MIT"
] | null | null | null |
chest.py
|
EtlamGit/PySlicer
|
4f2015ca995377d34dd176045396dcf51bc78a2a
|
[
"MIT"
] | null | null | null |
# copyright 2020 EtlamGit
import os
import os.path
from PIL import Image
class Chest:
def __init__(self, image_dimension_64):
self.scale = image_dimension_64 / 64.0
def scale_it(self, ox, oy, dx, dy):
return (int(ox * self.scale), int(oy * self.scale), int(ox * self.scale + dx * self.scale), int(oy * self.scale + dy * self.scale))
# helper to get texture bounding box in OLD format
class ChestOld(Chest):
# locker
def l_top(self): return self.scale_it(1, 0, 2, 1)
def l_bottom(self): return self.scale_it(3, 0, 2, 1)
def l_left(self): return self.scale_it(0, 1, 1, 4)
def l_front(self): return self.scale_it(1, 1, 2, 4)
def l_right(self): return self.scale_it(3, 1, 1, 4)
def l_back(self): return self.scale_it(4, 1, 2, 4)
# top lid
def t_top(self): return self.scale_it(14, 0, 14, 14)
def t_inside(self): return self.scale_it(28, 0, 14, 14)
def t_left(self): return self.scale_it( 0, 14, 14, 5)
def t_front(self): return self.scale_it(14, 14, 14, 5)
def t_right(self): return self.scale_it(28, 14, 14, 5)
def t_back(self): return self.scale_it(42, 14, 14, 5)
# bottom container
def b_inside(self): return self.scale_it(14, 19, 14, 14)
def b_bottom(self): return self.scale_it(28, 19, 14, 14)
def b_left(self): return self.scale_it( 0, 33, 14, 10)
def b_front(self): return self.scale_it(14, 33, 14, 10)
def b_right(self): return self.scale_it(28, 33, 14, 10)
def b_back(self): return self.scale_it(42, 33, 14, 10)
class DoubleChestOld(Chest):
# locker
def l_top1(self): return self.scale_it(1, 0, 1, 1)
def l_top2(self): return self.scale_it(2, 0, 1, 1)
def l_bottom1(self): return self.scale_it(3, 0, 1, 1)
def l_bottom2(self): return self.scale_it(4, 0, 1, 1)
def l_left(self): return self.scale_it(0, 1, 1, 4)
def l_front1(self): return self.scale_it(1, 1, 1, 4)
def l_front2(self): return self.scale_it(2, 1, 1, 4)
def l_right(self): return self.scale_it(3, 1, 1, 4)
def l_back1(self): return self.scale_it(4, 1, 1, 4)
def l_back2(self): return self.scale_it(5, 1, 1, 4)
# top lid
def t_top1(self): return self.scale_it(14, 0, 15, 14)
def t_top2(self): return self.scale_it(29, 0, 15, 14)
def t_inside1(self): return self.scale_it(44, 0, 15, 14)
def t_inside2(self): return self.scale_it(59, 0, 15, 14)
def t_left(self): return self.scale_it( 0, 14, 14, 5)
def t_front1(self): return self.scale_it(14, 14, 15, 5)
def t_front2(self): return self.scale_it(29, 14, 15, 5)
def t_right(self): return self.scale_it(44, 14, 14, 5)
def t_back1(self): return self.scale_it(58, 14, 15, 5)
def t_back2(self): return self.scale_it(73, 14, 15, 5)
# bottom container
def b_inside1(self): return self.scale_it(14, 19, 15, 14)
def b_inside2(self): return self.scale_it(29, 19, 15, 14)
def b_bottom1(self): return self.scale_it(44, 19, 15, 14)
def b_bottom2(self): return self.scale_it(59, 19, 15, 14)
def b_left(self): return self.scale_it( 0, 33, 14, 10)
def b_front1(self): return self.scale_it(14, 33, 15, 10)
def b_front2(self): return self.scale_it(29, 33, 15, 10)
def b_right(self): return self.scale_it(44, 33, 14, 10)
def b_back1(self): return self.scale_it(58, 33, 15, 10)
def b_back2(self): return self.scale_it(73, 33, 15, 10)
# helper to get texture bounding box in NEW format
class ChestNew(Chest):
# locker
def l_bottom(self): return self.scale_it(1, 0, 2, 1)
def l_top(self): return self.scale_it(3, 0, 2, 1)
def l_left(self): return self.scale_it(0, 1, 1, 4)
def l_back(self): return self.scale_it(1, 1, 2, 4)
def l_right(self): return self.scale_it(3, 1, 1, 4)
def l_front(self): return self.scale_it(4, 1, 2, 4)
# top lid
def t_inside(self): return self.scale_it(14, 0, 14, 14)
def t_top(self): return self.scale_it(28, 0, 14, 14)
def t_left(self): return self.scale_it( 0, 14, 14, 5)
def t_right(self): return self.scale_it(14, 14, 14, 5)
def t_back(self): return self.scale_it(28, 14, 14, 5)
def t_front(self): return self.scale_it(42, 14, 14, 5)
# bottom container
def b_bottom(self): return self.scale_it(14, 19, 14, 14)
def b_inside(self): return self.scale_it(28, 19, 14, 14)
def b_left(self): return self.scale_it( 0, 33, 14, 10)
def b_right(self): return self.scale_it(14, 33, 14, 10)
def b_back(self): return self.scale_it(28, 33, 14, 10)
def b_front(self): return self.scale_it(42, 33, 14, 10)
class DoubleChestNew(Chest):
# locker
def l_bottom(self): return self.scale_it(1, 0, 1, 1)
def l_top(self): return self.scale_it(2, 0, 1, 1)
def l_left(self): return self.scale_it(0, 1, 1, 4)
def l_back(self): return self.scale_it(1, 1, 1, 4)
def l_right(self): return self.scale_it(2, 1, 1, 4)
def l_front(self): return self.scale_it(3, 1, 1, 4)
# top lid
def t_inside1(self): return self.scale_it(14, 0, 15, 14)
def t_inside2(self): return self.scale_it(14, 0, 15, 14)
def t_top1(self): return self.scale_it(29, 0, 15, 14)
def t_top2(self): return self.scale_it(29, 0, 15, 14)
def t_left(self): return self.scale_it( 0, 14, 14, 5)
def t_front1(self): return self.scale_it(43, 14, 15, 5)
def t_front2(self): return self.scale_it(43, 14, 15, 5)
def t_right(self): return self.scale_it(29, 14, 14, 5)
def t_back1(self): return self.scale_it(14, 14, 15, 5)
def t_back2(self): return self.scale_it(14, 14, 15, 5)
# bottom container
def b_bottom1(self): return self.scale_it(14, 19, 15, 14)
def b_bottom2(self): return self.scale_it(14, 19, 15, 14)
def b_inside1(self): return self.scale_it(29, 19, 15, 14)
def b_inside2(self): return self.scale_it(29, 19, 15, 14)
def b_left(self): return self.scale_it( 0, 33, 14, 10)
def b_front1(self): return self.scale_it(43, 33, 15, 10)
def b_front2(self): return self.scale_it(43, 33, 15, 10)
def b_right(self): return self.scale_it(29, 33, 14, 10)
def b_back1(self): return self.scale_it(14, 33, 15, 10)
def b_back2(self): return self.scale_it(14, 33, 15, 10)
def transform_chest(input_root, output_root, input_file):
base_folder = 'assets/minecraft/textures/entity/chest/'
if os.path.exists(input_root + base_folder + input_file):
img = Image.open(input_root + base_folder + input_file)
# create new chest
new_chest = Image.new('RGBA', img.size, color=0)
# get bounding box helpers
oc = ChestOld(min(img.size))
nc = ChestNew(min(img.size))
# locker part
new_chest.paste(img.crop(oc.l_top()) .transpose(Image.FLIP_TOP_BOTTOM), nc.l_top())
new_chest.paste(img.crop(oc.l_bottom()), nc.l_bottom())
new_chest.paste(img.crop(oc.l_left()) .transpose(Image.ROTATE_180), nc.l_left())
new_chest.paste(img.crop(oc.l_front()) .transpose(Image.ROTATE_180), nc.l_front())
new_chest.paste(img.crop(oc.l_right()) .transpose(Image.ROTATE_180), nc.l_right())
new_chest.paste(img.crop(oc.l_back()) .transpose(Image.ROTATE_180), nc.l_back())
# top part
new_chest.paste(img.crop(oc.t_top()) .transpose(Image.FLIP_TOP_BOTTOM), nc.t_top())
new_chest.paste(img.crop(oc.t_inside()).transpose(Image.FLIP_TOP_BOTTOM), nc.t_inside())
new_chest.paste(img.crop(oc.t_left()) .transpose(Image.ROTATE_180), nc.t_left())
new_chest.paste(img.crop(oc.t_front()) .transpose(Image.ROTATE_180), nc.t_front())
new_chest.paste(img.crop(oc.t_right()) .transpose(Image.ROTATE_180), nc.t_right())
new_chest.paste(img.crop(oc.t_back()) .transpose(Image.ROTATE_180), nc.t_back())
# bottom part
new_chest.paste(img.crop(oc.b_inside()).transpose(Image.FLIP_TOP_BOTTOM), nc.b_inside())
new_chest.paste(img.crop(oc.b_bottom()).transpose(Image.FLIP_TOP_BOTTOM), nc.b_bottom())
new_chest.paste(img.crop(oc.b_left()) .transpose(Image.ROTATE_180), nc.b_left())
new_chest.paste(img.crop(oc.b_front()) .transpose(Image.ROTATE_180), nc.b_front())
new_chest.paste(img.crop(oc.b_right()) .transpose(Image.ROTATE_180), nc.b_right())
new_chest.paste(img.crop(oc.b_back()) .transpose(Image.ROTATE_180), nc.b_back())
# create output path if necessary
if not os.path.exists(os.path.abspath(output_root + base_folder)):
os.makedirs(os.path.abspath(output_root + base_folder))
new_chest.save(output_root + base_folder + input_file)
def transform_doublechest(input_root, output_root, input_file):
base_folder = 'assets/minecraft/textures/entity/chest/'
if os.path.exists(input_root + base_folder + input_file):
img = Image.open(input_root + base_folder + input_file)
width = min(img.size)
# create new chest
new_chest_L = Image.new('RGBA', (width, width), color=0)
new_chest_R = Image.new('RGBA', (width, width), color=0)
# get bounding box helpers
oc = DoubleChestOld(width)
nc = DoubleChestNew(width)
# locker part
new_chest_R.paste(img.crop(oc.l_top1()) .transpose(Image.FLIP_TOP_BOTTOM), nc.l_top())
new_chest_R.paste(img.crop(oc.l_bottom1()), nc.l_bottom())
new_chest_R.paste(img.crop(oc.l_left()) .transpose(Image.ROTATE_180), nc.l_left())
new_chest_R.paste(img.crop(oc.l_back1()) .transpose(Image.FLIP_TOP_BOTTOM), nc.l_back())
new_chest_R.paste(img.crop(oc.l_front1()) .transpose(Image.ROTATE_180), nc.l_front())
new_chest_L.paste(img.crop(oc.l_top2()) .transpose(Image.FLIP_TOP_BOTTOM), nc.l_top())
new_chest_L.paste(img.crop(oc.l_bottom2()), nc.l_bottom())
new_chest_L.paste(img.crop(oc.l_back2()) .transpose(Image.FLIP_TOP_BOTTOM), nc.l_back())
new_chest_L.paste(img.crop(oc.l_right()) .transpose(Image.ROTATE_180), nc.l_right())
new_chest_L.paste(img.crop(oc.l_front2()) .transpose(Image.ROTATE_180), nc.l_front())
# top part
new_chest_R.paste(img.crop(oc.t_top1()) .transpose(Image.FLIP_TOP_BOTTOM), nc.t_top1())
new_chest_L.paste(img.crop(oc.t_top2()) .transpose(Image.FLIP_TOP_BOTTOM), nc.t_top2())
new_chest_R.paste(img.crop(oc.t_inside1()).transpose(Image.FLIP_TOP_BOTTOM), nc.t_inside1())
new_chest_L.paste(img.crop(oc.t_inside2()).transpose(Image.FLIP_TOP_BOTTOM), nc.t_inside2())
new_chest_R.paste(img.crop(oc.t_left()) .transpose(Image.ROTATE_180), nc.t_left())
new_chest_R.paste(img.crop(oc.t_front1()).transpose(Image.ROTATE_180), nc.t_front1())
new_chest_L.paste(img.crop(oc.t_front2()).transpose(Image.ROTATE_180), nc.t_front2())
new_chest_L.paste(img.crop(oc.t_right()).transpose(Image.ROTATE_180), nc.t_right())
new_chest_L.paste(img.crop(oc.t_back1()) .transpose(Image.ROTATE_180), nc.t_back1())
new_chest_R.paste(img.crop(oc.t_back2()) .transpose(Image.ROTATE_180), nc.t_back2())
# bottom part
new_chest_R.paste(img.crop(oc.b_inside1()).transpose(Image.FLIP_TOP_BOTTOM), nc.b_inside1())
new_chest_L.paste(img.crop(oc.b_inside2()).transpose(Image.FLIP_TOP_BOTTOM), nc.b_inside2())
new_chest_R.paste(img.crop(oc.b_bottom1()).transpose(Image.FLIP_TOP_BOTTOM), nc.b_bottom1())
new_chest_L.paste(img.crop(oc.b_bottom2()).transpose(Image.FLIP_TOP_BOTTOM), nc.b_bottom2())
new_chest_R.paste(img.crop(oc.b_left()) .transpose(Image.ROTATE_180), nc.b_left())
new_chest_R.paste(img.crop(oc.b_front1()).transpose(Image.ROTATE_180), nc.b_front1())
new_chest_L.paste(img.crop(oc.b_front2()).transpose(Image.ROTATE_180), nc.b_front2())
new_chest_L.paste(img.crop(oc.b_right()).transpose(Image.ROTATE_180), nc.b_right())
new_chest_L.paste(img.crop(oc.b_back1()) .transpose(Image.ROTATE_180), nc.b_back1())
new_chest_R.paste(img.crop(oc.b_back2()) .transpose(Image.ROTATE_180), nc.b_back2())
# create output path if necessary
if not os.path.exists(os.path.abspath(output_root + base_folder)):
os.makedirs(os.path.abspath(output_root + base_folder))
new_chest_L.save(output_root + base_folder + input_file.replace('double', 'left'))
new_chest_R.save(output_root + base_folder + input_file.replace('double', 'right'))
| 56.874439
| 139
| 0.653079
| 2,199
| 12,683
| 3.549341
| 0.05457
| 0.114158
| 0.165022
| 0.223959
| 0.924279
| 0.914414
| 0.842409
| 0.703908
| 0.6041
| 0.580141
| 0
| 0.074904
| 0.197903
| 12,683
| 222
| 140
| 57.130631
| 0.692323
| 0.036506
| 0
| 0.165714
| 0
| 0
| 0.009104
| 0.006398
| 0
| 0
| 0
| 0
| 0
| 1
| 0.548571
| false
| 0
| 0.017143
| 0.531429
| 0.6
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
4bb5bf393f1ccf3c27f87954fd43ffe4d2c9d3e9
| 47
|
py
|
Python
|
bempp/api/grid/__init__.py
|
pescap/bempp-cl
|
3a68666e8db0e873d418b734289067483f68f12e
|
[
"MIT"
] | null | null | null |
bempp/api/grid/__init__.py
|
pescap/bempp-cl
|
3a68666e8db0e873d418b734289067483f68f12e
|
[
"MIT"
] | null | null | null |
bempp/api/grid/__init__.py
|
pescap/bempp-cl
|
3a68666e8db0e873d418b734289067483f68f12e
|
[
"MIT"
] | null | null | null |
from .grid import Grid
from .grid import union
| 15.666667
| 23
| 0.787234
| 8
| 47
| 4.625
| 0.5
| 0.432432
| 0.756757
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.170213
| 47
| 2
| 24
| 23.5
| 0.948718
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
4bb967e45a532dbe2dbea252a0c56564edd4d8c8
| 30,104
|
py
|
Python
|
apt_parser/templates.py
|
mwregan2/MiriTE
|
6b65939454db60bf10619d50fcb5769d23598b76
|
[
"CNRI-Python"
] | null | null | null |
apt_parser/templates.py
|
mwregan2/MiriTE
|
6b65939454db60bf10619d50fcb5769d23598b76
|
[
"CNRI-Python"
] | 24
|
2019-08-09T15:03:20.000Z
|
2022-03-04T10:04:48.000Z
|
apt_parser/templates.py
|
mwregan2/MiriTE
|
6b65939454db60bf10619d50fcb5769d23598b76
|
[
"CNRI-Python"
] | 4
|
2019-06-16T15:03:23.000Z
|
2020-12-02T19:51:52.000Z
|
import re
import logging
from . import dithering
from . import constants as c
LOG = logging.getLogger('parse_apt.templates')
class Template(object):
"""
Generic template object from which every class derive
"""
def __init__(self, template, metadata=None):
"""
:param template: MiriImaging observation XML object
:type template: <class 'xml.etree.ElementTree.Element'>
"""
self.template = template
self.simulation_list = []
self.metadata = metadata
self.metadata["template"] = self.__class__.__name__
self.parse_template()
def extract_dither_id(self, text):
"""
In observations, the dither is given like this: "Dither 1",
this correspond to the 0-th element in the dither list.
This function return 0 if given Dither 1, 1 if given Dither 2, etc...
The function crash if it can't match the expected pattern
:param str text: Text from filter_element.find("mi:Dither", parser.ns).text
:return int: index of the corresponding dither in the dither list
"""
regexp = "^Dither ([0-9]+)$"
pattern_obj = re.compile(regexp)
match = pattern_obj.match(text)
if match:
return int(match.group(1)) - 1
else:
raise ValueError("Regexp {} don't match in '{}'".format(regexp, text))
def getobs(self):
return self.simulation_list
def parse_template(self):
pass
def _add_simulation(self, simulation):
"""
Before adding the simulation to the list, will make a copy of common metadata from self.metadata
:param simulation: Simulation dictionnary to add to the list of simulation for this Template
:type simulation: dict()
"""
simulation.update(self.metadata)
self.simulation_list.append(simulation)
def _extend_simulation_list(self, sim_list):
"""
Similar to _add_simulation, but add several simulations at once
:param sim_list: list of Simulation dictionnary to add to the list of simulation for this Template
:type sim_list: list(dict())
:return:
:rtype:
"""
for sim in sim_list:
self._add_simulation(sim)
class MiriLRS(Template):
NS = "mlrs"
def parse_template(self):
self.simulation_list = []
subarray = self.template.find("{}:Subarray".format(self.NS), c.ns).text
exposures = int(self.template.find("{}:Exposures".format(self.NS), c.ns).text)
integrations = int(self.template.find("{}:Integrations".format(self.NS), c.ns).text)
groups = int(self.template.find("{}:Groups".format(self.NS), c.ns).text)
readout_pattern = self.template.find("{}:ReadoutPattern".format(self.NS), c.ns).text
dither = self.template.find("{}:DitherType".format(self.NS), c.ns).text
nb_dither_points = 1 # If no dithers
if dither != 'NONE':
try:
nb_dither_points = c.DITHER_POINTS[dither]
except KeyError:
if dither == "MAPPING":
spatial_step = int(self.template.find("{}:NumberOfSpatialSteps".format(self.NS), c.ns).text)
spectral_step = int(self.template.find("{}:NumberOfSpectralSteps".format(self.NS), c.ns).text)
nb_dither_points = spatial_step * spectral_step
else:
LOG.error("Unable to retrieve number of dither points from {}".format(dither))
# Specific to my needs
simulation = {
"exposures": exposures,
"ima_integrations": integrations,
"ima_frames": groups,
"subarray": subarray,
"NDither": nb_dither_points,
"readDetect": readout_pattern,
}
self._add_simulation(simulation)
class MiriImaging(Template):
NS = "mi"
def parse_template(self):
self.simulation_list = []
# Empty list if no dithers
dithers = self.template.findall(".//{}:DitherSpecification".format(self.NS), c.ns)
subarray = self.template.find("{}:Subarray".format(self.NS), c.ns).text
filters = self.template.find("{}:Filters".format(self.NS), c.ns)
for filter_element in filters:
exposures = int(filter_element.find("{}:Exposures".format(self.NS), c.ns).text)
integrations = int(filter_element.find("{}:Integrations".format(self.NS), c.ns).text)
groups = int(filter_element.find("{}:Groups".format(self.NS), c.ns).text)
filter_name = filter_element.find("{}:Filter".format(self.NS), c.ns).text
readout_pattern = filter_element.find("{}:ReadoutPattern".format(self.NS), c.ns).text
dither = filter_element.find("{}:Dither".format(self.NS), c.ns).text
nb_dither_points = 1 # If no dithers
if dither != 'None':
dither_idx = self.extract_dither_id(dither)
dither_el = dithers[dither_idx]
nb_dither_points = dithering.parse_dither(dither_el, self.NS)
# Specific to my needs
simulation = {
"exposures": exposures,
"ima_integrations": integrations,
"ima_frames": groups,
"NDither": nb_dither_points,
"subarray": subarray,
"filter": filter_name,
"readDetect": readout_pattern,
}
self._add_simulation(simulation)
class MiriExternalFlat(Template):
NS = "mef"
def parse_imager_exposure(self, exposure, common_metadata):
"""
:param exposure: Exposure template
:type exposure: mef:Exposure XML element
:param dict common_metadata: metadata declared in parent element that need to be added
:return: list of sub simulations
:rtype:
"""
exposures = int(exposure.find("{}:Exposures".format(self.NS), c.ns).text)
integrations = int(exposure.find("{}:Integrations".format(self.NS), c.ns).text)
groups = int(exposure.find("{}:Groups".format(self.NS), c.ns).text)
filter_name = exposure.find("{}:Filter".format(self.NS), c.ns).text
readout_pattern = exposure.find("{}:ReadoutPattern".format(self.NS), c.ns).text
nb_dither = 1 # No dither by default
if "NDither" in common_metadata:
nb_dither = common_metadata["NDither"]
# Specific to my needs
simulation = {
"exposures": exposures,
"ima_integrations": integrations,
"ima_frames": groups,
"NDither": nb_dither,
"filter": filter_name,
"readDetect": readout_pattern,
}
simulation.update(common_metadata)
return [simulation]
def parse_mrs_exposure(self, exposure, common_metadata):
"""
:param exposure: Exposure template
:type exposure: mef:Exposure XML element
:param dict common_metadata: metadata declared in parent element that need to be added
:return: list of sub simulations
:rtype:
"""
exposures = int(exposure.find("{}:Exposures".format(self.NS), c.ns).text)
readout_pattern = exposure.find("{}:ReadoutPatternLong".format(self.NS), c.ns).text
disperser_long = exposure.find("{}:Wavelength1_4".format(self.NS), c.ns).text
groups_long = int(exposure.find("{}:GroupsLong".format(self.NS), c.ns).text)
integrations_long = int(exposure.find("{}:IntegrationsLong".format(self.NS), c.ns).text)
disperser_short = exposure.find("{}:Wavelength2_3".format(self.NS), c.ns).text
groups_short = int(exposure.find("{}:GroupsShort".format(self.NS), c.ns).text)
integrations_short = int(exposure.find("{}:IntegrationsShort".format(self.NS), c.ns).text)
# Since long and short can't be different I read only one
# wavelength1_4 = exposure.find("{}:Wavelength1_4".format(self.NS), parser.ns).text
# disperser = parser.DISPERSER[wavelength1_4]
# Artificially add a subarray to predict memory and time consumption
if disperser_long is not None and disperser_short is not None:
detector = "BOTH"
subarray = "BOTH"
elif disperser_long is not None:
detector = "LW"
subarray = "FULL"
elif disperser_short is not None:
detector = "SW"
subarray = "FULL"
else:
LOG.error("Can't determine detector (LW, SW or BOTH). disperser_long={}; disperser_short={}".format(
disperser_long, disperser_short))
if detector == "BOTH":
if disperser_long != disperser_short:
raise ValueError("[detector=BOTH] Incoherent values (disperser) for SW and LW")
disperser = c.DISPERSER[disperser_long]
elif detector == "LW":
disperser = c.DISPERSER[disperser_long]
elif detector == "SW":
disperser = c.DISPERSER[disperser_short]
nb_dither = 1 # No dither by default
if "NDither" in common_metadata:
nb_dither = common_metadata["NDither"]
# Specific to my needs
simulation = {
"exposures": exposures,
"LW_integrations": integrations_long,
"LW_frames": groups_long,
"SW_integrations": integrations_short,
"SW_frames": groups_short,
"subarray": subarray,
"detector": detector,
"disperser": disperser,
"NDither": nb_dither,
"readDetect": readout_pattern,
}
simulation.update(common_metadata)
return [simulation]
def parse_all_exposure(self, exposure, common_metadata):
"""
:param exposure: Exposure template
:type exposure: mef:Exposure XML element
:param dict common_metadata: metadata declared in parent element that need to be added
:return: list of sub simulations
:rtype:
"""
# IMA
exposures = int(exposure.find("{}:Exposures".format(self.NS), c.ns).text)
ima_integrations = int(exposure.find("{}:Integrations".format(self.NS), c.ns).text)
ima_groups = int(exposure.find("{}:Groups".format(self.NS), c.ns).text)
ima_filter = exposure.find("{}:Filter".format(self.NS), c.ns).text
readout_pattern = exposure.find("{}:ReadoutPattern".format(self.NS), c.ns).text
# Since long and short can't be different I read only one
wavelength1_4 = exposure.find("{}:Wavelength1_4".format(self.NS), c.ns).text
disperser = c.DISPERSER[wavelength1_4]
readout_pattern_long = exposure.find("{}:ReadoutPatternLong".format(self.NS), c.ns).text
groups_long = int(exposure.find("{}:GroupsLong".format(self.NS), c.ns).text)
integrations_long = int(exposure.find("{}:IntegrationsLong".format(self.NS), c.ns).text)
groups_short = int(exposure.find("{}:GroupsShort".format(self.NS), c.ns).text)
integrations_short = int(exposure.find("{}:IntegrationsShort".format(self.NS), c.ns).text)
nb_dither = 1 # No dither by default
if "NDither" in common_metadata:
nb_dither = common_metadata["NDither"]
# Specific to my needs
simulation = {
"exposures": exposures,
"filter": ima_filter,
"ima_integrations": ima_integrations,
"ima_frames": ima_groups,
"LW_integrations": integrations_long,
"LW_frames": groups_long,
"SW_integrations": integrations_short,
"SW_frames": groups_short,
"disperser": disperser,
"NDither": nb_dither,
"readDetect": readout_pattern,
}
simulation.update(common_metadata)
return [simulation]
def parse_template(self):
self.simulation_list = []
detector = self.template.find("{}:Detector".format(self.NS), c.ns).text
dither = self.template.find("{}:Dither".format(self.NS), c.ns).text
exposure_list = self.template.find("{}:ExposureList".format(self.NS), c.ns)
common_metadata = {
"detector": detector,
"dither_type": dither,
}
if dither == "true":
dither_el = self.template.find("{}:DitherSpec".format(self.NS), c.ns)
nb_dither_point = dithering.parse_dither(dither_el, self.NS)
common_metadata["NDither"] = nb_dither_point
if detector == "IMAGER":
self.parse_exposure = self.parse_imager_exposure
subarray = self.template.find("{}:Subarray".format(self.NS), c.ns).text
common_metadata["subarray"] = subarray
elif detector == "MRS":
self.parse_exposure = self.parse_mrs_exposure
elif detector == "ALL":
# Dual mode MRS+IMA
subarray = self.template.find("{}:Subarray".format(self.NS), c.ns).text
common_metadata["subarray"] = subarray
self.parse_exposure = self.parse_all_exposure
else:
raise ValueError("Unknown detector value ({})".format(detector))
for exp_el in exposure_list:
sim_list = self.parse_exposure(exp_el, common_metadata)
self._extend_simulation_list(sim_list)
class MiriDark(Template):
NS = "md"
def parse_imager_exposure(self, exposure, common_metadata):
"""
:param exposure: Exposure template
:type exposure: mef:Exposure XML element
:param dict common_metadata: metadata declared in parent element that need to be added
:return: list of sub simulations
:rtype:
"""
exposures = int(exposure.find("{}:Exposures".format(self.NS), c.ns).text)
integrations = int(exposure.find("{}:Integrations".format(self.NS), c.ns).text)
groups = int(exposure.find("{}:Groups".format(self.NS), c.ns).text)
readout_pattern = exposure.find("{}:ReadoutPattern".format(self.NS), c.ns).text
nb_dither = 1 # No dither by default
# Specific to my needs
simulation = {
"exposures": exposures,
"ima_integrations": integrations,
"ima_frames": groups,
"NDither": nb_dither,
"readDetect": readout_pattern,
}
simulation.update(common_metadata)
return [simulation]
def parse_mrs_exposure(self, exposure, common_metadata):
"""
:param exposure: Exposure template
:type exposure: mef:Exposure XML element
:param dict common_metadata: metadata declared in parent element that need to be added
:return: list of sub simulations
:rtype:
"""
exposures = int(exposure.find("{}:Exposures".format(self.NS), c.ns).text)
readout_pattern = exposure.find("{}:ReadoutPattern".format(self.NS), c.ns).text
groups_long = int(exposure.find("{}:GroupsLong".format(self.NS), c.ns).text)
integrations_long = int(exposure.find("{}:IntegrationsLong".format(self.NS), c.ns).text)
groups_short = int(exposure.find("{}:GroupsShort".format(self.NS), c.ns).text)
integrations_short = int(exposure.find("{}:IntegrationsShort".format(self.NS), c.ns).text)
# Artificially add a subarray to predict memory and time consumption
if groups_long is not None and groups_short is not None:
detector = "BOTH"
subarray = "BOTH"
elif groups_long is not None:
detector = "LW"
subarray = "FULL"
elif groups_short is not None:
detector = "SW"
subarray = "FULL"
else:
LOG.error("Can't determine detector (LW, SW or BOTH).")
nb_dither = 1 # No dither by default
# Specific to my needs
simulation = {
"exposures": exposures,
"LW_integrations": integrations_long,
"LW_frames": groups_long,
"SW_integrations": integrations_short,
"SW_frames": groups_short,
"subarray": subarray,
"detector": detector,
"NDither": nb_dither,
"readDetect": readout_pattern,
}
simulation.update(common_metadata)
return [simulation]
def parse_all_exposure(self, exposure, common_metadata):
"""
:param exposure: Exposure template
:type exposure: mef:Exposure XML element
:param dict common_metadata: metadata declared in parent element that need to be added
:return: list of sub simulations
:rtype:
"""
# IMA
exposures = int(exposure.find("{}:Exposures".format(self.NS), c.ns).text)
ima_integrations = int(exposure.find("{}:Integrations".format(self.NS), c.ns).text)
ima_groups = int(exposure.find("{}:Groups".format(self.NS), c.ns).text)
readout_pattern = exposure.find("{}:ReadoutPattern".format(self.NS), c.ns).text
readout_pattern_long = exposure.find("{}:ReadoutPatternLong".format(self.NS), c.ns).text
groups_long = int(exposure.find("{}:GroupsLong".format(self.NS), c.ns).text)
integrations_long = int(exposure.find("{}:IntegrationsLong".format(self.NS), c.ns).text)
readout_pattern_short = exposure.find("{}:ReadoutPatternShort".format(self.NS), c.ns).text
groups_short = int(exposure.find("{}:GroupsShort".format(self.NS), c.ns).text)
integrations_short = int(exposure.find("{}:IntegrationsShort".format(self.NS), c.ns).text)
nb_dither = 1 # No dither by default
if "NDither" in common_metadata:
nb_dither = common_metadata["NDither"]
# Specific to my needs
simulation = {
"exposures": exposures,
"ima_integrations": ima_integrations,
"ima_frames": ima_groups,
"LW_integrations": integrations_long,
"LW_frames": groups_long,
"SW_integrations": integrations_short,
"SW_frames": groups_short,
"NDither": nb_dither,
"readDetect": readout_pattern,
"LW_readDetect": readout_pattern_long,
"SW_readDetect": readout_pattern_short,
}
simulation.update(common_metadata)
return [simulation]
def parse_template(self):
self.simulation_list = []
detector = self.template.find("{}:Detector".format(self.NS), c.ns).text
exposure_list = self.template.find("{}:Filters".format(self.NS), c.ns)
common_metadata = {
"detector": detector,
}
if detector == "IMAGER":
self.parse_exposure = self.parse_imager_exposure
subarray = self.template.find("{}:Subarray".format(self.NS), c.ns).text
common_metadata["subarray"] = subarray
elif detector == "MRS":
self.parse_exposure = self.parse_mrs_exposure
elif detector == "ALL":
# Dual mode MRS+IMA
subarray = self.template.find("{}:Subarray".format(self.NS), c.ns).text
common_metadata["subarray"] = subarray
self.parse_exposure = self.parse_all_exposure
else:
raise ValueError("Unknown detector value ({})".format(detector))
for exp_el in exposure_list:
sim_list = self.parse_exposure(exp_el, common_metadata)
self._extend_simulation_list(sim_list)
class MiriMRS(Template):
NS = "mmrs"
def parse_mrs_exposure(self, exposure, common_metadata, dithers=None):
"""
:param exposure: Exposure template
:type exposure: mef:Exposure XML element
:param dict common_metadata: metadata declared in parent element that need to be added
:param dithers: [optional] Dithers template, contain a list of dither_type object
:type dithers: mmrs:Dithers XML element
:return: list of sub simulations
:rtype:
"""
exposures = int(exposure.find("{}:Exposures".format(self.NS), c.ns).text)
readout_pattern = exposure.find("{}:ReadoutPatternLong".format(self.NS), c.ns).text
disperser_band = exposure.find("{}:Wavelength".format(self.NS), c.ns).text
disperser = c.DISPERSER[disperser_band]
groups_long = int(exposure.find("{}:GroupsLong".format(self.NS), c.ns).text)
integrations_long = int(exposure.find("{}:IntegrationsLong".format(self.NS), c.ns).text)
groups_short = int(exposure.find("{}:GroupsShort".format(self.NS), c.ns).text)
integrations_short = int(exposure.find("{}:IntegrationsShort".format(self.NS), c.ns).text)
# Artificially add a subarray to predict memory and time consumption
if groups_long is not None and groups_short is not None:
detector = "BOTH"
subarray = "BOTH"
elif groups_long is not None:
detector = "LW"
subarray = "FULL"
elif groups_short is not None:
detector = "SW"
subarray = "FULL"
else:
LOG.error("Can't determine detector (LW, SW or BOTH).")
dither = exposure.find("{}:Dither".format(self.NS), c.ns).text
nb_dither_points = 1 # No dither by default
if dither != 'None':
dither_idx = self.extract_dither_id(dither)
dither_el = dithers[dither_idx]
nb_dither_points = dithering.parse_dither(dither_el, self.NS)
# Specific to my needs
simulation = {
"exposures": exposures,
"LW_integrations": integrations_long,
"LW_frames": groups_long,
"SW_integrations": integrations_short,
"SW_frames": groups_short,
"subarray": subarray,
"detector": detector,
"disperser": disperser,
"NDither": nb_dither_points,
"readDetect": readout_pattern,
}
simulation.update(common_metadata)
return [simulation]
def parse_all_exposure(self, exposure, common_metadata, dithers=None):
"""
:param exposure: Exposure template
:type exposure: mef:Exposure XML element
:param dict common_metadata: metadata declared in parent element that need to be added
:param dithers: [optional] Dithers template, contain a list of dither_type object
:type dithers: mmrs:Dithers XML element
:return: list of sub simulations
:rtype:
"""
# IMA
exposures = int(exposure.find("{}:Exposures".format(self.NS), c.ns).text)
ima_integrations = int(exposure.find("{}:Integrations".format(self.NS), c.ns).text)
ima_groups = int(exposure.find("{}:Groups".format(self.NS), c.ns).text)
ima_filter = exposure.find("{}:Filter".format(self.NS), c.ns).text
readout_pattern = exposure.find("{}:ReadoutPattern".format(self.NS), c.ns).text
# Since long and short can't be different I read only one
wavelength = exposure.find("{}:Wavelength".format(self.NS), c.ns).text
disperser = c.DISPERSER[wavelength]
readout_pattern_long = exposure.find("{}:ReadoutPatternLong".format(self.NS), c.ns).text
groups_long = int(exposure.find("{}:GroupsLong".format(self.NS), c.ns).text)
integrations_long = int(exposure.find("{}:IntegrationsLong".format(self.NS), c.ns).text)
groups_short = int(exposure.find("{}:GroupsShort".format(self.NS), c.ns).text)
integrations_short = int(exposure.find("{}:IntegrationsShort".format(self.NS), c.ns).text)
dither = exposure.find("{}:Dither".format(self.NS), c.ns).text
nb_dither_points = 1 # No dither by default
if dither != 'None':
dither_idx = self.extract_dither_id(dither)
dither_el = dithers[dither_idx]
nb_dither_points = dithering.parse_dither(dither_el, self.NS)
# Specific to my needs
simulation = {
"exposures": exposures,
"filter": ima_filter,
"ima_integrations": ima_integrations,
"ima_frames": ima_groups,
"LW_integrations": integrations_long,
"LW_frames": groups_long,
"SW_integrations": integrations_short,
"SW_frames": groups_short,
"disperser": disperser,
"NDither": nb_dither_points,
"readDetect": readout_pattern,
}
simulation.update(common_metadata)
return [simulation]
def parse_template(self):
self.simulation_list = []
detector = self.template.find("{}:Detector".format(self.NS), c.ns).text
dithers = self.template.find("{}:Dithers".format(self.NS), c.ns)
simultaneous_imaging = self.template.find("{}:SimultaneousImaging".format(self.NS), c.ns).text
exposure_list = self.template.find("{}:ExposureList".format(self.NS), c.ns)
common_metadata = {
"detector": detector,
"simultaneous_imaging": simultaneous_imaging,
}
if detector == "MRS":
self.parse_exposure = self.parse_mrs_exposure
elif detector == "ALL":
# Dual mode MRS+IMA
subarray = self.template.find("{}:Subarray".format(self.NS), c.ns).text
primary_channel = self.template.find("{}:PrimaryChannel".format(self.NS), c.ns).text
common_metadata["subarray"] = subarray
common_metadata["primary_channel"] = primary_channel
self.parse_exposure = self.parse_all_exposure
else:
raise ValueError("Unknown detector value ({})".format(detector))
for exp_el in exposure_list:
sim_list = self.parse_exposure(exp_el, common_metadata, dithers=dithers)
self._extend_simulation_list(sim_list)
class MiriCoron(Template):
NS = "mc"
def parse_template(self):
self.simulation_list = []
# Empty list if no dithers
dither = self.template.find("{}:Dither".format(self.NS), c.ns).text
nb_dither_points = 1 # If no dithers
if dither != 'NONE':
try:
nb_dither_points = c.DITHER_POINTS[dither]
except KeyError:
LOG.error("Unknown Dither type: {}".format(dither))
filters = self.template.find("{}:Filters".format(self.NS), c.ns)
for filter_element in filters:
exposures = int(filter_element.find("{}:Exposures".format(self.NS), c.ns).text)
integrations = int(filter_element.find("{}:Integrations".format(self.NS), c.ns).text)
groups = int(filter_element.find("{}:Groups".format(self.NS), c.ns).text)
filter_name = filter_element.find("{}:Filter".format(self.NS), c.ns).text
mask = filter_element.find("{}:Mask".format(self.NS), c.ns).text
readout_pattern = filter_element.find("{}:ReadoutPattern".format(self.NS), c.ns).text
# Specific to my needs
simulation = {
"exposures": exposures,
"ima_integrations": integrations,
"ima_frames": groups,
"NDither": nb_dither_points,
"subarray": mask,
"filter": filter_name,
"readDetect": readout_pattern,
}
self._add_simulation(simulation)
class MiriCpc(Template):
NS = "mcpc"
def parse_template(self):
self.simulation_list = []
nb_dither_points = 1 # no dither
subarray = self.template.find("{}:Subarray".format(self.NS), c.ns).text
filters = self.template.find("{}:Filters".format(self.NS), c.ns)
for filter_element in filters:
exposures = int(filter_element.find("{}:Exposures".format(self.NS), c.ns).text)
integrations = int(filter_element.find("{}:Integrations".format(self.NS), c.ns).text)
groups = int(filter_element.find("{}:Groups".format(self.NS), c.ns).text)
filter_name = filter_element.find("{}:Filter".format(self.NS), c.ns).text
readout_pattern = filter_element.find("{}:ReadoutPattern".format(self.NS), c.ns).text
# Specific to my needs
simulation = {
"exposures": exposures,
"ima_integrations": integrations,
"ima_frames": groups,
"NDither": nb_dither_points,
"subarray": subarray,
"filter": filter_name,
"readDetect": readout_pattern,
}
self._add_simulation(simulation)
class MiriMRSCrossGratingEngineering(Template):
NS = "mmrscge"
def parse_template(self):
self.simulation_list = []
exposure_list = self.template.find("{}:ExposureList".format(self.NS), c.ns)
for exp_el in exposure_list:
exposures = int(exp_el.find("{}:Exposures".format(self.NS), c.ns).text)
groups_long = int(exp_el.find("{}:GroupsLong".format(self.NS), c.ns).text)
integrations_long = int(exp_el.find("{}:IntegrationsLong".format(self.NS), c.ns).text)
groups_short = int(exp_el.find("{}:GroupsShort".format(self.NS), c.ns).text)
integrations_short = int(exp_el.find("{}:IntegrationsShort".format(self.NS), c.ns).text)
# Specific to my needs
simulation = {
"exposures": exposures,
"LW_integrations": integrations_long,
"LW_frames": groups_long,
"SW_integrations": integrations_short,
"SW_frames": groups_short,
"subarray": "FULL",
"detector": "BOTH",
"NDither": 1,
}
self._add_simulation(simulation)
templates = {"MiriImaging": MiriImaging, "MiriLRS": MiriLRS, "MiriExternalFlat": MiriExternalFlat, "MiriMRS": MiriMRS,
"MiriDark": MiriDark, "MiriCoron": MiriCoron,
"MiriCpc": MiriCpc, "MiriMRSCrossGratingEngineering": MiriMRSCrossGratingEngineering}
| 38.644416
| 118
| 0.608657
| 3,396
| 30,104
| 5.249117
| 0.067138
| 0.0414
| 0.080108
| 0.086054
| 0.838326
| 0.830192
| 0.822506
| 0.810502
| 0.798889
| 0.78537
| 0
| 0.001773
| 0.269134
| 30,104
| 778
| 119
| 38.694087
| 0.808427
| 0.131444
| 0
| 0.749491
| 0
| 0
| 0.143144
| 0.009873
| 0
| 0
| 0
| 0
| 0
| 1
| 0.044807
| false
| 0.002037
| 0.008147
| 0.002037
| 0.107943
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
29954ffe7037c68e3ff52ee53f093b13c4e6a94f
| 49
|
py
|
Python
|
instance/config.py
|
alphonce-otieno-odhiambo/HOTLINE-BLINKNEWS
|
a29fefce2b5925c3c8c0ce86f7f9f7687148b24a
|
[
"MIT"
] | null | null | null |
instance/config.py
|
alphonce-otieno-odhiambo/HOTLINE-BLINKNEWS
|
a29fefce2b5925c3c8c0ce86f7f9f7687148b24a
|
[
"MIT"
] | null | null | null |
instance/config.py
|
alphonce-otieno-odhiambo/HOTLINE-BLINKNEWS
|
a29fefce2b5925c3c8c0ce86f7f9f7687148b24a
|
[
"MIT"
] | null | null | null |
NEWS_API_KEY = 'a0811784c6544548b7bef41c29abb906'
| 49
| 49
| 0.897959
| 4
| 49
| 10.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.468085
| 0.040816
| 49
| 1
| 49
| 49
| 0.425532
| 0
| 0
| 0
| 0
| 0
| 0.64
| 0.64
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
29d81ce7e9955e6715a3c721b480315d889473d2
| 26,270
|
py
|
Python
|
src/evalfitness.py
|
afuentesconwet/pySTEPforPython2.7
|
59c30ac9b663c1bfc90fe592709a1304302b08a7
|
[
"Unlicense"
] | null | null | null |
src/evalfitness.py
|
afuentesconwet/pySTEPforPython2.7
|
59c30ac9b663c1bfc90fe592709a1304302b08a7
|
[
"Unlicense"
] | null | null | null |
src/evalfitness.py
|
afuentesconwet/pySTEPforPython2.7
|
59c30ac9b663c1bfc90fe592709a1304302b08a7
|
[
"Unlicense"
] | null | null | null |
"""
evalfitness
===========
Contains methods to evaluate the fitness of a tree.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
@author: by Mehdi Khoury
@version: 1.00
@copyright: (c) 2009 Mehdi Khoury under the mit license
http://www.opensource.org/licenses/mit-license.html
@contact: mehdi.khoury at gmail.com
"""
from treeutil import PostOrder_Search
from collections import deque
#import psyco
import random
import math
import settings
import buildtree
import timeit
import settings
import fitnessutil
#psyco.profile()
def EvalTreeForOneInputSet(myTree, input_set_ref):
"""
Function: EvalTreeForOneInputSet
=================================
Function used to evaluate a tree by pluggin in
one set of values (one learning example)
@param myTree: the nested list representing a tree
@param input_set_ref: the set of values to plug into the tree
@return: the fitness of the tree for this set of values
"""
resultStack=deque()
adfDict={}
# examine every node in the tree in pre-order taversal
# (leaves first and then branches)
for elem in PostOrder_Search(myTree):
# if the node is a leaf (variable, or constant),
# add its value to the result stack
if elem[0]==3:
resultStack.append(settings.terminals[elem[2]][input_set_ref])
elif elem[0]==4:
resultStack.append(settings.terminals[elem[2]])
# if the node is a function with n arguments, pop the result stack
# n times. Get these popped elemnts as arguments for the function,
# and replace the top of the stack by the result of the function
elif elem[0]==1:
nb= elem[1]
name = elem[2]
tempResult=deque()
for i in xrange(0,nb):
try:
tempResult.append(resultStack.pop())
except:
print myTree
print elem
print resultStack
exit
resultStack.extend(map(settings.functions[name],[tempResult]))
# if the node is an ADF branch, add the top of the stack to
# the ADF dictionary
elif elem[0]==2:
adfDict[elem[2]]= resultStack[-1]
# if the node is an ADF terminal, add the corresponding ADF
# branch value available in the dictionary to the result stack
elif elem[0]==5:
resultStack.append(adfDict[elem[2]])
# if the node is a root, apply the root function to all
# direct children and return the result.
elif elem[0]==0:
name = elem[2]
tempResult=[]
while resultStack:
tempResult.append(resultStack.popleft())
resultStack.extend(map(settings.functions[name],[tempResult]))
return resultStack[0]
def EvalTreeForOneListInputSet(myTree):
"""
Function: EvalTreeForOneInputSet
=================================
Function used to evaluate a tree by pluggin in
one list of values (one list of data points)
@param myTree: the nested list representing a tree
@return: the fitness of the tree for this set of values
"""
resultStack=deque()
adfDict={}
# examine every node in the tree in pre-order taversal
# (leaves first and then branches)
for elem in PostOrder_Search(myTree):
# if the node is a leaf (variable, or constant),
# add its value to the result stack
if elem[0]==3:
resultStack.append(settings.terminals[elem[2]])
elif elem[0]==4:
resultStack.append(settings.terminals[elem[2]])
# if the node is a function with n arguments, pop the result stack
# n times. Get these popped elemnts as arguments for the function,
# and replace the top of the stack by the result of the function
elif elem[0]==1:
nb= elem[1]
name = elem[2]
tempResult=deque()
for i in xrange(0,nb):
tempResult.append(resultStack.pop())
resultStack.extend(map(settings.functions[name],[tempResult]))
# if the node is an ADF branch, add the top of the stack to
# the ADF dictionary
elif elem[0]==2:
adfDict[elem[2]]= resultStack[-1]
# if the node is an ADF terminal, add the corresponding ADF
# branch value available in the dictionary to the result stack
elif elem[0]==5:
resultStack.append(adfDict[elem[2]])
# if the node is a root, apply the root function to all
# direct children and return the result.
elif elem[0]==0:
name = elem[2]
tempResult=[]
while resultStack:
tempResult.append(resultStack.popleft())
resultStack.extend(map(settings.functions[name],[tempResult]))
return resultStack[0]
def EvalTreeForOneListInputSet_tutorial8(myTree):
"""
Function: EvalTreeForOneInputSet2
==================================
Function used to evaluate a tree by pluggin in
one list of values (one list of data points)
@param myTree: the nested list representing a tree
@return: the fitness of the tree for this set of values
"""
resultStack=deque()
adfDict={}
# examine every node in the tree in pre-order taversal
# (leaves first and then branches)
for elem in PostOrder_Search(myTree):
# if the node is a leaf (variable, or constant),
# add its value to the result stack
if elem[0]==3:
resultStack.append(settings.terminals[elem[2]])
elif elem[0]==4:
resultStack.append(settings.terminals[elem[2]])
# if the node is a function with n arguments, pop the result stack
# n times. Get these popped elemnts as arguments for the function,
# and replace the top of the stack by the result of the function
elif elem[0]==1:
nb= elem[1]
name = elem[2]
tempResult=deque()
for i in xrange(nb):
tempResult.append(resultStack.pop())
resultStack.extend(map(settings.functions[name],[tempResult]))
# if the node is an ADF branch, add the top of the stack to
# the ADF dictionary
elif elem[0]==2:
adfDict[elem[2]]= resultStack[-1]
# if the node is an ADF terminal, add the corresponding ADF
# branch value available in the dictionary to the result stack
elif elem[0]==5:
resultStack.append(adfDict[elem[2]])
# if the node is a root, apply the root function to all
# direct children and return the result.
elif elem[0]==0:
name = elem[2]
tempResult=[]
while resultStack:
tempResult.append(resultStack.popleft())
resultStack.extend(map(settings.functions[name],[tempResult]))
return resultStack[0]
def EvalTreeForOneListInputSet_tutorial9(myTree):
"""
Function: EvalTreeForOneInputSet2
==================================
Function used to evaluate a tree by pluggin in
one list of values (one list of data points)
@param myTree: the nested list representing a tree
@return: the fitness of the tree for this set of values
"""
resultStack=deque()
adfDict={}
# examine every node in the tree in pre-order taversal
# (leaves first and then branches)
for elem in PostOrder_Search(myTree):
# if the node is a leaf (variable, or constant),
# add its value to the result stack
if elem[0]==3:
resultStack.append(settings.terminals[elem[2]])
elif elem[0]==4:
resultStack.append(settings.terminals[elem[2]])
# if the node is a function with n arguments, pop the result stack
# n times. Get these popped elemnts as arguments for the function,
# and replace the top of the stack by the result of the function
elif elem[0]==1:
nb= elem[1]
name = elem[2]
tempResult=deque()
for i in xrange(nb):
tempResult.append(resultStack.pop())
resultStack.extend(map(settings.functions[name],[tempResult]))
# if the node is an ADF branch, add the top of the stack to
# the ADF dictionary
elif elem[0]==2:
adfDict[elem[2]]= resultStack[-1]
# if the node is an ADF terminal, add the corresponding ADF
# branch value available in the dictionary to the result stack
elif elem[0]==5:
resultStack.append(adfDict[elem[2]])
# if the node is a root, apply the root function to all
# direct children and return the result.
elif elem[0]==0:
name = elem[2]
tempResult=[]
while resultStack:
tempResult.append(resultStack.popleft())
resultStack.extend(map(settings.functions[name],[tempResult]))
return resultStack[0]
def EvalTreeForAllInputSets(myTree, input_sets):
"""
Function: EvalTreeForAllInputSets
==================================
Function used to evaluate a tree by pluggin in
several sets of values
@param myTree: the nested list representing a tree
@param input_sets: the set of values to plug into the tree
@return: the fitnesses of the tree over several sets of values
"""
results=[]
val=len(input_sets)
for elem in xrange(val):
results.append(EvalTreeForOneInputSet(myTree, elem))
return results
# compute global fitness of an individual across all different examples
def FinalFitness(intermediate_outputs):
"""
Function: FinalFitness
=======================
Compute global fitness of an individual. Intended when wanting to refine
the fitness score.
@param intermediate_outputs: the fitnesses of the tree over several sets of
values
@return: global fitness
"""
final_output=0
# each element represents one different sample or set of input data
# the size of each represents the number of examples
#each sub-element represents the value(s) obtained at the top of a three for one input
#In this particular case, we simply add the difference of all results with an ideal solution
# the ideal solution is : [adf1 = x+y adf2 = add1*(y-x)]
# build a corresponding list of two-elements sub lists
# then evaluate the sum of the difference with our built models
goal_function=[]
for nb in xrange(settings.nb_eval):
#for nb in xrange(settings.nb_ex):
ideal_results=settings.ideal_results[nb]
obtained_results=intermediate_outputs[nb]
for el in obtained_results:
try:
if math.isinf(el):
return el
except:
return float('inf')
# sum the absolute values of the differences over one example
diff= sum( [math.fabs(ideal_results[x]-obtained_results[x]) for x in xrange(len(ideal_results))])
final_output= final_output+diff
return final_output
def FinalFitness2(intermediate_outputs):
"""
Function: FinalFitness2
========================
Compute global fitness of an individual. Intended when wanting to refine
the fitness score.
@param intermediate_outputs: the fitnesses of the tree over several sets of
values
@return: global fitness
"""
final_output=0
# each element represents one different sample or set of input data
# the size of each represents the number of examples
#each sub-element represents the value(s) obtained at the top of a three for one input
#In this particular case, we simply add the difference of all results with an ideal solution
# the ideal solution is : [adf1 = x+y adf2 = add1*(y-x)]
# build a corresponding list of two-elements sub lists
# then evaluate the sum of the difference with our built models
ideal_results=settings.ideal_results
obtained_results=intermediate_outputs
for res in xrange(len(settings.ideal_results)):
for el in obtained_results[res]:
try:
if math.isinf(el):
return el
except:
return float('inf')
# sum the absolute values of the differences over one example
diff= sum( [math.fabs(ideal_results[res][x]-obtained_results[res][x]) for x in xrange(settings.nb_ex)])
final_output= final_output+diff
return final_output
def FinalFitness3(intermediate_outputs):
"""
Function: FinalFitness3
========================
Compute global fitness of an individual. Intended when wanting to refine
the fitness score.
@param intermediate_outputs: the fitnesses of the tree over several sets of
values
@return: global fitness
"""
final_output=0
# each element represents one different sample or set of input data
# the size of each represents the number of examples
#each sub-element represents the value(s) obtained at the top of a three for one input
#In this particular case, we simply add the difference of all results with an ideal solution
# the ideal solution is : [adf1 = x+y adf2 = add1*(y-x)]
# build a corresponding list of two-elements sub lists
# then evaluate the sum of the difference with our built models
goal_function=[]
for nb in xrange(settings.nb_eval):
#for nb in xrange(settings.nb_ex):
ideal_results=settings.ideal_results[nb]
obtained_results=intermediate_outputs[nb]
#print ideal_results
#print obtained_results
for el in obtained_results:
try:
if math.isinf(el):
return el
except:
return float('inf')
# sum the absolute values of the differences over one example
# here we use a very very puzzling python list comprehension... This deserve a bit of explanation.
# In general, the expression "T if C is true, or F if C is false" can be written as (F, T)[bool(C)].
# This single line could be replaced by a simpler but slower expression of the type:
#z=[]
#for i in range(10):
# if C:
# z.append(T)
# else:
# z.append(F)
# In our case, if the first element of obtained_resultsis is True (the result of the if statement)
# then use the result produce by the second branch, otherwise use the result produced by the third
# branch.
# As far as we are concerned, list comprehension are faster + compact + more memory efficient.
# so for this crucial fitness calculation bit, I chose this solution...
# May the deities of the Python programming pantheon forgive me (Sorry Guido...).
diff= sum( [(math.fabs(ideal_results[x]-obtained_results[1]) ,math.fabs(ideal_results[x]-obtained_results[2]) )[obtained_results[0]] for x in xrange(len(ideal_results))])
final_output= final_output+diff
return final_output
def FinalFitness4(intermediate_outputs):
"""
Function: FinalFitness3
========================
Compute global fitness of an individual. Intended when wanting to refine
the fitness score.
@param intermediate_outputs: the fitnesses of the tree over several sets of
values
@return: global fitness
"""
final_output=0
# each element represents one different sample or set of input data
# the size of each represents the number of examples
#each sub-element represents the value(s) obtained at the top of a three for one input
#In this particular case, we simply add the difference of all results with an ideal solution
# the ideal solution is : [adf1 = x+y adf2 = add1*(y-x)]
# build a corresponding list of two-elements sub lists
# then evaluate the sum of the difference with our built models
goal_function=[]
for nb in xrange(len(intermediate_outputs)):
for el in intermediate_outputs[nb]:
for el2 in el:
try:
if isinstance(el2, bool):
pass
elif math.isinf(el2):
return el2
except:
return float('inf')
# sum the absolute values of the differences over one example
# here we use a very very puzzling python list comprehension... This deserve a bit of explanation.
# In general, the expression "T if C is true, or F if C is false" can be written as (F, T)[bool(C)].
# This single line could be replaced by a simpler but slower expression of the type:
#z=[]
#for i in range(10):
# if C:
# z.append(T)
# else:
# z.append(F)
# In our case, if the first element of obtained_resultsis is True (the result of the if statement)
# then use the result produce by the second branch, otherwise use the result produced by the third
# branch.
# As far as we are concerned, list comprehension are faster + compact + more memory efficient.
# so for this crucial fitness calculation bit, I chose this solution...
# May the deities of the Python programming pantheon forgive me (Sorry Guido...).
final_output= sum([(math.fabs(settings.ideal_results[x][y]-intermediate_outputs[2][x][y]),math.fabs(settings.ideal_results[x][y]-intermediate_outputs[1][x][y])) [intermediate_outputs[0][x][y]] for x in xrange(len(intermediate_outputs[1])) for y in xrange(len(intermediate_outputs[1][x]))])
return final_output
def FinalFitness_tutorial8(intermediate_outputs):
"""
Function: FinalFitness3
========================
Compute global fitness of an individual. Intended when wanting to refine
the fitness score.
@param intermediate_outputs: the fitnesses of the tree over several sets of
values
@return: global fitness
"""
final_output=0
goal_function=[]
#for i in xrange( len(settings.ideal_results)):
# for j in xrange( len(settings.ideal_results[i])):
# print settings.ideal_results[i][j]
#print settings.inputdata
#print intermediate_outputs[0]
#print intermediate_outputs[1]
#print intermediate_outputs[2]
try:
result=fitnessutil.ReplaceUsingBinaryMask(settings.inputdata,intermediate_outputs[0],intermediate_outputs[1],intermediate_outputs[2] )
# expand the compact array
uncompressed_result= [fitnessutil.UncompressList(result[x][0]) for x in xrange(len(result))]
uncompressed_ideal_results= [fitnessutil.UncompressList(settings.ideal_results[x]) for x in xrange(len(result))]
final_output= sum([(0,1)[uncompressed_result[x][y]!=uncompressed_ideal_results[x][y]] for x in xrange(len(uncompressed_result)) for y in xrange(len(uncompressed_result[x]))])
except:
final_output=float('inf')
#print result[0][0]
#final_output= sum([(math.fabs(settings.ideal_results[x][y]-intermediate_outputs[2][x][y]),math.fabs(settings.ideal_results[x][y]-intermediate_outputs[1][x][y])) [intermediate_outputs[0][x][y]] for x in xrange(len(intermediate_outputs[1])) for y in xrange(len(intermediate_outputs[1][x]))])
#for i in xrange( len(result)):
# for j in xrange( len(result[i])):
# print result[i][j]
return final_output
def FinalFitness_tutorial9(intermediate_outputs):
"""
Function: FinalFitness3
========================
Compute global fitness of an individual. Intended when wanting to refine
the fitness score.
@param intermediate_outputs: the fitnesses of the tree over several sets of
values
@return: global fitness
"""
final_output=0
goal_function=[]
#for i in xrange( len(settings.ideal_results)):
# for j in xrange( len(settings.ideal_results[i])):
# print settings.ideal_results[i][j]
#print settings.inputdata
#print intermediate_outputs
#print intermediate_outputs[1]
#print intermediate_outputs[2]
try:
#print list(xrange(1,(len(intermediate_outputs)/3)+1))
temp_input=settings.inputdata
#print temp_input
for i in xrange(1,(len(intermediate_outputs)/3)+1):
temp_result=fitnessutil.ReplaceUsingBinaryMask(temp_input,intermediate_outputs[(i*3)-1],intermediate_outputs[(i*3)-2],intermediate_outputs[(i*3)-3] )
temp_input=temp_result
#print temp_input
# expand the compact array
uncompressed_result= [fitnessutil.UncompressList(temp_result[x][0]) for x in xrange(len(temp_result))]
uncompressed_ideal_results= [fitnessutil.UncompressList(settings.ideal_results[x]) for x in xrange(len(temp_result))]
final_output= sum([(0,1)[uncompressed_result[x][y]!=uncompressed_ideal_results[x][y]] for x in xrange(len(uncompressed_result)) for y in xrange(len(uncompressed_result[x]))])
except:
final_output=float('inf')
#print result[0][0]
#final_output= sum([(math.fabs(settings.ideal_results[x][y]-intermediate_outputs[2][x][y]),math.fabs(settings.ideal_results[x][y]-intermediate_outputs[1][x][y])) [intermediate_outputs[0][x][y]] for x in xrange(len(intermediate_outputs[1])) for y in xrange(len(intermediate_outputs[1][x]))])
#for i in xrange( len(result)):
# for j in xrange( len(result[i])):
# print result[i][j]
return final_output
if __name__ == '__main__':
#for i in xrange(2000):
#a=buildtree.buildTree().AddFullNode((0,3,'root'),0,2,8)
#a=buildtree.buildTree().AddFullNode((0,3,'root'),0,8)
a=buildtree.buildTree().AddFullNode((0,2,'root'),0,8)
#print a
#print i
#print r
#print FinalFitness_tutorial9(EvalTreeForOneListInputSet_tutorial9(a))
#t1 = timeit.Timer('evalfitness.EvalTreeForOneListInputSet_tutorial9(a)' , 'from __main__ import a ;import evalfitness')
t2 = timeit.Timer('evalfitness.FinalFitness_tutorial9(evalfitness.EvalTreeForOneListInputSet_tutorial9(a))' , 'from __main__ import a ;import evalfitness')
#print t1.timeit(100)
print t2.timeit(1000)
#print FinalFitness_tutorial8(EvalTreeForOneListInputSet_tutorial8(a))
#print FinalFitness3(EvalTreeForAllInputSets(a,xrange(20)))
#print FinalFitness4(EvalTreeForOneListInputSet(a))
#print EvalTreeForOneInputSet(a,0)
#i=EvalTreeForOneListInputSet2(a)
#print i
#print FinalFitness4(i)
#t1 = timeit.Timer('evalfitness.EvalTreeForOneInputSet(a,0)' , 'from __main__ import a ;import evalfitness')
#t2 = timeit.Timer('evalfitness.EvalTreeForAllInputSets(a,xrange(1000))' , 'from __main__ import a ;import evalfitness')
#t3 = timeit.Timer('i=evalfitness.EvalTreeForAllInputSets(a,xrange(1000)); evalfitness.FinalFitness(i)' , 'from __main__ import a ;import evalfitness')
#print t.timeit(100)
#print t1.repeat(1,1000)
#print t2.repeat(1,1000)
#print t3.repeat(1,1000)
#print sum(t1.repeat(1,1000))/1000
#print sum(t2.repeat(1,1000))/1000
#print sum(t3.repeat(1,1000))/1000
| 45.528596
| 299
| 0.584126
| 3,136
| 26,270
| 4.828763
| 0.106186
| 0.057716
| 0.018887
| 0.014528
| 0.840454
| 0.826917
| 0.815558
| 0.807568
| 0.786568
| 0.770125
| 0
| 0.016193
| 0.327674
| 26,270
| 576
| 300
| 45.607639
| 0.841184
| 0.347811
| 0
| 0.737089
| 0
| 0
| 0.013322
| 0.007289
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.004695
| 0.046948
| null | null | 0.018779
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
d99bfb211c75d8eab0472906468db064b753990d
| 5,719
|
py
|
Python
|
searchBar/basicapp/tests.py
|
rajvijen/Industry-Visit-Planning-and-Booking
|
fe54f9f354eb88cd78b1bc18a61908fe7de03577
|
[
"MIT"
] | null | null | null |
searchBar/basicapp/tests.py
|
rajvijen/Industry-Visit-Planning-and-Booking
|
fe54f9f354eb88cd78b1bc18a61908fe7de03577
|
[
"MIT"
] | 8
|
2019-09-05T04:58:20.000Z
|
2022-01-13T00:58:01.000Z
|
searchBar/basicapp/tests.py
|
rajvijen/ASE-101
|
fe54f9f354eb88cd78b1bc18a61908fe7de03577
|
[
"MIT"
] | 5
|
2018-11-15T19:04:29.000Z
|
2018-11-17T06:10:38.000Z
|
from django.test import TestCase ,Client
from django.http import HttpRequest
from django.urls import reverse
from .models import UserProfileInfo
from django.contrib.auth.models import User
class ListView_Testing(TestCase):
@classmethod
def setUpTestData(cls):
cls.client = Client()
cls.user = User.objects.create_user(username = "hemanth",email='hreddy281@gmail.com', password='devilmaycry4')
cls.profile = UserProfileInfo.objects.create(user = cls.user,name = "hemanth",gender = "M")
def test_view_uses_correct_template_index(self):
response = self.client.get(reverse('index'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'searchBarApp/index.html')
def test_view_register(self):
response = self.client.get(reverse('basicapp:register'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'basicapp/registration.html')
def test_user_login(self):
response = self.client.get(reverse('basicapp:user_login'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'basicapp/login.html')
def test_user_profile(self):
self.client.force_login(self.user)
response = self.client.get(reverse('basicapp:profile'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'basicapp/profile.html')
def test_user_profile_update(self):
self.client.force_login(self.user)
response = self.client.get(reverse('basicapp:update'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'basicapp/update_profile.html')
def test_visited_industries(self):
self.client.force_login(self.user)
response = self.client.get(reverse('basicapp:visited_industries'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'basicapp/visited.html')
def test_Booked_industries(self):
self.client.force_login(self.user)
response = self.client.get(reverse('basicapp:booked_to_visit'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'basicapp/booked_to_visit.html')
'''
def test_charge1(self):
self.client.force_login(self.user)
response = self.client.get(reverse('basicapp:charge1'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'basicapp/booking_charge.html')
def test_charge2(self):
self.client.force_login(self.user)
response = self.client.get(reverse('basicapp:charge2'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'basicapp/booking_charge.html')
def test_Cancelticket(self):
self.client.force_login(self.user)
response = self.client.get(reverse('basicapp:cancel_ticket'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'basicapp/booked_to_visit.html')
def test_activate(self,**kwargs,**kwargs1):
response = self.client.get(reverse('basicapp:activate',**kwargs,**kwargs1))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'searchBarApp/index.html')
class Forms_Testing(TestCase):
def test_UserProfileInfoForms(self):
invalid_data = {
'username':"Hemanth",
'email':"hreddy281@gmail.com",
'password':1254,
'name':'hemanth',
'gender':"K"
}
form = UserProfileInfo(data = invalid_data)
form.is_valid()
self.assertTrue(form.errors)
'''
#URL-Testing
class URL_testing(TestCase):
@classmethod
def setUpTestData(cls):
cls.client = Client()
cls.user = User.objects.create_user(username = "hemanth",email='hreddy281@gmail.com', password='devilmaycry4')
cls.profile = UserProfileInfo.objects.create(user = cls.user,name = "hemanth",gender = "M")
def test_URL_register(self):
response = self.client.get(reverse('basicapp:register'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'basicapp/registration.html')
def test_URL_user_login(self):
response = self.client.get(reverse('basicapp:user_login'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'basicapp/login.html')
def test_URL_profile(self):
#self.client.force_login(self.user)
response = self.client.get(reverse('basicapp:profile'))
self.assertEqual(response.status_code, 302)
#self.assertTemplateUsed(response, 'basicapp/profile.html')
def test_URL_profile_update(self):
response = self.client.get(reverse('basicapp:update'))
self.assertEqual(response.status_code, 302)
def test_URL_visited_industries(self):
response = self.client.get(reverse('basicapp:visited_industries'))
self.assertEqual(response.status_code, 302)
def test_URL_bookinglist_update(self):
response = self.client.get(reverse('basicapp:booked_to_visit'))
self.assertEqual(response.status_code, 302)
'''
def test_URL_charge1(self):
response = self.client.get(reverse('basicapp:charge1'))
self.assertEqual(response.status_code, 302)
def test_URL_charge2(self):
response = self.client.get(reverse('basicapp:charge2'))
self.assertEqual(response.status_code, 302)
'''
| 42.051471
| 119
| 0.678965
| 634
| 5,719
| 5.976341
| 0.135647
| 0.071259
| 0.090261
| 0.105305
| 0.845342
| 0.837952
| 0.818422
| 0.803378
| 0.77857
| 0.764846
| 0
| 0.01801
| 0.203882
| 5,719
| 136
| 120
| 42.051471
| 0.814188
| 0.01801
| 0
| 0.623188
| 0
| 0
| 0.14698
| 0.074434
| 0
| 0
| 0
| 0
| 0.318841
| 1
| 0.217391
| false
| 0.028986
| 0.072464
| 0
| 0.318841
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d9b62fd684a9d894868becf64c722ba0a9f07ed7
| 7,827
|
py
|
Python
|
sdk/python/pulumi_aws/ec2transitgateway/_inputs.py
|
sibuthomasmathew/pulumi-aws
|
6351f2182eb6f693d4e09e4136c385adfa0ab674
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_aws/ec2transitgateway/_inputs.py
|
sibuthomasmathew/pulumi-aws
|
6351f2182eb6f693d4e09e4136c385adfa0ab674
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_aws/ec2transitgateway/_inputs.py
|
sibuthomasmathew/pulumi-aws
|
6351f2182eb6f693d4e09e4136c385adfa0ab674
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from .. import _utilities, _tables
__all__ = [
'GetDirectConnectGatewayAttachmentFilterArgs',
'GetPeeringAttachmentFilterArgs',
'GetRouteTableFilterArgs',
'GetTransitGatewayFilterArgs',
'GetVpcAttachmentFilterArgs',
'GetVpnAttachmentFilterArgs',
]
@pulumi.input_type
class GetDirectConnectGatewayAttachmentFilterArgs:
def __init__(__self__, *,
name: str,
values: Sequence[str]):
"""
:param str name: The name of the filter field. Valid values can be found in the [EC2 DescribeTransitGatewayAttachments API Reference](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeTransitGatewayAttachments.html).
:param Sequence[str] values: Set of values that are accepted for the given filter field. Results will be selected if any given value matches.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the filter field. Valid values can be found in the [EC2 DescribeTransitGatewayAttachments API Reference](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeTransitGatewayAttachments.html).
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: str):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def values(self) -> Sequence[str]:
"""
Set of values that are accepted for the given filter field. Results will be selected if any given value matches.
"""
return pulumi.get(self, "values")
@values.setter
def values(self, value: Sequence[str]):
pulumi.set(self, "values", value)
@pulumi.input_type
class GetPeeringAttachmentFilterArgs:
def __init__(__self__, *,
name: str,
values: Sequence[str]):
"""
:param str name: The name of the field to filter by, as defined by
[the underlying AWS API](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeTransitGatewayPeeringAttachments.html).
:param Sequence[str] values: Set of values that are accepted for the given field.
An EC2 Transit Gateway Peering Attachment be selected if any one of the given values matches.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the field to filter by, as defined by
[the underlying AWS API](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeTransitGatewayPeeringAttachments.html).
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: str):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def values(self) -> Sequence[str]:
"""
Set of values that are accepted for the given field.
An EC2 Transit Gateway Peering Attachment be selected if any one of the given values matches.
"""
return pulumi.get(self, "values")
@values.setter
def values(self, value: Sequence[str]):
pulumi.set(self, "values", value)
@pulumi.input_type
class GetRouteTableFilterArgs:
def __init__(__self__, *,
name: str,
values: Sequence[str]):
"""
:param str name: Name of the filter.
:param Sequence[str] values: List of one or more values for the filter.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def name(self) -> str:
"""
Name of the filter.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: str):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def values(self) -> Sequence[str]:
"""
List of one or more values for the filter.
"""
return pulumi.get(self, "values")
@values.setter
def values(self, value: Sequence[str]):
pulumi.set(self, "values", value)
@pulumi.input_type
class GetTransitGatewayFilterArgs:
def __init__(__self__, *,
name: str,
values: Sequence[str]):
"""
:param str name: Name of the filter.
:param Sequence[str] values: List of one or more values for the filter.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def name(self) -> str:
"""
Name of the filter.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: str):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def values(self) -> Sequence[str]:
"""
List of one or more values for the filter.
"""
return pulumi.get(self, "values")
@values.setter
def values(self, value: Sequence[str]):
pulumi.set(self, "values", value)
@pulumi.input_type
class GetVpcAttachmentFilterArgs:
def __init__(__self__, *,
name: str,
values: Sequence[str]):
"""
:param str name: Name of the filter.
:param Sequence[str] values: List of one or more values for the filter.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def name(self) -> str:
"""
Name of the filter.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: str):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def values(self) -> Sequence[str]:
"""
List of one or more values for the filter.
"""
return pulumi.get(self, "values")
@values.setter
def values(self, value: Sequence[str]):
pulumi.set(self, "values", value)
@pulumi.input_type
class GetVpnAttachmentFilterArgs:
def __init__(__self__, *,
name: str,
values: Sequence[str]):
"""
:param str name: The name of the filter field. Valid values can be found in the [EC2 DescribeTransitGatewayAttachments API Reference](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeTransitGatewayAttachments.html).
:param Sequence[str] values: Set of values that are accepted for the given filter field. Results will be selected if any given value matches.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the filter field. Valid values can be found in the [EC2 DescribeTransitGatewayAttachments API Reference](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeTransitGatewayAttachments.html).
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: str):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def values(self) -> Sequence[str]:
"""
Set of values that are accepted for the given filter field. Results will be selected if any given value matches.
"""
return pulumi.get(self, "values")
@values.setter
def values(self, value: Sequence[str]):
pulumi.set(self, "values", value)
| 31.817073
| 241
| 0.624633
| 910
| 7,827
| 5.247253
| 0.117582
| 0.040209
| 0.06534
| 0.042723
| 0.870785
| 0.870785
| 0.870785
| 0.870785
| 0.870785
| 0.870785
| 0
| 0.002265
| 0.266769
| 7,827
| 245
| 242
| 31.946939
| 0.829761
| 0.360802
| 0
| 0.863309
| 1
| 0
| 0.078819
| 0.038854
| 0
| 0
| 0
| 0
| 0
| 1
| 0.215827
| false
| 0
| 0.035971
| 0
| 0.381295
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
d9ca0340d823d6a22f59d2483c8291da931963b3
| 2,066
|
py
|
Python
|
PA3/cs231n/datasets/get_assignment3_data.py
|
mcreng/cs231n
|
60b07e6a55b4cb239931daad5e0c8c8eeed46c1f
|
[
"MIT"
] | 13
|
2019-02-13T08:27:59.000Z
|
2021-12-20T07:04:49.000Z
|
PA3/cs231n/datasets/get_assignment3_data.py
|
mcreng/cs231n
|
60b07e6a55b4cb239931daad5e0c8c8eeed46c1f
|
[
"MIT"
] | 1
|
2018-12-23T15:33:01.000Z
|
2019-01-29T15:35:17.000Z
|
PA3/cs231n/datasets/get_assignment3_data.py
|
mcreng/cs231n
|
60b07e6a55b4cb239931daad5e0c8c8eeed46c1f
|
[
"MIT"
] | 4
|
2019-02-13T08:28:02.000Z
|
2022-03-11T08:48:45.000Z
|
from __future__ import print_function
from six.moves.urllib import request
import zipfile
import os
url = "http://cs231n.stanford.edu/coco_captioning.zip"
file_name = url.split('/')[-1]
u = request.urlopen(url)
f = open(file_name, 'wb')
meta = u.info()
file_size = int(meta.get("Content-Length"))
print("Downloading: %s Bytes: %s" % (file_name, file_size))
file_size_dl = 0
block_sz = 1048576
while True:
buffer = u.read(block_sz)
if not buffer:
break
file_size_dl += len(buffer)
f.write(buffer)
status = "%d [%3.2f%%]\r" % (file_size_dl, file_size_dl * 100. / file_size)
print(status,end='')
f.close()
print('Extracting: %s' % file_name)
t = zipfile.ZipFile(file_name, "r")
t.extractall('.')
t.close()
os.remove(file_name)
url = "http://cs231n.stanford.edu/imagenet_val_25.npz"
file_name = url.split('/')[-1]
u = request.urlopen(url)
f = open(file_name, 'wb')
meta = u.info()
file_size = int(meta.get("Content-Length"))
print("Downloading: %s Bytes: %s" % (file_name, file_size))
file_size_dl = 0
block_sz = 1048576
while True:
buffer = u.read(block_sz)
if not buffer:
break
file_size_dl += len(buffer)
f.write(buffer)
status = "%d [%3.2f%%]\r" % (file_size_dl, file_size_dl * 100. / file_size)
print(status,end='')
f.close()
url = "http://cs231n.stanford.edu/squeezenet_tf.zip"
file_name = url.split('/')[-1]
u = request.urlopen(url)
f = open(file_name, 'wb')
meta = u.info()
file_size = int(meta.get("Content-Length"))
print("Downloading: %s Bytes: %s" % (file_name, file_size))
file_size_dl = 0
block_sz = 1048576
while True:
buffer = u.read(block_sz)
if not buffer:
break
file_size_dl += len(buffer)
f.write(buffer)
status = "%d [%3.2f%%]\r" % (file_size_dl, file_size_dl * 100. / file_size)
print(status,end='')
f.close()
print('Extracting: %s' % file_name)
t = zipfile.ZipFile(file_name, "r")
t.extractall('.')
t.close()
os.remove(file_name)
| 23.747126
| 81
| 0.627299
| 312
| 2,066
| 3.951923
| 0.224359
| 0.136253
| 0.097324
| 0.051095
| 0.900243
| 0.841849
| 0.841849
| 0.841849
| 0.841849
| 0.841849
| 0
| 0.032297
| 0.205712
| 2,066
| 87
| 82
| 23.747126
| 0.719074
| 0
| 0
| 0.897059
| 0
| 0
| 0.171126
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.058824
| 0
| 0.058824
| 0.132353
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d9dff2248b7b72cc764ad8da51dace8f63f7fc75
| 103
|
py
|
Python
|
utils/mixture.py
|
LuckyDC/generalizing-reid-improved
|
ec78ba033e2961ba3d1d19d86ecc18d5f6ac8e39
|
[
"MIT"
] | null | null | null |
utils/mixture.py
|
LuckyDC/generalizing-reid-improved
|
ec78ba033e2961ba3d1d19d86ecc18d5f6ac8e39
|
[
"MIT"
] | null | null | null |
utils/mixture.py
|
LuckyDC/generalizing-reid-improved
|
ec78ba033e2961ba3d1d19d86ecc18d5f6ac8e39
|
[
"MIT"
] | null | null | null |
class MixUp:
def __init__(self):
pass
class CutMix:
def __init__(self):
pass
| 11.444444
| 23
| 0.572816
| 12
| 103
| 4.25
| 0.583333
| 0.27451
| 0.431373
| 0.588235
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.349515
| 103
| 8
| 24
| 12.875
| 0.761194
| 0
| 0
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0.333333
| 0
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
d9ea39d50c5ed1bf5e244ced5fab929f3d7c68ac
| 186
|
py
|
Python
|
pytype/tools/merge_pyi/test_data/scope.py
|
Jrryy/pytype
|
2d2855dc97d5ccee22ad233a83524616c17c44c9
|
[
"Apache-2.0"
] | 3,882
|
2015-03-22T12:17:15.000Z
|
2022-03-31T17:13:20.000Z
|
pytype/tools/merge_pyi/test_data/scope.py
|
Jrryy/pytype
|
2d2855dc97d5ccee22ad233a83524616c17c44c9
|
[
"Apache-2.0"
] | 638
|
2015-11-03T06:34:44.000Z
|
2022-03-31T23:41:48.000Z
|
pytype/tools/merge_pyi/test_data/scope.py
|
Jrryy/pytype
|
2d2855dc97d5ccee22ad233a83524616c17c44c9
|
[
"Apache-2.0"
] | 301
|
2015-08-14T10:21:17.000Z
|
2022-03-08T11:03:40.000Z
|
class C:
def f(self, x):
pass
def g(self):
def f(x): #gets ignored by pytype but fixer sees it, generates warning (FIXME?)
return 1
return f
| 20.666667
| 87
| 0.537634
| 28
| 186
| 3.571429
| 0.75
| 0.08
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008621
| 0.376344
| 186
| 8
| 88
| 23.25
| 0.853448
| 0.365591
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 0
| 1
| 0.428571
| false
| 0.142857
| 0
| 0.142857
| 0.857143
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 8
|
8a2bf79b038ac78476e1750555394f4311eaaab3
| 2,543
|
py
|
Python
|
imagemagick/convert.py
|
vmiklos/vmexam
|
ff4ef386d3cfd84b8ed06387cbd87b119dd50448
|
[
"MIT"
] | 1
|
2015-02-09T10:21:51.000Z
|
2015-02-09T10:21:51.000Z
|
imagemagick/convert.py
|
vmiklos/vmexam
|
ff4ef386d3cfd84b8ed06387cbd87b119dd50448
|
[
"MIT"
] | null | null | null |
imagemagick/convert.py
|
vmiklos/vmexam
|
ff4ef386d3cfd84b8ed06387cbd87b119dd50448
|
[
"MIT"
] | null | null | null |
import os, shutil, sys
who = sys.argv[1]
def system(cmd):
print "-> " + cmd
os.system(cmd)
bigs = []
smalls = []
os.chdir(who)
for root, dirs, files in os.walk("8as"):
for i in files:
bigs.append(os.path.join("8as", i))
for root, dirs, files in os.walk("4es"):
for i in files:
smalls.append(os.path.join("4es", i))
os.mkdir("scaled")
print "converting big images to std size"
for i in bigs:
system("convert %s -scale 2200x1100! %s" % (i, os.path.join("scaled", os.path.basename(i))))
print "rotating big images :2"
for i in bigs[:2]:
system("convert %s -rotate 90 %s" % (os.path.join("scaled", os.path.basename(i)), os.path.join("scaled", os.path.basename(i))))
print "splitting big images :2"
for i in bigs[:2]:
system("convert %s -crop 1100x1100+0+0 %s.t.jpg" % (os.path.join("scaled", os.path.basename(i)), os.path.join("scaled", os.path.basename(i))))
system("convert %s -crop 1100x1100+0+1100 %s.b.jpg" % (os.path.join("scaled", os.path.basename(i)), os.path.join("scaled", os.path.basename(i))))
print "splitting big images 2:"
for i in bigs[2:]:
system("convert %s -crop 1100x1100+0+0 %s.t.jpg" % (os.path.join("scaled", os.path.basename(i)), os.path.join("scaled", os.path.basename(i))))
system("convert %s -crop 1100x1100+1100+0 %s.b.jpg" % (os.path.join("scaled", os.path.basename(i)), os.path.join("scaled", os.path.basename(i))))
print "converting small images to std size"
for i in smalls:
system("convert %s -scale 1100x1100! %s" % (i, os.path.join("scaled", os.path.basename(i))))
images = []
images.append(os.path.join("scaled", os.path.basename(smalls[0])))
images.append(os.path.join("scaled", os.path.basename(smalls[1])))
images.append(os.path.join("scaled", os.path.basename("%s.t.jpg" % bigs[0])))
images.append(os.path.join("scaled", os.path.basename(smalls[2])))
images.append(os.path.join("scaled", os.path.basename(smalls[3])))
images.append(os.path.join("scaled", os.path.basename("%s.b.jpg" % bigs[0])))
images.append(os.path.join("scaled", os.path.basename(smalls[4])))
images.append(os.path.join("scaled", os.path.basename(smalls[5])))
images.append(os.path.join("scaled", os.path.basename("%s.t.jpg" % bigs[1])))
images.append(os.path.join("scaled", os.path.basename("%s.t.jpg" % bigs[2])))
images.append(os.path.join("scaled", os.path.basename("%s.b.jpg" % bigs[2])))
images.append(os.path.join("scaled", os.path.basename("%s.b.jpg" % bigs[1])))
print "montaging"
system("montage %s -density 500 -tile 3x4 -geometry +0+0 ../%s.jpg" % (" ".join(images), who))
shutil.rmtree("scaled")
| 41.016129
| 146
| 0.672827
| 441
| 2,543
| 3.879819
| 0.142857
| 0.175336
| 0.151958
| 0.22443
| 0.769141
| 0.769141
| 0.768556
| 0.715956
| 0.715956
| 0.715956
| 0
| 0.040941
| 0.097129
| 2,543
| 61
| 147
| 41.688525
| 0.704268
| 0
| 0
| 0.125
| 0
| 0
| 0.263862
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.020833
| null | null | 0.145833
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8a2dea66f1e117cb3b7044cf2cf38a99f097e817
| 32,095
|
py
|
Python
|
unixtimestampfield/tests.py
|
bducraux/django-unixtimestampfield
|
cb8b9f8dcfe43be8110845062406e16b2f25e9d1
|
[
"MIT"
] | 22
|
2015-12-25T06:31:22.000Z
|
2021-07-14T02:46:36.000Z
|
unixtimestampfield/tests.py
|
bducraux/django-unixtimestampfield
|
cb8b9f8dcfe43be8110845062406e16b2f25e9d1
|
[
"MIT"
] | 7
|
2015-12-11T14:49:37.000Z
|
2022-01-05T01:56:24.000Z
|
unixtimestampfield/tests.py
|
bducraux/django-unixtimestampfield
|
cb8b9f8dcfe43be8110845062406e16b2f25e9d1
|
[
"MIT"
] | 6
|
2018-05-14T02:35:30.000Z
|
2021-04-07T03:25:57.000Z
|
import logging
from django.test import TestCase, override_settings
from django.db import models
from django.utils import timezone
from django import forms
from django.core import exceptions
from django.template import Template, Context
from .fields import UnixTimeStampField, OrdinalField, TimestampPatchMixin, OrdinalPatchMixin
unix_0 = timezone.datetime(1970, 1, 1)
unix_0_utc = timezone.datetime(1970, 1, 1, tzinfo=timezone.utc)
ordinal_1 = timezone.datetime.fromordinal(1)
ordinal_1_utc = timezone.make_aware(timezone.datetime.fromordinal(1), timezone.utc)
logging.basicConfig()
LOGGER = logging.getLogger(__name__)
LOGGER.setLevel(logging.DEBUG)
class MixinTest(TestCase):
zero_utc = timezone.datetime(1970, 1, 1, 0, 0, tzinfo=timezone.utc)
oneyear_utc = timezone.datetime(
1971, 1, 1, 1, 1, 1, 123400, tzinfo=timezone.utc) # 31539661.123400
oneyear_utc_i = timezone.datetime(1971, 1, 1, 1, 1, 1, tzinfo=timezone.utc) # 31539661.0
zero = timezone.datetime(1970, 1, 1, 0, 0)
oneyear = timezone.datetime(1971, 1, 1, 1, 1, 1, 123400)
oneyear_i = timezone.datetime(1971, 1, 1, 1, 1, 1)
negyear_utc = timezone.datetime(
1969, 1, 1, 1, 1, 1, 123400, tzinfo=timezone.utc) # -31532338.8766
negyear_utc_i = timezone.datetime(1969, 1, 1, 1, 1, 1, tzinfo=timezone.utc) # -31532339
@override_settings(USE_TZ=True, TIME_ZONE='UTC')
def test_to_timestamp_utc(self):
ts = TimestampPatchMixin()
self.assertEqual(0, ts.to_timestamp(self.zero_utc))
self.assertEqual(31539661.123400, ts.to_timestamp(self.oneyear_utc))
self.assertEqual(31539661, ts.to_timestamp(self.oneyear_utc_i))
self.assertEqual(-31532338.8766, ts.to_timestamp(self.negyear_utc))
self.assertEqual(-31532339, ts.to_timestamp(self.negyear_utc_i))
@override_settings(USE_TZ=True, TIME_ZONE='Asia/Taipei')
def test_to_timestamp_with_tz(self):
ts = TimestampPatchMixin()
self.assertEqual(0, ts.to_timestamp(timezone.localtime(self.zero_utc)))
self.assertEqual(31539661.123400, ts.to_timestamp(timezone.localtime(self.oneyear_utc)))
self.assertEqual(31539661, ts.to_timestamp(timezone.localtime(self.oneyear_utc_i)))
self.assertEqual(-31532338.8766, ts.to_timestamp(timezone.localtime(self.negyear_utc)))
self.assertEqual(-31532339, ts.to_timestamp(timezone.localtime(self.negyear_utc_i)))
@override_settings(USE_TZ=False)
def test_to_timestamp_without_tz(self):
ts = TimestampPatchMixin()
self.assertEqual(0, ts.to_timestamp(self.zero_utc))
self.assertEqual(0, ts.to_timestamp(self.zero))
self.assertEqual(0, ts.to_timestamp(timezone.localtime(self.zero_utc)))
self.assertEqual(31539661.123400, ts.to_timestamp(self.oneyear))
self.assertEqual(31539661.123400, ts.to_timestamp(self.oneyear_utc))
self.assertEqual(31539661, ts.to_timestamp(self.oneyear_utc_i))
self.assertEqual(-31532338.8766, ts.to_timestamp(self.negyear_utc))
self.assertEqual(-31532339, ts.to_timestamp(self.negyear_utc_i))
@override_settings(USE_TZ=True, TIME_ZONE='UTC')
def test_to_naive_utc(self):
ts = TimestampPatchMixin()
self.assertEqual(self.zero, ts.to_naive_datetime(0))
self.assertEqual(self.zero, ts.to_naive_datetime(0.0))
self.assertEqual(self.zero, ts.to_naive_datetime('0'))
self.assertEqual(self.zero, ts.to_naive_datetime('1970-01-01 00:00:00'))
self.assertEqual(self.oneyear_i, ts.to_naive_datetime(31539661))
self.assertEqual(self.oneyear, ts.to_naive_datetime(31539661.123400))
self.assertEqual(self.oneyear, ts.to_naive_datetime('31539661.123400'))
self.assertEqual(self.oneyear, ts.to_naive_datetime('1971-01-01 01:01:01.123400'))
@override_settings(USE_TZ=True, TIME_ZONE='Asia/Taipei')
def test_to_naive_with_tz(self):
ts = TimestampPatchMixin()
self.assertEqual(self.zero, ts.to_naive_datetime(0))
self.assertEqual(self.zero, ts.to_naive_datetime(0.0))
self.assertEqual(self.zero, ts.to_naive_datetime('0'))
self.assertEqual(self.zero, ts.to_naive_datetime('1970-01-01 00:00:00'))
self.assertEqual(self.oneyear_i, ts.to_naive_datetime(31539661))
self.assertEqual(self.oneyear, ts.to_naive_datetime(31539661.123400))
self.assertEqual(self.oneyear, ts.to_naive_datetime('31539661.123400'))
self.assertEqual(self.oneyear, ts.to_naive_datetime('1971-01-01 01:01:01.123400'))
@override_settings(USE_TZ=False)
def test_to_naive_without_tz(self):
ts = TimestampPatchMixin()
self.assertEqual(self.zero, ts.to_naive_datetime(0))
self.assertEqual(self.zero, ts.to_naive_datetime(0.0))
self.assertEqual(self.zero, ts.to_naive_datetime('0'))
self.assertEqual(self.zero, ts.to_naive_datetime('1970-01-01 00:00:00'))
self.assertEqual(self.oneyear_i, ts.to_naive_datetime(31539661))
self.assertEqual(self.oneyear, ts.to_naive_datetime(31539661.123400))
self.assertEqual(self.oneyear, ts.to_naive_datetime('31539661.123400'))
self.assertEqual(self.oneyear, ts.to_naive_datetime('1971-01-01 01:01:01.123400'))
@override_settings(USE_TZ=True, TIME_ZONE='UTC')
def test_to_utc_utc(self):
ts = TimestampPatchMixin()
self.assertEqual(self.zero_utc, ts.to_utc_datetime(0))
self.assertEqual(self.zero_utc, ts.to_utc_datetime(0.0))
self.assertEqual(self.zero_utc, ts.to_utc_datetime('0'))
self.assertEqual(self.zero_utc, ts.to_utc_datetime('1970-01-01 00:00:00'))
self.assertEqual(self.oneyear_utc_i, ts.to_utc_datetime(31539661))
self.assertEqual(self.oneyear_utc, ts.to_utc_datetime(31539661.123400))
self.assertEqual(self.oneyear_utc, ts.to_utc_datetime('31539661.123400'))
self.assertEqual(self.oneyear_utc, ts.to_utc_datetime('1971-01-01 01:01:01.123400'))
@override_settings(USE_TZ=True, TIME_ZONE='Asia/Taipei')
def test_to_utc_with_tz(self):
ts = TimestampPatchMixin()
self.assertEqual(self.zero_utc, ts.to_utc_datetime(0))
self.assertEqual(self.zero_utc, ts.to_utc_datetime(0.0))
self.assertEqual(self.zero_utc, ts.to_utc_datetime('0'))
self.assertEqual(self.zero_utc, ts.to_utc_datetime('1970-01-01 00:00:00'))
self.assertEqual(self.oneyear_utc_i, ts.to_utc_datetime(31539661))
self.assertEqual(self.oneyear_utc, ts.to_utc_datetime(31539661.123400))
self.assertEqual(self.oneyear_utc, ts.to_utc_datetime('31539661.123400'))
self.assertEqual(self.oneyear_utc, ts.to_utc_datetime('1971-01-01 01:01:01.123400'))
@override_settings(USE_TZ=False)
def test_to_utc_without_tz(self):
ts = TimestampPatchMixin()
self.assertEqual(self.zero_utc, ts.to_utc_datetime(0))
self.assertEqual(self.zero_utc, ts.to_utc_datetime(0.0))
self.assertEqual(self.zero_utc, ts.to_utc_datetime('0'))
self.assertEqual(self.zero_utc, ts.to_utc_datetime('1970-01-01 00:00:00'))
self.assertEqual(self.oneyear_utc_i, ts.to_utc_datetime(31539661))
self.assertEqual(self.oneyear_utc, ts.to_utc_datetime(31539661.123400))
self.assertEqual(self.oneyear_utc, ts.to_utc_datetime('31539661.123400'))
self.assertEqual(self.oneyear_utc, ts.to_utc_datetime('1971-01-01 01:01:01.123400'))
@override_settings(USE_TZ=True, TIME_ZONE='UTC')
def test_to_datetime_utc(self):
ts = TimestampPatchMixin()
self.assertEqual(self.zero_utc, ts.to_datetime(0))
self.assertEqual(self.zero_utc, ts.to_datetime(0.0))
self.assertEqual(self.zero_utc, ts.to_datetime('0'))
self.assertEqual(self.zero_utc, ts.to_datetime('1970-01-01 00:00:00'))
self.assertEqual(self.oneyear_utc_i, ts.to_datetime(31539661))
self.assertEqual(self.oneyear_utc, ts.to_datetime(31539661.123400))
self.assertEqual(self.oneyear_utc, ts.to_datetime('31539661.123400'))
self.assertEqual(self.oneyear_utc, ts.to_datetime('1971-01-01 01:01:01.123400'))
@override_settings(USE_TZ=True, TIME_ZONE='Asia/Taipei')
def test_to_datetime_with_tz(self):
ts = TimestampPatchMixin()
zero = timezone.localtime(self.zero_utc)
oneyear = timezone.localtime(self.oneyear_utc)
oneyear_i = timezone.localtime(self.oneyear_utc_i)
self.assertEqual(zero, ts.to_datetime(0))
self.assertEqual(zero, ts.to_datetime(0.0))
self.assertEqual(zero, ts.to_datetime('0'))
self.assertEqual(zero, ts.to_datetime('1970-01-01 00:00:00'))
self.assertEqual(oneyear_i, ts.to_datetime(31539661))
self.assertEqual(oneyear, ts.to_datetime(31539661.123400))
self.assertEqual(oneyear, ts.to_datetime('31539661.123400'))
self.assertEqual(oneyear, ts.to_datetime('1971-01-01 01:01:01.123400'))
@override_settings(USE_TZ=False)
def test_to_datetime_without_tz(self):
ts = TimestampPatchMixin()
self.assertEqual(self.zero, ts.to_datetime(0))
self.assertEqual(self.zero, ts.to_datetime(0.0))
self.assertEqual(self.zero, ts.to_datetime('0'))
self.assertEqual(self.zero, ts.to_datetime('1970-01-01 00:00:00'))
self.assertEqual(self.oneyear_i, ts.to_datetime(31539661))
self.assertEqual(self.oneyear, ts.to_datetime(31539661.123400))
self.assertEqual(self.oneyear, ts.to_datetime('31539661.123400'))
self.assertEqual(self.oneyear, ts.to_datetime('1971-01-01 01:01:01.123400'))
@override_settings(USE_TZ=True, TIME_ZONE='UTC')
def test_over_and_under_flow(self):
ts = TimestampPatchMixin()
self.assertRaises(exceptions.ValidationError, ts.from_number, 253402272000)
self.assertRaises(exceptions.ValidationError, ts.from_number, -719163)
class ForTestModel(models.Model):
created = UnixTimeStampField(auto_now_add=True)
modified = UnixTimeStampField(auto_now=True)
str_ini = UnixTimeStampField(default='0.0')
str_dt_ini = UnixTimeStampField(default='1970-01-01 00:00:00')
float_ini = UnixTimeStampField(default=0.0)
int_ini = UnixTimeStampField(default=0.0)
dt_ini = UnixTimeStampField(default=unix_0_utc)
use_numeric_field = UnixTimeStampField(use_numeric=True, default=0.0)
round_3_field = UnixTimeStampField(use_numeric=True, round_to=3, default=0.0)
class TimeStampFieldTest(TestCase):
@override_settings(USE_TZ=True, TIME_ZONE='UTC')
def test_init_with_use_tz(self):
now = timezone.now()
expected = timezone.datetime(1970, 1, 1, tzinfo=timezone.utc)
t = ForTestModel.objects.create()
self.assertGreater(t.created, now)
self.assertGreater(t.modified, now)
self.assertEqual(t.str_ini, expected)
self.assertEqual(t.str_dt_ini, expected)
self.assertEqual(t.float_ini, expected)
self.assertEqual(t.int_ini, expected)
@override_settings(USE_TZ=True, TIME_ZONE='UTC')
def test_assignment_with_tz(self):
expected = timezone.datetime(1970, 1, 1, 0, 0, 3, tzinfo=timezone.utc)
t = ForTestModel.objects.create()
pre_modified = t.modified
t.str_ini = '3'
t.str_dt_ini = '1970-01-01 00:00:03'
t.float_ini = 3.0
t.int_ini = 3
t.dt_ini = timezone.datetime(1970, 1, 1, 0, 0, 3, tzinfo=timezone.utc)
t.use_numeric_field = 3.1111116
t.round_3_field = 3.1116
t.save()
if hasattr(t, 'refresh_from_db'):
t.refresh_from_db()
else:
t = ForTestModel.objects.get(id=t.id)
self.assertGreater(t.modified, pre_modified)
self.assertEqual(t.str_ini, expected)
self.assertEqual(t.str_dt_ini, expected)
self.assertEqual(t.float_ini, expected)
self.assertEqual(t.int_ini, expected)
self.assertEqual(t.use_numeric_field, 3.111112)
self.assertEqual(t.round_3_field, 3.112)
@override_settings(USE_TZ=True, TIME_ZONE='Asia/Taipei')
def test_init_with_different_tz(self):
now = timezone.now()
expected = timezone.localtime(
timezone.datetime(1970, 1, 1, tzinfo=timezone.utc),
timezone.pytz.timezone('Asia/Taipei')
)
t = ForTestModel.objects.create()
self.assertGreater(t.created, now)
self.assertGreater(t.modified, now)
self.assertEqual(t.str_ini, expected)
self.assertEqual(t.str_dt_ini, expected)
self.assertEqual(t.float_ini, expected)
self.assertEqual(t.int_ini, expected)
@override_settings(USE_TZ=True, TIME_ZONE='Asia/Taipei')
def test_assignment_with_different_tz(self):
expected = timezone.localtime(
timezone.datetime(1970, 1, 1, 0, 0, 3, tzinfo=timezone.utc),
timezone.pytz.timezone('Asia/Taipei')
)
t = ForTestModel.objects.create()
pre_modified = t.modified
t.str_ini = '3'
t.str_dt_ini = '1970-01-01 00:00:03'
t.float_ini = 3.0
t.int_ini = 3
t.dt_ini = timezone.datetime.fromtimestamp(3.0, timezone.pytz.timezone('Asia/Taipei'))
t.use_numeric_field = 3.1111116
t.round_3_field = 3.1116
t.save()
if hasattr(t, 'refresh_from_db'):
t.refresh_from_db()
else:
t = ForTestModel.objects.get(id=t.id)
self.assertGreater(t.modified, pre_modified)
self.assertEqual(t.str_ini, expected)
self.assertEqual(t.str_dt_ini, expected)
self.assertEqual(t.float_ini, expected)
self.assertEqual(t.int_ini, expected)
self.assertEqual(t.use_numeric_field, 3.111112)
self.assertEqual(t.round_3_field, 3.112)
@override_settings(USE_TZ=False)
def test_init_without_tz(self):
now = timezone.datetime.utcnow()
expected = timezone.datetime(1970, 1, 1, 0, 0)
t = ForTestModel.objects.create()
self.assertGreater(t.created, now)
self.assertGreater(t.modified, now)
self.assertEqual(t.str_ini, expected)
self.assertEqual(t.str_dt_ini, expected)
self.assertEqual(t.float_ini, expected)
self.assertEqual(t.int_ini, expected)
@override_settings(USE_TZ=False)
def test_assignment_without_tz(self):
expected = timezone.datetime(1970, 1, 1, 0, 0, 3)
t = ForTestModel.objects.create()
pre_modified = t.modified
t.str_ini = '3'
t.str_dt_ini = '1970-01-01 00:00:03'
t.float_ini = 3.0
t.int_ini = 3
t.dt_ini = timezone.datetime.fromtimestamp(3.0)
t.save()
if hasattr(t, 'refresh_from_db'):
t.refresh_from_db()
else:
t = ForTestModel.objects.get(id=t.id)
self.assertGreater(t.modified, pre_modified)
self.assertEqual(t.str_ini, expected)
self.assertEqual(t.str_dt_ini, expected)
self.assertEqual(t.float_ini, expected)
self.assertEqual(t.int_ini, expected)
@override_settings(USE_TZ=False)
def test_assignment_with_big_num(self):
expected = timezone.datetime(1970, 1, 1, 0, 0) + timezone.timedelta(seconds=14248491461)
t = ForTestModel.objects.create()
pre_modified = t.modified
t.str_ini = '14248491461'
t.float_ini = 14248491461.0
t.int_ini = 14248491461
t.dt_ini = timezone.datetime.fromtimestamp(14248491461.0)
t.save()
if hasattr(t, 'refresh_from_db'):
t.refresh_from_db()
else:
t = ForTestModel.objects.get(id=t.id)
self.assertGreater(t.modified, pre_modified)
self.assertEqual(t.str_ini, expected)
self.assertEqual(t.float_ini, expected)
self.assertEqual(t.int_ini, expected)
@override_settings(USE_TZ=False)
def test_assignment_overflow(self):
t = ForTestModel.objects.create()
t.float_ini = 14248491461222.0
self.assertRaises(exceptions.ValidationError, t.save)
class ForTestModelForm(forms.ModelForm):
class Meta:
model = ForTestModel
fields = ['str_ini', 'float_ini', 'int_ini', 'dt_ini',
'use_numeric_field', 'round_3_field']
class FormFieldTest(TestCase):
def test_noraml(self):
data = {
'str_ini': '1999-12-11 10:23:13',
'float_ini': 3.0,
'int_ini': 3,
'dt_ini': 3,
'use_numeric_field': 0,
'round_3_field': 0,
}
tform = ForTestModelForm(data=data)
self.assertTrue(tform.is_valid())
def test_empty_form(self):
data = {}
tform = ForTestModelForm(data=data)
self.assertFalse(tform.is_valid())
errors = {'dt_ini': [u'This field is required.'],
'float_ini': [u'This field is required.'],
'int_ini': [u'This field is required.'],
'round_3_field': [u'This field is required.'],
'str_ini': [u'This field is required.'],
'use_numeric_field': [u'This field is required.']}
self.assertDictEqual(tform.errors, errors)
self.assertEqual(tform.error_class, forms.utils.ErrorList)
def test_partial_data(self):
data = {
'int_ini': 0,
'round_3_field': 0,
'str_ini': '3',
}
tform = ForTestModelForm(data=data)
self.assertFalse(tform.is_valid())
errors = {'dt_ini': [u'This field is required.'],
'float_ini': [u'This field is required.'],
'use_numeric_field': [u'This field is required.']}
self.assertDictEqual(tform.errors, errors)
self.assertEqual(tform.error_class, forms.utils.ErrorList)
def test_invalid_data(self):
data = {
'str_ini': ['hello'],
'float_ini': 3.0,
'int_ini': 3,
'dt_ini': 3,
'use_numeric_field': 0,
'round_3_field': 0,
}
tform = ForTestModelForm(data=data)
self.assertFalse(tform.is_valid())
errors = {'str_ini': [u"Unable to convert value: '['hello']' to datetime"
u", please use 'YYYY-mm-dd HH:MM:SS'"]}
self.assertDictEqual(tform.errors, errors)
self.assertEqual(tform.error_class, forms.utils.ErrorList)
class OrdMixinTest(TestCase):
zero_utc = timezone.datetime(1, 1, 1, 0, 0, tzinfo=timezone.utc)
oneyear_utc = timezone.datetime(1, 12, 31, 0, 0, tzinfo=timezone.utc) # 365
zero = timezone.datetime(1, 1, 1, 0, 0)
oneyear = timezone.datetime(1, 12, 31, 0, 0) # 365
@override_settings(USE_TZ=True, TIME_ZONE='UTC')
def test_to_timestamp_utc(self):
ts = OrdinalPatchMixin()
self.assertEqual(1, ts.to_timestamp(self.zero_utc))
self.assertEqual(365, ts.to_timestamp(self.oneyear_utc))
@override_settings(USE_TZ=True, TIME_ZONE='Asia/Taipei')
def test_to_timestamp_with_tz(self):
ts = OrdinalPatchMixin()
self.assertEqual(1, ts.to_timestamp(timezone.localtime(self.zero_utc)))
self.assertEqual(365, ts.to_timestamp(timezone.localtime(self.oneyear_utc)))
@override_settings(USE_TZ=False)
def test_to_timestamp_without_tz(self):
ts = OrdinalPatchMixin()
self.assertEqual(1, ts.to_timestamp(self.zero_utc))
self.assertEqual(1, ts.to_timestamp(self.zero))
self.assertEqual(365, ts.to_timestamp(self.oneyear))
@override_settings(USE_TZ=True, TIME_ZONE='UTC')
def test_to_naive_utc(self):
ts = OrdinalPatchMixin()
self.assertEqual(self.zero, ts.to_naive_datetime(1))
self.assertEqual(self.zero, ts.to_naive_datetime(1.0))
self.assertEqual(self.zero, ts.to_naive_datetime('1'))
self.assertEqual(self.zero, ts.to_naive_datetime('0001-01-01 00:00:00'))
self.assertEqual(self.oneyear, ts.to_naive_datetime(365))
self.assertEqual(self.oneyear, ts.to_naive_datetime(365.0))
self.assertEqual(self.oneyear, ts.to_naive_datetime('365'))
self.assertEqual(self.oneyear, ts.to_naive_datetime('0001-12-31 00:00:00'))
@override_settings(USE_TZ=True, TIME_ZONE='Asia/Taipei')
def test_to_naive_with_tz(self):
ts = OrdinalPatchMixin()
self.assertEqual(self.zero, ts.to_naive_datetime(1))
self.assertEqual(self.zero, ts.to_naive_datetime(1.0))
self.assertEqual(self.zero, ts.to_naive_datetime('1'))
self.assertEqual(self.zero, ts.to_naive_datetime('0001-01-01 00:00:00'))
self.assertEqual(self.oneyear, ts.to_naive_datetime(365))
self.assertEqual(self.oneyear, ts.to_naive_datetime(365.0))
self.assertEqual(self.oneyear, ts.to_naive_datetime('365'))
self.assertEqual(self.oneyear, ts.to_naive_datetime('0001-12-31 00:00:00'))
@override_settings(USE_TZ=False)
def test_to_naive_without_tz(self):
ts = OrdinalPatchMixin()
self.assertEqual(self.zero, ts.to_naive_datetime(1))
self.assertEqual(self.zero, ts.to_naive_datetime(1.0))
self.assertEqual(self.zero, ts.to_naive_datetime('1'))
self.assertEqual(self.zero, ts.to_naive_datetime('0001-01-01 00:00:00'))
self.assertEqual(self.oneyear, ts.to_naive_datetime(365))
self.assertEqual(self.oneyear, ts.to_naive_datetime(365.0))
self.assertEqual(self.oneyear, ts.to_naive_datetime('365'))
self.assertEqual(self.oneyear, ts.to_naive_datetime('0001-12-31 00:00:00'))
@override_settings(USE_TZ=True, TIME_ZONE='UTC')
def test_to_utc_utc(self):
ts = OrdinalPatchMixin()
self.assertEqual(self.zero_utc, ts.to_utc_datetime(1))
self.assertEqual(self.zero_utc, ts.to_utc_datetime(1.0))
self.assertEqual(self.zero_utc, ts.to_utc_datetime('1'))
self.assertEqual(self.zero_utc, ts.to_utc_datetime('0001-01-01 00:00:00'))
self.assertEqual(self.oneyear_utc, ts.to_utc_datetime(365))
self.assertEqual(self.oneyear_utc, ts.to_utc_datetime(365.0))
self.assertEqual(self.oneyear_utc, ts.to_utc_datetime('365'))
self.assertEqual(self.oneyear_utc, ts.to_utc_datetime('0001-12-31 00:00:00'))
@override_settings(USE_TZ=True, TIME_ZONE='Asia/Taipei')
def test_to_utc_with_tz(self):
ts = OrdinalPatchMixin()
self.assertEqual(self.zero_utc, ts.to_utc_datetime(1))
self.assertEqual(self.zero_utc, ts.to_utc_datetime(1.0))
self.assertEqual(self.zero_utc, ts.to_utc_datetime('1'))
self.assertEqual(self.zero_utc, ts.to_utc_datetime('0001-01-01 00:00:00'))
self.assertEqual(self.oneyear_utc, ts.to_utc_datetime(365))
self.assertEqual(self.oneyear_utc, ts.to_utc_datetime(365.0))
self.assertEqual(self.oneyear_utc, ts.to_utc_datetime('365'))
self.assertEqual(self.oneyear_utc, ts.to_utc_datetime('0001-12-31 00:00:00'))
@override_settings(USE_TZ=False)
def test_to_utc_without_tz(self):
ts = OrdinalPatchMixin()
self.assertEqual(self.zero_utc, ts.to_utc_datetime(1))
self.assertEqual(self.zero_utc, ts.to_utc_datetime(1.0))
self.assertEqual(self.zero_utc, ts.to_utc_datetime('1'))
self.assertEqual(self.zero_utc, ts.to_utc_datetime('0001-01-01 00:00:00'))
self.assertEqual(self.oneyear_utc, ts.to_utc_datetime(365))
self.assertEqual(self.oneyear_utc, ts.to_utc_datetime(365.0))
self.assertEqual(self.oneyear_utc, ts.to_utc_datetime('365'))
self.assertEqual(self.oneyear_utc, ts.to_utc_datetime('0001-12-31 00:00:00'))
@override_settings(USE_TZ=True, TIME_ZONE='UTC')
def test_to_datetime_utc(self):
ts = OrdinalPatchMixin()
self.assertEqual(self.zero_utc, ts.to_datetime(1))
self.assertEqual(self.zero_utc, ts.to_datetime(1.0))
self.assertEqual(self.zero_utc, ts.to_datetime('1'))
self.assertEqual(self.zero_utc, ts.to_datetime('0001-01-01 00:00:00'))
self.assertEqual(self.oneyear_utc, ts.to_datetime(365))
self.assertEqual(self.oneyear_utc, ts.to_datetime(365.0))
self.assertEqual(self.oneyear_utc, ts.to_datetime('365'))
self.assertEqual(self.oneyear_utc, ts.to_datetime('0001-12-31 00:00:00'))
@override_settings(USE_TZ=True, TIME_ZONE='Asia/Taipei')
def test_to_datetime_with_tz(self):
ts = OrdinalPatchMixin()
zero = timezone.localtime(self.zero_utc)
oneyear = timezone.localtime(self.oneyear_utc)
self.assertEqual(zero, ts.to_datetime(1))
self.assertEqual(zero, ts.to_datetime(1.0))
self.assertEqual(zero, ts.to_datetime('1'))
self.assertEqual(zero, ts.to_datetime('0001-01-01 00:00:00'))
self.assertEqual(oneyear, ts.to_datetime(365))
self.assertEqual(oneyear, ts.to_datetime(365.0))
self.assertEqual(oneyear, ts.to_datetime('365'))
self.assertEqual(oneyear, ts.to_datetime('0001-12-31 00:00:00'))
@override_settings(USE_TZ=False)
def test_to_datetime_without_tz(self):
ts = OrdinalPatchMixin()
self.assertEqual(self.zero, ts.to_datetime(1))
self.assertEqual(self.zero, ts.to_datetime(1.0))
self.assertEqual(self.zero, ts.to_datetime('1'))
self.assertEqual(self.zero, ts.to_datetime('0001-01-01 00:00:00'))
self.assertEqual(self.oneyear, ts.to_datetime(365))
self.assertEqual(self.oneyear, ts.to_datetime(365.0))
self.assertEqual(self.oneyear, ts.to_datetime('365'))
self.assertEqual(self.oneyear, ts.to_datetime('0001-12-31 00:00:00'))
@override_settings(USE_TZ=True, TIME_ZONE='UTC')
def test_over_and_under_flow(self):
ts = OrdinalPatchMixin()
self.assertRaises(exceptions.ValidationError, ts.from_number, 3652060)
self.assertRaises(exceptions.ValidationError, ts.from_number, 0)
self.assertRaises(exceptions.ValidationError, ts.from_number, -1)
class ForOrdinalTestModel(models.Model):
created = OrdinalField(auto_now_add=True)
modified = OrdinalField(auto_now=True)
str_ini = OrdinalField(default='1')
float_ini = OrdinalField(default=1)
int_ini = OrdinalField(default=1)
dt_ini = OrdinalField(default=ordinal_1)
class OrdinalFieldTest(TestCase):
@override_settings(USE_TZ=True, TIME_ZONE='UTC')
def test_init_with_utc(self):
today = timezone.make_aware(
timezone.datetime.fromordinal(timezone.now().toordinal()), timezone.utc)
expected = timezone.make_aware(timezone.datetime.fromordinal(1), timezone.utc)
m = ForOrdinalTestModel.objects.create()
self.assertEqual(m.created, today)
self.assertEqual(m.modified, today)
self.assertEqual(m.str_ini, expected)
self.assertEqual(m.float_ini, expected)
self.assertEqual(m.int_ini, expected)
self.assertEqual(m.dt_ini, expected)
@override_settings(USE_TZ=True, TIME_ZONE='UTC')
def test_assignment_with_tz(self):
today = timezone.make_aware(
timezone.datetime.fromordinal(timezone.now().toordinal()), timezone.utc)
expected = timezone.make_aware(timezone.datetime.fromordinal(3), timezone.utc)
m = ForOrdinalTestModel.objects.create()
m.str_ini = '3'
m.float_ini = 3.0
m.int_ini = 3
m.dt_ini = timezone.make_aware(timezone.datetime.fromordinal(3), timezone.utc)
m.save()
if hasattr(m, 'refresh_from_db'):
m.refresh_from_db()
else:
m = ForOrdinalTestModel.objects.get(id=m.id)
self.assertEqual(m.modified, today)
self.assertEqual(m.str_ini, expected)
self.assertEqual(m.float_ini, expected)
self.assertEqual(m.int_ini, expected)
@override_settings(USE_TZ=True, TIME_ZONE='Asia/Taipei')
def test_init_with_different_tz(self):
today = timezone.make_aware(
timezone.datetime.fromordinal(timezone.now().toordinal()), timezone.utc)
expected = timezone.localtime(
timezone.make_aware(timezone.datetime.fromordinal(1), timezone.utc),
timezone.pytz.timezone('Asia/Taipei')
)
m = ForOrdinalTestModel.objects.create()
self.assertEqual(m.created, today)
self.assertEqual(m.modified, today)
self.assertEqual(m.str_ini, expected)
self.assertEqual(m.float_ini, expected)
self.assertEqual(m.int_ini, expected)
@override_settings(USE_TZ=False)
def test_init_without_tz(self):
today = timezone.datetime.fromordinal(timezone.datetime.utcnow().toordinal())
expected = timezone.datetime.fromordinal(1)
m = ForOrdinalTestModel.objects.create()
self.assertEqual(m.created, today)
self.assertEqual(m.modified, today)
self.assertEqual(m.str_ini, expected)
self.assertEqual(m.float_ini, expected)
self.assertEqual(m.int_ini, expected)
@override_settings(USE_TZ=False)
def test_assignment_without_tz(self):
today = timezone.datetime.fromordinal(timezone.datetime.utcnow().toordinal())
expected = timezone.datetime.fromordinal(3)
m = ForOrdinalTestModel.objects.create()
m.str_ini = '3'
m.float_ini = 3.0
m.int_ini = 3
m.dt_ini = timezone.datetime.fromordinal(3)
m.save()
if hasattr(m, 'refresh_from_db'):
m.refresh_from_db()
else:
m = ForOrdinalTestModel.objects.get(id=m.id)
self.assertEqual(m.modified, today)
self.assertEqual(m.str_ini, expected)
self.assertEqual(m.float_ini, expected)
self.assertEqual(m.int_ini, expected)
@override_settings(USE_TZ=False)
def test_assignment_overflow(self):
t = ForOrdinalTestModel.objects.create()
t.float_ini = 14248491461222.0
self.assertRaises(exceptions.ValidationError, t.save)
class TemplateTagsTest(TestCase):
def setUp(self):
self.template = Template(
"{% load unixtimestampfield %} "
"{{t.str_ini|to_datetime}} "
"{{t.str_ini|to_timestamp}}"
)
@override_settings(USE_TZ=True, TIME_ZONE='UTC')
def test_render(self):
t = ForTestModel()
rendered = self.template.render(Context({'t': t}))
self.assertIn("Jan. 1, 1970", rendered)
self.assertIn("0.0", rendered)
class SubmiddlewareModel(models.Model):
datetime = UnixTimeStampField(default=0.0)
numeric = UnixTimeStampField(use_numeric=True, default=0.0)
class SubmiddlewareTest(TestCase):
@override_settings(USE_TZ=True, TIME_ZONE='UTC')
def test_default(self):
t = SubmiddlewareModel.objects.create()
expected = timezone.datetime(1970, 1, 1, 0, 0, 0, tzinfo=timezone.utc)
if hasattr(t, 'refresh_from_db'):
t.refresh_from_db()
else:
t = ForTestModel.objects.get(id=t.id)
self.assertEqual(t.datetime, expected)
self.assertEqual(t.numeric, 0)
@override_settings(USE_TZ=True, TIME_ZONE='UTC', USF_FORMAT='usf_datetime')
def test_datetime(self):
t = SubmiddlewareModel.objects.create()
expected = timezone.datetime(1970, 1, 1, 0, 0, 0, tzinfo=timezone.utc)
self.assertEqual(t.datetime, expected)
self.assertEqual(t.numeric, expected)
@override_settings(USE_TZ=True, TIME_ZONE='UTC', USF_FORMAT='usf_timestamp')
def test_timestamp(self):
t = SubmiddlewareModel.objects.create()
self.assertEqual(t.datetime, 0)
self.assertEqual(t.numeric, 0)
@override_settings(USE_TZ=True, TIME_ZONE='UTC', USF_FORMAT='invalid')
def test_invalid_option(self):
t = SubmiddlewareModel.objects.create()
expected = timezone.datetime(1970, 1, 1, 0, 0, 0, tzinfo=timezone.utc)
if hasattr(t, 'refresh_from_db'):
t.refresh_from_db()
else:
t = ForTestModel.objects.get(id=t.id)
self.assertEqual(t.datetime, expected)
self.assertEqual(t.numeric, 0)
| 40.068664
| 96
| 0.676367
| 4,336
| 32,095
| 4.798662
| 0.043819
| 0.169414
| 0.116884
| 0.070745
| 0.903638
| 0.884606
| 0.878214
| 0.851877
| 0.825924
| 0.800067
| 0
| 0.069526
| 0.197383
| 32,095
| 800
| 97
| 40.11875
| 0.738199
| 0.001838
| 0
| 0.685484
| 0
| 0
| 0.065909
| 0.001592
| 0
| 0
| 0
| 0
| 0.420968
| 1
| 0.080645
| false
| 0
| 0.012903
| 0
| 0.159677
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8a419fe36b3b7a41d40f50c6c12fa3a96dea92cc
| 15,611
|
py
|
Python
|
django_comments_xtd/tests/test_moderation.py
|
mcguia/critical-design
|
dbe1c2d24d373dafed7b6214d2259dda76bb633c
|
[
"BSD-2-Clause"
] | null | null | null |
django_comments_xtd/tests/test_moderation.py
|
mcguia/critical-design
|
dbe1c2d24d373dafed7b6214d2259dda76bb633c
|
[
"BSD-2-Clause"
] | null | null | null |
django_comments_xtd/tests/test_moderation.py
|
mcguia/critical-design
|
dbe1c2d24d373dafed7b6214d2259dda76bb633c
|
[
"BSD-2-Clause"
] | null | null | null |
from __future__ import unicode_literals
import re
try:
from unittest.mock import patch
except ImportError:
from mock import patch
from datetime import datetime, timedelta
import django
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.test import TestCase
try:
from django_comments.models import CommentFlag
except ImportError:
from django.contrib.comments.models import CommentFlag
from django_comments_xtd import django_comments
from django_comments_xtd.models import LIKEDIT_FLAG, DISLIKEDIT_FLAG
from django_comments_xtd.tests.models import Diary
send_mail = '' # string to send_mail function to patch
try:
import imp
imp.find_module('django_comments')
send_mail = 'django_comments.moderation.send_mail'
except ImportError:
send_mail = 'django.contrib.comments.moderation.send_mail'
class ModeratorApprovesComment(TestCase):
def setUp(self):
patcher_app1 = patch(send_mail)
patcher_app2 = patch('django_comments_xtd.views.send_mail')
self.mailer_app1 = patcher_app1.start()
self.mailer_app2 = patcher_app2.start()
diary_entry = Diary.objects.create(
body="What I did on October...",
allow_comments=True,
publish=datetime.now())
self.form = django_comments.get_form()(diary_entry)
def post_valid_data(self):
data = {"name": "Bob", "email": "bob@example.com", "followup": True,
"reply_to": 0, "level": 1, "order": 1,
"comment": "Es war einmal eine kleine..."}
data.update(self.form.initial)
self.response = self.client.post(reverse("comments-post-comment"),
data=data, follow=True)
def get_confirm_comment_url(self, key):
self.response = self.client.get(reverse("comments-xtd-confirm",
kwargs={'key': key}),
follow=True)
def test_moderation_with_registered_user(self):
User.objects.create_user("bob", "bob@example.com", "pwd")
self.client.login(username="bob", password="pwd")
self.assert_(self.mailer_app1.call_count == 0)
self.post_valid_data()
# Moderation class:
# django_comments_xtd.tests.models.DiaryCommentModerator
# must trigger an email once comment has passed moderation.
self.assert_(self.mailer_app1.call_count == 1)
comment = django_comments.get_model()\
.objects.for_app_models('tests.diary')[0]
self.assert_(comment.is_public is True)
def test_moderation_with_unregistered_user(self):
self.post_valid_data()
self.assert_(self.mailer_app1.call_count == 0)
self.assert_(self.mailer_app2.call_count == 1)
mail_msg = self.mailer_app2.call_args[0][1]
key = str(re.search(r'http://.+/confirm/(?P<key>[\S]+)',
mail_msg).group("key"))
self.get_confirm_comment_url(key)
self.assert_(self.mailer_app1.call_count == 1)
self.assert_(self.mailer_app2.call_count == 1)
comment = django_comments.get_model()\
.objects.for_app_models('tests.diary')[0]
self.assert_(comment.is_public is True)
class ModeratorHoldsComment(TestCase):
def setUp(self):
patcher_app1 = patch(send_mail)
patcher_app2 = patch('django_comments_xtd.views.send_mail')
self.mailer_app1 = patcher_app1.start()
self.mailer_app2 = patcher_app2.start()
diary_entry = Diary.objects.create(
body="What I did Yesterday...",
allow_comments=True,
publish=datetime.now() - timedelta(days=5))
self.form = django_comments.get_form()(diary_entry)
def post_valid_data(self):
data = {"name": "Bob", "email": "bob@example.com", "followup": True,
"reply_to": 0, "level": 1, "order": 1,
"comment": "Es war einmal eine kleine..."}
data.update(self.form.initial)
self.response = self.client.post(reverse("comments-post-comment"),
data=data, follow=True)
def get_confirm_comment_url(self, key):
self.response = self.client.get(reverse("comments-xtd-confirm",
kwargs={'key': key}),
follow=True)
def test_moderation_with_registered_user(self):
User.objects.create_user("bob", "bob@example.com", "pwd")
self.client.login(username="bob", password="pwd")
self.post_valid_data()
# Moderation class:
# django_comments_xtd.tests.models.DiaryCommentModerator
# must trigger an email once comment has passed moderation.
self.assert_(self.mailer_app1.call_count == 1)
comment = django_comments.get_model()\
.objects.for_app_models('tests.diary')[0]
self.assert_(comment.is_public is False)
def test_moderation_with_unregistered_user(self):
self.post_valid_data()
self.assert_(self.mailer_app1.call_count == 0)
self.assert_(self.mailer_app2.call_count == 1)
mail_msg = self.mailer_app2.call_args[0][1]
key = str(re.search(r'http://.+/confirm/(?P<key>[\S]+)',
mail_msg).group("key"))
self.get_confirm_comment_url(key)
self.assert_(self.mailer_app1.call_count == 1)
self.assert_(self.mailer_app2.call_count == 1)
comment = django_comments.get_model()\
.objects.for_app_models('tests.diary')[0]
self.assert_(comment.is_public is False)
class FlaggingRemovalSuggestion(TestCase):
"""Scenario to test the flag removal_suggestion_notification"""
def setUp(self):
patcher = patch('django_comments_xtd.moderation.send_mail')
self.mailer = patcher.start()
diary_entry = Diary.objects.create(
body="What I did on October...",
allow_comments=True,
publish=datetime.now())
self.form = django_comments.get_form()(diary_entry)
User.objects.create_user("bob", "bob@example.com", "pwd")
self.client.login(username="bob", password="pwd")
data = {"name": "Bob", "email": "bob@example.com", "followup": True,
"reply_to": 0, "level": 1, "order": 1,
"comment": "Es war einmal eine kleine..."}
data.update(self.form.initial)
self.response = self.client.post(reverse("comments-post-comment"),
data=data, follow=True)
def test_anonymous_user_redirected_when_flagging(self):
self.client.logout()
comment = django_comments.get_model()\
.objects.for_app_models('tests.diary')[0]
flag_url = reverse("comments-flag", args=[comment.id])
response = self.client.get(flag_url, follow=True)
dest_url = '/accounts/login/?next=/comments/flag/1/'
self.assertRedirects(response, dest_url)
def test_loggedin_user_can_flag_comment(self):
comment = django_comments.get_model()\
.objects.for_app_models('tests.diary')[0]
flag_url = reverse("comments-flag", args=[comment.id])
response = self.client.get(flag_url)
self.assertTemplateUsed(response, 'comments/flag.html')
response = self.client.post(flag_url)
self.assertRedirects(response, reverse("comments-flag-done") + "?c=1")
user = User.objects.get(username='bob')
flags = CommentFlag.objects.filter(comment=comment,
user=user,
flag=CommentFlag.SUGGEST_REMOVAL)
self.assert_(flags.count() == 1)
def test_email_is_triggered(self):
flag_url = reverse("comments-flag", args=[1])
self.assert_(self.mailer.call_count == 0)
self.client.post(flag_url)
self.assert_(self.mailer.call_count == 1)
class FlaggingLikedItAndDislikedit(TestCase):
"""Scenario to test the flag removal_suggestion_notification"""
def setUp(self):
diary_entry = Diary.objects.create(
body="What I did on October...",
allow_comments=True,
publish=datetime.now())
self.form = django_comments.get_form()(diary_entry)
User.objects.create_user("bob", "bob@example.com", "pwd")
self.client.login(username="bob", password="pwd")
data = {"name": "Bob", "email": "bob@example.com", "followup": True,
"reply_to": 0, "level": 1, "order": 1,
"comment": "Es war einmal eine kleine..."}
data.update(self.form.initial)
self.response = self.client.post(reverse("comments-post-comment"),
data=data, follow=True)
def test_anonymous_user_is_redirected(self):
self.client.logout()
comment = django_comments.get_model()\
.objects.for_app_models('tests.diary')[0]
like_url = reverse("comments-xtd-like", args=[comment.id])
response = self.client.get(like_url, follow=True)
dest_url = '/accounts/login/?next=/comments/like/1/'
self.assertRedirects(response, dest_url)
dislike_url = reverse("comments-xtd-dislike", args=[comment.id])
response = self.client.get(dislike_url, follow=True)
dest_url = '/accounts/login/?next=/comments/dislike/1/'
self.assertRedirects(response, dest_url)
def test_loggedin_user_can_like(self):
if django.VERSION < (1, 5):
return
comment = django_comments.get_model()\
.objects.for_app_models('tests.diary')[0]
like_url = reverse("comments-xtd-like", args=[comment.id])
response = self.client.get(like_url)
self.assertTemplateUsed(response, 'django_comments_xtd/like.html')
response = self.client.post(like_url)
self.assertRedirects(response,
reverse("comments-xtd-like-done") + "?c=1")
user = User.objects.get(username='bob')
flags = CommentFlag.objects.filter(comment=comment,
user=user,
flag=LIKEDIT_FLAG)
self.assert_(flags.count() == 1)
def test_loggedin_user_can_dislike(self):
if django.VERSION < (1, 5):
return
comment = django_comments.get_model()\
.objects.for_app_models('tests.diary')[0]
dislike_url = reverse("comments-xtd-dislike", args=[comment.id])
response = self.client.get(dislike_url)
self.assertTemplateUsed(response, 'django_comments_xtd/dislike.html')
response = self.client.post(dislike_url)
self.assertRedirects(response,
reverse("comments-xtd-dislike-done") + "?c=1")
user = User.objects.get(username='bob')
flags = CommentFlag.objects.filter(comment=comment,
user=user,
flag=DISLIKEDIT_FLAG)
self.assert_(flags.count() == 1)
def test_likedit_can_be_cancelled(self):
if django.VERSION < (1, 5):
return
comment = django_comments.get_model()\
.objects.for_app_models('tests.diary')[0]
like_url = reverse("comments-xtd-like", args=[comment.id])
self.client.post(like_url)
user = User.objects.get(username='bob')
flags = CommentFlag.objects.filter(comment=comment,
user=user,
flag=LIKEDIT_FLAG)
self.assert_(flags.count() == 1)
# Now we liked the comment again to cancel the flag.
self.client.post(like_url)
flags = CommentFlag.objects.filter(comment=comment,
user=user,
flag=LIKEDIT_FLAG)
self.assert_(flags.count() == 0)
def test_dislikedit_can_be_cancelled(self):
if django.VERSION < (1, 5):
return
comment = django_comments.get_model()\
.objects.for_app_models('tests.diary')[0]
dislike_url = reverse("comments-xtd-dislike", args=[comment.id])
self.client.post(dislike_url, follow=True)
user = User.objects.get(username='bob')
flags = CommentFlag.objects.filter(comment=comment,
user=user,
flag=DISLIKEDIT_FLAG)
self.assert_(flags.count() == 1)
# Now we liked the comment again to cancel the flag.
self.client.post(dislike_url)
flags = CommentFlag.objects.filter(comment=comment,
user=user,
flag=DISLIKEDIT_FLAG)
self.assert_(flags.count() == 0)
def test_likedit_cancels_dislikedit(self):
if django.VERSION < (1, 5):
return
comment = django_comments.get_model()\
.objects.for_app_models('tests.diary')[0]
dislike_url = reverse("comments-xtd-dislike", args=[comment.id])
self.client.post(dislike_url)
user = User.objects.get(username='bob')
flags = CommentFlag.objects.filter(comment=comment,
user=user,
flag=DISLIKEDIT_FLAG)
self.assert_(flags.count() == 1)
# Now we liked the comment again to cancel the flag.
like_url = reverse("comments-xtd-like", args=[comment.id])
self.client.post(like_url)
flags = CommentFlag.objects.filter(comment=comment,
user=user,
flag=DISLIKEDIT_FLAG)
self.assert_(flags.count() == 0)
flags = CommentFlag.objects.filter(comment=comment,
user=user,
flag=LIKEDIT_FLAG)
self.assert_(flags.count() == 1)
def test_dislikedit_cancels_likedit(self):
if django.VERSION < (1, 5):
return
comment = django_comments.get_model()\
.objects.for_app_models('tests.diary')[0]
like_url = reverse("comments-xtd-like", args=[comment.id])
self.client.post(like_url)
user = User.objects.get(username='bob')
flags = CommentFlag.objects.filter(comment=comment,
user=user,
flag=LIKEDIT_FLAG)
self.assert_(flags.count() == 1)
# Now we liked the comment again to cancel the flag.
dislike_url = reverse("comments-xtd-dislike", args=[comment.id])
self.client.post(dislike_url)
flags = CommentFlag.objects.filter(comment=comment,
user=user,
flag=LIKEDIT_FLAG)
self.assert_(flags.count() == 0)
flags = CommentFlag.objects.filter(comment=comment,
user=user,
flag=DISLIKEDIT_FLAG)
self.assert_(flags.count() == 1)
| 45.914706
| 78
| 0.581449
| 1,735
| 15,611
| 5.039193
| 0.098559
| 0.04964
| 0.033055
| 0.029738
| 0.870639
| 0.857028
| 0.829349
| 0.804415
| 0.802013
| 0.78417
| 0
| 0.00921
| 0.304465
| 15,611
| 339
| 79
| 46.050147
| 0.796003
| 0.039716
| 0
| 0.791096
| 0
| 0
| 0.108484
| 0.033534
| 0
| 0
| 0
| 0
| 0.133562
| 1
| 0.075342
| false
| 0.013699
| 0.061644
| 0
| 0.171233
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8a5e5d6ef4e44de1f8564b05f1ba64f19dba228e
| 3,884
|
py
|
Python
|
DataStructures/BinarySearchTree/tests/binary_search_tree_test.py
|
Nalhin/AlgorithmsAndDataStructures
|
2d2c87d0572e107c993c3c8866b8beefd4d22082
|
[
"MIT"
] | 1
|
2021-11-16T13:02:25.000Z
|
2021-11-16T13:02:25.000Z
|
DataStructures/BinarySearchTree/tests/binary_search_tree_test.py
|
Nalhin/AlgorithmsAndDataStructures
|
2d2c87d0572e107c993c3c8866b8beefd4d22082
|
[
"MIT"
] | null | null | null |
DataStructures/BinarySearchTree/tests/binary_search_tree_test.py
|
Nalhin/AlgorithmsAndDataStructures
|
2d2c87d0572e107c993c3c8866b8beefd4d22082
|
[
"MIT"
] | null | null | null |
from random import random
import pytest
from DataStructures.BinarySearchTree.binary_search_tree import BinarySearchTree
@pytest.fixture()
def binary_search_tree():
return BinarySearchTree()
def populate_tree(binary_search_tree, items):
for i in items:
binary_search_tree.insert(i)
class TestBinarySearchTree:
def test_inorder_tree_traversal(self, binary_search_tree):
values = [3, 2, 12, 4, 5, 6, 2]
populate_tree(binary_search_tree, values)
result = binary_search_tree.inorder_tree_traversal()
assert result == sorted(values)
def test_inorder_tree_traversal_random_values(self, binary_search_tree):
values = [random() for _ in range(100)]
populate_tree(binary_search_tree, values)
result = binary_search_tree.inorder_tree_traversal()
assert result == sorted(values)
def test_minimum(self, binary_search_tree):
values = [3, 2, 12, 4, 5, 6, 2]
populate_tree(binary_search_tree, values)
result = binary_search_tree.minimum()
assert result == min(values)
def test_minimum_random_values(self, binary_search_tree):
values = [random() for _ in range(100)]
populate_tree(binary_search_tree, values)
result = binary_search_tree.minimum()
assert result == min(values)
def test_maximum(self, binary_search_tree):
values = [3, 2, 12, 4, 5, 6, 2]
populate_tree(binary_search_tree, values)
result = binary_search_tree.maximum()
assert result == max(values)
def test_maximum_random_values(self, binary_search_tree):
values = [random() for _ in range(100)]
populate_tree(binary_search_tree, values)
result = binary_search_tree.maximum()
assert result == max(values)
def test_tree_successor(self, binary_search_tree):
val = 4
expected = 5
values = [3, expected, 2, 12, 6, 2, val]
populate_tree(binary_search_tree, values)
node = binary_search_tree.search(val)
result = binary_search_tree.tree_successor(node)
assert result.val == expected
def test_tree_predecessor(self, binary_search_tree):
val = 5
expected = 4
values = [3, expected, 2, 12, 6, 2, val]
populate_tree(binary_search_tree, values)
node = binary_search_tree.search(val)
result = binary_search_tree.tree_predecessor(node)
assert result.val == expected
def test_search(self, binary_search_tree):
searched_value = 12
values = [3, 2, searched_value, 4, 5, 6, 2]
populate_tree(binary_search_tree, values)
result = binary_search_tree.search(searched_value)
assert result.val == searched_value
def test_search_random_values(self, binary_search_tree):
values = [random() for _ in range(100)]
searched_value = values[20]
populate_tree(binary_search_tree, values)
result = binary_search_tree.search(searched_value)
assert result.val == searched_value
def test_insert(self, binary_search_tree):
values = [3, 2, 12, 4, 5, 6, 2]
populate_tree(binary_search_tree, values)
result = binary_search_tree.inorder_tree_traversal()
assert result == sorted(values)
def test_insert_random_values(self, binary_search_tree):
values = [random() for _ in range(100)]
populate_tree(binary_search_tree, values)
result = binary_search_tree.inorder_tree_traversal()
assert result == sorted(values)
def test_delete(self, binary_search_tree):
values = [3, 2, 12, 4, 5, 6, 2]
populate_tree(binary_search_tree, values)
for i in values[3:]:
binary_search_tree.delete(i)
assert binary_search_tree.inorder_tree_traversal() == sorted(
values[:-4]
)
| 29.203008
| 79
| 0.668898
| 493
| 3,884
| 4.947262
| 0.099391
| 0.226322
| 0.301763
| 0.207462
| 0.801148
| 0.751538
| 0.736777
| 0.708897
| 0.708897
| 0.708897
| 0
| 0.028212
| 0.242533
| 3,884
| 132
| 80
| 29.424242
| 0.800816
| 0
| 0
| 0.569767
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.151163
| 1
| 0.174419
| false
| 0
| 0.034884
| 0.011628
| 0.232558
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
8a77179b02819d4afffd9754c1de104ccbf039bb
| 92,716
|
py
|
Python
|
app/api/tests/test_api.py
|
edwardly1002/doccano
|
d7b56eedc50132b18dee5f9bd5ef4f9a9ff27441
|
[
"MIT"
] | 1
|
2021-07-25T02:37:01.000Z
|
2021-07-25T02:37:01.000Z
|
app/api/tests/test_api.py
|
edwardly1002/doccano
|
d7b56eedc50132b18dee5f9bd5ef4f9a9ff27441
|
[
"MIT"
] | 11
|
2021-04-06T18:42:53.000Z
|
2022-02-10T10:52:57.000Z
|
app/api/tests/test_api.py
|
munivinayk/Doccano-master
|
9e7aab7b4422579df4e8b67bebd0a9215edf558a
|
[
"MIT"
] | null | null | null |
import os
from django.conf import settings
from django.test import override_settings
from rest_framework import status
from rest_framework.reverse import reverse
from rest_framework.test import APITestCase
from model_mommy import mommy
from ..models import User, SequenceAnnotation, Document, Role, RoleMapping
from ..models import DOCUMENT_CLASSIFICATION, SEQUENCE_LABELING, SEQ2SEQ, SPEECH2TEXT
from ..utils import PlainTextParser, CoNLLParser, JSONParser, CSVParser, FastTextParser
from ..exceptions import FileParseException
DATA_DIR = os.path.join(os.path.dirname(__file__), 'data')
def create_default_roles():
Role.objects.get_or_create(name=settings.ROLE_PROJECT_ADMIN)
Role.objects.get_or_create(name=settings.ROLE_ANNOTATOR)
Role.objects.get_or_create(name=settings.ROLE_ANNOTATION_APPROVER)
def assign_user_to_role(project_member, project, role_name):
role, _ = Role.objects.get_or_create(name=role_name)
RoleMapping.objects.get_or_create(role_id=role.id, user_id=project_member.id, project_id=project.id)
def remove_all_role_mappings():
RoleMapping.objects.all().delete()
class TestHealthEndpoint(APITestCase):
@classmethod
def setUpTestData(cls):
cls.url = reverse(viewname='health')
def test_returns_green_status_on_health_endpoint(self):
response = self.client.get(self.url, format='json')
self.assertEqual(response.data['status'], 'green')
class TestUtilsMixin:
def _patch_project(self, project, attribute, value):
old_value = getattr(project, attribute, None)
setattr(project, attribute, value)
project.save()
def cleanup_project():
setattr(project, attribute, old_value)
project.save()
self.addCleanup(cleanup_project)
@override_settings(STATICFILES_STORAGE='django.contrib.staticfiles.storage.StaticFilesStorage')
class TestProjectListAPI(APITestCase):
@classmethod
def setUpTestData(cls):
cls.main_project_member_name = 'project_member_name'
cls.main_project_member_pass = 'project_member_pass'
cls.sub_project_member_name = 'sub_project_member_name'
cls.sub_project_member_pass = 'sub_project_member_pass'
cls.approver_name = 'approver_name_name'
cls.approver_pass = 'approver_pass'
cls.super_user_name = 'super_user_name'
cls.super_user_pass = 'super_user_pass'
create_default_roles()
main_project_member = User.objects.create_user(username=cls.main_project_member_name,
password=cls.main_project_member_pass)
sub_project_member = User.objects.create_user(username=cls.sub_project_member_name,
password=cls.sub_project_member_pass)
approver = User.objects.create_user(username=cls.approver_name,
password=cls.approver_pass)
User.objects.create_superuser(username=cls.super_user_name,
password=cls.super_user_pass,
email='fizz@buzz.com')
cls.main_project = mommy.make('TextClassificationProject', users=[main_project_member])
cls.sub_project = mommy.make('TextClassificationProject', users=[sub_project_member])
assign_user_to_role(project_member=main_project_member, project=cls.main_project,
role_name=settings.ROLE_ANNOTATOR)
assign_user_to_role(project_member=sub_project_member, project=cls.sub_project,
role_name=settings.ROLE_ANNOTATOR)
assign_user_to_role(project_member=approver, project=cls.main_project,
role_name=settings.ROLE_ANNOTATION_APPROVER)
cls.url = reverse(viewname='project_list')
cls.data = {'name': 'example', 'project_type': 'DocumentClassification',
'description': 'example', 'guideline': 'example',
'resourcetype': 'TextClassificationProject'}
cls.num_project = main_project_member.projects.count()
def test_returns_main_project_to_approver(self):
self.client.login(username=self.approver_name,
password=self.approver_pass)
response = self.client.get(self.url, format='json')
project = response.data[0]
num_project = len(response.data)
self.assertEqual(num_project, self.num_project)
self.assertEqual(project['id'], self.main_project.id)
def test_returns_main_project_to_main_project_member(self):
self.client.login(username=self.main_project_member_name,
password=self.main_project_member_pass)
response = self.client.get(self.url, format='json')
project = response.data[0]
num_project = len(response.data)
self.assertEqual(num_project, self.num_project)
self.assertEqual(project['id'], self.main_project.id)
def test_do_not_return_main_project_to_sub_project_member(self):
self.client.login(username=self.sub_project_member_name,
password=self.sub_project_member_pass)
response = self.client.get(self.url, format='json')
project = response.data[0]
num_project = len(response.data)
self.assertEqual(num_project, self.num_project)
self.assertNotEqual(project['id'], self.main_project.id)
def test_allows_superuser_to_create_project(self):
self.client.login(username=self.super_user_name,
password=self.super_user_pass)
response = self.client.post(self.url, format='json', data=self.data)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertFalse(response.json().get('collaborative_annotation'))
self.assertFalse(response.json().get('randomize_document_order'))
def test_allows_superuser_to_create_project_with_flags(self):
self.client.login(username=self.super_user_name,
password=self.super_user_pass)
data = dict(self.data)
data['collaborative_annotation'] = True
data['randomize_document_order'] = True
response = self.client.post(self.url, format='json', data=data)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertTrue(response.json().get('collaborative_annotation'))
self.assertTrue(response.json().get('randomize_document_order'))
def test_disallows_project_member_to_create_project(self):
self.client.login(username=self.main_project_member_name,
password=self.main_project_member_pass)
response = self.client.post(self.url, format='json', data=self.data)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
@classmethod
def doCleanups(cls):
remove_all_role_mappings()
@override_settings(STATICFILES_STORAGE='django.contrib.staticfiles.storage.StaticFilesStorage')
class TestProjectDetailAPI(APITestCase):
@classmethod
def setUpTestData(cls):
cls.project_member_name = 'project_member_name'
cls.project_member_pass = 'project_member_pass'
cls.non_project_member_name = 'non_project_member_name'
cls.non_project_member_pass = 'non_project_member_pass'
cls.admin_user_name = 'admin_user_name'
cls.admin_user_pass = 'admin_user_pass'
create_default_roles()
cls.project_member = User.objects.create_user(username=cls.project_member_name,
password=cls.project_member_pass)
non_project_member = User.objects.create_user(username=cls.non_project_member_name,
password=cls.non_project_member_pass)
project_admin = User.objects.create_superuser(username=cls.admin_user_name,
password=cls.admin_user_pass,
email='fizz@buzz.com')
cls.main_project = mommy.make('TextClassificationProject', users=[cls.project_member, project_admin])
mommy.make('TextClassificationProject', users=[non_project_member])
cls.url = reverse(viewname='project_detail', args=[cls.main_project.id])
cls.data = {'description': 'lorem'}
assign_user_to_role(project_member=cls.project_member, project=cls.main_project,
role_name=settings.ROLE_ANNOTATOR)
assign_user_to_role(project_member=project_admin, project=cls.main_project,
role_name=settings.ROLE_PROJECT_ADMIN)
def test_returns_main_project_detail_to_main_project_member(self):
self.client.login(username=self.project_member_name,
password=self.project_member_pass)
response = self.client.get(self.url, format='json')
self.assertEqual(response.data['id'], self.main_project.id)
def test_do_not_return_main_project_to_sub_project_member(self):
self.client.login(username=self.non_project_member_name,
password=self.non_project_member_pass)
response = self.client.get(self.url, format='json')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_allows_admin_to_update_project(self):
self.client.login(username=self.admin_user_name,
password=self.admin_user_pass)
response = self.client.patch(self.url, format='json', data=self.data)
self.assertEqual(response.data['description'], self.data['description'])
def test_disallows_non_project_member_to_update_project(self):
self.client.login(username=self.non_project_member_name,
password=self.non_project_member_pass)
response = self.client.patch(self.url, format='json', data=self.data)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_allows_admin_to_delete_project(self):
self.client.login(username=self.admin_user_name,
password=self.admin_user_pass)
response = self.client.delete(self.url, format='json')
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
def test_disallows_non_project_member_to_delete_project(self):
self.client.login(username=self.non_project_member_name,
password=self.non_project_member_pass)
response = self.client.delete(self.url, format='json')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
@classmethod
def doCleanups(cls):
remove_all_role_mappings()
class TestLabelListAPI(APITestCase):
@classmethod
def setUpTestData(cls):
cls.project_member_name = 'project_member_name'
cls.project_member_pass = 'project_member_pass'
cls.non_project_member_name = 'non_project_member_name'
cls.non_project_member_pass = 'non_project_member_pass'
cls.admin_user_name = 'admin_user_name'
cls.admin_user_pass = 'admin_user_pass'
create_default_roles()
cls.project_member = User.objects.create_user(username=cls.project_member_name,
password=cls.project_member_pass)
non_project_member = User.objects.create_user(username=cls.non_project_member_name,
password=cls.non_project_member_pass)
project_admin = User.objects.create_superuser(username=cls.admin_user_name,
password=cls.admin_user_pass,
email='fizz@buzz.com')
cls.main_project = mommy.make('Project', users=[cls.project_member, project_admin])
cls.main_project_label = mommy.make('Label', project=cls.main_project)
sub_project = mommy.make('Project', users=[non_project_member])
other_project = mommy.make('Project', users=[project_admin])
mommy.make('Label', project=sub_project)
cls.url = reverse(viewname='label_list', args=[cls.main_project.id])
cls.other_url = reverse(viewname='label_list', args=[other_project.id])
cls.data = {'text': 'example'}
assign_user_to_role(project_member=cls.project_member, project=cls.main_project,
role_name=settings.ROLE_ANNOTATOR)
def test_returns_labels_to_project_member(self):
self.client.login(username=self.project_member_name,
password=self.project_member_pass)
response = self.client.get(self.url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_do_not_return_labels_to_non_project_member(self):
self.client.login(username=self.non_project_member_name,
password=self.non_project_member_pass)
response = self.client.get(self.url, format='json')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_do_not_return_labels_of_other_projects(self):
self.client.login(username=self.project_member_name,
password=self.project_member_pass)
response = self.client.get(self.url, format='json')
label = response.data[0]
num_labels = len(response.data)
self.assertEqual(num_labels, len(self.main_project.labels.all()))
self.assertEqual(label['id'], self.main_project_label.id)
def test_allows_admin_to_create_label(self):
self.client.login(username=self.admin_user_name,
password=self.admin_user_pass)
response = self.client.post(self.url, format='json', data=self.data)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_can_create_multiple_labels_without_shortcut_key(self):
self.client.login(username=self.admin_user_name,
password=self.admin_user_pass)
labels = [
{'text': 'Ruby', 'prefix_key': None, 'suffix_key': None},
{'text': 'PHP', 'prefix_key': None, 'suffix_key': None}
]
for label in labels:
response = self.client.post(self.url, format='json', data=label)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_can_create_same_label_in_multiple_projects(self):
self.client.login(username=self.admin_user_name,
password=self.admin_user_pass)
label = {'text': 'LOC', 'prefix_key': None, 'suffix_key': 'l'}
response = self.client.post(self.url, format='json', data=label)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
response = self.client.post(self.other_url, format='json', data=label)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_can_create_same_suffix_with_different_prefix(self):
self.client.login(username=self.admin_user_name,
password=self.admin_user_pass)
label = {'text': 'Person', 'prefix_key': None, 'suffix_key': 'p'}
response = self.client.post(self.url, format='json', data=label)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
label = {'text': 'Percentage', 'prefix_key': 'ctrl', 'suffix_key': 'p'}
response = self.client.post(self.url, format='json', data=label)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_cannot_create_same_shortcut_key(self):
self.client.login(username=self.admin_user_name,
password=self.admin_user_pass)
label = {'text': 'Person', 'prefix_key': None, 'suffix_key': 'p'}
response = self.client.post(self.url, format='json', data=label)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
label = {'text': 'Percentage', 'prefix_key': None, 'suffix_key': 'p'}
response = self.client.post(self.url, format='json', data=label)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_disallows_project_member_to_create_label(self):
self.client.login(username=self.project_member_name,
password=self.project_member_pass)
response = self.client.post(self.url, format='json', data=self.data)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
@classmethod
def doCleanups(cls):
remove_all_role_mappings()
class TestLabelDetailAPI(APITestCase):
@classmethod
def setUpTestData(cls):
cls.project_member_name = 'project_member_name'
cls.project_member_pass = 'project_member_pass'
cls.non_project_member_name = 'non_project_member_name'
cls.non_project_member_pass = 'non_project_member_pass'
cls.super_user_name = 'super_user_name'
cls.super_user_pass = 'super_user_pass'
create_default_roles()
project_member = User.objects.create_user(username=cls.project_member_name,
password=cls.project_member_pass)
User.objects.create_user(username=cls.non_project_member_name, password=cls.non_project_member_pass)
# Todo: change super_user to project_admin.
super_user = User.objects.create_superuser(username=cls.super_user_name,
password=cls.super_user_pass,
email='fizz@buzz.com')
project = mommy.make('Project', users=[project_member, super_user])
cls.label = mommy.make('Label', project=project)
cls.label_with_shortcut = mommy.make('Label', suffix_key='l', project=project)
cls.url = reverse(viewname='label_detail', args=[project.id, cls.label.id])
cls.url_with_shortcut = reverse(viewname='label_detail', args=[project.id, cls.label_with_shortcut.id])
cls.data = {'text': 'example'}
create_default_roles()
assign_user_to_role(project_member=project_member, project=project,
role_name=settings.ROLE_ANNOTATOR)
def test_returns_label_to_project_member(self):
self.client.login(username=self.project_member_name,
password=self.project_member_pass)
response = self.client.get(self.url, format='json')
self.assertEqual(response.data['id'], self.label.id)
def test_do_not_return_label_to_non_project_member(self):
self.client.login(username=self.non_project_member_name,
password=self.non_project_member_pass)
response = self.client.get(self.url, format='json')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_allows_superuser_to_update_label(self):
self.client.login(username=self.super_user_name,
password=self.super_user_pass)
response = self.client.patch(self.url, format='json', data=self.data)
self.assertEqual(response.data['text'], self.data['text'])
def test_allows_superuser_to_update_label_with_shortcut(self):
self.client.login(username=self.super_user_name,
password=self.super_user_pass)
response = self.client.patch(self.url_with_shortcut, format='json', data={'suffix_key': 's'})
self.assertEqual(response.data['suffix_key'], 's')
def test_disallows_project_member_to_update_label(self):
self.client.login(username=self.project_member_name,
password=self.project_member_pass)
response = self.client.patch(self.url, format='json', data=self.data)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_allows_superuser_to_delete_label(self):
self.client.login(username=self.super_user_name,
password=self.super_user_pass)
response = self.client.delete(self.url, format='json')
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
def test_disallows_project_member_to_delete_label(self):
self.client.login(username=self.project_member_name,
password=self.project_member_pass)
response = self.client.delete(self.url, format='json')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
@classmethod
def doCleanups(cls):
remove_all_role_mappings()
class TestLabelUploadAPI(APITestCase):
@classmethod
def setUpTestData(cls):
cls.project_member_name = 'project_member_name'
cls.project_member_pass = 'project_member_pass'
cls.non_project_member_name = 'non_project_member_name'
cls.non_project_member_pass = 'non_project_member_pass'
cls.super_user_name = 'super_user_name'
cls.super_user_pass = 'super_user_pass'
create_default_roles()
project_member = User.objects.create_user(username=cls.project_member_name,
password=cls.project_member_pass)
User.objects.create_user(username=cls.non_project_member_name, password=cls.non_project_member_pass)
project_admin = User.objects.create_user(username=cls.super_user_name,
password=cls.super_user_pass)
project = mommy.make('Project', users=[project_member, project_admin])
cls.url = reverse(viewname='label_upload', args=[project.id])
create_default_roles()
assign_user_to_role(project_member=project_admin, project=project, role_name=settings.ROLE_PROJECT_ADMIN)
assign_user_to_role(project_member=project_member, project=project, role_name=settings.ROLE_ANNOTATOR)
def help_to_upload_file(self, filename, expected_status):
with open(os.path.join(DATA_DIR, filename), 'rb') as f:
response = self.client.post(self.url, data={'file': f})
self.assertEqual(response.status_code, expected_status)
def test_allows_project_admin_to_upload_label(self):
self.client.login(username=self.super_user_name,
password=self.super_user_pass)
self.help_to_upload_file('valid_labels.json', status.HTTP_201_CREATED)
def test_disallows_project_member_to_upload_label(self):
self.client.login(username=self.project_member_name,
password=self.project_member_pass)
self.help_to_upload_file('valid_labels.json', status.HTTP_403_FORBIDDEN)
def test_try_to_upload_invalid_file(self):
self.client.login(username=self.super_user_name,
password=self.super_user_pass)
self.help_to_upload_file('invalid_labels.json', status.HTTP_400_BAD_REQUEST)
@classmethod
def doCleanups(cls):
remove_all_role_mappings()
class TestDocumentListAPI(APITestCase, TestUtilsMixin):
@classmethod
def setUpTestData(cls):
cls.project_member_name = 'project_member_name'
cls.project_member_pass = 'project_member_pass'
cls.non_project_member_name = 'non_project_member_name'
cls.non_project_member_pass = 'non_project_member_pass'
cls.super_user_name = 'super_user_name'
cls.super_user_pass = 'super_user_pass'
create_default_roles()
project_member = User.objects.create_user(username=cls.project_member_name,
password=cls.project_member_pass)
non_project_member = User.objects.create_user(username=cls.non_project_member_name,
password=cls.non_project_member_pass)
super_user = User.objects.create_superuser(username=cls.super_user_name,
password=cls.super_user_pass,
email='fizz@buzz.com')
cls.main_project = mommy.make('TextClassificationProject', users=[project_member, super_user])
doc1 = mommy.make('Document', project=cls.main_project)
doc2 = mommy.make('Document', project=cls.main_project)
mommy.make('Document', project=cls.main_project)
cls.random_order_project = mommy.make('TextClassificationProject', users=[project_member, super_user],
randomize_document_order=True)
mommy.make('Document', 100, project=cls.random_order_project)
sub_project = mommy.make('TextClassificationProject', users=[non_project_member])
mommy.make('Document', project=sub_project)
cls.url = reverse(viewname='doc_list', args=[cls.main_project.id])
cls.random_order_project_url = reverse(viewname='doc_list', args=[cls.random_order_project.id])
cls.data = {'text': 'example'}
assign_user_to_role(project_member=project_member, project=cls.main_project,
role_name=settings.ROLE_ANNOTATOR)
assign_user_to_role(project_member=project_member, project=cls.random_order_project,
role_name=settings.ROLE_ANNOTATOR)
mommy.make('DocumentAnnotation', document=doc1, user=project_member)
mommy.make('DocumentAnnotation', document=doc2, user=project_member)
def _test_list(self, url, username, password, expected_num_results):
self.client.login(username=username, password=password)
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.json().get('results')), expected_num_results)
def test_returns_docs_to_project_member(self):
self._test_list(self.url,
username=self.project_member_name,
password=self.project_member_pass,
expected_num_results=3)
def test_returns_docs_to_project_member_filtered_to_active(self):
self._test_list('{}?doc_annotations__isnull=true'.format(self.url),
username=self.project_member_name,
password=self.project_member_pass,
expected_num_results=1)
def test_returns_docs_to_project_member_filtered_to_completed(self):
self._test_list('{}?doc_annotations__isnull=false'.format(self.url),
username=self.project_member_name,
password=self.project_member_pass,
expected_num_results=2)
def test_returns_docs_to_project_member_filtered_to_active_with_collaborative_annotation(self):
self._test_list('{}?doc_annotations__isnull=true'.format(self.url),
username=self.super_user_name,
password=self.super_user_pass,
expected_num_results=3)
self._patch_project(self.main_project, 'collaborative_annotation', True)
self._test_list('{}?doc_annotations__isnull=true'.format(self.url),
username=self.super_user_name,
password=self.super_user_pass,
expected_num_results=1)
def test_returns_docs_to_project_member_filtered_to_completed_with_collaborative_annotation(self):
self._test_list('{}?doc_annotations__isnull=false'.format(self.url),
username=self.super_user_name,
password=self.super_user_pass,
expected_num_results=0)
self._patch_project(self.main_project, 'collaborative_annotation', True)
self._test_list('{}?doc_annotations__isnull=false'.format(self.url),
username=self.super_user_name,
password=self.super_user_pass,
expected_num_results=2)
def test_returns_docs_in_consistent_order_for_all_users(self):
self.client.login(username=self.project_member_name, password=self.project_member_pass)
user1_documents = self.client.get(self.url, format='json').json().get('results')
self.client.logout()
self.client.login(username=self.super_user_name, password=self.super_user_pass)
user2_documents = self.client.get(self.url, format='json').json().get('results')
self.client.logout()
self.assertEqual([doc['id'] for doc in user1_documents], [doc['id'] for doc in user2_documents])
def test_can_return_docs_in_consistent_random_order(self):
self.client.login(username=self.project_member_name, password=self.project_member_pass)
user1_documents1 = self.client.get(self.random_order_project_url, format='json').json().get('results')
user1_documents2 = self.client.get(self.random_order_project_url, format='json').json().get('results')
self.client.logout()
self.assertEqual(user1_documents1, user1_documents2)
self.client.login(username=self.super_user_name, password=self.super_user_pass)
user2_documents1 = self.client.get(self.random_order_project_url, format='json').json().get('results')
user2_documents2 = self.client.get(self.random_order_project_url, format='json').json().get('results')
self.client.logout()
self.assertEqual(user2_documents1, user2_documents2)
self.assertNotEqual(user1_documents1, user2_documents1)
self.assertNotEqual(user1_documents2, user2_documents2)
def test_do_not_return_docs_to_non_project_member(self):
self.client.login(username=self.non_project_member_name,
password=self.non_project_member_pass)
response = self.client.get(self.url, format='json')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_do_not_return_docs_of_other_projects(self):
self._test_list(self.url,
username=self.project_member_name,
password=self.project_member_pass,
expected_num_results=self.main_project.documents.count())
def test_allows_superuser_to_create_doc(self):
self.client.login(username=self.super_user_name,
password=self.super_user_pass)
response = self.client.post(self.url, format='json', data=self.data)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_disallows_project_member_to_create_doc(self):
self.client.login(username=self.project_member_name,
password=self.project_member_pass)
response = self.client.post(self.url, format='json', data=self.data)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
@classmethod
def doCleanups(cls):
remove_all_role_mappings()
class TestDocumentDetailAPI(APITestCase):
@classmethod
def setUpTestData(cls):
cls.project_member_name = 'project_member_name'
cls.project_member_pass = 'project_member_pass'
cls.non_project_member_name = 'non_project_member_name'
cls.non_project_member_pass = 'non_project_member_pass'
cls.super_user_name = 'super_user_name'
cls.super_user_pass = 'super_user_pass'
create_default_roles()
project_member = User.objects.create_user(username=cls.project_member_name,
password=cls.project_member_pass)
non_project_member = User.objects.create_user(username=cls.non_project_member_name,
password=cls.non_project_member_pass)
# Todo: change super_user to project_admin.
super_user = User.objects.create_superuser(username=cls.super_user_name,
password=cls.super_user_pass,
email='fizz@buzz.com')
project = mommy.make('TextClassificationProject', users=[project_member, super_user])
cls.doc = mommy.make('Document', project=project)
cls.url = reverse(viewname='doc_detail', args=[project.id, cls.doc.id])
cls.data = {'text': 'example'}
assign_user_to_role(project_member=project_member, project=project,
role_name=settings.ROLE_ANNOTATOR)
def test_returns_doc_to_project_member(self):
self.client.login(username=self.project_member_name,
password=self.project_member_pass)
response = self.client.get(self.url, format='json')
self.assertEqual(response.data['id'], self.doc.id)
def test_do_not_return_doc_to_non_project_member(self):
self.client.login(username=self.non_project_member_name,
password=self.non_project_member_pass)
response = self.client.get(self.url, format='json')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_allows_superuser_to_update_doc(self):
self.client.login(username=self.super_user_name,
password=self.super_user_pass)
response = self.client.patch(self.url, format='json', data=self.data)
self.assertEqual(response.data['text'], self.data['text'])
def test_disallows_project_member_to_update_doc(self):
self.client.login(username=self.project_member_name,
password=self.project_member_pass)
response = self.client.patch(self.url, format='json', data=self.data)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_allows_superuser_to_delete_doc(self):
self.client.login(username=self.super_user_name,
password=self.super_user_pass)
response = self.client.delete(self.url, format='json')
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
def test_disallows_project_member_to_delete_doc(self):
self.client.login(username=self.project_member_name,
password=self.project_member_pass)
response = self.client.delete(self.url, format='json')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
@classmethod
def doCleanups(cls):
remove_all_role_mappings()
class TestApproveLabelsAPI(APITestCase):
@classmethod
def setUpTestData(cls):
cls.annotator_name = 'annotator_name'
cls.annotator_pass = 'annotator_pass'
cls.approver_name = 'approver_name_name'
cls.approver_pass = 'approver_pass'
cls.project_admin_name = 'project_admin_name'
cls.project_admin_pass = 'project_admin_pass'
annotator = User.objects.create_user(username=cls.annotator_name,
password=cls.annotator_pass)
approver = User.objects.create_user(username=cls.approver_name,
password=cls.approver_pass)
project_admin = User.objects.create_user(username=cls.project_admin_name,
password=cls.project_admin_pass)
project = mommy.make('TextClassificationProject', users=[annotator, approver, project_admin])
cls.doc = mommy.make('Document', project=project)
cls.url = reverse(viewname='approve_labels', args=[project.id, cls.doc.id])
create_default_roles()
assign_user_to_role(project_member=annotator, project=project,
role_name=settings.ROLE_ANNOTATOR)
assign_user_to_role(project_member=approver, project=project,
role_name=settings.ROLE_ANNOTATION_APPROVER)
assign_user_to_role(project_member=project_admin, project=project,
role_name=settings.ROLE_PROJECT_ADMIN)
def test_allow_project_admin_to_approve_and_disapprove_labels(self):
self.client.login(username=self.project_admin_name, password=self.project_admin_pass)
response = self.client.post(self.url, format='json', data={'approved': True})
self.assertEqual(response.data['annotation_approver'], self.project_admin_name)
response = self.client.post(self.url, format='json', data={'approved': False})
self.assertIsNone(response.data['annotation_approver'])
def test_allow_approver_to_approve_and_disapprove_labels(self):
self.client.login(username=self.approver_name, password=self.approver_pass)
response = self.client.post(self.url, format='json', data={'approved': True})
self.assertEqual(response.data['annotation_approver'], self.approver_name)
response = self.client.post(self.url, format='json', data={'approved': False})
self.assertIsNone(response.data['annotation_approver'])
def test_disallows_non_annotation_approver_to_approve_and_disapprove_labels(self):
self.client.login(username=self.annotator_name, password=self.annotator_pass)
response = self.client.post(self.url, format='json', data={'approved': True})
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
@classmethod
def doCleanups(cls):
remove_all_role_mappings()
class TestAnnotationListAPI(APITestCase, TestUtilsMixin):
@classmethod
def setUpTestData(cls):
cls.project_member_name = 'project_member_name'
cls.project_member_pass = 'project_member_pass'
cls.another_project_member_name = 'another_project_member_name'
cls.another_project_member_pass = 'another_project_member_pass'
cls.non_project_member_name = 'non_project_member_name'
cls.non_project_member_pass = 'non_project_member_pass'
create_default_roles()
project_member = User.objects.create_user(username=cls.project_member_name,
password=cls.project_member_pass)
another_project_member = User.objects.create_user(username=cls.another_project_member_name,
password=cls.another_project_member_pass)
non_project_member = User.objects.create_user(username=cls.non_project_member_name,
password=cls.non_project_member_pass)
main_project = mommy.make('SequenceLabelingProject', users=[project_member, another_project_member])
main_project_label = mommy.make('Label', project=main_project)
main_project_doc = mommy.make('Document', project=main_project)
mommy.make('SequenceAnnotation', document=main_project_doc, user=project_member)
mommy.make('SequenceAnnotation', document=main_project_doc, user=another_project_member)
sub_project = mommy.make('SequenceLabelingProject', users=[non_project_member])
sub_project_doc = mommy.make('Document', project=sub_project)
mommy.make('SequenceAnnotation', document=sub_project_doc)
cls.classification_project = mommy.make('TextClassificationProject',
users=[project_member, another_project_member])
cls.classification_project_label_1 = mommy.make('Label', project=cls.classification_project)
cls.classification_project_label_2 = mommy.make('Label', project=cls.classification_project)
cls.classification_project_document = mommy.make('Document', project=cls.classification_project)
cls.classification_project_url = reverse(
viewname='annotation_list', args=[cls.classification_project.id, cls.classification_project_document.id])
assign_user_to_role(project_member=project_member, project=cls.classification_project,
role_name=settings.ROLE_ANNOTATOR)
assign_user_to_role(project_member=another_project_member, project=cls.classification_project,
role_name=settings.ROLE_ANNOTATOR)
cls.url = reverse(viewname='annotation_list', args=[main_project.id, main_project_doc.id])
cls.post_data = {'start_offset': 0, 'end_offset': 1, 'label': main_project_label.id}
cls.num_entity_of_project_member = SequenceAnnotation.objects.filter(document=main_project_doc,
user=project_member).count()
cls.num_entity_of_another_project_member = SequenceAnnotation.objects.filter(
document=main_project_doc,
user=another_project_member).count()
cls.main_project = main_project
assign_user_to_role(project_member=project_member, project=main_project,
role_name=settings.ROLE_ANNOTATOR)
def test_returns_annotations_to_project_member(self):
self.client.login(username=self.project_member_name,
password=self.project_member_pass)
response = self.client.get(self.url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_do_not_return_annotations_to_non_project_member(self):
self.client.login(username=self.non_project_member_name,
password=self.non_project_member_pass)
response = self.client.get(self.url, format='json')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_do_not_return_annotations_of_another_project_member(self):
self.client.login(username=self.project_member_name,
password=self.project_member_pass)
response = self.client.get(self.url, format='json')
self.assertEqual(len(response.data), self.num_entity_of_project_member)
def test_returns_annotations_of_another_project_member_if_collaborative_project(self):
self._patch_project(self.main_project, 'collaborative_annotation', True)
self.client.login(username=self.project_member_name,
password=self.project_member_pass)
response = self.client.get(self.url, format='json')
self.assertEqual(len(response.data),
self.num_entity_of_project_member + self.num_entity_of_another_project_member)
def test_allows_project_member_to_create_annotation(self):
self.client.login(username=self.project_member_name,
password=self.project_member_pass)
response = self.client.post(self.url, format='json', data=self.post_data)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_disallows_non_project_member_to_create_annotation(self):
self.client.login(username=self.non_project_member_name,
password=self.non_project_member_pass)
response = self.client.post(self.url, format='json', data=self.post_data)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_disallows_second_annotation_for_single_class_project(self):
self._patch_project(self.classification_project, 'single_class_classification', True)
self.client.login(username=self.project_member_name, password=self.project_member_pass)
response = self.client.post(self.classification_project_url, format='json',
data={'label': self.classification_project_label_1.id})
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
response = self.client.post(self.classification_project_url, format='json',
data={'label': self.classification_project_label_2.id})
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_disallows_second_annotation_for_single_class_shared_project(self):
self._patch_project(self.classification_project, 'single_class_classification', True)
self._patch_project(self.classification_project, 'collaborative_annotation', True)
self.client.login(username=self.project_member_name, password=self.project_member_pass)
response = self.client.post(self.classification_project_url, format='json',
data={'label': self.classification_project_label_1.id})
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.client.login(username=self.another_project_member_name, password=self.another_project_member_pass)
response = self.client.post(self.classification_project_url, format='json',
data={'label': self.classification_project_label_2.id})
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def _patch_project(self, project, attribute, value):
old_value = getattr(project, attribute, None)
setattr(project, attribute, value)
project.save()
def cleanup_project():
setattr(project, attribute, old_value)
project.save()
self.addCleanup(cleanup_project)
@classmethod
def doCleanups(cls):
remove_all_role_mappings()
class TestAnnotationDetailAPI(APITestCase):
@classmethod
def setUpTestData(cls):
cls.super_user_name = 'super_user_name'
cls.super_user_pass = 'super_user_pass'
cls.project_member_name = 'project_member_name'
cls.project_member_pass = 'project_member_pass'
cls.another_project_member_name = 'another_project_member_name'
cls.another_project_member_pass = 'another_project_member_pass'
cls.non_project_member_name = 'non_project_member_name'
cls.non_project_member_pass = 'non_project_member_pass'
# Todo: change super_user to project_admin.
super_user = User.objects.create_superuser(username=cls.super_user_name,
password=cls.super_user_pass,
email='fizz@buzz.com')
create_default_roles()
project_member = User.objects.create_user(username=cls.project_member_name,
password=cls.project_member_pass)
another_project_member = User.objects.create_user(username=cls.another_project_member_name,
password=cls.another_project_member_pass)
non_project_member = User.objects.create_user(username=cls.non_project_member_name,
password=cls.non_project_member_pass)
main_project = mommy.make('SequenceLabelingProject',
users=[super_user, project_member, another_project_member])
main_project_doc = mommy.make('Document', project=main_project)
main_project_entity = mommy.make('SequenceAnnotation',
document=main_project_doc, user=project_member)
another_entity = mommy.make('SequenceAnnotation',
document=main_project_doc, user=another_project_member)
shared_project = mommy.make('SequenceLabelingProject',
collaborative_annotation=True,
users=[project_member, another_project_member])
shared_project_doc = mommy.make('Document', project=shared_project)
shared_entity = mommy.make('SequenceAnnotation', document=shared_project_doc, user=another_project_member)
cls.url = reverse(viewname='annotation_detail', args=[main_project.id,
main_project_doc.id,
main_project_entity.id])
cls.another_url = reverse(viewname='annotation_detail', args=[main_project.id,
main_project_doc.id,
another_entity.id])
cls.shared_url = reverse(viewname='annotation_detail', args=[shared_project.id,
shared_project_doc.id,
shared_entity.id])
cls.post_data = {'start_offset': 0, 'end_offset': 10}
assign_user_to_role(project_member=project_member, project=main_project, role_name=settings.ROLE_ANNOTATOR)
assign_user_to_role(project_member=project_member, project=shared_project, role_name=settings.ROLE_ANNOTATOR)
def test_returns_annotation_to_project_member(self):
self.client.login(username=self.project_member_name,
password=self.project_member_pass)
response = self.client.get(self.url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_do_not_return_annotation_to_non_project_member(self):
self.client.login(username=self.non_project_member_name,
password=self.non_project_member_pass)
response = self.client.get(self.url, format='json')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_do_not_return_annotation_by_another_project_member(self):
self.client.login(username=self.project_member_name,
password=self.project_member_pass)
response = self.client.get(self.another_url, format='json')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_allows_project_member_to_update_annotation(self):
self.client.login(username=self.project_member_name,
password=self.project_member_pass)
response = self.client.patch(self.url, format='json', data=self.post_data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_disallows_non_project_member_to_update_annotation(self):
self.client.login(username=self.non_project_member_name,
password=self.non_project_member_pass)
response = self.client.patch(self.url, format='json', data=self.post_data)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_disallows_project_member_to_update_annotation_of_another_member(self):
self.client.login(username=self.project_member_name,
password=self.project_member_pass)
response = self.client.patch(self.another_url, format='json', data=self.post_data)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_allows_superuser_to_delete_annotation_of_another_member(self):
self.client.login(username=self.super_user_name,
password=self.super_user_pass)
response = self.client.delete(self.another_url, format='json', data=self.post_data)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
def test_allows_project_member_to_delete_annotation(self):
self.client.login(username=self.project_member_name,
password=self.project_member_pass)
response = self.client.delete(self.url, format='json')
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
def test_disallows_project_member_to_delete_annotation(self):
self.client.login(username=self.non_project_member_name,
password=self.non_project_member_pass)
response = self.client.delete(self.url, format='json')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_disallows_project_member_to_delete_annotation_of_another_member(self):
self.client.login(username=self.project_member_name,
password=self.project_member_pass)
response = self.client.delete(self.another_url, format='json', data=self.post_data)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_allow_member_to_update_others_annotation_in_shared_project(self):
self.client.login(username=self.project_member_name,
password=self.project_member_pass)
response = self.client.patch(self.shared_url, format='json', data=self.post_data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_allow_member_to_delete_others_annotation_in_shared_project(self):
self.client.login(username=self.project_member_name,
password=self.project_member_pass)
response = self.client.delete(self.shared_url, format='json')
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
@classmethod
def doCleanups(cls):
remove_all_role_mappings()
class TestSearch(APITestCase):
@classmethod
def setUpTestData(cls):
cls.project_member_name = 'project_member_name'
cls.project_member_pass = 'project_member_pass'
cls.non_project_member_name = 'non_project_member_name'
cls.non_project_member_pass = 'non_project_member_pass'
create_default_roles()
project_member = User.objects.create_user(username=cls.project_member_name,
password=cls.project_member_pass)
non_project_member = User.objects.create_user(username=cls.non_project_member_name,
password=cls.non_project_member_pass)
cls.main_project = mommy.make('TextClassificationProject', users=[project_member])
cls.search_term = 'example'
doc1 = mommy.make('Document', text=cls.search_term, project=cls.main_project)
doc2 = mommy.make('Document', text='Lorem', project=cls.main_project)
label1 = mommy.make('Label', project=cls.main_project)
label2 = mommy.make('Label', project=cls.main_project)
mommy.make('SequenceAnnotation', document=doc1, user=project_member, label=label1)
mommy.make('SequenceAnnotation', document=doc2, user=project_member, label=label2)
sub_project = mommy.make('TextClassificationProject', users=[non_project_member])
mommy.make('Document', text=cls.search_term, project=sub_project)
cls.url = reverse(viewname='doc_list', args=[cls.main_project.id])
cls.data = {'q': cls.search_term}
assign_user_to_role(project_member=project_member, project=cls.main_project,
role_name=settings.ROLE_ANNOTATOR)
def test_can_filter_doc_by_term(self):
self.client.login(username=self.project_member_name,
password=self.project_member_pass)
response = self.client.get(self.url, format='json', data=self.data)
count = Document.objects.filter(text__contains=self.search_term,
project=self.main_project).count()
self.assertEqual(response.data['count'], count)
def test_can_order_doc_by_created_at_ascending(self):
params = {'ordering': 'created_at'}
self.client.login(username=self.project_member_name,
password=self.project_member_pass)
response = self.client.get(self.url, format='json', data=params)
docs = Document.objects.filter(project=self.main_project).order_by('created_at').values()
for d1, d2 in zip(response.data['results'], docs):
self.assertEqual(d1['id'], d2['id'])
def test_can_order_doc_by_created_at_descending(self):
params = {'ordering': '-created_at'}
self.client.login(username=self.project_member_name,
password=self.project_member_pass)
response = self.client.get(self.url, format='json', data=params)
docs = Document.objects.filter(project=self.main_project).order_by('-created_at').values()
for d1, d2 in zip(response.data['results'], docs):
self.assertEqual(d1['id'], d2['id'])
def test_can_order_doc_by_annotation_updated_at_ascending(self):
params = {'ordering': 'seq_annotations__updated_at'}
self.client.login(username=self.project_member_name,
password=self.project_member_pass)
response = self.client.get(self.url, format='json', data=params)
docs = Document.objects.filter(project=self.main_project).order_by('seq_annotations__updated_at').values()
for d1, d2 in zip(response.data['results'], docs):
self.assertEqual(d1['id'], d2['id'])
def test_can_order_doc_by_annotation_updated_at_descending(self):
params = {'ordering': '-seq_annotations__updated_at'}
self.client.login(username=self.project_member_name,
password=self.project_member_pass)
response = self.client.get(self.url, format='json', data=params)
docs = Document.objects.filter(project=self.main_project).order_by('-seq_annotations__updated_at').values()
for d1, d2 in zip(response.data['results'], docs):
self.assertEqual(d1['id'], d2['id'])
@classmethod
def doCleanups(cls):
remove_all_role_mappings()
class TestFilter(APITestCase):
@classmethod
def setUpTestData(cls):
cls.project_member_name = 'project_member_name'
cls.project_member_pass = 'project_member_pass'
create_default_roles()
project_member = User.objects.create_user(username=cls.project_member_name,
password=cls.project_member_pass)
cls.main_project = mommy.make('SequenceLabelingProject', users=[project_member])
cls.label1 = mommy.make('Label', project=cls.main_project)
cls.label2 = mommy.make('Label', project=cls.main_project)
doc1 = mommy.make('Document', project=cls.main_project)
doc2 = mommy.make('Document', project=cls.main_project)
mommy.make('Document', project=cls.main_project)
mommy.make('SequenceAnnotation', document=doc1, user=project_member, label=cls.label1)
mommy.make('SequenceAnnotation', document=doc2, user=project_member, label=cls.label2)
cls.url = reverse(viewname='doc_list', args=[cls.main_project.id])
cls.params = {'seq_annotations__label__id': cls.label1.id}
assign_user_to_role(project_member=project_member, project=cls.main_project,
role_name=settings.ROLE_ANNOTATOR)
def test_can_filter_by_label(self):
self.client.login(username=self.project_member_name,
password=self.project_member_pass)
response = self.client.get(self.url, format='json', data=self.params)
docs = Document.objects.filter(project=self.main_project,
seq_annotations__label__id=self.label1.id).values()
for d1, d2 in zip(response.data['results'], docs):
self.assertEqual(d1['id'], d2['id'])
def test_can_filter_doc_with_annotation(self):
params = {'seq_annotations__isnull': False}
self.client.login(username=self.project_member_name,
password=self.project_member_pass)
response = self.client.get(self.url, format='json', data=params)
docs = Document.objects.filter(project=self.main_project, seq_annotations__isnull=False).values()
self.assertEqual(response.data['count'], docs.count())
for d1, d2 in zip(response.data['results'], docs):
self.assertEqual(d1['id'], d2['id'])
def test_can_filter_doc_without_anotation(self):
params = {'seq_annotations__isnull': True}
self.client.login(username=self.project_member_name,
password=self.project_member_pass)
response = self.client.get(self.url, format='json', data=params)
docs = Document.objects.filter(project=self.main_project, seq_annotations__isnull=True).values()
self.assertEqual(response.data['count'], docs.count())
for d1, d2 in zip(response.data['results'], docs):
self.assertEqual(d1['id'], d2['id'])
@classmethod
def doCleanups(cls):
remove_all_role_mappings()
class TestUploader(APITestCase):
@classmethod
def setUpTestData(cls):
cls.super_user_name = 'super_user_name'
cls.super_user_pass = 'super_user_pass'
# Todo: change super_user to project_admin.
create_default_roles()
super_user = User.objects.create_superuser(username=cls.super_user_name,
password=cls.super_user_pass,
email='fizz@buzz.com')
cls.classification_project = mommy.make('TextClassificationProject',
users=[super_user], project_type=DOCUMENT_CLASSIFICATION)
cls.labeling_project = mommy.make('SequenceLabelingProject',
users=[super_user], project_type=SEQUENCE_LABELING)
cls.seq2seq_project = mommy.make('Seq2seqProject', users=[super_user], project_type=SEQ2SEQ)
assign_user_to_role(project_member=super_user, project=cls.classification_project,
role_name=settings.ROLE_PROJECT_ADMIN)
assign_user_to_role(project_member=super_user, project=cls.labeling_project,
role_name=settings.ROLE_PROJECT_ADMIN)
assign_user_to_role(project_member=super_user, project=cls.seq2seq_project,
role_name=settings.ROLE_PROJECT_ADMIN)
def setUp(self):
self.client.login(username=self.super_user_name,
password=self.super_user_pass)
def upload_test_helper(self, project_id, filename, file_format, expected_status, **kwargs):
url = reverse(viewname='doc_uploader', args=[project_id])
with open(os.path.join(DATA_DIR, filename), 'rb') as f:
response = self.client.post(url, data={'file': f, 'format': file_format})
self.assertEqual(response.status_code, expected_status)
def label_test_helper(self, project_id, expected_labels, expected_label_keys):
url = reverse(viewname='label_list', args=[project_id])
expected_keys = {key for label in expected_labels for key in label}
response = self.client.get(url).json()
actual_labels = [{key: value for (key, value) in label.items() if key in expected_keys}
for label in response]
self.assertCountEqual(actual_labels, expected_labels)
for label in response:
for expected_label_key in expected_label_keys:
self.assertIsNotNone(label.get(expected_label_key))
def test_can_upload_conll_format_file(self):
self.upload_test_helper(project_id=self.labeling_project.id,
filename='labeling.conll',
file_format='conll',
expected_status=status.HTTP_201_CREATED)
def test_cannot_upload_wrong_conll_format_file(self):
self.upload_test_helper(project_id=self.labeling_project.id,
filename='labeling.invalid.conll',
file_format='conll',
expected_status=status.HTTP_400_BAD_REQUEST)
def test_can_upload_classification_csv(self):
self.upload_test_helper(project_id=self.classification_project.id,
filename='example.csv',
file_format='csv',
expected_status=status.HTTP_201_CREATED)
def test_can_upload_classification_csv_with_out_of_order_columns(self):
self.upload_test_helper(project_id=self.classification_project.id,
filename='example_out_of_order_columns.csv',
file_format='csv',
expected_status=status.HTTP_201_CREATED)
self.label_test_helper(
project_id=self.classification_project.id,
expected_labels=[
{'text': 'Positive'},
{'text': 'Negative'},
],
expected_label_keys=[],
)
def test_can_upload_csv_with_non_utf8_encoding(self):
self.upload_test_helper(project_id=self.classification_project.id,
filename='example.utf16.csv',
file_format='csv',
expected_status=status.HTTP_201_CREATED)
def test_can_upload_seq2seq_csv(self):
self.upload_test_helper(project_id=self.seq2seq_project.id,
filename='example.csv',
file_format='csv',
expected_status=status.HTTP_201_CREATED)
def test_can_upload_single_column_csv(self):
self.upload_test_helper(project_id=self.seq2seq_project.id,
filename='example_one_column.csv',
file_format='csv',
expected_status=status.HTTP_201_CREATED)
def test_can_upload_csv_file_does_not_match_column_and_row(self):
self.upload_test_helper(project_id=self.classification_project.id,
filename='example_column_and_row_not_matching.csv',
file_format='csv',
expected_status=status.HTTP_201_CREATED)
def test_cannot_upload_csv_file_has_too_many_columns(self):
self.upload_test_helper(project_id=self.classification_project.id,
filename='example.invalid.2.csv',
file_format='csv',
expected_status=status.HTTP_400_BAD_REQUEST)
def test_can_upload_classification_excel(self):
self.upload_test_helper(project_id=self.classification_project.id,
filename='example.xlsx',
file_format='excel',
expected_status=status.HTTP_201_CREATED)
def test_can_upload_seq2seq_excel(self):
self.upload_test_helper(project_id=self.seq2seq_project.id,
filename='example.xlsx',
file_format='excel',
expected_status=status.HTTP_201_CREATED)
def test_can_upload_single_column_excel(self):
self.upload_test_helper(project_id=self.seq2seq_project.id,
filename='example_one_column.xlsx',
file_format='excel',
expected_status=status.HTTP_201_CREATED)
def test_can_upload_excel_file_does_not_match_column_and_row(self):
self.upload_test_helper(project_id=self.classification_project.id,
filename='example_column_and_row_not_matching.xlsx',
file_format='excel',
expected_status=status.HTTP_201_CREATED)
def test_cannot_upload_excel_file_has_too_many_columns(self):
self.upload_test_helper(project_id=self.classification_project.id,
filename='example.invalid.2.xlsx',
file_format='excel',
expected_status=status.HTTP_400_BAD_REQUEST)
@override_settings(IMPORT_BATCH_SIZE=1)
def test_can_upload_small_batch_size(self):
self.upload_test_helper(project_id=self.seq2seq_project.id,
filename='example_one_column_no_header.xlsx',
file_format='excel',
expected_status=status.HTTP_201_CREATED)
def test_can_upload_classification_jsonl(self):
self.upload_test_helper(project_id=self.classification_project.id,
filename='classification.jsonl',
file_format='json',
expected_status=status.HTTP_201_CREATED)
self.label_test_helper(
project_id=self.classification_project.id,
expected_labels=[
{'text': 'positive', 'suffix_key': 'p', 'prefix_key': None},
{'text': 'negative', 'suffix_key': 'n', 'prefix_key': None},
{'text': 'neutral', 'suffix_key': 'n', 'prefix_key': 'ctrl'},
],
expected_label_keys=[
'background_color',
'text_color',
])
def test_can_upload_labeling_jsonl(self):
self.upload_test_helper(project_id=self.labeling_project.id,
filename='labeling.jsonl',
file_format='json',
expected_status=status.HTTP_201_CREATED)
self.label_test_helper(
project_id=self.labeling_project.id,
expected_labels=[
{'text': 'LOC', 'suffix_key': 'l', 'prefix_key': None},
{'text': 'ORG', 'suffix_key': 'o', 'prefix_key': None},
{'text': 'PER', 'suffix_key': 'p', 'prefix_key': None},
],
expected_label_keys=[
'background_color',
'text_color',
])
def test_can_upload_seq2seq_jsonl(self):
self.upload_test_helper(project_id=self.seq2seq_project.id,
filename='seq2seq.jsonl',
file_format='json',
expected_status=status.HTTP_201_CREATED)
def test_can_upload_plain_text(self):
self.upload_test_helper(project_id=self.classification_project.id,
filename='example.txt',
file_format='plain',
expected_status=status.HTTP_201_CREATED)
def test_can_upload_data_without_label(self):
self.upload_test_helper(project_id=self.classification_project.id,
filename='example.jsonl',
file_format='json',
expected_status=status.HTTP_201_CREATED)
@classmethod
def doCleanups(cls):
remove_all_role_mappings()
@override_settings(CLOUD_BROWSER_APACHE_LIBCLOUD_PROVIDER='LOCAL')
@override_settings(CLOUD_BROWSER_APACHE_LIBCLOUD_ACCOUNT=os.path.dirname(DATA_DIR))
@override_settings(CLOUD_BROWSER_APACHE_LIBCLOUD_SECRET_KEY='not-used')
class TestCloudUploader(TestUploader):
def upload_test_helper(self, project_id, filename, file_format, expected_status, **kwargs):
query_params = {
'project_id': project_id,
'upload_format': file_format,
'container': kwargs.pop('container', os.path.basename(DATA_DIR)),
'object': filename,
}
query_params.update(kwargs)
response = self.client.get(reverse('cloud_uploader'), query_params)
self.assertEqual(response.status_code, expected_status)
def test_cannot_upload_with_missing_file(self):
self.upload_test_helper(project_id=self.classification_project.id,
filename='does-not-exist',
file_format='json',
expected_status=status.HTTP_400_BAD_REQUEST)
def test_cannot_upload_with_missing_container(self):
self.upload_test_helper(project_id=self.classification_project.id,
filename='example.jsonl',
container='does-not-exist',
file_format='json',
expected_status=status.HTTP_400_BAD_REQUEST)
def test_cannot_upload_with_missing_query_parameters(self):
response = self.client.get(reverse('cloud_uploader'), {'project_id': self.classification_project.id})
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_can_upload_with_redirect(self):
self.upload_test_helper(project_id=self.classification_project.id,
filename='example.jsonl',
next='http://somewhere',
file_format='json',
expected_status=status.HTTP_302_FOUND)
def test_can_upload_with_redirect_to_blank(self):
self.upload_test_helper(project_id=self.classification_project.id,
filename='example.jsonl',
next='about:blank',
file_format='json',
expected_status=status.HTTP_201_CREATED)
class TestFeatures(APITestCase):
@classmethod
def setUpTestData(cls):
cls.user_name = 'user_name'
cls.user_pass = 'user_pass'
create_default_roles()
cls.user = User.objects.create_user(username=cls.user_name, password=cls.user_pass, email='fizz@buzz.com')
def setUp(self):
self.client.login(username=self.user_name, password=self.user_pass)
@override_settings(CLOUD_BROWSER_APACHE_LIBCLOUD_PROVIDER=None)
def test_no_cloud_upload(self):
response = self.client.get(reverse('features'))
self.assertFalse(response.json().get('cloud_upload'))
@override_settings(IMPORT_BATCH_SIZE=2)
class TestParser(APITestCase):
def parser_helper(self, filename, parser, include_label=True):
with open(os.path.join(DATA_DIR, filename), mode='rb') as f:
result = list(parser.parse(f))
for data in result:
for r in data:
self.assertIn('text', r)
if include_label:
self.assertIn('labels', r)
return result
def test_give_valid_data_to_conll_parser(self):
self.parser_helper(filename='labeling.conll', parser=CoNLLParser())
def test_give_valid_data_to_conll_parser_with_trailing_newlines(self):
result = self.parser_helper(filename='labeling.trailing.conll', parser=CoNLLParser())
self.assertEqual(len(result), 1)
self.assertEqual(len(result[0]), 1)
def test_plain_parser(self):
self.parser_helper(filename='example.txt', parser=PlainTextParser(), include_label=False)
def test_give_invalid_data_to_conll_parser(self):
with self.assertRaises(FileParseException):
self.parser_helper(filename='labeling.invalid.conll',
parser=CoNLLParser())
def test_give_classification_data_to_csv_parser(self):
self.parser_helper(filename='example.csv', parser=CSVParser(), include_label=False)
def test_give_seq2seq_data_to_csv_parser(self):
self.parser_helper(filename='example.csv', parser=CSVParser(), include_label=False)
def test_give_classification_data_to_json_parser(self):
self.parser_helper(filename='classification.jsonl', parser=JSONParser())
def test_give_labeling_data_to_json_parser(self):
self.parser_helper(filename='labeling.jsonl', parser=JSONParser())
def test_give_seq2seq_data_to_json_parser(self):
self.parser_helper(filename='seq2seq.jsonl', parser=JSONParser())
def test_give_data_without_label_to_json_parser(self):
self.parser_helper(filename='example.jsonl', parser=JSONParser(), include_label=False)
def test_give_labeling_data_to_fasttext_parser(self):
self.parser_helper(filename='example_fasttext.txt', parser=FastTextParser())
def test_give_data_without_label_name_to_fasttext_parser(self):
with self.assertRaises(FileParseException):
self.parser_helper(filename='example_fasttext_label_tag_without_name.txt', parser=FastTextParser())
def test_give_data_without_text_to_fasttext_parser(self):
with self.assertRaises(FileParseException):
self.parser_helper(filename='example_fasttext_without_text.txt', parser=FastTextParser())
class TestDownloader(APITestCase):
@classmethod
def setUpTestData(cls):
cls.super_user_name = 'super_user_name'
cls.super_user_pass = 'super_user_pass'
# Todo: change super_user to project_admin.
create_default_roles()
super_user = User.objects.create_superuser(username=cls.super_user_name,
password=cls.super_user_pass,
email='fizz@buzz.com')
cls.classification_project = mommy.make('TextClassificationProject',
users=[super_user], project_type=DOCUMENT_CLASSIFICATION)
cls.labeling_project = mommy.make('SequenceLabelingProject',
users=[super_user], project_type=SEQUENCE_LABELING)
cls.seq2seq_project = mommy.make('Seq2seqProject', users=[super_user], project_type=SEQ2SEQ)
cls.speech2text_project = mommy.make('Speech2textProject', users=[super_user], project_type=SPEECH2TEXT)
cls.classification_url = reverse(viewname='doc_downloader', args=[cls.classification_project.id])
cls.labeling_url = reverse(viewname='doc_downloader', args=[cls.labeling_project.id])
cls.seq2seq_url = reverse(viewname='doc_downloader', args=[cls.seq2seq_project.id])
cls.speech2text_url = reverse(viewname='doc_downloader', args=[cls.speech2text_project.id])
def setUp(self):
self.client.login(username=self.super_user_name,
password=self.super_user_pass)
def download_test_helper(self, url, format, expected_status):
response = self.client.get(url, data={'q': format})
self.assertEqual(response.status_code, expected_status)
def test_cannot_download_conll_format_file(self):
self.download_test_helper(url=self.labeling_url,
format='conll',
expected_status=status.HTTP_400_BAD_REQUEST)
def test_can_download_classification_csv(self):
self.download_test_helper(url=self.classification_url,
format='csv',
expected_status=status.HTTP_200_OK)
def test_can_download_labeling_csv(self):
self.download_test_helper(url=self.labeling_url,
format='csv',
expected_status=status.HTTP_200_OK)
def test_can_download_seq2seq_csv(self):
self.download_test_helper(url=self.seq2seq_url,
format='csv',
expected_status=status.HTTP_200_OK)
def test_can_download_classification_jsonl(self):
self.download_test_helper(url=self.classification_url,
format='json',
expected_status=status.HTTP_200_OK)
def test_can_download_labeling_jsonl(self):
self.download_test_helper(url=self.labeling_url,
format='json',
expected_status=status.HTTP_200_OK)
def test_can_download_seq2seq_jsonl(self):
self.download_test_helper(url=self.seq2seq_url,
format='json',
expected_status=status.HTTP_200_OK)
def test_can_download_speech2text_jsonl(self):
self.download_test_helper(url=self.speech2text_url,
format='json',
expected_status=status.HTTP_200_OK)
def test_can_download_labelling_jsonl(self):
self.download_test_helper(url=self.labeling_url,
format='jsonl',
expected_status=status.HTTP_200_OK)
def test_can_download_plain_text(self):
self.download_test_helper(url=self.classification_url,
format='plain',
expected_status=status.HTTP_400_BAD_REQUEST)
class TestStatisticsAPI(APITestCase, TestUtilsMixin):
@classmethod
def setUpTestData(cls):
cls.super_user_name = 'super_user_name'
cls.super_user_pass = 'super_user_pass'
cls.other_user_name = 'other_user_name'
cls.other_user_pass = 'other_user_pass'
create_default_roles()
# Todo: change super_user to project_admin.
super_user = User.objects.create_superuser(username=cls.super_user_name,
password=cls.super_user_pass,
email='fizz@buzz.com')
other_user = User.objects.create_user(username=cls.other_user_name,
password=cls.other_user_pass,
email='bar@buzz.com')
cls.project = mommy.make('TextClassificationProject', users=[super_user, other_user])
doc1 = mommy.make('Document', project=cls.project)
doc2 = mommy.make('Document', project=cls.project)
mommy.make('DocumentAnnotation', document=doc1, user=super_user)
mommy.make('DocumentAnnotation', document=doc2, user=other_user)
cls.url = reverse(viewname='statistics', args=[cls.project.id])
cls.doc = Document.objects.filter(project=cls.project)
assign_user_to_role(project_member=other_user, project=cls.project,
role_name=settings.ROLE_ANNOTATOR)
@classmethod
def doCleanups(cls):
remove_all_role_mappings()
def test_returns_exact_progress(self):
self.client.login(username=self.super_user_name,
password=self.super_user_pass)
response = self.client.get(self.url, format='json')
self.assertEqual(response.data['total'], 2)
self.assertEqual(response.data['remaining'], 1)
def test_returns_exact_progress_with_collaborative_annotation(self):
self._patch_project(self.project, 'collaborative_annotation', True)
self.client.login(username=self.other_user_name,
password=self.other_user_pass)
response = self.client.get(self.url, format='json')
self.assertEqual(response.data['total'], 2)
self.assertEqual(response.data['remaining'], 0)
def test_returns_user_count(self):
self.client.login(username=self.super_user_name,
password=self.super_user_pass)
response = self.client.get(self.url, format='json')
self.assertIn('label', response.data)
self.assertIsInstance(response.data['label'], dict)
def test_returns_label_count(self):
self.client.login(username=self.super_user_name,
password=self.super_user_pass)
response = self.client.get(self.url, format='json')
self.assertIn('user', response.data)
self.assertIsInstance(response.data['user'], dict)
def test_returns_partial_response(self):
self.client.login(username=self.super_user_name,
password=self.super_user_pass)
response = self.client.get(f'{self.url}?include=user', format='json')
self.assertEqual(list(response.data.keys()), ['user'])
class TestUserAPI(APITestCase):
@classmethod
def setUpTestData(cls):
cls.super_user_name = 'super_user_name'
cls.super_user_pass = 'super_user_pass'
create_default_roles()
User.objects.create_superuser(username=cls.super_user_name,
password=cls.super_user_pass,
email='fizz@buzz.com')
cls.url = reverse(viewname='user_list')
def test_returns_user_count(self):
self.client.login(username=self.super_user_name,
password=self.super_user_pass)
response = self.client.get(self.url, format='json')
self.assertEqual(1, len(response.data))
class TestRoleAPI(APITestCase):
@classmethod
def setUpTestData(cls):
cls.user_name = 'user_name'
cls.user_pass = 'user_pass'
cls.project_admin_name = 'project_admin_name'
cls.project_admin_pass = 'project_admin_pass'
create_default_roles()
cls.user = User.objects.create_user(username=cls.user_name,
password=cls.user_pass)
User.objects.create_superuser(username=cls.project_admin_name,
password=cls.project_admin_pass,
email='fizz@buzz.com')
cls.url = reverse(viewname='roles')
def test_cannot_create_multiple_roles_with_same_name(self):
self.client.login(username=self.project_admin_name,
password=self.project_admin_pass)
roles = [
{'name': 'examplerole', 'description': 'example'},
{'name': 'examplerole', 'description': 'example'}
]
self.client.post(self.url, format='json', data=roles[0])
second_response = self.client.post(self.url, format='json', data=roles[1])
self.assertEqual(second_response.status_code, status.HTTP_400_BAD_REQUEST)
def test_nonadmin_cannot_create_role(self):
self.client.login(username=self.user_name,
password=self.user_pass)
data = {'name': 'testrole', 'description': 'example'}
response = self.client.post(self.url, format='json', data=data)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_admin_can_create_role(self):
self.client.login(username=self.project_admin_name,
password=self.project_admin_pass)
data = {'name': 'testrole', 'description': 'example'}
response = self.client.post(self.url, format='json', data=data)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_admin_can_get_roles(self):
self.client.login(username=self.project_admin_name,
password=self.project_admin_pass)
response = self.client.get(self.url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
class TestRoleMappingListAPI(APITestCase):
@classmethod
def setUpTestData(cls):
cls.project_member_name = 'project_member_name'
cls.project_member_pass = 'project_member_pass'
cls.second_project_member_name = 'second_project_member_name'
cls.second_project_member_pass = 'second_project_member_pass'
cls.project_admin_name = 'project_admin_name'
cls.project_admin_pass = 'project_admin_pass'
create_default_roles()
project_member = User.objects.create_user(username=cls.project_member_name,
password=cls.project_member_pass)
cls.second_project_member = User.objects.create_user(username=cls.second_project_member_name,
password=cls.second_project_member_pass)
project_admin = User.objects.create_user(username=cls.project_admin_name,
password=cls.project_admin_pass)
cls.main_project = mommy.make('Project', users=[project_member, project_admin, cls.second_project_member])
cls.other_project = mommy.make('Project', users=[cls.second_project_member, project_admin])
cls.admin_role = Role.objects.get(name=settings.ROLE_PROJECT_ADMIN)
cls.role = mommy.make('Role', name='otherrole')
mommy.make('RoleMapping', role=cls.admin_role, project=cls.main_project, user=project_admin)
cls.data = {'user': project_member.id, 'role': cls.admin_role.id, 'project': cls.main_project.id}
cls.other_url = reverse(viewname='rolemapping_list', args=[cls.other_project.id])
cls.url = reverse(viewname='rolemapping_list', args=[cls.main_project.id])
def test_returns_mappings_to_project_admin(self):
self.client.login(username=self.project_admin_name,
password=self.project_admin_pass)
response = self.client.get(self.url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_allows_superuser_to_create_mapping(self):
self.client.login(username=self.project_admin_name,
password=self.project_admin_pass)
response = self.client.post(self.url, format='json', data=self.data)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_do_not_allow_nonadmin_to_create_mapping(self):
self.client.login(username=self.project_member_name,
password=self.project_member_pass)
response = self.client.post(self.url, format='json', data=self.data)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_do_not_return_mappings_to_nonadmin(self):
self.client.login(username=self.project_member_name,
password=self.project_member_pass)
response = self.client.get(self.url, format='json')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
class TestRoleMappingDetailAPI(APITestCase):
@classmethod
def setUpTestData(cls):
cls.project_admin_name = 'project_admin_name'
cls.project_admin_pass = 'project_admin_pass'
cls.project_member_name = 'project_member_name'
cls.project_member_pass = 'project_member_pass'
cls.non_project_member_name = 'non_project_member_name'
cls.non_project_member_pass = 'non_project_member_pass'
create_default_roles()
project_admin = User.objects.create_user(username=cls.project_admin_name,
password=cls.project_admin_pass)
project_member = User.objects.create_user(username=cls.project_member_name,
password=cls.project_member_pass)
User.objects.create_user(username=cls.non_project_member_name, password=cls.non_project_member_pass)
project = mommy.make('Project', users=[project_admin, project_member])
admin_role = Role.objects.get(name=settings.ROLE_PROJECT_ADMIN)
annotator_role = Role.objects.get(name=settings.ROLE_ANNOTATOR)
cls.rolemapping = mommy.make('RoleMapping', role=admin_role, project=project, user=project_admin)
cls.url = reverse(viewname='rolemapping_detail', args=[project.id, cls.rolemapping.id])
cls.data = {'role': annotator_role.id }
def test_returns_rolemapping_to_project_member(self):
self.client.login(username=self.project_admin_name,
password=self.project_admin_pass)
response = self.client.get(self.url, format='json')
self.assertEqual(response.data['id'], self.rolemapping.id)
def test_do_not_return_mapping_to_non_project_member(self):
self.client.login(username=self.non_project_member_name,
password=self.non_project_member_pass)
response = self.client.get(self.url, format='json')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_allows_admin_to_update_mapping(self):
self.client.login(username=self.project_admin_name,
password=self.project_admin_pass)
response = self.client.patch(self.url, format='json', data=self.data)
self.assertEqual(response.data['role'], self.data['role'])
def test_disallows_project_member_to_update_mapping(self):
self.client.login(username=self.project_member_name,
password=self.project_member_pass)
response = self.client.patch(self.url, format='json', data=self.data)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_allows_admin_to_delete_mapping(self):
self.client.login(username=self.project_admin_name,
password=self.project_admin_pass)
response = self.client.delete(self.url, format='json')
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
def test_disallows_project_member_to_delete_mapping(self):
self.client.login(username=self.project_member_name,
password=self.project_member_pass)
response = self.client.delete(self.url, format='json')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
| 52.146232
| 117
| 0.66173
| 10,710
| 92,716
| 5.381419
| 0.035481
| 0.100373
| 0.041884
| 0.039906
| 0.887308
| 0.855851
| 0.81369
| 0.779509
| 0.754351
| 0.728238
| 0
| 0.006599
| 0.244931
| 92,716
| 1,777
| 118
| 52.175577
| 0.816675
| 0.002707
| 0
| 0.619891
| 0
| 0
| 0.077927
| 0.026412
| 0
| 0
| 0
| 0.000563
| 0.089918
| 1
| 0.136921
| false
| 0.141008
| 0.008856
| 0
| 0.162807
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
0a03953c605fb4587c8f0deea8227a8b1c2f9ad9
| 3,822
|
py
|
Python
|
robotframework-ls/src/robotframework_ls/__init__.py
|
DetachHead/robotframework-lsp
|
a82438f45b75f3afbe3f80a970b75ed9065f96a7
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
robotframework-ls/src/robotframework_ls/__init__.py
|
DetachHead/robotframework-lsp
|
a82438f45b75f3afbe3f80a970b75ed9065f96a7
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
robotframework-ls/src/robotframework_ls/__init__.py
|
DetachHead/robotframework-lsp
|
a82438f45b75f3afbe3f80a970b75ed9065f96a7
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
__version__ = "0.28.0"
version_info = [int(x) for x in __version__.split(".")]
import os.path
import sys
__file__ = os.path.abspath(__file__)
if __file__.endswith((".pyc", ".pyo")):
__file__ = __file__[:-1]
def import_robocorp_ls_core():
"""
Helper function to make sure that robocorp_ls_core is imported properly
(either in dev or in release mode).
"""
try:
import robocorp_ls_core
except ImportError:
log_contents = []
use_folder = None
try:
src_folder = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
log_contents.append("Source folder: %s" % (src_folder,))
src_core_folder = os.path.abspath(
os.path.join(src_folder, "..", "..", "robocorp-python-ls-core", "src")
)
if os.path.isdir(src_core_folder):
log_contents.append("Dev mode detected. Found: %s" % (src_core_folder,))
use_folder = src_core_folder
else:
vendored_folder = os.path.join(
src_folder, "robotframework_ls", "vendored"
)
log_contents.append(
"Using vendored mode. Found: %s" % (vendored_folder,)
)
use_folder = vendored_folder
assert os.path.isdir(
use_folder
), "Expected: %s to exist and be a directory." % (use_folder,)
sys.path.append(use_folder)
import robocorp_ls_core
except:
try:
if use_folder:
log_contents.append(
"%s contents:\n%s" % (use_folder, os.listdir(use_folder))
)
except:
log_contents.append("Error in os.listdir('%s')." % (use_folder,))
raise ImportError(
"Error importing robocorp_ls_core. Log: %s" % "\n".join(log_contents)
)
def import_rf_interactive():
"""
Helper function to make sure that robotframework_interactive is imported properly
(either in dev or in release mode).
"""
try:
import robotframework_interactive
except ImportError:
log_contents = []
use_folder = None
try:
src_folder = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
log_contents.append("Source folder: %s" % (src_folder,))
src_core_folder = os.path.abspath(
os.path.join(
src_folder, "..", "..", "robotframework-interactive", "src"
)
)
if os.path.isdir(src_core_folder):
log_contents.append("Dev mode detected. Found: %s" % (src_core_folder,))
use_folder = src_core_folder
else:
vendored_folder = os.path.join(
src_folder, "robotframework_ls", "vendored"
)
log_contents.append(
"Using vendored mode. Found: %s" % (vendored_folder,)
)
use_folder = vendored_folder
assert os.path.isdir(
use_folder
), "Expected: %s to exist and be a directory." % (use_folder,)
sys.path.append(use_folder)
import robotframework_interactive
except:
try:
if use_folder:
log_contents.append(
"%s contents:\n%s" % (use_folder, os.listdir(use_folder))
)
except:
log_contents.append("Error in os.listdir('%s')." % (use_folder,))
raise ImportError(
"Error importing robocorp_ls_core. Log: %s" % "\n".join(log_contents)
)
| 34.745455
| 88
| 0.525641
| 401
| 3,822
| 4.708229
| 0.184539
| 0.095339
| 0.090042
| 0.03178
| 0.851695
| 0.836864
| 0.799788
| 0.799788
| 0.799788
| 0.799788
| 0
| 0.002074
| 0.369178
| 3,822
| 109
| 89
| 35.06422
| 0.781004
| 0.05887
| 0
| 0.735632
| 0
| 0
| 0.14896
| 0.013772
| 0
| 0
| 0
| 0
| 0.022989
| 1
| 0.022989
| false
| 0
| 0.16092
| 0
| 0.183908
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0a1d2e6a3972255183efb35b7ba92cc9c171b8f3
| 182,158
|
py
|
Python
|
server/www/packages/packages-windows/x86/ldap3/protocol/schemas/edir914.py
|
tinygg/teleport
|
5ac759c707d355767a209e29becaadf250b0e366
|
[
"Apache-2.0"
] | 640
|
2018-09-12T03:14:13.000Z
|
2022-03-30T04:38:09.000Z
|
server/www/packages/packages-windows/x86/ldap3/protocol/schemas/edir914.py
|
tinygg/teleport
|
5ac759c707d355767a209e29becaadf250b0e366
|
[
"Apache-2.0"
] | 175
|
2018-09-10T19:52:20.000Z
|
2022-03-30T04:37:30.000Z
|
server/www/packages/packages-windows/x86/ldap3/protocol/schemas/edir914.py
|
tinygg/teleport
|
5ac759c707d355767a209e29becaadf250b0e366
|
[
"Apache-2.0"
] | 230
|
2018-09-13T02:40:49.000Z
|
2022-03-29T11:53:58.000Z
|
"""
"""
# Created on 2019.08.31
#
# Author: Giovanni Cannata
#
# Copyright 2014 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
edir_9_1_4_schema = """
{
"raw": {
"attributeTypes": [
"( 2.5.4.35 NAME 'userPassword' DESC 'Internal NDS policy forces this to be single-valued' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{128} USAGE directoryOperation )",
"( 2.5.18.1 NAME 'createTimestamp' DESC 'Operational Attribute' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation )",
"( 2.5.18.2 NAME 'modifyTimestamp' DESC 'Operational Attribute' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation )",
"( 2.5.18.10 NAME 'subschemaSubentry' DESC 'Operational Attribute' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 USAGE directoryOperation )",
"( 2.5.21.9 NAME 'structuralObjectClass' DESC 'Operational Attribute' SYNTAX 1.3.6.1.4.1.1466.115.121.1.38 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation )",
"( 2.16.840.1.113719.1.27.4.49 NAME 'subordinateCount' DESC 'Operational Attribute' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation )",
"( 2.16.840.1.113719.1.27.4.48 NAME 'entryFlags' DESC 'Operational Attribute' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation )",
"( 2.16.840.1.113719.1.27.4.51 NAME 'federationBoundary' DESC 'Operational Attribute' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 NO-USER-MODIFICATION USAGE directoryOperation )",
"( 2.5.21.5 NAME 'attributeTypes' DESC 'Operational Attribute' SYNTAX 1.3.6.1.4.1.1466.115.121.1.3 USAGE directoryOperation )",
"( 2.5.21.6 NAME 'objectClasses' DESC 'Operational Attribute' SYNTAX 1.3.6.1.4.1.1466.115.121.1.37 USAGE directoryOperation )",
"( 1.3.6.1.1.20 NAME 'entryDN' DESC 'Operational Attribute' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation )",
"( 2.16.840.1.113719.1.1.4.1.2 NAME 'ACL' SYNTAX 2.16.840.1.113719.1.1.5.1.17 X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' )",
"( 2.5.4.1 NAME 'aliasedObjectName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'Aliased Object Name' X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' )",
"( 2.16.840.1.113719.1.1.4.1.6 NAME 'backLink' SYNTAX 2.16.840.1.113719.1.1.5.1.23 NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Back Link' X-NDS_SERVER_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.1.4.1.8 NAME 'binderyProperty' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Bindery Property' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.1.4.1.7 NAME 'binderyObjectRestriction' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Bindery Object Restriction' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.1.4.1.9 NAME 'binderyType' SYNTAX 1.3.6.1.4.1.1466.115.121.1.36{64512} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Bindery Type' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.1.4.1.11 NAME 'cAPrivateKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'CA Private Key' X-NDS_NONREMOVABLE '1' X-NDS_HIDDEN '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.1.4.1.12 NAME 'cAPublicKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'CA Public Key' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.1.4.1.10 NAME 'Cartridge' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} X-NDS_NONREMOVABLE '1' )",
"( 2.5.4.3 NAME ( 'cn' 'commonName' ) SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64} X-NDS_NAME 'CN' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '64' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.78 NAME 'printerConfiguration' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'Printer Configuration' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.15 NAME 'Convergence' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{1} SINGLE-VALUE X-NDS_UPPER_BOUND '1' X-NDS_NONREMOVABLE '1' )",
"( 2.5.4.6 NAME ( 'c' 'countryName' ) SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{2} SINGLE-VALUE X-NDS_NAME 'C' X-NDS_LOWER_BOUND '2' X-NDS_UPPER_BOUND '2' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.18 NAME 'defaultQueue' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'Default Queue' X-NDS_SERVER_READ '1' X-NDS_NONREMOVABLE '1' )",
"( 2.5.4.13 NAME ( 'description' 'multiLineDescription' ) SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{1024} X-NDS_NAME 'Description' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '1024' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.64 NAME 'partitionCreationTime' SYNTAX 2.16.840.1.113719.1.1.5.1.19 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Partition Creation Time' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
"( 2.5.4.23 NAME 'facsimileTelephoneNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.22{64512} X-NDS_NAME 'Facsimile Telephone Number' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.117 NAME 'highConvergenceSyncInterval' SYNTAX 2.16.840.1.113719.1.1.5.1.27 SINGLE-VALUE X-NDS_NAME 'High Convergence Sync Interval' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.25 NAME 'groupMembership' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Group Membership' X-NDS_NAME_VALUE_ACCESS '1' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.26 NAME 'ndsHomeDirectory' SYNTAX 2.16.840.1.113719.1.1.5.1.15{255} SINGLE-VALUE X-NDS_NAME 'Home Directory' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '255' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.27 NAME 'hostDevice' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'Host Device' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.28 NAME 'hostResourceName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'Host Resource Name' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.29 NAME 'hostServer' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'Host Server' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.30 NAME 'inheritedACL' SYNTAX 2.16.840.1.113719.1.1.5.1.17 NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Inherited ACL' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
"( 2.5.4.7 NAME ( 'l' 'localityname' ) SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{128} X-NDS_NAME 'L' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '128' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.39 NAME 'loginAllowedTimeMap' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{42} SINGLE-VALUE X-NDS_NAME 'Login Allowed Time Map' X-NDS_LOWER_BOUND '42' X-NDS_UPPER_BOUND '42' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.40 NAME 'loginDisabled' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'Login Disabled' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.41 NAME 'loginExpirationTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE X-NDS_NAME 'Login Expiration Time' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.42 NAME 'loginGraceLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'Login Grace Limit' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.43 NAME 'loginGraceRemaining' SYNTAX 2.16.840.1.113719.1.1.5.1.22 SINGLE-VALUE X-NDS_NAME 'Login Grace Remaining' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.44 NAME 'loginIntruderAddress' SYNTAX 2.16.840.1.113719.1.1.5.1.12 SINGLE-VALUE X-NDS_NAME 'Login Intruder Address' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.45 NAME 'loginIntruderAttempts' SYNTAX 2.16.840.1.113719.1.1.5.1.22 SINGLE-VALUE X-NDS_NAME 'Login Intruder Attempts' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.46 NAME 'loginIntruderLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'Login Intruder Limit' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.31 NAME 'intruderAttemptResetInterval' SYNTAX 2.16.840.1.113719.1.1.5.1.27 SINGLE-VALUE X-NDS_NAME 'Intruder Attempt Reset Interval' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.47 NAME 'loginIntruderResetTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE X-NDS_NAME 'Login Intruder Reset Time' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.48 NAME 'loginMaximumSimultaneous' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'Login Maximum Simultaneous' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.49 NAME 'loginScript' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_NAME 'Login Script' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.50 NAME 'loginTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE X-NDS_NAME 'Login Time' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' )",
"( 2.5.4.31 NAME ( 'member' 'uniqueMember' ) SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Member' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.52 NAME 'Memory' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.22 NAME 'eMailAddress' SYNTAX 2.16.840.1.113719.1.1.5.1.14{64512} X-NDS_NAME 'EMail Address' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.55 NAME 'networkAddress' SYNTAX 2.16.840.1.113719.1.1.5.1.12 X-NDS_NAME 'Network Address' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.56 NAME 'networkAddressRestriction' SYNTAX 2.16.840.1.113719.1.1.5.1.12 X-NDS_NAME 'Network Address Restriction' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.57 NAME 'notify' SYNTAX 2.16.840.1.113719.1.1.5.1.25 X-NDS_NAME 'Notify' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.114 NAME 'Obituary' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' X-NDS_READ_FILTERED '1' )",
"( 2.5.4.0 NAME 'objectClass' SYNTAX 1.3.6.1.4.1.1466.115.121.1.38 X-NDS_NAME 'Object Class' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' )",
"( 2.16.840.1.113719.1.1.4.1.59 NAME 'operator' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Operator' X-NDS_SERVER_READ '1' X-NDS_NONREMOVABLE '1' )",
"( 2.5.4.11 NAME ( 'ou' 'organizationalUnitName' ) SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64} X-NDS_NAME 'OU' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '64' X-NDS_NONREMOVABLE '1' )",
"( 2.5.4.10 NAME ( 'o' 'organizationname' ) SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64} X-NDS_NAME 'O' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '64' X-NDS_NONREMOVABLE '1' )",
"( 2.5.4.32 NAME 'owner' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Owner' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.63 NAME 'pageDescriptionLanguage' SYNTAX 1.3.6.1.4.1.1466.115.121.1.44{64} X-NDS_NAME 'Page Description Language' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '64' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.65 NAME 'passwordsUsed' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} USAGE directoryOperation X-NDS_NAME 'Passwords Used' X-NDS_NONREMOVABLE '1' X-NDS_HIDDEN '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.1.4.1.66 NAME 'passwordAllowChange' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'Password Allow Change' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.67 NAME 'passwordExpirationInterval' SYNTAX 2.16.840.1.113719.1.1.5.1.27 SINGLE-VALUE X-NDS_NAME 'Password Expiration Interval' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.68 NAME 'passwordExpirationTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE X-NDS_NAME 'Password Expiration Time' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.69 NAME 'passwordMinimumLength' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'Password Minimum Length' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.70 NAME 'passwordRequired' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'Password Required' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.71 NAME 'passwordUniqueRequired' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'Password Unique Required' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.72 NAME 'path' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_NAME 'Path' X-NDS_NONREMOVABLE '1' )",
"( 2.5.4.19 NAME 'physicalDeliveryOfficeName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{128} X-NDS_NAME 'Physical Delivery Office Name' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '128' X-NDS_NONREMOVABLE '1' )",
"( 2.5.4.16 NAME 'postalAddress' SYNTAX 1.3.6.1.4.1.1466.115.121.1.41{64512} X-NDS_NAME 'Postal Address' X-NDS_NONREMOVABLE '1' )",
"( 2.5.4.17 NAME 'postalCode' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{40} X-NDS_NAME 'Postal Code' X-NDS_UPPER_BOUND '40' X-NDS_NONREMOVABLE '1' )",
"( 2.5.4.18 NAME 'postOfficeBox' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{40} X-NDS_NAME 'Postal Office Box' X-NDS_UPPER_BOUND '40' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.80 NAME 'printJobConfiguration' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_NAME 'Print Job Configuration' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.79 NAME 'printerControl' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_NAME 'Printer Control' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.82 NAME 'privateKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Private Key' X-NDS_NONREMOVABLE '1' X-NDS_HIDDEN '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.1.4.1.83 NAME 'Profile' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.84 NAME 'publicKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Public Key' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_OPERATIONAL '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.1.4.1.85 NAME 'queue' SYNTAX 2.16.840.1.113719.1.1.5.1.25 X-NDS_NAME 'Queue' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.86 NAME 'queueDirectory' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{255} SINGLE-VALUE X-NDS_NAME 'Queue Directory' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '255' X-NDS_SERVER_READ '1' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.115 NAME 'Reference' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_HIDDEN '1' X-NDS_FILTERED_REQUIRED '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.1.4.1.88 NAME 'Replica' SYNTAX 2.16.840.1.113719.1.1.5.1.16{64512} NO-USER-MODIFICATION USAGE directoryOperation X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.1.4.1.89 NAME 'Resource' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NONREMOVABLE '1' )",
"( 2.5.4.33 NAME 'roleOccupant' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Role Occupant' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.116 NAME 'higherPrivileges' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Higher Privileges' X-NDS_SERVER_READ '1' X-NDS_NAME_VALUE_ACCESS '1' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.92 NAME 'securityEquals' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Security Equals' X-NDS_SERVER_READ '1' X-NDS_NAME_VALUE_ACCESS '1' X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' )",
"( 2.5.4.34 NAME 'seeAlso' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'See Also' X-NDS_NONREMOVABLE '1' )",
"( 2.5.4.5 NAME 'serialNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.44{64} X-NDS_NAME 'Serial Number' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '64' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.95 NAME 'server' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Server' X-NDS_SERVER_READ '1' X-NDS_NONREMOVABLE '1' )",
"( 2.5.4.8 NAME ( 'st' 'stateOrProvinceName' ) SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{128} X-NDS_NAME 'S' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '128' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.98 NAME 'status' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'Status' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_OPERATIONAL '1' )",
"( 2.5.4.9 NAME 'street' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{128} X-NDS_NAME 'SA' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '128' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.102 NAME 'supportedTypefaces' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64} X-NDS_NAME 'Supported Typefaces' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '64' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.101 NAME 'supportedServices' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64} X-NDS_NAME 'Supported Services' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '64' X-NDS_NONREMOVABLE '1' )",
"( 2.5.4.4 NAME ( 'sn' 'surname' ) SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64} X-NDS_NAME 'Surname' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '64' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' )",
"( 2.5.4.20 NAME 'telephoneNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.50{64512} X-NDS_NAME 'Telephone Number' X-NDS_NONREMOVABLE '1' )",
"( 2.5.4.12 NAME 'title' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64} X-NDS_NAME 'Title' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '64' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.111 NAME 'User' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_SERVER_READ '1' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.112 NAME 'Version' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64} SINGLE-VALUE X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '64' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.1 NAME 'accountBalance' SYNTAX 2.16.840.1.113719.1.1.5.1.22 SINGLE-VALUE X-NDS_NAME 'Account Balance' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.4 NAME 'allowUnlimitedCredit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'Allow Unlimited Credit' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.118 NAME 'lowConvergenceResetTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE USAGE directoryOperation X-NDS_NAME 'Low Convergence Reset Time' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.1.4.1.54 NAME 'minimumAccountBalance' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'Minimum Account Balance' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.104 NAME 'lowConvergenceSyncInterval' SYNTAX 2.16.840.1.113719.1.1.5.1.27 SINGLE-VALUE X-NDS_NAME 'Low Convergence Sync Interval' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.21 NAME 'Device' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.53 NAME 'messageServer' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'Message Server' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.34 NAME 'Language' SYNTAX 2.16.840.1.113719.1.1.5.1.6{64512} SINGLE-VALUE X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.100 NAME 'supportedConnections' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'Supported Connections' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.107 NAME 'typeCreatorMap' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_NAME 'Type Creator Map' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.108 NAME 'ndsUID' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'UID' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.24 NAME 'groupID' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'GID' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.110 NAME 'unknownBaseClass' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{32} SINGLE-VALUE USAGE directoryOperation X-NDS_NAME 'Unknown Base Class' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '32' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.1.4.1.87 NAME 'receivedUpTo' SYNTAX 2.16.840.1.113719.1.1.5.1.19 NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Received Up To' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.1.4.1.33 NAME 'synchronizedUpTo' SYNTAX 2.16.840.1.113719.1.1.5.1.19 NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Synchronized Up To' X-NDS_PUBLIC_READ '1' X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.1.4.1.5 NAME 'authorityRevocation' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Authority Revocation' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.1.4.1.13 NAME 'certificateRevocation' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Certificate Revocation' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.1.4.1.17 NAME 'ndsCrossCertificatePair' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'Cross Certificate Pair' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.37 NAME 'lockedByIntruder' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'Locked By Intruder' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.77 NAME 'printer' SYNTAX 2.16.840.1.113719.1.1.5.1.25 X-NDS_NAME 'Printer' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.20 NAME 'detectIntruder' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'Detect Intruder' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.38 NAME 'lockoutAfterDetection' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'Lockout After Detection' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.32 NAME 'intruderLockoutResetInterval' SYNTAX 2.16.840.1.113719.1.1.5.1.27 SINGLE-VALUE X-NDS_NAME 'Intruder Lockout Reset Interval' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.96 NAME 'serverHolds' SYNTAX 2.16.840.1.113719.1.1.5.1.26 X-NDS_NAME 'Server Holds' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.91 NAME 'sAPName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{47} SINGLE-VALUE X-NDS_NAME 'SAP Name' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '47' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.113 NAME 'Volume' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.35 NAME 'lastLoginTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Last Login Time' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.81 NAME 'printServer' SYNTAX 2.16.840.1.113719.1.1.5.1.25 SINGLE-VALUE X-NDS_NAME 'Print Server' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.119 NAME 'nNSDomain' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{128} X-NDS_NAME 'NNS Domain' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '128' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.120 NAME 'fullName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{127} X-NDS_NAME 'Full Name' X-NDS_UPPER_BOUND '127' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.121 NAME 'partitionControl' SYNTAX 2.16.840.1.113719.1.1.5.1.25 NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Partition Control' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.1.4.1.122 NAME 'revision' SYNTAX 2.16.840.1.113719.1.1.5.1.22 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Revision' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_SCHED_SYNC_NEVER '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.1.4.1.123 NAME 'certificateValidityInterval' SYNTAX 2.16.840.1.113719.1.1.5.1.27{4294967295} SINGLE-VALUE X-NDS_NAME 'Certificate Validity Interval' X-NDS_LOWER_BOUND '60' X-NDS_UPPER_BOUND '-1' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.124 NAME 'externalSynchronizer' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'External Synchronizer' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.125 NAME 'messagingDatabaseLocation' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} SINGLE-VALUE X-NDS_NAME 'Messaging Database Location' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.126 NAME 'messageRoutingGroup' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Message Routing Group' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.127 NAME 'messagingServer' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Messaging Server' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.128 NAME 'Postmaster' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.162 NAME 'mailboxLocation' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'Mailbox Location' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.163 NAME 'mailboxID' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{8} SINGLE-VALUE X-NDS_NAME 'Mailbox ID' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '8' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.164 NAME 'externalName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'External Name' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.165 NAME 'securityFlags' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'Security Flags' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.166 NAME 'messagingServerType' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{32} SINGLE-VALUE X-NDS_NAME 'Messaging Server Type' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '32' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.167 NAME 'lastReferencedTime' SYNTAX 2.16.840.1.113719.1.1.5.1.19 SINGLE-VALUE USAGE directoryOperation X-NDS_NAME 'Last Referenced Time' X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
"( 2.5.4.42 NAME 'givenName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{32} X-NDS_NAME 'Given Name' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '32' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' )",
"( 2.5.4.43 NAME 'initials' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{8} X-NDS_NAME 'Initials' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '8' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' )",
"( 2.5.4.44 NAME 'generationQualifier' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{8} SINGLE-VALUE X-NDS_NAME 'Generational Qualifier' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '8' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.171 NAME 'profileMembership' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Profile Membership' X-NDS_NAME_VALUE_ACCESS '1' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.172 NAME 'dsRevision' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'DS Revision' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_OPERATIONAL '1' )",
"( 2.16.840.1.113719.1.1.4.1.173 NAME 'supportedGateway' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{4096} X-NDS_NAME 'Supported Gateway' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '4096' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.174 NAME 'equivalentToMe' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Equivalent To Me' X-NDS_SERVER_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' )",
"( 2.16.840.1.113719.1.1.4.1.175 NAME 'replicaUpTo' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Replica Up To' X-NDS_PUBLIC_READ '1' X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.1.4.1.176 NAME 'partitionStatus' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Partition Status' X-NDS_PUBLIC_READ '1' X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.1.4.1.177 NAME 'permanentConfigParms' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'Permanent Config Parms' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.178 NAME 'Timezone' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.179 NAME 'binderyRestrictionLevel' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE USAGE directoryOperation X-NDS_NAME 'Bindery Restriction Level' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.1.4.1.180 NAME 'transitiveVector' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Transitive Vector' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_SCHED_SYNC_NEVER '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.1.4.1.181 NAME 'T' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{32} X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '32' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.183 NAME 'purgeVector' SYNTAX 2.16.840.1.113719.1.1.5.1.19 NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Purge Vector' X-NDS_PUBLIC_READ '1' X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_SCHED_SYNC_NEVER '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.1.4.1.184 NAME 'synchronizationTolerance' SYNTAX 2.16.840.1.113719.1.1.5.1.19 USAGE directoryOperation X-NDS_NAME 'Synchronization Tolerance' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.1.4.1.185 NAME 'passwordManagement' SYNTAX 2.16.840.1.113719.1.1.5.1.0 SINGLE-VALUE USAGE directoryOperation X-NDS_NAME 'Password Management' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.1.4.1.186 NAME 'usedBy' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Used By' X-NDS_SERVER_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.1.4.1.187 NAME 'Uses' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} NO-USER-MODIFICATION USAGE directoryOperation X-NDS_SERVER_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.1.4.1.500 NAME 'obituaryNotify' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Obituary Notify' X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.1.4.1.501 NAME 'GUID' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{16} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_LOWER_BOUND '16' X-NDS_UPPER_BOUND '16' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.1.4.1.502 NAME 'otherGUID' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{16} USAGE directoryOperation X-NDS_NAME 'Other GUID' X-NDS_LOWER_BOUND '16' X-NDS_UPPER_BOUND '16' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.1.4.1.503 NAME 'auxiliaryClassFlag' SYNTAX 2.16.840.1.113719.1.1.5.1.0 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Auxiliary Class Flag' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.1.4.1.504 NAME 'unknownAuxiliaryClass' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{32} USAGE directoryOperation X-NDS_NAME 'Unknown Auxiliary Class' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '32' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
"( 0.9.2342.19200300.100.1.1 NAME ( 'uid' 'userId' ) SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64} X-NDS_NAME 'uniqueID' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '64' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' )",
"( 0.9.2342.19200300.100.1.25 NAME 'dc' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64} X-NDS_NAME 'dc' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '64' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.507 NAME 'auxClassObjectClassBackup' SYNTAX 1.3.6.1.4.1.1466.115.121.1.38 NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'AuxClass Object Class Backup' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.1.4.1.508 NAME 'localReceivedUpTo' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Local Received Up To' X-NDS_PUBLIC_READ '1' X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.141.4.4 NAME 'federationControl' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} USAGE directoryOperation X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.141.4.2 NAME 'federationSearchPath' SYNTAX 2.16.840.1.113719.1.1.5.1.6{64512} SINGLE-VALUE USAGE directoryOperation X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.141.4.3 NAME 'federationDNSName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE USAGE directoryOperation X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.141.4.1 NAME 'federationBoundaryType' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.14.4.1.4 NAME 'DirXML-Associations' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' )",
"( 2.5.18.3 NAME 'creatorsName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' X-NDS_READ_FILTERED '1' )",
"( 2.5.18.4 NAME 'modifiersName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.1.4.1.300 NAME 'languageId' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.27.4.35 NAME 'ndsPredicate' SYNTAX 2.16.840.1.113719.1.1.5.1.12 X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.27.4.36 NAME 'ndsPredicateState' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.27.4.37 NAME 'ndsPredicateFlush' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.27.4.38 NAME 'ndsPredicateTimeout' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{2147483647} SINGLE-VALUE X-NDS_UPPER_BOUND '2147483647' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.27.4.40 NAME 'ndsPredicateStatsDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.27.4.39 NAME 'ndsPredicateUseValues' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.601 NAME 'syncPanePoint' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_PUBLIC_READ '1' X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.1.4.1.600 NAME 'syncWindowVector' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_PUBLIC_READ '1' X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.1.4.1.602 NAME 'objectVersion' SYNTAX 2.16.840.1.113719.1.1.5.1.19 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.27.4.52 NAME 'memberQueryURL' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'memberQuery' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.302 NAME 'excludedMember' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.1.525 NAME 'auxClassCompatibility' SYNTAX 1.3.6.1.4.1.1466.115.121.1.38 NO-USER-MODIFICATION USAGE directoryOperation X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.1.4.1.518 NAME 'ndsAgentPassword' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_HIDDEN '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.1.4.1.519 NAME 'ndsOperationCheckpoint' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} USAGE directoryOperation X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.1.4.1.520 NAME 'localReferral' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE USAGE directoryOperation X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.1.4.1.521 NAME 'treeReferral' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} USAGE directoryOperation X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.1.4.1.522 NAME 'schemaResetLock' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE USAGE directoryOperation X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.1.4.1.523 NAME 'modifiedACLEntry' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 USAGE directoryOperation X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.1.4.1.524 NAME 'monitoredConnection' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} USAGE directoryOperation X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.1.4.1.526 NAME 'localFederationBoundary' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE USAGE directoryOperation X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.1.4.1.527 NAME 'replicationFilter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE USAGE directoryOperation X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.1.4.721 NAME 'ServerEBAEnabled' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.716 NAME 'EBATreeConfiguration' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.722 NAME 'EBAPartitionConfiguration' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.723 NAME 'EBAServerConfiguration' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_HIDDEN '1' )",
"( 2.16.840.1.113719.1.1.4.1.296 NAME 'loginActivationTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.687 NAME 'UpdateInProgress' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.720 NAME 'dsContainerReadyAttrs' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.4.400.1 NAME 'edirSchemaFlagVersion' SYNTAX 2.16.840.1.113719.1.1.5.1.0 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NONREMOVABLE '1' X-NDS_HIDDEN '1' X-NDS_READ_FILTERED '1' )",
"( 2.16.840.1.113719.1.1.4.1.512 NAME 'indexDefinition' SYNTAX 2.16.840.1.113719.1.1.5.1.6{64512} X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.1.4.1.513 NAME 'ndsStatusRepair' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.1.4.1.514 NAME 'ndsStatusExternalReference' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.1.4.1.515 NAME 'ndsStatusObituary' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.1.4.1.516 NAME 'ndsStatusSchema' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.1.4.1.517 NAME 'ndsStatusLimber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.1.4.1.511 NAME 'authoritative' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113730.3.1.34 NAME 'ref' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.1.4.1.546 NAME 'CachedAttrsOnExtRefs' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} X-NDS_SERVER_READ '1' )",
"( 2.16.840.1.113719.1.1.4.1.547 NAME 'ExtRefLastUpdatedTime' SYNTAX 2.16.840.1.113719.1.1.5.1.19 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_PUBLIC_READ '1' X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.1.4.688 NAME 'NCPKeyMaterialName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.1.4.713 NAME 'UTF8LoginScript' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.1.4.714 NAME 'loginScriptCharset' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE )",
"( 2.16.840.1.113719.1.1.4.721 NAME 'NDSRightsToMonitor' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.1.4.1.1.192 NAME 'lDAPLogLevel' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{32768} SINGLE-VALUE X-NDS_NAME 'LDAP Log Level' X-NDS_UPPER_BOUND '32768' )",
"( 2.16.840.1.113719.1.27.4.12 NAME 'lDAPUDPPort' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{65535} SINGLE-VALUE X-NDS_NAME 'LDAP UDP Port' X-NDS_UPPER_BOUND '65535' )",
"( 2.16.840.1.113719.1.1.4.1.204 NAME 'lDAPLogFilename' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'LDAP Log Filename' )",
"( 2.16.840.1.113719.1.1.4.1.205 NAME 'lDAPBackupLogFilename' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'LDAP Backup Log Filename' )",
"( 2.16.840.1.113719.1.1.4.1.206 NAME 'lDAPLogSizeLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{4294967295} SINGLE-VALUE X-NDS_NAME 'LDAP Log Size Limit' X-NDS_LOWER_BOUND '2048' X-NDS_UPPER_BOUND '-1' )",
"( 2.16.840.1.113719.1.1.4.1.194 NAME 'lDAPSearchSizeLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{2147483647} SINGLE-VALUE X-NDS_NAME 'LDAP Search Size Limit' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '2147483647' )",
"( 2.16.840.1.113719.1.1.4.1.195 NAME 'lDAPSearchTimeLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{2147483647} SINGLE-VALUE X-NDS_NAME 'LDAP Search Time Limit' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '2147483647' )",
"( 2.16.840.1.113719.1.1.4.1.207 NAME 'lDAPSuffix' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'LDAP Suffix' )",
"( 2.16.840.1.113719.1.27.4.70 NAME 'ldapConfigVersion' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.27.4.14 NAME 'ldapReferral' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'LDAP Referral' )",
"( 2.16.840.1.113719.1.27.4.73 NAME 'ldapDefaultReferralBehavior' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.27.4.23 NAME 'ldapSearchReferralUsage' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'LDAP:searchReferralUsage' )",
"( 2.16.840.1.113719.1.27.4.24 NAME 'lDAPOtherReferralUsage' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'LDAP:otherReferralUsage' )",
"( 2.16.840.1.113719.1.27.4.1 NAME 'ldapHostServer' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'LDAP Host Server' )",
"( 2.16.840.1.113719.1.27.4.2 NAME 'ldapGroupDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'LDAP Group' )",
"( 2.16.840.1.113719.1.27.4.3 NAME 'ldapTraceLevel' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{32768} SINGLE-VALUE X-NDS_NAME 'LDAP Screen Level' X-NDS_UPPER_BOUND '32768' )",
"( 2.16.840.1.113719.1.27.4.4 NAME 'searchSizeLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{2147483647} SINGLE-VALUE X-NDS_UPPER_BOUND '2147483647' )",
"( 2.16.840.1.113719.1.27.4.5 NAME 'searchTimeLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{2147483647} SINGLE-VALUE X-NDS_UPPER_BOUND '2147483647' )",
"( 2.16.840.1.113719.1.27.4.6 NAME 'ldapServerBindLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{4294967295} SINGLE-VALUE X-NDS_NAME 'LDAP Server Bind Limit' X-NDS_UPPER_BOUND '-1' )",
"( 2.16.840.1.113719.1.27.4.7 NAME 'ldapServerIdleTimeout' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{4294967295} SINGLE-VALUE X-NDS_NAME 'LDAP Server Idle Timeout' X-NDS_UPPER_BOUND '-1' )",
"( 2.16.840.1.113719.1.27.4.8 NAME 'ldapEnableTCP' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'LDAP Enable TCP' )",
"( 2.16.840.1.113719.1.27.4.10 NAME 'ldapEnableSSL' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'LDAP Enable SSL' )",
"( 2.16.840.1.113719.1.27.4.11 NAME 'ldapTCPPort' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{65535} SINGLE-VALUE X-NDS_NAME 'LDAP TCP Port' X-NDS_UPPER_BOUND '65535' )",
"( 2.16.840.1.113719.1.27.4.13 NAME 'ldapSSLPort' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{65535} SINGLE-VALUE X-NDS_NAME 'LDAP SSL Port' X-NDS_UPPER_BOUND '65535' )",
"( 2.16.840.1.113719.1.27.4.21 NAME 'filteredReplicaUsage' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.27.4.22 NAME 'ldapKeyMaterialName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'LDAP:keyMaterialName' )",
"( 2.16.840.1.113719.1.27.4.42 NAME 'extensionInfo' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.27.4.45 NAME 'nonStdClientSchemaCompatMode' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.27.4.46 NAME 'sslEnableMutualAuthentication' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.27.4.62 NAME 'ldapEnablePSearch' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.27.4.63 NAME 'ldapMaximumPSearchOperations' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.27.4.64 NAME 'ldapIgnorePSearchLimitsForEvents' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.27.4.65 NAME 'ldapTLSTrustedRootContainer' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )",
"( 2.16.840.1.113719.1.27.4.66 NAME 'ldapEnableMonitorEvents' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.27.4.67 NAME 'ldapMaximumMonitorEventsLoad' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.27.4.68 NAME 'ldapTLSRequired' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.27.4.69 NAME 'ldapTLSVerifyClientCertificate' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.27.4.71 NAME 'ldapDerefAlias' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.27.4.72 NAME 'ldapNonStdAllUserAttrsMode' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.27.4.75 NAME 'ldapBindRestrictions' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.27.4.79 NAME 'ldapInterfaces' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
"( 2.16.840.1.113719.1.27.4.80 NAME 'ldapChainSecureRequired' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.27.4.82 NAME 'ldapStdCompliance' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.27.4.83 NAME 'ldapDerefAliasOnAuth' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.27.4.84 NAME 'ldapGeneralizedTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.27.4.85 NAME 'ldapPermissiveModify' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.27.4.86 NAME 'ldapSSLConfig' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.27.4.15 NAME 'ldapServerList' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'LDAP Server List' )",
"( 2.16.840.1.113719.1.27.4.16 NAME 'ldapAttributeMap' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'LDAP Attribute Map v11' )",
"( 2.16.840.1.113719.1.27.4.17 NAME 'ldapClassMap' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'LDAP Class Map v11' )",
"( 2.16.840.1.113719.1.27.4.18 NAME 'ldapAllowClearTextPassword' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'LDAP Allow Clear Text Password' )",
"( 2.16.840.1.113719.1.27.4.19 NAME 'ldapAnonymousIdentity' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'LDAP Anonymous Identity' )",
"( 2.16.840.1.113719.1.27.4.52 NAME 'ldapAttributeList' SYNTAX 2.16.840.1.113719.1.1.5.1.6{64512} )",
"( 2.16.840.1.113719.1.27.4.53 NAME 'ldapClassList' SYNTAX 2.16.840.1.113719.1.1.5.1.6{64512} )",
"( 2.16.840.1.113719.1.27.4.56 NAME 'transitionGroupDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.27.4.74 NAME 'ldapTransitionBackLink' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.27.4.78 NAME 'ldapLBURPNumWriterThreads' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.27.4.20 NAME 'ldapServerDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'LDAP Server' )",
"( 0.9.2342.19200300.100.1.3 NAME 'mail' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} X-NDS_NAME 'Internet EMail Address' X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113730.3.1.3 NAME 'employeeNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} X-NDS_NAME 'NSCP:employeeNumber' )",
"( 2.16.840.1.113719.1.27.4.76 NAME 'referralExcludeFilter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
"( 2.16.840.1.113719.1.27.4.77 NAME 'referralIncludeFilter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
"( 2.5.4.36 NAME 'userCertificate' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'userCertificate' X-NDS_PUBLIC_READ '1' )",
"( 2.5.4.37 NAME 'cACertificate' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'cACertificate' X-NDS_PUBLIC_READ '1' )",
"( 2.5.4.40 NAME 'crossCertificatePair' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'crossCertificatePair' X-NDS_PUBLIC_READ '1' )",
"( 2.5.4.58 NAME 'attributeCertificate' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )",
"( 2.5.4.2 NAME 'knowledgeInformation' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{32768} X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '32768' )",
"( 2.5.4.14 NAME 'searchGuide' SYNTAX 1.3.6.1.4.1.1466.115.121.1.25{64512} X-NDS_NAME 'searchGuide' )",
"( 2.5.4.15 NAME 'businessCategory' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{128} X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '128' )",
"( 2.5.4.21 NAME 'telexNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.52{64512} X-NDS_NAME 'telexNumber' )",
"( 2.5.4.22 NAME 'teletexTerminalIdentifier' SYNTAX 1.3.6.1.4.1.1466.115.121.1.51{64512} X-NDS_NAME 'teletexTerminalIdentifier' )",
"( 2.5.4.24 NAME 'x121Address' SYNTAX 1.3.6.1.4.1.1466.115.121.1.36{15} X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '15' )",
"( 2.5.4.25 NAME 'internationaliSDNNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.36{16} X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '16' )",
"( 2.5.4.26 NAME 'registeredAddress' SYNTAX 1.3.6.1.4.1.1466.115.121.1.41{64512} X-NDS_NAME 'registeredAddress' )",
"( 2.5.4.27 NAME 'destinationIndicator' SYNTAX 1.3.6.1.4.1.1466.115.121.1.44{128} X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '128' )",
"( 2.5.4.28 NAME 'preferredDeliveryMethod' SYNTAX 1.3.6.1.4.1.1466.115.121.1.14{64512} SINGLE-VALUE X-NDS_NAME 'preferredDeliveryMethod' )",
"( 2.5.4.29 NAME 'presentationAddress' SYNTAX 1.3.6.1.4.1.1466.115.121.1.43{64512} SINGLE-VALUE X-NDS_NAME 'presentationAddress' )",
"( 2.5.4.30 NAME 'supportedApplicationContext' SYNTAX 1.3.6.1.4.1.1466.115.121.1.38{64512} X-NDS_NAME 'supportedApplicationContext' )",
"( 2.5.4.45 NAME 'x500UniqueIdentifier' SYNTAX 1.3.6.1.4.1.1466.115.121.1.6{64512} X-NDS_NAME 'x500UniqueIdentifier' )",
"( 2.5.4.46 NAME 'dnQualifier' SYNTAX 1.3.6.1.4.1.1466.115.121.1.44{64512} )",
"( 2.5.4.47 NAME 'enhancedSearchGuide' SYNTAX 1.3.6.1.4.1.1466.115.121.1.21{64512} X-NDS_NAME 'enhancedSearchGuide' )",
"( 2.5.4.48 NAME 'protocolInformation' SYNTAX 1.3.6.1.4.1.1466.115.121.1.42{64512} X-NDS_NAME 'protocolInformation' )",
"( 2.5.4.51 NAME 'houseIdentifier' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{32768} X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '32768' )",
"( 2.5.4.52 NAME 'supportedAlgorithms' SYNTAX 1.3.6.1.4.1.1466.115.121.1.49{64512} X-NDS_NAME 'supportedAlgorithms' )",
"( 2.5.4.54 NAME 'dmdName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{32768} X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '32768' )",
"( 0.9.2342.19200300.100.1.6 NAME 'roomNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
"( 0.9.2342.19200300.100.1.38 NAME 'associatedName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )",
"( 2.5.4.49 NAME 'dn' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.3.4.1 NAME 'httpServerDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )",
"( 2.16.840.1.113719.1.3.4.2 NAME 'httpHostServerDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.3.4.3 NAME 'httpThreadsPerCPU' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.3.4.4 NAME 'httpIOBufferSize' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.3.4.5 NAME 'httpRequestTimeout' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.3.4.6 NAME 'httpKeepAliveRequestTimeout' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.3.4.7 NAME 'httpSessionTimeout' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.3.4.8 NAME 'httpKeyMaterialObject' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.3.4.9 NAME 'httpTraceLevel' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.3.4.10 NAME 'httpAuthRequiresTLS' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.3.4.11 NAME 'httpDefaultClearPort' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.3.4.12 NAME 'httpDefaultTLSPort' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.3.4.13 NAME 'httpBindRestrictions' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.1.4.1.295 NAME 'emboxConfig' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE )",
"( 2.16.840.1.113719.1.54.4.1.1 NAME 'trusteesOfNewObject' SYNTAX 2.16.840.1.113719.1.1.5.1.17 X-NDS_NAME 'Trustees Of New Object' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.55.4.1.1 NAME 'newObjectSDSRights' SYNTAX 2.16.840.1.113719.1.1.5.1.17 X-NDS_NAME 'New Object's DS Rights' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.56.4.1.1 NAME 'newObjectSFSRights' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_NAME 'New Object's FS Rights' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.57.4.1.1 NAME 'setupScript' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_NAME 'Setup Script' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.58.4.1.1 NAME 'runSetupScript' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'Run Setup Script' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.59.4.1.1 NAME 'membersOfTemplate' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Members Of Template' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.60.4.1.1 NAME 'volumeSpaceRestrictions' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_NAME 'Volume Space Restrictions' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.61.4.1.1 NAME 'setPasswordAfterCreate' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'Set Password After Create' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.62.4.1.1 NAME 'homeDirectoryRights' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 X-NDS_NAME 'Home Directory Rights' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.63.4.1.1 NAME 'newObjectSSelfRights' SYNTAX 2.16.840.1.113719.1.1.5.1.17 X-NDS_NAME 'New Object's Self Rights' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.8.4.1 NAME 'digitalMeID' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} SINGLE-VALUE )",
"( 2.16.840.1.113719.1.8.4.2 NAME 'assistant' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )",
"( 2.16.840.1.113719.1.8.4.3 NAME 'assistantPhone' SYNTAX 1.3.6.1.4.1.1466.115.121.1.50{64512} )",
"( 2.16.840.1.113719.1.8.4.4 NAME 'city' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
"( 2.16.840.1.113719.1.8.4.5 NAME 'company' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
"( 0.9.2342.19200300.100.1.43 NAME 'co' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
"( 2.16.840.1.113719.1.8.4.6 NAME 'directReports' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )",
"( 0.9.2342.19200300.100.1.10 NAME 'manager' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )",
"( 2.16.840.1.113719.1.8.4.7 NAME 'mailstop' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
"( 0.9.2342.19200300.100.1.41 NAME 'mobile' SYNTAX 1.3.6.1.4.1.1466.115.121.1.50{64512} )",
"( 0.9.2342.19200300.100.1.40 NAME 'personalTitle' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
"( 0.9.2342.19200300.100.1.42 NAME 'pager' SYNTAX 1.3.6.1.4.1.1466.115.121.1.50{64512} )",
"( 2.16.840.1.113719.1.8.4.8 NAME 'workforceID' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
"( 2.16.840.1.113719.1.8.4.9 NAME 'instantMessagingID' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
"( 2.16.840.1.113719.1.8.4.10 NAME 'preferredName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
"( 0.9.2342.19200300.100.1.7 NAME 'photo' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} )",
"( 2.16.840.1.113719.1.8.4.11 NAME 'jobCode' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
"( 2.16.840.1.113719.1.8.4.12 NAME 'siteLocation' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
"( 2.16.840.1.113719.1.8.4.13 NAME 'employeeStatus' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
"( 2.16.840.1.113730.3.1.4 NAME 'employeeType' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
"( 2.16.840.1.113719.1.8.4.14 NAME 'costCenter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
"( 2.16.840.1.113719.1.8.4.15 NAME 'costCenterDescription' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
"( 2.16.840.1.113719.1.8.4.16 NAME 'tollFreePhoneNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.50{64512} )",
"( 2.16.840.1.113719.1.8.4.17 NAME 'otherPhoneNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.50{64512} )",
"( 2.16.840.1.113719.1.8.4.18 NAME 'managerWorkforceID' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
"( 2.16.840.1.113719.1.8.4.19 NAME 'jackNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
"( 2.16.840.1.113730.3.1.2 NAME 'departmentNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
"( 2.16.840.1.113719.1.8.4.20 NAME 'vehicleInformation' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
"( 2.16.840.1.113719.1.8.4.21 NAME 'accessCardNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
"( 2.16.840.1.113719.1.8.4.32 NAME 'isManager' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.8.4.22 NAME 'homeCity' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
"( 2.16.840.1.113719.1.8.4.23 NAME 'homeEmailAddress' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
"( 1.3.6.1.4.1.1466.101.120.31 NAME 'homeFax' SYNTAX 1.3.6.1.4.1.1466.115.121.1.50{64512} )",
"( 0.9.2342.19200300.100.1.20 NAME 'homePhone' SYNTAX 1.3.6.1.4.1.1466.115.121.1.50{64512} )",
"( 2.16.840.1.113719.1.8.4.24 NAME 'homeState' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
"( 0.9.2342.19200300.100.1.39 NAME 'homePostalAddress' SYNTAX 1.3.6.1.4.1.1466.115.121.1.41{64512} )",
"( 2.16.840.1.113719.1.8.4.25 NAME 'homeZipCode' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
"( 2.16.840.1.113719.1.8.4.26 NAME 'personalMobile' SYNTAX 1.3.6.1.4.1.1466.115.121.1.50{64512} )",
"( 2.16.840.1.113719.1.8.4.27 NAME 'children' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
"( 2.16.840.1.113719.1.8.4.28 NAME 'spouse' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
"( 2.16.840.1.113719.1.8.4.29 NAME 'vendorName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
"( 2.16.840.1.113719.1.8.4.30 NAME 'vendorAddress' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
"( 2.16.840.1.113719.1.8.4.31 NAME 'vendorPhoneNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.50{64512} )",
"( 2.16.840.1.113719.1.1.4.1.303 NAME 'dgIdentity' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME_VALUE_ACCESS '1' )",
"( 2.16.840.1.113719.1.1.4.1.304 NAME 'dgTimeOut' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.1.4.1.305 NAME 'dgAllowUnknown' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.1.4.1.306 NAME 'dgAllowDuplicates' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.1.4.1.546 NAME 'allowAliasToAncestor' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.39.4.1.1 NAME 'sASSecurityDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'SAS:Security DN' X-NDS_SERVER_READ '1' )",
"( 2.16.840.1.113719.1.39.4.1.2 NAME 'sASServiceDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'SAS:Service DN' X-NDS_SERVER_READ '1' )",
"( 2.16.840.1.113719.1.39.4.1.3 NAME 'sASSecretStore' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'SAS:SecretStore' )",
"( 2.16.840.1.113719.1.39.4.1.4 NAME 'sASSecretStoreKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE USAGE directoryOperation X-NDS_NAME 'SAS:SecretStore:Key' X-NDS_HIDDEN '1' )",
"( 2.16.840.1.113719.1.39.4.1.5 NAME 'sASSecretStoreData' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} USAGE directoryOperation X-NDS_NAME 'SAS:SecretStore:Data' X-NDS_HIDDEN '1' )",
"( 2.16.840.1.113719.1.39.4.1.6 NAME 'sASPKIStoreKeys' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} USAGE directoryOperation X-NDS_NAME 'SAS:PKIStore:Keys' X-NDS_HIDDEN '1' )",
"( 2.16.840.1.113719.1.48.4.1.1 NAME 'nDSPKIPublicKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Public Key' X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.48.4.1.2 NAME 'nDSPKIPrivateKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Private Key' )",
"( 2.16.840.1.113719.1.48.4.1.3 NAME 'nDSPKIPublicKeyCertificate' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Public Key Certificate' X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.48.4.1.4 NAME 'nDSPKICertificateChain' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'NDSPKI:Certificate Chain' X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.48.4.1.16 NAME 'nDSPKIPublicKeyEC' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Public Key EC' X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.48.4.1.17 NAME 'nDSPKIPrivateKeyEC' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Private Key EC' )",
"( 2.16.840.1.113719.1.48.4.1.18 NAME 'nDSPKIPublicKeyCertificateEC' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Public Key Certificate EC' X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.48.4.1.19 NAME 'crossCertificatePairEC' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'Cross Certificate Pair EC' X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.48.4.1.20 NAME 'nDSPKICertificateChainEC' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'NDSPKI:Certificate Chain EC' X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.48.4.1.5 NAME 'nDSPKIParentCA' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Parent CA' )",
"( 2.16.840.1.113719.1.48.4.1.6 NAME 'nDSPKIParentCADN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'NDSPKI:Parent CA DN' )",
"( 2.16.840.1.113719.1.48.4.1.20 NAME 'nDSPKISuiteBMode' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'NDSPKI:SuiteBMode' X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.48.4.1.7 NAME 'nDSPKIKeyFile' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Key File' )",
"( 2.16.840.1.113719.1.48.4.1.8 NAME 'nDSPKISubjectName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Subject Name' )",
"( 2.16.840.1.113719.1.48.4.1.11 NAME 'nDSPKIGivenName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Given Name' )",
"( 2.16.840.1.113719.1.48.4.1.9 NAME 'nDSPKIKeyMaterialDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'NDSPKI:Key Material DN' )",
"( 2.16.840.1.113719.1.48.4.1.10 NAME 'nDSPKITreeCADN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'NDSPKI:Tree CA DN' )",
"( 2.5.4.59 NAME 'cAECCertificate' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.48.4.1.12 NAME 'nDSPKIUserCertificateInfo' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_NAME 'NDSPKI:userCertificateInfo' )",
"( 2.16.840.1.113719.1.48.4.1.13 NAME 'nDSPKITrustedRootCertificate' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Trusted Root Certificate' X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.48.4.1.14 NAME 'nDSPKINotBefore' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Not Before' X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.48.4.1.15 NAME 'nDSPKINotAfter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Not After' X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.48.4.1.101 NAME 'nDSPKISDKeyServerDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'NDSPKI:SD Key Server DN' X-NDS_SERVER_READ '1' )",
"( 2.16.840.1.113719.1.48.4.1.102 NAME 'nDSPKISDKeyStruct' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'NDSPKI:SD Key Struct' )",
"( 2.16.840.1.113719.1.48.4.1.103 NAME 'nDSPKISDKeyCert' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:SD Key Cert' )",
"( 2.16.840.1.113719.1.48.4.1.104 NAME 'nDSPKISDKeyID' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:SD Key ID' )",
"( 2.16.840.1.113719.1.39.4.1.105 NAME 'nDSPKIKeystore' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} USAGE directoryOperation X-NDS_NAME 'NDSPKI:Keystore' X-NDS_HIDDEN '1' )",
"( 2.16.840.1.113719.1.39.4.1.106 NAME 'ndspkiAdditionalRoots' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.31.4.2.3 NAME 'masvLabel' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.31.4.2.4 NAME 'masvProposedLabel' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.31.4.2.5 NAME 'masvDefaultRange' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.31.4.2.6 NAME 'masvAuthorizedRange' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.31.4.2.7 NAME 'masvDomainPolicy' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.31.4.1.8 NAME 'masvClearanceNames' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.31.4.1.9 NAME 'masvLabelNames' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.31.4.1.10 NAME 'masvLabelSecrecyLevelNames' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.31.4.1.11 NAME 'masvLabelSecrecyCategoryNames' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.31.4.1.12 NAME 'masvLabelIntegrityLevelNames' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.31.4.1.13 NAME 'masvLabelIntegrityCategoryNames' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.31.4.1.14 NAME 'masvPolicyUpdate' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.31.4.1.16 NAME 'masvNDSAttributeLabels' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.31.4.1.15 NAME 'masvPolicyDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.2 NAME 'sASLoginSequence' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} X-NDS_NAME 'SAS:Login Sequence' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.8 NAME 'sASLoginPolicyUpdate' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'SAS:Login Policy Update' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.38 NAME 'sasNMASProductOptions' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.74 NAME 'sasAuditConfiguration' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_SERVER_READ '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.14 NAME 'sASNDSPasswordWindow' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'SAS:NDS Password Window' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.15 NAME 'sASPolicyCredentials' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'SAS:Policy Credentials' X-NDS_SERVER_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.16 NAME 'sASPolicyMethods' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_NAME 'SAS:Policy Methods' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.17 NAME 'sASPolicyObjectVersion' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'SAS:Policy Object Version' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.18 NAME 'sASPolicyServiceSubtypes' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_NAME 'SAS:Policy Service Subtypes' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.19 NAME 'sASPolicyServices' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_NAME 'SAS:Policy Services' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.20 NAME 'sASPolicyUsers' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_NAME 'SAS:Policy Users' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.21 NAME 'sASAllowNDSPasswordWindow' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'SAS:Allow NDS Password Window' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.9 NAME 'sASMethodIdentifier' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'SAS:Method Identifier' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.10 NAME 'sASMethodVendor' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'SAS:Method Vendor' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.11 NAME 'sASAdvisoryMethodGrade' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'SAS:Advisory Method Grade' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.12 NAME 'sASVendorSupport' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'SAS:Vendor Support' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.13 NAME 'sasCertificateSearchContainers' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.70 NAME 'sasNMASMethodConfigData' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.22 NAME 'sASLoginClientMethodNetWare' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_NAME 'SAS:Login Client Method NetWare' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.23 NAME 'sASLoginServerMethodNetWare' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_NAME 'SAS:Login Server Method NetWare' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.24 NAME 'sASLoginClientMethodWINNT' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_NAME 'SAS:Login Client Method WINNT' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.25 NAME 'sASLoginServerMethodWINNT' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_NAME 'SAS:Login Server Method WINNT' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.26 NAME 'sasLoginClientMethodSolaris' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.27 NAME 'sasLoginServerMethodSolaris' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.28 NAME 'sasLoginClientMethodLinux' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.29 NAME 'sasLoginServerMethodLinux' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.30 NAME 'sasLoginClientMethodTru64' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.31 NAME 'sasLoginServerMethodTru64' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.32 NAME 'sasLoginClientMethodAIX' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.33 NAME 'sasLoginServerMethodAIX' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.34 NAME 'sasLoginClientMethodHPUX' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.35 NAME 'sasLoginServerMethodHPUX' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.1000 NAME 'sasLoginClientMethods390' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.1001 NAME 'sasLoginServerMethods390' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.1002 NAME 'sasLoginClientMethodLinuxX64' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.1003 NAME 'sasLoginServerMethodLinuxX64' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.1004 NAME 'sasLoginClientMethodWinX64' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.1005 NAME 'sasLoginServerMethodWinX64' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.1006 NAME 'sasLoginClientMethodSolaris64' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.1007 NAME 'sasLoginServerMethodSolaris64' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.1008 NAME 'sasLoginClientMethodAIX64' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.1009 NAME 'sasLoginServerMethodAIX64' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.1011 NAME 'sasLoginServerMethodSolarisi386' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.1012 NAME 'sasLoginClientMethodSolarisi386' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.78 NAME 'sasUnsignedMethodModules' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.79 NAME 'sasServerModuleName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.80 NAME 'sasServerModuleEntryPointName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.81 NAME 'sasSASLMechanismName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.82 NAME 'sasSASLMechanismEntryPointName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.83 NAME 'sasClientModuleName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.84 NAME 'sasClientModuleEntryPointName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.36 NAME 'sASLoginMethodContainerDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'SAS:Login Method Container DN' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.37 NAME 'sASLoginPolicyDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'SAS:Login Policy DN' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.63 NAME 'sasPostLoginMethodContainerDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.38 NAME 'rADIUSActiveConnections' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'RADIUS:Active Connections' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.39 NAME 'rADIUSAgedInterval' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'RADIUS:Aged Interval' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.40 NAME 'rADIUSAttributeList' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'RADIUS:Attribute List' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.41 NAME 'rADIUSAttributeLists' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'RADIUS:Attribute Lists' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.42 NAME 'rADIUSClient' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'RADIUS:Client' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.43 NAME 'rADIUSCommonNameResolution' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'RADIUS:Common Name Resolution' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.44 NAME 'rADIUSConcurrentLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'RADIUS:Concurrent Limit' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.45 NAME 'rADIUSConnectionHistory' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'RADIUS:Connection History' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.46 NAME 'rADIUSDASVersion' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'RADIUS:DAS Version' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.47 NAME 'rADIUSDefaultProfile' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'RADIUS:Default Profile' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.48 NAME 'rADIUSDialAccessGroup' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'RADIUS:Dial Access Group' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.49 NAME 'rADIUSEnableCommonNameLogin' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'RADIUS:Enable Common Name Login' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.50 NAME 'rADIUSEnableDialAccess' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'RADIUS:Enable Dial Access' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.51 NAME 'rADIUSInterimAcctingTimeout' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'RADIUS:Interim Accting Timeout' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.52 NAME 'rADIUSLookupContexts' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'RADIUS:Lookup Contexts' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.53 NAME 'rADIUSMaxDASHistoryRecord' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'RADIUS:Max DAS History Record' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.54 NAME 'rADIUSMaximumHistoryRecord' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'RADIUS:Maximum History Record' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.55 NAME 'rADIUSPassword' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'RADIUS:Password' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.56 NAME 'rADIUSPasswordPolicy' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'RADIUS:Password Policy' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.57 NAME 'rADIUSPrivateKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'RADIUS:Private Key' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.58 NAME 'rADIUSProxyContext' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_NAME 'RADIUS:Proxy Context' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.59 NAME 'rADIUSProxyDomain' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'RADIUS:Proxy Domain' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.60 NAME 'rADIUSProxyTarget' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'RADIUS:Proxy Target' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.61 NAME 'rADIUSPublicKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'RADIUS:Public Key' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.62 NAME 'rADIUSServiceList' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_NAME 'RADIUS:Service List' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.3 NAME 'sASLoginSecret' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'SAS:Login Secret' X-NDS_SERVER_READ '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.4 NAME 'sASLoginSecretKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'SAS:Login Secret Key' X-NDS_SERVER_READ '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.5 NAME 'sASEncryptionType' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'SAS:Encryption Type' X-NDS_SERVER_READ '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.6 NAME 'sASLoginConfiguration' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'SAS:Login Configuration' X-NDS_SERVER_READ '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.7 NAME 'sASLoginConfigurationKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'SAS:Login Configuration Key' X-NDS_SERVER_READ '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.73 NAME 'sasDefaultLoginSequence' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.64 NAME 'sasAuthorizedLoginSequences' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.69 NAME 'sasAllowableSubjectNames' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.71 NAME 'sasLoginFailureDelay' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_SERVER_READ '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.72 NAME 'sasMethodVersion' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_SERVER_READ '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.1010 NAME 'sasUpdateLoginInfo' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_SERVER_READ '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.1011 NAME 'sasOTPEnabled' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_SERVER_READ '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.1012 NAME 'sasOTPCounter' SYNTAX 2.16.840.1.113719.1.1.5.1.22 SINGLE-VALUE X-NDS_SERVER_READ '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.1013 NAME 'sasOTPLookAheadWindow' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_SERVER_READ '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.1014 NAME 'sasOTPDigits' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_SERVER_READ '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.1015 NAME 'sasOTPReSync' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_SERVER_READ '1' )",
"( 2.16.840.1.113719.1.39.42.1.0.1016 NAME 'sasUpdateLoginTimeInterval' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_SERVER_READ '1' )",
"( 2.16.840.1.113719.1.6.4.1 NAME 'snmpGroupDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.6.4.2 NAME 'snmpServerList' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )",
"( 2.16.840.1.113719.1.6.4.3 NAME 'snmpTrapConfig' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE )",
"( 2.16.840.1.113719.1.6.4.4 NAME 'snmpTrapDescription' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE )",
"( 2.16.840.1.113719.1.6.4.5 NAME 'snmpTrapInterval' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.6.4.6 NAME 'snmpTrapDisable' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.1.4.1.528 NAME 'ndapPartitionPasswordMgmt' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.1.4.1.529 NAME 'ndapClassPasswordMgmt' SYNTAX 2.16.840.1.113719.1.1.5.1.0 X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.1.4.1.530 NAME 'ndapPasswordMgmt' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.1.4.1.537 NAME 'ndapPartitionLoginMgmt' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.1.4.1.538 NAME 'ndapClassLoginMgmt' SYNTAX 2.16.840.1.113719.1.1.5.1.0 X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.1.4.1.539 NAME 'ndapLoginMgmt' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.1 NAME 'nspmPasswordKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE USAGE directoryOperation X-NDS_HIDDEN '1' )",
"( 2.16.840.1.113719.1.39.43.4.2 NAME 'nspmPassword' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE USAGE directoryOperation X-NDS_HIDDEN '1' )",
"( 2.16.840.1.113719.1.39.43.4.3 NAME 'nspmDistributionPassword' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE USAGE directoryOperation X-NDS_HIDDEN '1' )",
"( 2.16.840.1.113719.1.39.43.4.4 NAME 'nspmPasswordHistory' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} USAGE directoryOperation X-NDS_HIDDEN '1' )",
"( 2.16.840.1.113719.1.39.43.4.5 NAME 'nspmAdministratorChangeCount' SYNTAX 2.16.840.1.113719.1.1.5.1.22 SINGLE-VALUE USAGE directoryOperation X-NDS_HIDDEN '1' )",
"( 2.16.840.1.113719.1.39.43.4.6 NAME 'nspmPasswordPolicyDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.7 NAME 'nspmPreviousDistributionPassword' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE USAGE directoryOperation X-NDS_HIDDEN '1' )",
"( 2.16.840.1.113719.1.39.43.4.8 NAME 'nspmDoNotExpirePassword' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 1.3.6.1.4.1.42.2.27.8.1.16 NAME 'pwdChangedTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation )",
"( 1.3.6.1.4.1.42.2.27.8.1.17 NAME 'pwdAccountLockedTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation )",
"( 1.3.6.1.4.1.42.2.27.8.1.19 NAME 'pwdFailureTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 NO-USER-MODIFICATION USAGE directoryOperation )",
"( 2.16.840.1.113719.1.39.43.4.100 NAME 'nspmConfigurationOptions' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.102 NAME 'nspmChangePasswordMessage' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.103 NAME 'nspmPasswordHistoryLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.104 NAME 'nspmPasswordHistoryExpiration' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 1.3.6.1.4.1.42.2.27.8.1.4 NAME 'pwdInHistory' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.105 NAME 'nspmMinPasswordLifetime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.106 NAME 'nspmAdminsDoNotExpirePassword' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.107 NAME 'nspmPasswordACL' SYNTAX 2.16.840.1.113719.1.1.5.1.17 )",
"( 2.16.840.1.113719.1.39.43.4.200 NAME 'nspmMaximumLength' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.201 NAME 'nspmMinUpperCaseCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.202 NAME 'nspmMaxUpperCaseCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.203 NAME 'nspmMinLowerCaseCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.204 NAME 'nspmMaxLowerCaseCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.205 NAME 'nspmNumericCharactersAllowed' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.206 NAME 'nspmNumericAsFirstCharacter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.207 NAME 'nspmNumericAsLastCharacter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.208 NAME 'nspmMinNumericCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.209 NAME 'nspmMaxNumericCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.210 NAME 'nspmSpecialCharactersAllowed' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.211 NAME 'nspmSpecialAsFirstCharacter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.212 NAME 'nspmSpecialAsLastCharacter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.213 NAME 'nspmMinSpecialCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.214 NAME 'nspmMaxSpecialCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.215 NAME 'nspmMaxRepeatedCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.216 NAME 'nspmMaxConsecutiveCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.217 NAME 'nspmMinUniqueCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.218 NAME 'nspmDisallowedAttributeValues' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.219 NAME 'nspmExcludeList' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.220 NAME 'nspmCaseSensitive' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.221 NAME 'nspmPolicyPrecedence' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.222 NAME 'nspmExtendedCharactersAllowed' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.223 NAME 'nspmExtendedAsFirstCharacter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.224 NAME 'nspmExtendedAsLastCharacter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.225 NAME 'nspmMinExtendedCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.226 NAME 'nspmMaxExtendedCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.227 NAME 'nspmUpperAsFirstCharacter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.228 NAME 'nspmUpperAsLastCharacter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.229 NAME 'nspmLowerAsFirstCharacter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.230 NAME 'nspmLowerAsLastCharacter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.231 NAME 'nspmComplexityRules' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.233 NAME 'nspmAD2K8Syntax' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.234 NAME 'nspmAD2K8maxViolation' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.235 NAME 'nspmXCharLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.236 NAME 'nspmXCharHistoryLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.237 NAME 'nspmUnicodeAllowed' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.238 NAME 'nspmNonAlphaCharactersAllowed' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.239 NAME 'nspmMinNonAlphaCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.240 NAME 'nspmMaxNonAlphaCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.241 NAME 'nspmGraceLoginHistoryLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.300 NAME 'nspmPolicyAgentContainerDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.301 NAME 'nspmPolicyAgentNetWare' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.302 NAME 'nspmPolicyAgentWINNT' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.303 NAME 'nspmPolicyAgentSolaris' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.304 NAME 'nspmPolicyAgentLinux' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.305 NAME 'nspmPolicyAgentAIX' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.43.4.306 NAME 'nspmPolicyAgentHPUX' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 0.9.2342.19200300.100.1.55 NAME 'audio' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} )",
"( 2.16.840.1.113730.3.1.1 NAME 'carLicense' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
"( 2.16.840.1.113730.3.1.241 NAME 'displayName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
"( 0.9.2342.19200300.100.1.60 NAME 'jpegPhoto' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} )",
"( 1.3.6.1.4.1.250.1.57 NAME 'labeledUri' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
"( 0.9.2342.19200300.100.1.7 NAME 'ldapPhoto' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} )",
"( 2.16.840.1.113730.3.1.39 NAME 'preferredLanguage' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE )",
"( 0.9.2342.19200300.100.1.21 NAME 'secretary' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )",
"( 2.16.840.1.113730.3.1.40 NAME 'userSMIMECertificate' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} )",
"( 2.16.840.1.113730.3.1.216 NAME 'userPKCS12' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} )",
"( 2.16.840.1.113719.1.12.4.1.0 NAME 'auditAEncryptionKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'Audit:A Encryption Key' )",
"( 2.16.840.1.113719.1.12.4.2.0 NAME 'auditBEncryptionKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'Audit:B Encryption Key' )",
"( 2.16.840.1.113719.1.12.4.3.0 NAME 'auditContents' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'Audit:Contents' )",
"( 2.16.840.1.113719.1.12.4.4.0 NAME 'auditType' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'Audit:Type' )",
"( 2.16.840.1.113719.1.12.4.5.0 NAME 'auditCurrentEncryptionKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'Audit:Current Encryption Key' )",
"( 2.16.840.1.113719.1.12.4.6.0 NAME 'auditFileLink' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'Audit:File Link' )",
"( 2.16.840.1.113719.1.12.4.7.0 NAME 'auditLinkList' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Audit:Link List' )",
"( 2.16.840.1.113719.1.12.4.8.0 NAME 'auditPath' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} SINGLE-VALUE X-NDS_NAME 'Audit:Path' )",
"( 2.16.840.1.113719.1.12.4.9.0 NAME 'auditPolicy' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'Audit:Policy' )",
"( 2.16.840.1.113719.1.38.4.1.1 NAME 'wANMANWANPolicy' SYNTAX 2.16.840.1.113719.1.1.5.1.13{64512} X-NDS_NAME 'WANMAN:WAN Policy' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.38.4.1.2 NAME 'wANMANLANAreaMembership' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'WANMAN:LAN Area Membership' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.38.4.1.3 NAME 'wANMANCost' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'WANMAN:Cost' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.38.4.1.4 NAME 'wANMANDefaultCost' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'WANMAN:Default Cost' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.135.4.30 NAME 'rbsAssignedRoles' SYNTAX 2.16.840.1.113719.1.1.5.1.25 )",
"( 2.16.840.1.113719.1.135.4.31 NAME 'rbsContent' SYNTAX 2.16.840.1.113719.1.1.5.1.25 )",
"( 2.16.840.1.113719.1.135.4.32 NAME 'rbsContentMembership' SYNTAX 2.16.840.1.113719.1.1.5.1.25 )",
"( 2.16.840.1.113719.1.135.4.33 NAME 'rbsEntryPoint' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} SINGLE-VALUE )",
"( 2.16.840.1.113719.1.135.4.34 NAME 'rbsMember' SYNTAX 2.16.840.1.113719.1.1.5.1.25 )",
"( 2.16.840.1.113719.1.135.4.35 NAME 'rbsOwnedCollections' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )",
"( 2.16.840.1.113719.1.135.4.36 NAME 'rbsPath' SYNTAX 2.16.840.1.113719.1.1.5.1.25 )",
"( 2.16.840.1.113719.1.135.4.37 NAME 'rbsParameters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} )",
"( 2.16.840.1.113719.1.135.4.38 NAME 'rbsTaskRights' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} )",
"( 2.16.840.1.113719.1.135.4.39 NAME 'rbsTrusteeOf' SYNTAX 2.16.840.1.113719.1.1.5.1.25 )",
"( 2.16.840.1.113719.1.135.4.40 NAME 'rbsType' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{256} SINGLE-VALUE X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '256' )",
"( 2.16.840.1.113719.1.135.4.41 NAME 'rbsURL' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE )",
"( 2.16.840.1.113719.1.135.4.42 NAME 'rbsTaskTemplates' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} )",
"( 2.16.840.1.113719.1.135.4.43 NAME 'rbsTaskTemplatesURL' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE )",
"( 2.16.840.1.113719.1.135.4.44 NAME 'rbsGALabel' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE )",
"( 2.16.840.1.113719.1.135.4.45 NAME 'rbsPageMembership' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} )",
"( 2.16.840.1.113719.1.135.4.46 NAME 'rbsTargetObjectType' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
"( 2.16.840.1.113719.1.135.4.47 NAME 'rbsContext' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.135.4.48 NAME 'rbsXMLInfo' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE )",
"( 2.16.840.1.113719.1.135.4.51 NAME 'rbsAssignedRoles2' SYNTAX 2.16.840.1.113719.1.1.5.1.25 )",
"( 2.16.840.1.113719.1.135.4.52 NAME 'rbsOwnedCollections2' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )",
"( 2.16.840.1.113719.1.1.4.1.540 NAME 'prSyncPolicyDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_SERVER_READ '1' )",
"( 2.16.840.1.113719.1.1.4.1.541 NAME 'prSyncAttributes' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} X-NDS_SERVER_READ '1' )",
"( 2.16.840.1.113719.1.1.4.1.542 NAME 'dsEncryptedReplicationConfig' SYNTAX 2.16.840.1.113719.1.1.5.1.19 )",
"( 2.16.840.1.113719.1.1.4.1.543 NAME 'encryptionPolicyDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.1.4.1.544 NAME 'attrEncryptionRequiresSecure' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.1.4.1.545 NAME 'attrEncryptionDefinition' SYNTAX 2.16.840.1.113719.1.1.5.1.6{64512} X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.48.4.1.16 NAME 'ndspkiCRLFileName' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.48.4.1.17 NAME 'ndspkiStatus' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.48.4.1.18 NAME 'ndspkiIssueTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.48.4.1.19 NAME 'ndspkiNextIssueTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.48.4.1.20 NAME 'ndspkiAttemptTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.48.4.1.21 NAME 'ndspkiTimeInterval' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.48.4.1.22 NAME 'ndspkiCRLMaxProcessingInterval' SYNTAX 2.16.840.1.113719.1.1.5.1.27 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.48.4.1.23 NAME 'ndspkiCRLNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.48.4.1.24 NAME 'ndspkiDistributionPoints' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.48.4.1.25 NAME 'ndspkiCRLProcessData' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.48.4.1.26 NAME 'ndspkiCRLConfigurationDNList' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.48.4.1.27 NAME 'ndspkiCADN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.48.4.1.28 NAME 'ndspkiCRLContainerDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.48.4.1.29 NAME 'ndspkiIssuedCertContainerDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.48.4.1.30 NAME 'ndspkiDistributionPointDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.48.4.1.31 NAME 'ndspkiCRLConfigurationDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.48.4.1.32 NAME 'ndspkiDirectory' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} )",
"( 2.5.4.38 NAME 'authorityRevocationList' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40 SINGLE-VALUE X-NDS_NAME 'ndspkiAuthorityRevocationList' X-NDS_PUBLIC_READ '1' )",
"( 2.5.4.39 NAME 'certificateRevocationList' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40 SINGLE-VALUE X-NDS_NAME 'ndspkiCertificateRevocationList' X-NDS_PUBLIC_READ '1' )",
"( 2.5.4.53 NAME 'deltaRevocationList' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40 SINGLE-VALUE X-NDS_NAME 'ndspkiDeltaRevocationList' X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.48.4.1.36 NAME 'ndspkiTrustedRootList' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.48.4.1.37 NAME 'ndspkiSecurityRightsLevel' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.48.4.1.38 NAME 'ndspkiKMOExport' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
"( 2.16.840.1.113719.1.48.4.1.39 NAME 'ndspkiCRLECConfigurationDNList' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.48.4.1.40 NAME 'ndspkiCRLType' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.48.4.1.41 NAME 'ndspkiCRLExtendValidity' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.48.4.1.42 NAME 'ndspkiDefaultRSAKeySize' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.48.4.1.43 NAME 'ndspkiDefaultECCurve' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.48.4.1.44 NAME 'ndspkiDefaultCertificateLife' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.7.4.1 NAME 'notfSMTPEmailHost' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.7.4.2 NAME 'notfSMTPEmailFrom' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.7.4.3 NAME 'notfSMTPEmailUserName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.7.4.5 NAME 'notfMergeTemplateData' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.7.4.6 NAME 'notfMergeTemplateSubject' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.44.4.1 NAME 'nsimRequiredQuestions' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.44.4.2 NAME 'nsimRandomQuestions' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.44.4.3 NAME 'nsimNumberRandomQuestions' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.44.4.4 NAME 'nsimMinResponseLength' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.44.4.5 NAME 'nsimMaxResponseLength' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.44.4.6 NAME 'nsimForgottenLoginConfig' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.44.4.7 NAME 'nsimForgottenAction' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.44.4.8 NAME 'nsimAssignments' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.44.4.9 NAME 'nsimChallengeSetDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.44.4.10 NAME 'nsimChallengeSetGUID' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.44.4.11 NAME 'nsimPwdRuleEnforcement' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.39.44.4.12 NAME 'nsimHint' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.39.44.4.13 NAME 'nsimPasswordReminder' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} SINGLE-VALUE )",
"( 2.16.840.1.113719.1.266.4.4 NAME 'sssProxyStoreKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE USAGE directoryOperation X-NDS_HIDDEN '1' )",
"( 2.16.840.1.113719.1.266.4.5 NAME 'sssProxyStoreSecrets' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} USAGE directoryOperation X-NDS_HIDDEN '1' )",
"( 2.16.840.1.113719.1.266.4.6 NAME 'sssActiveServerList' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} )",
"( 2.16.840.1.113719.1.266.4.7 NAME 'sssCacheRefreshInterval' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.266.4.8 NAME 'sssAdminList' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )",
"( 2.16.840.1.113719.1.266.4.9 NAME 'sssAdminGALabel' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE )",
"( 2.16.840.1.113719.1.266.4.10 NAME 'sssEnableReadTimestamps' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.266.4.11 NAME 'sssDisableMasterPasswords' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.266.4.12 NAME 'sssEnableAdminAccess' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.266.4.13 NAME 'sssReadSecretPolicies' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} )",
"( 2.16.840.1.113719.1.266.4.14 NAME 'sssServerPolicyOverrideDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.1.4.1.531 NAME 'eDirCloneSource' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.1.4.1.532 NAME 'eDirCloneKeys' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_HIDDEN '1' )",
"( 2.16.840.1.113719.1.1.4.1.533 NAME 'eDirCloneLock' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
"( 2.16.840.1.113719.1.1.4.711 NAME 'groupMember' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )",
"( 2.16.840.1.113719.1.1.4.712 NAME 'nestedConfig' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 2.16.840.1.113719.1.1.4.717 NAME 'xdasDSConfiguration' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
"( 2.16.840.1.113719.1.1.4.718 NAME 'xdasConfiguration' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
"( 2.16.840.1.113719.1.1.4.719 NAME 'xdasVersion' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{32768} SINGLE-VALUE X-NDS_UPPER_BOUND '32768' )",
"( 2.16.840.1.113719.1.347.4.79 NAME 'NAuditInstrumentation' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
"( 2.16.840.1.113719.1.347.4.2 NAME 'NAuditLoggingServer' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_PUBLIC_READ '1' )",
"( 2.16.840.1.113719.1.1.4.724 NAME 'cefConfiguration' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
"( 2.16.840.1.113719.1.1.4.725 NAME 'cefVersion' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{32768} SINGLE-VALUE X-NDS_UPPER_BOUND '32768' )"
],
"createTimestamp": [],
"dITContentRules": [],
"dITStructureRules": [],
"ldapSyntaxes": [
"( 1.3.6.1.4.1.1466.115.121.1.1 X-NDS_SYNTAX '9' )",
"( 1.3.6.1.4.1.1466.115.121.1.2 X-NDS_SYNTAX '9' )",
"( 1.3.6.1.4.1.1466.115.121.1.3 X-NDS_SYNTAX '3' )",
"( 1.3.6.1.4.1.1466.115.121.1.4 X-NDS_SYNTAX '9' )",
"( 1.3.6.1.4.1.1466.115.121.1.5 X-NDS_SYNTAX '21' )",
"( 1.3.6.1.4.1.1466.115.121.1.6 X-NDS_SYNTAX '9' )",
"( 1.3.6.1.4.1.1466.115.121.1.7 X-NDS_SYNTAX '7' )",
"( 2.16.840.1.113719.1.1.5.1.6 X-NDS_SYNTAX '6' )",
"( 1.3.6.1.4.1.1466.115.121.1.8 X-NDS_SYNTAX '9' )",
"( 1.3.6.1.4.1.1466.115.121.1.9 X-NDS_SYNTAX '9' )",
"( 1.3.6.1.4.1.1466.115.121.1.10 X-NDS_SYNTAX '9' )",
"( 2.16.840.1.113719.1.1.5.1.22 X-NDS_SYNTAX '22' )",
"( 1.3.6.1.4.1.1466.115.121.1.11 X-NDS_SYNTAX '3' )",
"( 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_SYNTAX '1' )",
"( 1.3.6.1.4.1.1466.115.121.1.13 X-NDS_SYNTAX '9' )",
"( 1.3.6.1.4.1.1466.115.121.1.14 X-NDS_SYNTAX '9' )",
"( 1.3.6.1.4.1.1466.115.121.1.15 X-NDS_SYNTAX '3' )",
"( 1.3.6.1.4.1.1466.115.121.1.16 X-NDS_SYNTAX '3' )",
"( 1.3.6.1.4.1.1466.115.121.1.17 X-NDS_SYNTAX '3' )",
"( 1.3.6.1.4.1.1466.115.121.1.18 X-NDS_SYNTAX '9' )",
"( 1.3.6.1.4.1.1466.115.121.1.19 X-NDS_SYNTAX '9' )",
"( 1.3.6.1.4.1.1466.115.121.1.20 X-NDS_SYNTAX '9' )",
"( 1.3.6.1.4.1.1466.115.121.1.21 X-NDS_SYNTAX '9' )",
"( 1.3.6.1.4.1.1466.115.121.1.22 X-NDS_SYNTAX '11' )",
"( 1.3.6.1.4.1.1466.115.121.1.23 X-NDS_SYNTAX '9' )",
"( 1.3.6.1.4.1.1466.115.121.1.24 X-NDS_SYNTAX '24' )",
"( 1.3.6.1.4.1.1466.115.121.1.25 X-NDS_SYNTAX '9' )",
"( 1.3.6.1.4.1.1466.115.121.1.26 X-NDS_SYNTAX '2' )",
"( 1.3.6.1.4.1.1466.115.121.1.27 X-NDS_SYNTAX '8' )",
"( 1.3.6.1.4.1.1466.115.121.1.28 X-NDS_SYNTAX '9' )",
"( 1.2.840.113556.1.4.906 X-NDS_SYNTAX '29' )",
"( 1.3.6.1.4.1.1466.115.121.1.54 X-NDS_SYNTAX '3' )",
"( 1.3.6.1.4.1.1466.115.121.1.56 X-NDS_SYNTAX '9' )",
"( 1.3.6.1.4.1.1466.115.121.1.57 X-NDS_SYNTAX '3' )",
"( 1.3.6.1.4.1.1466.115.121.1.29 X-NDS_SYNTAX '9' )",
"( 1.3.6.1.4.1.1466.115.121.1.30 X-NDS_SYNTAX '3' )",
"( 1.3.6.1.4.1.1466.115.121.1.31 X-NDS_SYNTAX '3' )",
"( 1.3.6.1.4.1.1466.115.121.1.32 X-NDS_SYNTAX '3' )",
"( 1.3.6.1.4.1.1466.115.121.1.33 X-NDS_SYNTAX '3' )",
"( 1.3.6.1.4.1.1466.115.121.1.55 X-NDS_SYNTAX '9' )",
"( 1.3.6.1.4.1.1466.115.121.1.34 X-NDS_SYNTAX '3' )",
"( 1.3.6.1.4.1.1466.115.121.1.35 X-NDS_SYNTAX '3' )",
"( 2.16.840.1.113719.1.1.5.1.19 X-NDS_SYNTAX '19' )",
"( 1.3.6.1.4.1.1466.115.121.1.36 X-NDS_SYNTAX '5' )",
"( 2.16.840.1.113719.1.1.5.1.17 X-NDS_SYNTAX '17' )",
"( 1.3.6.1.4.1.1466.115.121.1.37 X-NDS_SYNTAX '3' )",
"( 2.16.840.1.113719.1.1.5.1.13 X-NDS_SYNTAX '13' )",
"( 1.3.6.1.4.1.1466.115.121.1.40 X-NDS_SYNTAX '9' )",
"( 1.3.6.1.4.1.1466.115.121.1.38 X-NDS_SYNTAX '20' )",
"( 1.3.6.1.4.1.1466.115.121.1.39 X-NDS_SYNTAX '3' )",
"( 1.3.6.1.4.1.1466.115.121.1.41 X-NDS_SYNTAX '18' )",
"( 1.3.6.1.4.1.1466.115.121.1.43 X-NDS_SYNTAX '9' )",
"( 1.3.6.1.4.1.1466.115.121.1.44 X-NDS_SYNTAX '4' )",
"( 1.3.6.1.4.1.1466.115.121.1.42 X-NDS_SYNTAX '9' )",
"( 2.16.840.1.113719.1.1.5.1.16 X-NDS_SYNTAX '16' )",
"( 1.3.6.1.4.1.1466.115.121.1.58 X-NDS_SYNTAX '9' )",
"( 1.3.6.1.4.1.1466.115.121.1.45 X-NDS_SYNTAX '9' )",
"( 1.3.6.1.4.1.1466.115.121.1.46 X-NDS_SYNTAX '9' )",
"( 1.3.6.1.4.1.1466.115.121.1.47 X-NDS_SYNTAX '9' )",
"( 1.3.6.1.4.1.1466.115.121.1.48 X-NDS_SYNTAX '9' )",
"( 1.3.6.1.4.1.1466.115.121.1.49 X-NDS_SYNTAX '9' )",
"( 2.16.840.1.113719.1.1.5.1.12 X-NDS_SYNTAX '12' )",
"( 2.16.840.1.113719.1.1.5.1.23 X-NDS_SYNTAX '23' )",
"( 2.16.840.1.113719.1.1.5.1.15 X-NDS_SYNTAX '15' )",
"( 2.16.840.1.113719.1.1.5.1.14 X-NDS_SYNTAX '14' )",
"( 1.3.6.1.4.1.1466.115.121.1.50 X-NDS_SYNTAX '10' )",
"( 1.3.6.1.4.1.1466.115.121.1.51 X-NDS_SYNTAX '9' )",
"( 1.3.6.1.4.1.1466.115.121.1.52 X-NDS_SYNTAX '9' )",
"( 2.16.840.1.113719.1.1.5.1.25 X-NDS_SYNTAX '25' )",
"( 1.3.6.1.4.1.1466.115.121.1.53 X-NDS_SYNTAX '9' )",
"( 2.16.840.1.113719.1.1.5.1.26 X-NDS_SYNTAX '26' )",
"( 2.16.840.1.113719.1.1.5.1.27 X-NDS_SYNTAX '27' )"
],
"matchingRuleUse": [],
"matchingRules": [],
"modifyTimestamp": [
"20190831135835Z"
],
"nameForms": [],
"objectClass": [
"top",
"subschema"
],
"objectClasses": [
"( 2.5.6.0 NAME 'Top' STRUCTURAL MUST objectClass MAY ( cAPublicKey $ cAPrivateKey $ certificateValidityInterval $ authorityRevocation $ lastReferencedTime $ equivalentToMe $ ACL $ backLink $ binderyProperty $ Obituary $ Reference $ revision $ ndsCrossCertificatePair $ certificateRevocation $ usedBy $ GUID $ otherGUID $ DirXML-Associations $ creatorsName $ modifiersName $ objectVersion $ auxClassCompatibility $ unknownBaseClass $ unknownAuxiliaryClass $ masvProposedLabel $ masvDefaultRange $ masvAuthorizedRange $ auditFileLink $ rbsAssignedRoles $ rbsOwnedCollections $ rbsAssignedRoles2 $ rbsOwnedCollections2 ) X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES '16#subtree#[Creator]#[Entry Rights]' )",
"( 1.3.6.1.4.1.42.2.27.1.2.1 NAME 'aliasObject' SUP Top STRUCTURAL MUST aliasedObjectName X-NDS_NAME 'Alias' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
"( 2.5.6.2 NAME 'Country' SUP Top STRUCTURAL MUST c MAY ( description $ searchGuide $ sssActiveServerList $ sssServerPolicyOverrideDN ) X-NDS_NAMING 'c' X-NDS_CONTAINMENT ( 'Top' 'treeRoot' 'domain' ) X-NDS_NONREMOVABLE '1' )",
"( 2.5.6.3 NAME 'Locality' SUP Top STRUCTURAL MAY ( description $ l $ seeAlso $ st $ street $ searchGuide $ sssActiveServerList $ sssServerPolicyOverrideDN ) X-NDS_NAMING ( 'l' 'st' ) X-NDS_CONTAINMENT ( 'Country' 'organizationalUnit' 'Locality' 'Organization' 'domain' ) X-NDS_NONREMOVABLE '1' )",
"( 2.5.6.4 NAME 'Organization' SUP ( ndsLoginProperties $ ndsContainerLoginProperties ) STRUCTURAL MUST o MAY ( description $ facsimileTelephoneNumber $ l $ loginScript $ eMailAddress $ physicalDeliveryOfficeName $ postalAddress $ postalCode $ postOfficeBox $ printJobConfiguration $ printerControl $ seeAlso $ st $ street $ telephoneNumber $ loginIntruderLimit $ intruderAttemptResetInterval $ detectIntruder $ lockoutAfterDetection $ intruderLockoutResetInterval $ nNSDomain $ mailboxLocation $ mailboxID $ x121Address $ registeredAddress $ destinationIndicator $ preferredDeliveryMethod $ telexNumber $ teletexTerminalIdentifier $ internationaliSDNNumber $ businessCategory $ searchGuide $ rADIUSAttributeLists $ rADIUSDefaultProfile $ rADIUSDialAccessGroup $ rADIUSEnableDialAccess $ rADIUSServiceList $ sssActiveServerList $ sssServerPolicyOverrideDN $ userPassword ) X-NDS_NAMING 'o' X-NDS_CONTAINMENT ( 'Top' 'treeRoot' 'Country' 'Locality' 'domain' ) X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES ( '2#entry#[Self]#loginScript' '2#entry#[Self]#printJobConfiguration') )",
"( 2.5.6.5 NAME 'organizationalUnit' SUP ( ndsLoginProperties $ ndsContainerLoginProperties ) STRUCTURAL MUST ou MAY ( description $ facsimileTelephoneNumber $ l $ loginScript $ eMailAddress $ physicalDeliveryOfficeName $ postalAddress $ postalCode $ postOfficeBox $ printJobConfiguration $ printerControl $ seeAlso $ st $ street $ telephoneNumber $ loginIntruderLimit $ intruderAttemptResetInterval $ detectIntruder $ lockoutAfterDetection $ intruderLockoutResetInterval $ nNSDomain $ mailboxLocation $ mailboxID $ x121Address $ registeredAddress $ destinationIndicator $ preferredDeliveryMethod $ telexNumber $ teletexTerminalIdentifier $ internationaliSDNNumber $ businessCategory $ searchGuide $ rADIUSAttributeLists $ rADIUSDefaultProfile $ rADIUSDialAccessGroup $ rADIUSEnableDialAccess $ rADIUSServiceList $ sssActiveServerList $ sssServerPolicyOverrideDN $ userPassword ) X-NDS_NAMING 'ou' X-NDS_CONTAINMENT ( 'Locality' 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NAME 'Organizational Unit' X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES ( '2#entry#[Self]#loginScript' '2#entry#[Self]#printJobConfiguration') )",
"( 2.5.6.8 NAME 'organizationalRole' SUP Top STRUCTURAL MUST cn MAY ( description $ facsimileTelephoneNumber $ l $ eMailAddress $ ou $ physicalDeliveryOfficeName $ postalAddress $ postalCode $ postOfficeBox $ roleOccupant $ seeAlso $ st $ street $ telephoneNumber $ mailboxLocation $ mailboxID $ x121Address $ registeredAddress $ destinationIndicator $ preferredDeliveryMethod $ telexNumber $ teletexTerminalIdentifier $ internationaliSDNNumber ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NAME 'Organizational Role' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
"( 2.5.6.9 NAME ( 'groupOfNames' 'group' 'groupOfUniqueNames' ) SUP Top STRUCTURAL MUST cn MAY ( description $ l $ member $ ou $ o $ owner $ seeAlso $ groupID $ fullName $ eMailAddress $ mailboxLocation $ mailboxID $ Profile $ profileMembership $ loginScript $ businessCategory $ nspmPasswordPolicyDN ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NAME 'Group' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
"( 2.5.6.6 NAME 'Person' SUP ndsLoginProperties STRUCTURAL MUST ( cn $ sn ) MAY ( description $ seeAlso $ telephoneNumber $ fullName $ givenName $ initials $ generationQualifier $ uid $ assistant $ assistantPhone $ city $ st $ company $ co $ directReports $ manager $ mailstop $ mobile $ personalTitle $ pager $ workforceID $ instantMessagingID $ preferredName $ photo $ jobCode $ siteLocation $ employeeStatus $ employeeType $ costCenter $ costCenterDescription $ tollFreePhoneNumber $ otherPhoneNumber $ managerWorkforceID $ roomNumber $ jackNumber $ departmentNumber $ vehicleInformation $ accessCardNumber $ isManager $ userPassword ) X-NDS_NAMING ( 'cn' 'uid' ) X-NDS_CONTAINMENT ( 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
"( 2.5.6.7 NAME 'organizationalPerson' SUP Person STRUCTURAL MAY ( facsimileTelephoneNumber $ l $ eMailAddress $ ou $ physicalDeliveryOfficeName $ postalAddress $ postalCode $ postOfficeBox $ st $ street $ title $ mailboxLocation $ mailboxID $ uid $ mail $ employeeNumber $ destinationIndicator $ internationaliSDNNumber $ preferredDeliveryMethod $ registeredAddress $ teletexTerminalIdentifier $ telexNumber $ x121Address $ businessCategory $ roomNumber $ x500UniqueIdentifier ) X-NDS_NAMING ( 'cn' 'ou' 'uid' ) X-NDS_CONTAINMENT ( 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NAME 'Organizational Person' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113730.3.2.2 NAME 'inetOrgPerson' SUP organizationalPerson STRUCTURAL MAY ( groupMembership $ ndsHomeDirectory $ loginAllowedTimeMap $ loginDisabled $ loginExpirationTime $ loginGraceLimit $ loginGraceRemaining $ loginIntruderAddress $ loginIntruderAttempts $ loginIntruderResetTime $ loginMaximumSimultaneous $ loginScript $ loginTime $ networkAddressRestriction $ networkAddress $ passwordsUsed $ passwordAllowChange $ passwordExpirationInterval $ passwordExpirationTime $ passwordMinimumLength $ passwordRequired $ passwordUniqueRequired $ printJobConfiguration $ privateKey $ Profile $ publicKey $ securityEquals $ accountBalance $ allowUnlimitedCredit $ minimumAccountBalance $ messageServer $ Language $ ndsUID $ lockedByIntruder $ serverHolds $ lastLoginTime $ typeCreatorMap $ higherPrivileges $ printerControl $ securityFlags $ profileMembership $ Timezone $ sASServiceDN $ sASSecretStore $ sASSecretStoreKey $ sASSecretStoreData $ sASPKIStoreKeys $ userCertificate $ nDSPKIUserCertificateInfo $ nDSPKIKeystore $ rADIUSActiveConnections $ rADIUSAttributeLists $ rADIUSConcurrentLimit $ rADIUSConnectionHistory $ rADIUSDefaultProfile $ rADIUSDialAccessGroup $ rADIUSEnableDialAccess $ rADIUSPassword $ rADIUSServiceList $ audio $ businessCategory $ carLicense $ departmentNumber $ employeeNumber $ employeeType $ displayName $ givenName $ homePhone $ homePostalAddress $ initials $ jpegPhoto $ labeledUri $ mail $ manager $ mobile $ o $ pager $ ldapPhoto $ preferredLanguage $ roomNumber $ secretary $ uid $ userSMIMECertificate $ x500UniqueIdentifier $ userPKCS12 $ sssProxyStoreKey $ sssProxyStoreSecrets $ sssServerPolicyOverrideDN ) X-NDS_NAME 'User' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES ( '2#subtree#[Self]#[All Attributes Rights]' '6#entry#[Self]#loginScript' '1#subtree#[Root Template]#[Entry Rights]' '2#entry#[Public]#messageServer' '2#entry#[Root Template]#groupMembership' '6#entry#[Self]#printJobConfiguration' '2#entry#[Root Template]#networkAddress') )",
"( 2.5.6.14 NAME 'Device' SUP Top STRUCTURAL MUST cn MAY ( description $ l $ networkAddress $ ou $ o $ owner $ seeAlso $ serialNumber ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.6.1.4 NAME 'Computer' SUP Device STRUCTURAL MAY ( operator $ server $ status ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.6.1.17 NAME 'Printer' SUP Device STRUCTURAL MAY ( Cartridge $ printerConfiguration $ defaultQueue $ hostDevice $ printServer $ Memory $ networkAddressRestriction $ notify $ operator $ pageDescriptionLanguage $ queue $ status $ supportedTypefaces ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.6.1.21 NAME 'Resource' SUP Top ABSTRACT MUST cn MAY ( description $ hostResourceName $ l $ ou $ o $ seeAlso $ Uses ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.6.1.20 NAME 'Queue' SUP Resource STRUCTURAL MUST queueDirectory MAY ( Device $ operator $ server $ User $ networkAddress $ Volume $ hostServer ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES '2#subtree#[Root Template]#[All Attributes Rights]' )",
"( 2.16.840.1.113719.1.1.6.1.3 NAME 'binderyQueue' SUP Queue STRUCTURAL MUST binderyType X-NDS_NAMING ( 'cn' 'binderyType' ) X-NDS_NAME 'Bindery Queue' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES '2#subtree#[Root Template]#[All Attributes Rights]' )",
"( 2.16.840.1.113719.1.1.6.1.26 NAME 'Volume' SUP Resource STRUCTURAL MUST hostServer MAY status X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES ( '2#entry#[Root Template]#hostResourceName' '2#entry#[Root Template]#hostServer') )",
"( 2.16.840.1.113719.1.1.6.1.7 NAME 'directoryMap' SUP Resource STRUCTURAL MUST hostServer MAY path X-NDS_NAME 'Directory Map' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.6.1.19 NAME 'Profile' SUP Top STRUCTURAL MUST ( cn $ loginScript ) MAY ( description $ l $ ou $ o $ seeAlso $ fullName ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.6.1.22 NAME 'Server' SUP Top ABSTRACT MUST cn MAY ( description $ hostDevice $ l $ ou $ o $ privateKey $ publicKey $ Resource $ seeAlso $ status $ User $ Version $ networkAddress $ accountBalance $ allowUnlimitedCredit $ minimumAccountBalance $ fullName $ securityEquals $ securityFlags $ Timezone $ ndapClassPasswordMgmt $ ndapClassLoginMgmt ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES ( '2#entry#[Public]#networkAddress' '16#subtree#[Self]#[Entry Rights]') )",
"( 2.16.840.1.113719.1.1.6.1.10 NAME 'ncpServer' SUP Server STRUCTURAL MAY ( operator $ supportedServices $ messagingServer $ dsRevision $ permanentConfigParms $ ndsPredicateStatsDN $ languageId $ indexDefinition $ CachedAttrsOnExtRefs $ NCPKeyMaterialName $ NDSRightsToMonitor $ ldapServerDN $ httpServerDN $ emboxConfig $ sASServiceDN $ cACertificate $ cAECCertificate $ nDSPKIPublicKey $ nDSPKIPrivateKey $ nDSPKICertificateChain $ nDSPKIParentCADN $ nDSPKISDKeyID $ nDSPKISDKeyStruct $ snmpGroupDN $ wANMANWANPolicy $ wANMANLANAreaMembership $ wANMANCost $ wANMANDefaultCost $ encryptionPolicyDN $ eDirCloneSource $ eDirCloneLock $ xdasDSConfiguration $ xdasConfiguration $ xdasVersion $ NAuditLoggingServer $ NAuditInstrumentation $ cefConfiguration $ cefVersion ) X-NDS_NAME 'NCP Server' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES '2#entry#[Public]#messagingServer' )",
"( 2.16.840.1.113719.1.1.6.1.18 NAME 'printServer' SUP Server STRUCTURAL MAY ( operator $ printer $ sAPName ) X-NDS_NAME 'Print Server' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES '2#subtree#[Root Template]#[All Attributes Rights]' )",
"( 2.16.840.1.113719.1.1.6.1.31 NAME 'CommExec' SUP Server STRUCTURAL MAY networkAddressRestriction X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.6.1.2 NAME 'binderyObject' SUP Top STRUCTURAL MUST ( binderyObjectRestriction $ binderyType $ cn ) X-NDS_NAMING ( 'cn' 'binderyType' ) X-NDS_CONTAINMENT ( 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NAME 'Bindery Object' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.6.1.15 NAME 'Partition' AUXILIARY MAY ( Convergence $ partitionCreationTime $ Replica $ inheritedACL $ lowConvergenceSyncInterval $ receivedUpTo $ synchronizedUpTo $ authorityRevocation $ certificateRevocation $ cAPrivateKey $ cAPublicKey $ ndsCrossCertificatePair $ lowConvergenceResetTime $ highConvergenceSyncInterval $ partitionControl $ replicaUpTo $ partitionStatus $ transitiveVector $ purgeVector $ synchronizationTolerance $ obituaryNotify $ localReceivedUpTo $ federationControl $ syncPanePoint $ syncWindowVector $ EBAPartitionConfiguration $ authoritative $ allowAliasToAncestor $ sASSecurityDN $ masvLabel $ ndapPartitionPasswordMgmt $ ndapPartitionLoginMgmt $ prSyncPolicyDN $ dsEncryptedReplicationConfig ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.6.1.0 NAME 'aFPServer' SUP Server STRUCTURAL MAY ( serialNumber $ supportedConnections ) X-NDS_NAME 'AFP Server' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.6.1.27 NAME 'messagingServer' SUP Server STRUCTURAL MAY ( messagingDatabaseLocation $ messageRoutingGroup $ Postmaster $ supportedServices $ messagingServerType $ supportedGateway ) X-NDS_NAME 'Messaging Server' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES ( '1#subtree#[Self]#[Entry Rights]' '2#subtree#[Self]#[All Attributes Rights]' '6#entry#[Self]#status' '2#entry#[Public]#messagingServerType' '2#entry#[Public]#messagingDatabaseLocation') )",
"( 2.16.840.1.113719.1.1.6.1.28 NAME 'messageRoutingGroup' SUP groupOfNames STRUCTURAL X-NDS_NAME 'Message Routing Group' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES ( '1#subtree#[Self]#[Entry Rights]' '2#subtree#[Self]#[All Attributes Rights]') )",
"( 2.16.840.1.113719.1.1.6.1.29 NAME 'externalEntity' SUP Top STRUCTURAL MUST cn MAY ( description $ seeAlso $ facsimileTelephoneNumber $ l $ eMailAddress $ ou $ physicalDeliveryOfficeName $ postalAddress $ postalCode $ postOfficeBox $ st $ street $ title $ externalName $ mailboxLocation $ mailboxID ) X-NDS_NAMING ( 'cn' 'ou' ) X-NDS_CONTAINMENT ( 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NAME 'External Entity' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES '2#entry#[Public]#externalName' )",
"( 2.16.840.1.113719.1.1.6.1.30 NAME 'List' SUP Top STRUCTURAL MUST cn MAY ( description $ l $ member $ ou $ o $ eMailAddress $ mailboxLocation $ mailboxID $ owner $ seeAlso $ fullName ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES '2#entry#[Root Template]#member' )",
"( 2.16.840.1.113719.1.1.6.1.32 NAME 'treeRoot' SUP Top STRUCTURAL MUST T MAY ( EBATreeConfiguration $ sssActiveServerList ) X-NDS_NAMING 'T' X-NDS_NAME 'Tree Root' X-NDS_NONREMOVABLE '1' )",
"( 0.9.2342.19200300.100.4.13 NAME 'domain' SUP ( Top $ ndsLoginProperties $ ndsContainerLoginProperties ) STRUCTURAL MUST dc MAY ( searchGuide $ o $ seeAlso $ businessCategory $ x121Address $ registeredAddress $ destinationIndicator $ preferredDeliveryMethod $ telexNumber $ teletexTerminalIdentifier $ telephoneNumber $ internationaliSDNNumber $ facsimileTelephoneNumber $ street $ postOfficeBox $ postalCode $ postalAddress $ physicalDeliveryOfficeName $ l $ associatedName $ description $ sssActiveServerList $ sssServerPolicyOverrideDN $ userPassword ) X-NDS_NAMING 'dc' X-NDS_CONTAINMENT ( 'Top' 'treeRoot' 'Country' 'Locality' 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NONREMOVABLE '1' )",
"( 1.3.6.1.4.1.1466.344 NAME 'dcObject' AUXILIARY MUST dc X-NDS_NAMING 'dc' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.6.1.33 NAME 'ndsLoginProperties' SUP Top ABSTRACT MAY ( groupMembership $ loginAllowedTimeMap $ loginDisabled $ loginExpirationTime $ loginGraceLimit $ loginGraceRemaining $ loginIntruderAddress $ loginIntruderAttempts $ loginIntruderResetTime $ loginMaximumSimultaneous $ loginScript $ loginTime $ networkAddressRestriction $ networkAddress $ passwordsUsed $ passwordAllowChange $ passwordExpirationInterval $ passwordExpirationTime $ passwordMinimumLength $ passwordRequired $ passwordUniqueRequired $ privateKey $ Profile $ publicKey $ securityEquals $ accountBalance $ allowUnlimitedCredit $ minimumAccountBalance $ Language $ lockedByIntruder $ serverHolds $ lastLoginTime $ higherPrivileges $ securityFlags $ profileMembership $ Timezone $ loginActivationTime $ UTF8LoginScript $ loginScriptCharset $ sASNDSPasswordWindow $ sASLoginSecret $ sASLoginSecretKey $ sASEncryptionType $ sASLoginConfiguration $ sASLoginConfigurationKey $ sasLoginFailureDelay $ sasDefaultLoginSequence $ sasAuthorizedLoginSequences $ sasAllowableSubjectNames $ sasUpdateLoginInfo $ sasOTPEnabled $ sasOTPCounter $ sasOTPDigits $ sasOTPReSync $ sasUpdateLoginTimeInterval $ ndapPasswordMgmt $ ndapLoginMgmt $ nspmPasswordKey $ nspmPassword $ pwdChangedTime $ pwdAccountLockedTime $ pwdFailureTime $ nspmDoNotExpirePassword $ nspmDistributionPassword $ nspmPreviousDistributionPassword $ nspmPasswordHistory $ nspmAdministratorChangeCount $ nspmPasswordPolicyDN $ nsimHint $ nsimPasswordReminder $ userPassword ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.141.6.1 NAME 'federationBoundary' AUXILIARY MUST federationBoundaryType MAY ( federationControl $ federationDNSName $ federationSearchPath ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.6.1.34 NAME 'ndsContainerLoginProperties' SUP Top ABSTRACT MAY ( loginIntruderLimit $ intruderAttemptResetInterval $ detectIntruder $ lockoutAfterDetection $ intruderLockoutResetInterval $ sasLoginFailureDelay $ sasDefaultLoginSequence $ sasAuthorizedLoginSequences $ sasUpdateLoginInfo $ sasOTPEnabled $ sasOTPDigits $ sasUpdateLoginTimeInterval $ ndapPasswordMgmt $ ndapLoginMgmt $ nspmPasswordPolicyDN ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.27.6.3 NAME 'ndsPredicateStats' SUP Top STRUCTURAL MUST ( cn $ ndsPredicateState $ ndsPredicateFlush ) MAY ( ndsPredicate $ ndsPredicateTimeout $ ndsPredicateUseValues ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.6.400.1 NAME 'edirSchemaVersion' SUP Top ABSTRACT MAY edirSchemaFlagVersion X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
"( 2.16.840.1.113719.1.1.6.1.47 NAME 'immediateSuperiorReference' AUXILIARY MAY ref X-NDS_NOT_CONTAINER '1' )",
"( 2.16.840.1.113719.1.27.6.1 NAME 'ldapServer' SUP Top STRUCTURAL MUST cn MAY ( ldapHostServer $ ldapGroupDN $ ldapTraceLevel $ ldapServerBindLimit $ ldapServerIdleTimeout $ lDAPUDPPort $ lDAPSearchSizeLimit $ lDAPSearchTimeLimit $ lDAPLogLevel $ lDAPLogFilename $ lDAPBackupLogFilename $ lDAPLogSizeLimit $ Version $ searchSizeLimit $ searchTimeLimit $ ldapEnableTCP $ ldapTCPPort $ ldapEnableSSL $ ldapSSLPort $ ldapKeyMaterialName $ filteredReplicaUsage $ extensionInfo $ nonStdClientSchemaCompatMode $ sslEnableMutualAuthentication $ ldapEnablePSearch $ ldapMaximumPSearchOperations $ ldapIgnorePSearchLimitsForEvents $ ldapTLSTrustedRootContainer $ ldapEnableMonitorEvents $ ldapMaximumMonitorEventsLoad $ ldapTLSRequired $ ldapTLSVerifyClientCertificate $ ldapConfigVersion $ ldapDerefAlias $ ldapNonStdAllUserAttrsMode $ ldapBindRestrictions $ ldapDefaultReferralBehavior $ ldapReferral $ ldapSearchReferralUsage $ lDAPOtherReferralUsage $ ldapLBURPNumWriterThreads $ ldapInterfaces $ ldapChainSecureRequired $ ldapStdCompliance $ ldapDerefAliasOnAuth $ ldapGeneralizedTime $ ldapPermissiveModify $ ldapSSLConfig ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'organizationalUnit' 'Organization' 'domain' ) X-NDS_NAME 'LDAP Server' X-NDS_NOT_CONTAINER '1' )",
"( 2.16.840.1.113719.1.27.6.2 NAME 'ldapGroup' SUP Top STRUCTURAL MUST cn MAY ( ldapReferral $ ldapServerList $ ldapAllowClearTextPassword $ ldapAnonymousIdentity $ lDAPSuffix $ ldapAttributeMap $ ldapClassMap $ ldapSearchReferralUsage $ lDAPOtherReferralUsage $ transitionGroupDN $ ldapAttributeList $ ldapClassList $ ldapConfigVersion $ Version $ ldapDefaultReferralBehavior $ ldapTransitionBackLink $ ldapSSLConfig $ referralIncludeFilter $ referralExcludeFilter ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'organizationalUnit' 'Organization' 'domain' ) X-NDS_NAME 'LDAP Group' X-NDS_NOT_CONTAINER '1' )",
"( 2.5.6.22 NAME 'pkiCA' AUXILIARY MAY ( cACertificate $ certificateRevocationList $ authorityRevocationList $ crossCertificatePair $ attributeCertificate $ publicKey $ privateKey $ networkAddress $ loginTime $ lastLoginTime $ cAECCertificate $ crossCertificatePairEC ) X-NDS_NOT_CONTAINER '1' )",
"( 2.5.6.21 NAME 'pkiUser' AUXILIARY MAY userCertificate X-NDS_NOT_CONTAINER '1' )",
"( 2.5.6.15 NAME 'strongAuthenticationUser' AUXILIARY MAY userCertificate X-NDS_NOT_CONTAINER '1' )",
"( 2.5.6.11 NAME 'applicationProcess' SUP Top STRUCTURAL MUST cn MAY ( seeAlso $ ou $ l $ description ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'organizationalUnit' 'Organization' 'domain' ) )",
"( 2.5.6.12 NAME 'applicationEntity' SUP Top STRUCTURAL MUST ( presentationAddress $ cn ) MAY ( supportedApplicationContext $ seeAlso $ ou $ o $ l $ description ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'organizationalUnit' 'Organization' 'domain' ) )",
"( 2.5.6.13 NAME 'dSA' SUP applicationEntity STRUCTURAL MAY knowledgeInformation X-NDS_CONTAINMENT ( 'Country' 'Locality' 'organizationalUnit' 'Organization' 'domain' ) )",
"( 2.5.6.16 NAME 'certificationAuthority' AUXILIARY MUST ( authorityRevocationList $ certificateRevocationList $ cACertificate ) MAY crossCertificatePair X-NDS_NOT_CONTAINER '1' )",
"( 2.5.6.18 NAME 'userSecurityInformation' AUXILIARY MAY supportedAlgorithms X-NDS_NOT_CONTAINER '1' )",
"( 2.5.6.20 NAME 'dmd' SUP ndsLoginProperties AUXILIARY MUST dmdName MAY ( searchGuide $ seeAlso $ businessCategory $ x121Address $ registeredAddress $ destinationIndicator $ preferredDeliveryMethod $ telexNumber $ teletexTerminalIdentifier $ telephoneNumber $ internationaliSDNNumber $ facsimileTelephoneNumber $ street $ postOfficeBox $ postalCode $ postalAddress $ physicalDeliveryOfficeName $ l $ description $ userPassword ) X-NDS_NOT_CONTAINER '1' )",
"( 2.5.6.16.2 NAME 'certificationAuthority-V2' AUXILIARY MUST ( authorityRevocationList $ certificateRevocationList $ cACertificate ) MAY ( crossCertificatePair $ deltaRevocationList ) X-NDS_NAME 'certificationAuthorityVer2' X-NDS_NOT_CONTAINER '1' )",
"( 2.16.840.1.113719.1.3.6.1 NAME 'httpServer' SUP Top STRUCTURAL MUST cn MAY ( httpHostServerDN $ httpThreadsPerCPU $ httpIOBufferSize $ httpRequestTimeout $ httpKeepAliveRequestTimeout $ httpSessionTimeout $ httpKeyMaterialObject $ httpTraceLevel $ httpAuthRequiresTLS $ httpDefaultClearPort $ httpDefaultTLSPort $ httpBindRestrictions ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'domain' 'Country' 'Locality' 'organizationalUnit' 'Organization' ) X-NDS_NOT_CONTAINER '1' )",
"( 2.16.840.1.113719.1.64.6.1.1 NAME 'Template' SUP Top STRUCTURAL MUST cn MAY ( trusteesOfNewObject $ newObjectSDSRights $ newObjectSFSRights $ setupScript $ runSetupScript $ membersOfTemplate $ volumeSpaceRestrictions $ setPasswordAfterCreate $ homeDirectoryRights $ accountBalance $ allowUnlimitedCredit $ description $ eMailAddress $ facsimileTelephoneNumber $ groupMembership $ higherPrivileges $ ndsHomeDirectory $ l $ Language $ loginAllowedTimeMap $ loginDisabled $ loginExpirationTime $ loginGraceLimit $ loginMaximumSimultaneous $ loginScript $ mailboxID $ mailboxLocation $ member $ messageServer $ minimumAccountBalance $ networkAddressRestriction $ newObjectSSelfRights $ ou $ passwordAllowChange $ passwordExpirationInterval $ passwordExpirationTime $ passwordMinimumLength $ passwordRequired $ passwordUniqueRequired $ physicalDeliveryOfficeName $ postalAddress $ postalCode $ postOfficeBox $ Profile $ st $ street $ securityEquals $ securityFlags $ seeAlso $ telephoneNumber $ title $ assistant $ assistantPhone $ city $ company $ co $ manager $ managerWorkforceID $ mailstop $ siteLocation $ employeeType $ costCenter $ costCenterDescription $ tollFreePhoneNumber $ departmentNumber ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'organizationalUnit' 'Organization' ) X-NDS_NOT_CONTAINER '1' )",
"( 2.16.840.1.113719.1.8.6.1 NAME 'homeInfo' AUXILIARY MAY ( homeCity $ homeEmailAddress $ homeFax $ homePhone $ homeState $ homePostalAddress $ homeZipCode $ personalMobile $ spouse $ children ) X-NDS_NOT_CONTAINER '1' )",
"( 2.16.840.1.113719.1.8.6.2 NAME 'contingentWorker' AUXILIARY MAY ( vendorName $ vendorAddress $ vendorPhoneNumber ) X-NDS_NOT_CONTAINER '1' )",
"( 2.16.840.1.113719.1.1.6.1.45 NAME 'dynamicGroup' SUP ( groupOfNames $ ndsLoginProperties ) STRUCTURAL MAY ( memberQueryURL $ excludedMember $ dgIdentity $ dgAllowUnknown $ dgTimeOut $ dgAllowDuplicates $ userPassword ) X-NDS_NOT_CONTAINER '1' )",
"( 2.16.840.1.113719.1.1.6.1.46 NAME 'dynamicGroupAux' SUP ( groupOfNames $ ndsLoginProperties ) AUXILIARY MAY ( memberQueryURL $ excludedMember $ dgIdentity $ dgAllowUnknown $ dgTimeOut $ dgAllowDuplicates $ userPassword ) X-NDS_NOT_CONTAINER '1' )",
"( 2.16.840.1.113719.1.39.6.1.1 NAME 'sASSecurity' SUP Top STRUCTURAL MUST cn MAY ( nDSPKITreeCADN $ masvPolicyDN $ sASLoginPolicyDN $ sASLoginMethodContainerDN $ sasPostLoginMethodContainerDN $ nspmPolicyAgentContainerDN ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Top' 'treeRoot' 'Country' 'Organization' 'domain' ) X-NDS_NAME 'SAS:Security' )",
"( 2.16.840.1.113719.1.39.6.1.2 NAME 'sASService' SUP Resource STRUCTURAL MAY ( hostServer $ privateKey $ publicKey $ allowUnlimitedCredit $ fullName $ lastLoginTime $ lockedByIntruder $ loginAllowedTimeMap $ loginDisabled $ loginExpirationTime $ loginIntruderAddress $ loginIntruderAttempts $ loginIntruderResetTime $ loginMaximumSimultaneous $ loginTime $ networkAddress $ networkAddressRestriction $ notify $ operator $ owner $ path $ securityEquals $ securityFlags $ status $ Version $ nDSPKIKeyMaterialDN $ ndspkiKMOExport ) X-NDS_NAMING 'cn' X-NDS_NAME 'SAS:Service' X-NDS_NOT_CONTAINER '1' )",
"( 2.16.840.1.113719.1.48.6.1.1 NAME 'nDSPKICertificateAuthority' SUP Top STRUCTURAL MUST cn MAY ( hostServer $ nDSPKIPublicKey $ nDSPKIPrivateKey $ nDSPKIPublicKeyCertificate $ nDSPKICertificateChain $ nDSPKICertificateChainEC $ nDSPKIParentCA $ nDSPKIParentCADN $ nDSPKISubjectName $ nDSPKIPublicKeyEC $ nDSPKIPrivateKeyEC $ nDSPKIPublicKeyCertificateEC $ crossCertificatePairEC $ nDSPKISuiteBMode $ cACertificate $ cAECCertificate $ ndspkiCRLContainerDN $ ndspkiIssuedCertContainerDN $ ndspkiCRLConfigurationDNList $ ndspkiCRLECConfigurationDNList $ ndspkiSecurityRightsLevel $ ndspkiDefaultRSAKeySize $ ndspkiDefaultECCurve $ ndspkiDefaultCertificateLife ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'sASSecurity' X-NDS_NAME 'NDSPKI:Certificate Authority' X-NDS_NOT_CONTAINER '1' )",
"( 2.16.840.1.113719.1.48.6.1.2 NAME 'nDSPKIKeyMaterial' SUP Top STRUCTURAL MUST cn MAY ( hostServer $ nDSPKIKeyFile $ nDSPKIPrivateKey $ nDSPKIPublicKey $ nDSPKIPublicKeyCertificate $ nDSPKICertificateChain $ nDSPKISubjectName $ nDSPKIGivenName $ ndspkiAdditionalRoots $ nDSPKINotBefore $ nDSPKINotAfter ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'sASSecurity' 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NAME 'NDSPKI:Key Material' X-NDS_NOT_CONTAINER '1' )",
"( 2.16.840.1.113719.1.48.6.1.3 NAME 'nDSPKITrustedRoot' SUP Top STRUCTURAL MUST cn MAY ndspkiTrustedRootList X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'sASSecurity' 'Organization' 'organizationalUnit' 'Country' 'Locality' 'domain' ) X-NDS_NAME 'NDSPKI:Trusted Root' )",
"( 2.16.840.1.113719.1.48.6.1.4 NAME 'nDSPKITrustedRootObject' SUP Top STRUCTURAL MUST ( cn $ nDSPKITrustedRootCertificate ) MAY ( nDSPKISubjectName $ nDSPKINotBefore $ nDSPKINotAfter $ externalName $ givenName $ sn ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'nDSPKITrustedRoot' X-NDS_NAME 'NDSPKI:Trusted Root Object' X-NDS_NOT_CONTAINER '1' )",
"( 2.16.840.1.113719.1.48.6.1.101 NAME 'nDSPKISDKeyAccessPartition' SUP Top STRUCTURAL MUST cn X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'sASSecurity' X-NDS_NAME 'NDSPKI:SD Key Access Partition' )",
"( 2.16.840.1.113719.1.48.6.1.102 NAME 'nDSPKISDKeyList' SUP Top STRUCTURAL MUST cn MAY ( nDSPKISDKeyServerDN $ nDSPKISDKeyStruct $ nDSPKISDKeyCert ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'nDSPKISDKeyAccessPartition' X-NDS_NAME 'NDSPKI:SD Key List' )",
"( 2.16.840.1.113719.1.31.6.2.1 NAME 'mASVSecurityPolicy' SUP Top STRUCTURAL MUST cn MAY ( description $ masvDomainPolicy $ masvPolicyUpdate $ masvClearanceNames $ masvLabelNames $ masvLabelSecrecyLevelNames $ masvLabelSecrecyCategoryNames $ masvLabelIntegrityLevelNames $ masvLabelIntegrityCategoryNames $ masvNDSAttributeLabels ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'sASSecurity' X-NDS_NAME 'MASV:Security Policy' X-NDS_NOT_CONTAINER '1' )",
"( 2.16.840.1.113719.1.39.42.2.0.1 NAME 'sASLoginMethodContainer' SUP Top STRUCTURAL MUST cn MAY description X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'sASSecurity' 'Country' 'Locality' 'organizationalUnit' 'Organization' ) X-NDS_NAME 'SAS:Login Method Container' )",
"( 2.16.840.1.113719.1.39.42.2.0.4 NAME 'sASLoginPolicy' SUP Top STRUCTURAL MUST cn MAY ( description $ privateKey $ publicKey $ sASAllowNDSPasswordWindow $ sASPolicyCredentials $ sASPolicyMethods $ sASPolicyObjectVersion $ sASPolicyServiceSubtypes $ sASPolicyServices $ sASPolicyUsers $ sASLoginSequence $ sASLoginPolicyUpdate $ sasNMASProductOptions $ sasPolicyMethods $ sasPolicyServices $ sasPolicyUsers $ sasAllowNDSPasswordWindow $ sasLoginFailureDelay $ sasDefaultLoginSequence $ sasAuthorizedLoginSequences $ sasAuditConfiguration $ sasUpdateLoginInfo $ sasOTPEnabled $ sasOTPLookAheadWindow $ sasOTPDigits $ sasUpdateLoginTimeInterval $ nspmPasswordPolicyDN ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'sASSecurity' X-NDS_NAME 'SAS:Login Policy' X-NDS_NOT_CONTAINER '1' )",
"( 2.16.840.1.113719.1.39.42.2.0.7 NAME 'sASNMASBaseLoginMethod' SUP Top ABSTRACT MUST cn MAY ( description $ sASLoginSecret $ sASLoginSecretKey $ sASEncryptionType $ sASLoginConfiguration $ sASLoginConfigurationKey $ sASMethodIdentifier $ sASMethodVendor $ sASVendorSupport $ sASAdvisoryMethodGrade $ sASLoginClientMethodNetWare $ sASLoginServerMethodNetWare $ sASLoginClientMethodWINNT $ sASLoginServerMethodWINNT $ sasCertificateSearchContainers $ sasNMASMethodConfigData $ sasMethodVersion $ sASLoginPolicyUpdate $ sasUnsignedMethodModules $ sasServerModuleName $ sasServerModuleEntryPointName $ sasSASLMechanismName $ sasSASLMechanismEntryPointName $ sasClientModuleName $ sasClientModuleEntryPointName $ sasLoginClientMethodSolaris $ sasLoginServerMethodSolaris $ sasLoginClientMethodLinux $ sasLoginServerMethodLinux $ sasLoginClientMethodTru64 $ sasLoginServerMethodTru64 $ sasLoginClientMethodAIX $ sasLoginServerMethodAIX $ sasLoginClientMethodHPUX $ sasLoginServerMethodHPUX $ sasLoginClientMethods390 $ sasLoginServerMethods390 $ sasLoginClientMethodLinuxX64 $ sasLoginServerMethodLinuxX64 $ sasLoginClientMethodWinX64 $ sasLoginServerMethodWinX64 $ sasLoginClientMethodSolaris64 $ sasLoginServerMethodSolaris64 $ sasLoginClientMethodSolarisi386 $ sasLoginServerMethodSolarisi386 $ sasLoginClientMethodAIX64 $ sasLoginServerMethodAIX64 ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'sASLoginMethodContainer' X-NDS_NAME 'SAS:NMAS Base Login Method' )",
"( 2.16.840.1.113719.1.39.42.2.0.8 NAME 'sASNMASLoginMethod' SUP sASNMASBaseLoginMethod STRUCTURAL X-NDS_NAME 'SAS:NMAS Login Method' )",
"( 2.16.840.1.113719.1.39.42.2.0.9 NAME 'rADIUSDialAccessSystem' SUP Top STRUCTURAL MUST cn MAY ( publicKey $ privateKey $ rADIUSAgedInterval $ rADIUSClient $ rADIUSCommonNameResolution $ rADIUSConcurrentLimit $ rADIUSDASVersion $ rADIUSEnableCommonNameLogin $ rADIUSEnableDialAccess $ rADIUSInterimAcctingTimeout $ rADIUSLookupContexts $ rADIUSMaxDASHistoryRecord $ rADIUSMaximumHistoryRecord $ rADIUSPasswordPolicy $ rADIUSPrivateKey $ rADIUSProxyContext $ rADIUSProxyDomain $ rADIUSProxyTarget $ rADIUSPublicKey $ sASLoginConfiguration $ sASLoginConfigurationKey ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'organizationalUnit' 'Organization' ) X-NDS_NAME 'RADIUS:Dial Access System' X-NDS_NOT_CONTAINER '1' )",
"( 2.16.840.1.113719.1.39.42.2.0.10 NAME 'rADIUSProfile' SUP Top STRUCTURAL MUST cn MAY rADIUSAttributeList X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'organizationalUnit' 'Organization' ) X-NDS_NAME 'RADIUS:Profile' X-NDS_NOT_CONTAINER '1' )",
"( 2.16.840.1.113719.1.39.42.2.0.11 NAME 'sasPostLoginMethodContainer' SUP Top STRUCTURAL MUST cn MAY description X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'sASSecurity' )",
"( 2.16.840.1.113719.1.39.42.2.0.12 NAME 'sasPostLoginMethod' SUP Top STRUCTURAL MUST cn MAY ( description $ sASLoginSecret $ sASLoginSecretKey $ sASEncryptionType $ sASLoginConfiguration $ sASLoginConfigurationKey $ sASMethodIdentifier $ sASMethodVendor $ sASVendorSupport $ sASAdvisoryMethodGrade $ sASLoginClientMethodNetWare $ sASLoginServerMethodNetWare $ sASLoginClientMethodWINNT $ sASLoginServerMethodWINNT $ sasMethodVersion $ sASLoginPolicyUpdate $ sasUnsignedMethodModules $ sasServerModuleName $ sasServerModuleEntryPointName $ sasSASLMechanismName $ sasSASLMechanismEntryPointName $ sasClientModuleName $ sasClientModuleEntryPointName $ sasLoginClientMethodSolaris $ sasLoginServerMethodSolaris $ sasLoginClientMethodLinux $ sasLoginServerMethodLinux $ sasLoginClientMethodTru64 $ sasLoginServerMethodTru64 $ sasLoginClientMethodAIX $ sasLoginServerMethodAIX $ sasLoginClientMethodHPUX $ sasLoginServerMethodHPUX $ sasLoginClientMethods390 $ sasLoginServerMethods390 $ sasLoginClientMethodLinuxX64 $ sasLoginServerMethodLinuxX64 $ sasLoginClientMethodWinX64 $ sasLoginServerMethodWinX64 $ sasLoginClientMethodSolaris64 $ sasLoginServerMethodSolaris64 $ sasLoginClientMethodSolarisi386 $ sasLoginServerMethodSolarisi386 $ sasLoginClientMethodAIX64 $ sasLoginServerMethodAIX64 ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'sasPostLoginMethodContainer' )",
"( 2.16.840.1.113719.1.6.6.1 NAME 'snmpGroup' SUP Top STRUCTURAL MUST cn MAY ( Version $ snmpServerList $ snmpTrapDisable $ snmpTrapInterval $ snmpTrapDescription $ snmpTrapConfig ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'domain' 'organizationalUnit' 'Organization' ) X-NDS_NOT_CONTAINER '1' )",
"( 2.16.840.1.113719.1.39.43.6.2 NAME 'nspmPasswordPolicyContainer' SUP Top STRUCTURAL MUST cn MAY description X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'sASSecurity' 'Country' 'domain' 'Locality' 'Organization' 'organizationalUnit' ) )",
"( 2.16.840.1.113719.1.39.43.6.3 NAME 'nspmPolicyAgent' SUP Top STRUCTURAL MUST cn MAY ( description $ nspmPolicyAgentNetWare $ nspmPolicyAgentWINNT $ nspmPolicyAgentSolaris $ nspmPolicyAgentLinux $ nspmPolicyAgentAIX $ nspmPolicyAgentHPUX ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'nspmPasswordPolicyContainer' X-NDS_NOT_CONTAINER '1' )",
"( 2.16.840.1.113719.1.39.43.6.1 NAME 'nspmPasswordPolicy' SUP Top STRUCTURAL MUST cn MAY ( description $ nspmPolicyPrecedence $ nspmConfigurationOptions $ nspmChangePasswordMessage $ passwordExpirationInterval $ loginGraceLimit $ nspmMinPasswordLifetime $ passwordUniqueRequired $ nspmPasswordHistoryLimit $ nspmPasswordHistoryExpiration $ passwordAllowChange $ passwordRequired $ passwordMinimumLength $ nspmMaximumLength $ nspmCaseSensitive $ nspmMinUpperCaseCharacters $ nspmMaxUpperCaseCharacters $ nspmMinLowerCaseCharacters $ nspmMaxLowerCaseCharacters $ nspmNumericCharactersAllowed $ nspmNumericAsFirstCharacter $ nspmNumericAsLastCharacter $ nspmMinNumericCharacters $ nspmMaxNumericCharacters $ nspmSpecialCharactersAllowed $ nspmSpecialAsFirstCharacter $ nspmSpecialAsLastCharacter $ nspmMinSpecialCharacters $ nspmMaxSpecialCharacters $ nspmMaxRepeatedCharacters $ nspmMaxConsecutiveCharacters $ nspmMinUniqueCharacters $ nspmDisallowedAttributeValues $ nspmExcludeList $ nspmExtendedCharactersAllowed $ nspmExtendedAsFirstCharacter $ nspmExtendedAsLastCharacter $ nspmMinExtendedCharacters $ nspmMaxExtendedCharacters $ nspmUpperAsFirstCharacter $ nspmUpperAsLastCharacter $ nspmLowerAsFirstCharacter $ nspmLowerAsLastCharacter $ nspmComplexityRules $ nspmAD2K8Syntax $ nspmAD2K8maxViolation $ nspmXCharLimit $ nspmXCharHistoryLimit $ nspmUnicodeAllowed $ nspmNonAlphaCharactersAllowed $ nspmMinNonAlphaCharacters $ nspmMaxNonAlphaCharacters $ pwdInHistory $ nspmAdminsDoNotExpirePassword $ nspmPasswordACL $ nsimChallengeSetDN $ nsimForgottenAction $ nsimForgottenLoginConfig $ nsimAssignments $ nsimChallengeSetGUID $ nsimPwdRuleEnforcement ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'nspmPasswordPolicyContainer' 'domain' 'Locality' 'Organization' 'organizationalUnit' 'Country' ) X-NDS_NOT_CONTAINER '1' )",
"( 2.16.840.1.113719.1.39.43.6.4 NAME 'nspmPasswordAux' AUXILIARY MAY ( publicKey $ privateKey $ loginGraceLimit $ loginGraceRemaining $ passwordExpirationTime $ passwordRequired $ nspmPasswordKey $ nspmPassword $ nspmDistributionPassword $ nspmPreviousDistributionPassword $ nspmPasswordHistory $ nspmAdministratorChangeCount $ nspmPasswordPolicyDN $ pwdChangedTime $ pwdAccountLockedTime $ pwdFailureTime $ nspmDoNotExpirePassword ) X-NDS_NOT_CONTAINER '1' )",
"( 2.16.840.1.113719.1.12.6.1.0 NAME 'auditFileObject' SUP Top STRUCTURAL MUST ( cn $ auditPolicy $ auditContents ) MAY ( description $ auditPath $ auditLinkList $ auditType $ auditCurrentEncryptionKey $ auditAEncryptionKey $ auditBEncryptionKey ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Top' 'Country' 'Locality' 'Organization' 'organizationalUnit' 'treeRoot' 'domain' ) X-NDS_NAME 'Audit:File Object' X-NDS_NOT_CONTAINER '1' )",
"( 2.16.840.1.113719.1.38.6.1.4 NAME 'wANMANLANArea' SUP Top STRUCTURAL MUST cn MAY ( description $ l $ member $ o $ ou $ owner $ seeAlso $ wANMANWANPolicy $ wANMANCost $ wANMANDefaultCost ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'Organization' 'organizationalUnit' ) X-NDS_NAME 'WANMAN:LAN Area' X-NDS_NOT_CONTAINER '1' )",
"( 2.16.840.1.113719.1.135.6.37.1 NAME 'rbsCollection' SUP Top STRUCTURAL MUST cn MAY ( owner $ description $ rbsXMLInfo ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'organizationalUnit' 'Organization' 'domain' ) )",
"( 2.16.840.1.113719.1.135.6.30.1 NAME 'rbsExternalScope' SUP Top ABSTRACT MUST cn MAY ( rbsURL $ description $ rbsXMLInfo ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'rbsCollection' X-NDS_NOT_CONTAINER '1' )",
"( 2.16.840.1.113719.1.135.6.31.1 NAME 'rbsModule' SUP Top STRUCTURAL MUST cn MAY ( rbsURL $ rbsPath $ rbsType $ description $ rbsXMLInfo ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'rbsCollection' )",
"( 2.16.840.1.113719.1.135.6.32.1 NAME 'rbsRole' SUP Top STRUCTURAL MUST cn MAY ( rbsContent $ rbsMember $ rbsTrusteeOf $ rbsGALabel $ rbsParameters $ description $ rbsXMLInfo ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'rbsCollection' )",
"( 2.16.840.1.113719.1.135.6.33.1 NAME 'rbsTask' SUP Top STRUCTURAL MUST cn MAY ( rbsContentMembership $ rbsType $ rbsTaskRights $ rbsEntryPoint $ rbsParameters $ rbsTaskTemplates $ rbsTaskTemplatesURL $ description $ rbsXMLInfo ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'rbsModule' X-NDS_NOT_CONTAINER '1' )",
"( 2.16.840.1.113719.1.135.6.34.1 NAME 'rbsBook' SUP rbsTask STRUCTURAL MAY ( rbsTargetObjectType $ rbsPageMembership ) X-NDS_NOT_CONTAINER '1' )",
"( 2.16.840.1.113719.1.135.6.35.1 NAME 'rbsScope' SUP groupOfNames STRUCTURAL MAY ( rbsContext $ rbsXMLInfo ) X-NDS_CONTAINMENT 'rbsRole' X-NDS_NOT_CONTAINER '1' )",
"( 2.16.840.1.113719.1.135.6.45.1 NAME 'rbsCollection2' SUP Top STRUCTURAL MUST cn MAY ( rbsXMLInfo $ rbsParameters $ owner $ description ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'organizationalUnit' 'Organization' 'domain' ) )",
"( 2.16.840.1.113719.1.135.6.38.1 NAME 'rbsExternalScope2' SUP Top ABSTRACT MUST cn MAY ( rbsXMLInfo $ description ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'rbsCollection2' X-NDS_NOT_CONTAINER '1' )",
"( 2.16.840.1.113719.1.135.6.39.1 NAME 'rbsModule2' SUP Top STRUCTURAL MUST cn MAY ( rbsXMLInfo $ rbsPath $ rbsType $ description ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'rbsCollection2' )",
"( 2.16.840.1.113719.1.135.6.40.1 NAME 'rbsRole2' SUP Top STRUCTURAL MUST cn MAY ( rbsXMLInfo $ rbsContent $ rbsMember $ rbsTrusteeOf $ rbsParameters $ description ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'rbsCollection2' )",
"( 2.16.840.1.113719.1.135.6.41.1 NAME 'rbsTask2' SUP Top STRUCTURAL MUST cn MAY ( rbsXMLInfo $ rbsContentMembership $ rbsType $ rbsTaskRights $ rbsEntryPoint $ rbsParameters $ description ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'rbsModule2' X-NDS_NOT_CONTAINER '1' )",
"( 2.16.840.1.113719.1.135.6.42.1 NAME 'rbsBook2' SUP rbsTask2 STRUCTURAL MAY ( rbsTargetObjectType $ rbsPageMembership ) X-NDS_NOT_CONTAINER '1' )",
"( 2.16.840.1.113719.1.135.6.43.1 NAME 'rbsScope2' SUP groupOfNames STRUCTURAL MAY ( rbsContext $ rbsXMLInfo ) X-NDS_CONTAINMENT 'rbsRole2' X-NDS_NOT_CONTAINER '1' )",
"( 2.16.840.1.113719.1.1.6.1.49 NAME 'prSyncPolicy' SUP Top STRUCTURAL MUST cn MAY prSyncAttributes X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'domain' 'Country' 'Locality' 'organizationalUnit' 'Organization' ) X-NDS_NOT_CONTAINER '1' )",
"( 2.16.840.1.113719.1.1.6.1.50 NAME 'encryptionPolicy' SUP Top STRUCTURAL MUST cn MAY ( attrEncryptionDefinition $ attrEncryptionRequiresSecure ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'domain' 'organizationalUnit' 'Organization' ) X-NDS_NOT_CONTAINER '1' )",
"( 2.16.840.1.113719.1.48.6.1.5 NAME 'ndspkiContainer' SUP Top STRUCTURAL MUST cn X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'ndspkiContainer' 'sASSecurity' 'Organization' 'organizationalUnit' 'Country' 'Locality' 'nDSPKITrustedRoot' ) )",
"( 2.16.840.1.113719.1.48.6.1.6 NAME 'ndspkiCertificate' SUP Top STRUCTURAL MUST ( cn $ userCertificate ) MAY ( nDSPKISubjectName $ nDSPKINotBefore $ nDSPKINotAfter $ externalName $ givenName $ sn ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'sASSecurity' 'Organization' 'organizationalUnit' 'Country' 'Locality' 'ndspkiContainer' 'nDSPKITrustedRoot' ) X-NDS_NOT_CONTAINER '1' )",
"( 2.16.840.1.113719.1.48.6.1.7 NAME 'ndspkiCRLConfiguration' SUP Top STRUCTURAL MUST cn MAY ( ndspkiCRLFileName $ ndspkiDirectory $ ndspkiStatus $ ndspkiIssueTime $ ndspkiNextIssueTime $ ndspkiAttemptTime $ ndspkiTimeInterval $ ndspkiCRLMaxProcessingInterval $ ndspkiCRLNumber $ ndspkiDistributionPoints $ ndspkiDistributionPointDN $ ndspkiCADN $ ndspkiCRLProcessData $ nDSPKIPublicKey $ nDSPKIPrivateKey $ nDSPKIPublicKeyCertificate $ nDSPKICertificateChain $ nDSPKIParentCA $ nDSPKIParentCADN $ nDSPKISubjectName $ cACertificate $ hostServer $ ndspkiCRLType $ ndspkiCRLExtendValidity ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'ndspkiContainer' )",
"( 2.5.6.19 NAME 'cRLDistributionPoint' SUP Top STRUCTURAL MUST cn MAY ( authorityRevocationList $ authorityRevocationList $ cACertificate $ certificateRevocationList $ certificateRevocationList $ crossCertificatePair $ deltaRevocationList $ deltaRevocationList $ ndspkiCRLConfigurationDN ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'organizationalUnit' 'Organization' 'sASSecurity' 'domain' 'ndspkiCRLConfiguration' ) X-NDS_NOT_CONTAINER '1' )",
"( 2.16.840.1.113719.1.7.6.1 NAME 'notfTemplateCollection' SUP Top STRUCTURAL MUST cn MAY ( notfSMTPEmailHost $ notfSMTPEmailFrom $ notfSMTPEmailUserName $ sASSecretStore ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'sASSecurity' )",
"( 2.16.840.1.113719.1.7.6.2 NAME 'notfMergeTemplate' SUP Top STRUCTURAL MUST cn MAY ( notfMergeTemplateData $ notfMergeTemplateSubject ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'notfTemplateCollection' X-NDS_NOT_CONTAINER '1' )",
"( 2.16.840.1.113719.1.39.44.6.1 NAME 'nsimChallengeSet' SUP Top STRUCTURAL MUST cn MAY ( description $ nsimRequiredQuestions $ nsimRandomQuestions $ nsimNumberRandomQuestions $ nsimMinResponseLength $ nsimMaxResponseLength ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'nspmPasswordPolicyContainer' 'Country' 'domain' 'Locality' 'Organization' 'organizationalUnit' ) X-NDS_NOT_CONTAINER '1' )",
"( 2.16.840.1.113719.1.266.6.1 NAME 'sssServerPolicies' SUP Top STRUCTURAL MUST cn MAY ( sssCacheRefreshInterval $ sssEnableReadTimestamps $ sssDisableMasterPasswords $ sssEnableAdminAccess $ sssAdminList $ sssAdminGALabel $ sssReadSecretPolicies ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'sASSecurity' )",
"( 2.16.840.1.113719.1.266.6.2 NAME 'sssServerPolicyOverride' SUP Top STRUCTURAL MUST cn MAY ( sssCacheRefreshInterval $ sssEnableReadTimestamps $ sssDisableMasterPasswords $ sssEnableAdminAccess $ sssAdminList $ sssAdminGALabel $ sssReadSecretPolicies ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'sssServerPolicies' 'Organization' 'organizationalUnit' 'Country' 'Locality' 'domain' ) X-NDS_NOT_CONTAINER '1' )",
"( 2.16.840.1.113719.1.1.6.1.91 NAME 'nestedGroupAux' AUXILIARY MAY ( groupMember $ excludedMember $ nestedConfig $ groupMembership ) X-NDS_NOT_CONTAINER '1' )"
]
},
"schema_entry": "cn=schema",
"type": "SchemaInfo"
}
"""
edir_9_1_4_dsa_info = """
{
"raw": {
"abandonOps": [
"0"
],
"addEntryOps": [
"0"
],
"altServer": [],
"bindSecurityErrors": [
"0"
],
"chainings": [
"0"
],
"compareOps": [
"0"
],
"directoryTreeName": [
"TEST_TREE"
],
"dsaName": [
"cn=MYSERVER,o=resources"
],
"errors": [
"0"
],
"extendedOps": [
"0"
],
"inBytes": [
"293"
],
"inOps": [
"3"
],
"listOps": [
"0"
],
"modifyEntryOps": [
"0"
],
"modifyRDNOps": [
"0"
],
"namingContexts": [
""
],
"oneLevelSearchOps": [
"0"
],
"outBytes": [
"14"
],
"readOps": [
"1"
],
"referralsReturned": [
"0"
],
"removeEntryOps": [
"0"
],
"repUpdatesIn": [
"0"
],
"repUpdatesOut": [
"0"
],
"searchOps": [
"1"
],
"securityErrors": [
"0"
],
"simpleAuthBinds": [
"1"
],
"strongAuthBinds": [
"0"
],
"subschemaSubentry": [
"cn=schema"
],
"supportedCapabilities": [],
"supportedControl": [
"2.16.840.1.113719.1.27.101.6",
"2.16.840.1.113719.1.27.101.5",
"1.2.840.113556.1.4.319",
"2.16.840.1.113730.3.4.3",
"2.16.840.1.113730.3.4.2",
"2.16.840.1.113719.1.27.101.57",
"2.16.840.1.113719.1.27.103.7",
"2.16.840.1.113719.1.27.101.40",
"2.16.840.1.113719.1.27.101.41",
"1.2.840.113556.1.4.1413",
"1.2.840.113556.1.4.805",
"2.16.840.1.113730.3.4.18",
"1.2.840.113556.1.4.529"
],
"supportedExtension": [
"2.16.840.1.113719.1.148.100.1",
"2.16.840.1.113719.1.148.100.3",
"2.16.840.1.113719.1.148.100.5",
"2.16.840.1.113719.1.148.100.7",
"2.16.840.1.113719.1.148.100.9",
"2.16.840.1.113719.1.148.100.11",
"2.16.840.1.113719.1.148.100.13",
"2.16.840.1.113719.1.148.100.15",
"2.16.840.1.113719.1.148.100.17",
"2.16.840.1.113719.1.39.42.100.1",
"2.16.840.1.113719.1.39.42.100.3",
"2.16.840.1.113719.1.39.42.100.5",
"2.16.840.1.113719.1.39.42.100.7",
"2.16.840.1.113719.1.39.42.100.9",
"2.16.840.1.113719.1.39.42.100.11",
"2.16.840.1.113719.1.39.42.100.13",
"2.16.840.1.113719.1.39.42.100.15",
"2.16.840.1.113719.1.39.42.100.17",
"2.16.840.1.113719.1.39.42.100.19",
"2.16.840.1.113719.1.39.42.100.21",
"2.16.840.1.113719.1.39.42.100.23",
"2.16.840.1.113719.1.39.42.100.25",
"2.16.840.1.113719.1.39.42.100.27",
"2.16.840.1.113719.1.39.42.100.29",
"1.3.6.1.4.1.4203.1.11.1",
"2.16.840.1.113719.1.27.100.1",
"2.16.840.1.113719.1.27.100.3",
"2.16.840.1.113719.1.27.100.5",
"2.16.840.1.113719.1.27.100.7",
"2.16.840.1.113719.1.27.100.11",
"2.16.840.1.113719.1.27.100.13",
"2.16.840.1.113719.1.27.100.15",
"2.16.840.1.113719.1.27.100.17",
"2.16.840.1.113719.1.27.100.19",
"2.16.840.1.113719.1.27.100.21",
"2.16.840.1.113719.1.27.100.23",
"2.16.840.1.113719.1.27.100.25",
"2.16.840.1.113719.1.27.100.27",
"2.16.840.1.113719.1.27.100.29",
"2.16.840.1.113719.1.27.100.31",
"2.16.840.1.113719.1.27.100.33",
"2.16.840.1.113719.1.27.100.35",
"2.16.840.1.113719.1.27.100.37",
"2.16.840.1.113719.1.27.100.39",
"2.16.840.1.113719.1.27.100.41",
"2.16.840.1.113719.1.27.100.96",
"2.16.840.1.113719.1.27.100.98",
"2.16.840.1.113719.1.27.100.101",
"2.16.840.1.113719.1.27.100.103",
"2.16.840.1.113719.1.142.100.1",
"2.16.840.1.113719.1.142.100.4",
"2.16.840.1.113719.1.142.100.6",
"2.16.840.1.113719.1.27.100.9",
"2.16.840.1.113719.1.27.100.43",
"2.16.840.1.113719.1.27.100.45",
"2.16.840.1.113719.1.27.100.47",
"2.16.840.1.113719.1.27.100.49",
"2.16.840.1.113719.1.27.100.51",
"2.16.840.1.113719.1.27.100.53",
"2.16.840.1.113719.1.27.100.55",
"1.3.6.1.4.1.1466.20037",
"2.16.840.1.113719.1.27.100.79",
"2.16.840.1.113719.1.27.100.84",
"2.16.840.1.113719.1.27.103.1",
"2.16.840.1.113719.1.27.103.2"
],
"supportedFeatures": [
"1.3.6.1.4.1.4203.1.5.1",
"2.16.840.1.113719.1.27.99.1"
],
"supportedGroupingTypes": [
"2.16.840.1.113719.1.27.103.8"
],
"supportedLDAPVersion": [
"2",
"3"
],
"supportedSASLMechanisms": [
"NMAS_LOGIN"
],
"unAuthBinds": [
"0"
],
"vendorName": [
"NetIQ Corporation"
],
"vendorVersion": [
"LDAP Agent for NetIQ eDirectory 9.1.4 (40105.09)"
],
"wholeSubtreeSearchOps": [
"0"
]
},
"type": "DsaInfo"
}
"""
| 157.439931
| 2,033
| 0.678252
| 31,266
| 182,158
| 3.865829
| 0.050726
| 0.049872
| 0.022264
| 0.051775
| 0.67852
| 0.664232
| 0.649538
| 0.631576
| 0.610338
| 0.595761
| 0
| 0.227642
| 0.155299
| 182,158
| 1,157
| 2,034
| 157.439931
| 0.55789
| 0.004183
| 0
| 0.060124
| 0
| 0.816092
| 0.999551
| 0.4196
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.051282
| 0
| 0
| 0
| 0.008842
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
0a28277ecb07f211806e8d3a0f39f0692a06fe83
| 165
|
py
|
Python
|
src/dame/tests/test__utils.py
|
sirspock/mint_cli
|
6159c9434a20903d0a68a4aa2b61cb57827a48f7
|
[
"Apache-2.0"
] | 3
|
2020-04-01T21:18:10.000Z
|
2021-04-19T23:10:31.000Z
|
src/dame/tests/test__utils.py
|
mintproject/dame_cli
|
6159c9434a20903d0a68a4aa2b61cb57827a48f7
|
[
"Apache-2.0"
] | 68
|
2020-04-01T19:58:18.000Z
|
2021-04-28T13:37:59.000Z
|
src/dame/tests/test__utils.py
|
sirspock/mint_cli
|
6159c9434a20903d0a68a4aa2b61cb57827a48f7
|
[
"Apache-2.0"
] | null | null | null |
from dame._utils import init_logger, get_latest_version
def test_init_logger():
init_logger()
def test_get_latest_version():
assert get_latest_version()
| 16.5
| 55
| 0.781818
| 24
| 165
| 4.875
| 0.5
| 0.25641
| 0.410256
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145455
| 165
| 9
| 56
| 18.333333
| 0.829787
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 1
| 0.4
| true
| 0
| 0.2
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
0a2961396006c534b0ba7468777b9df0e3001e57
| 243
|
py
|
Python
|
supermamas/pamperings/forms/__init__.py
|
oasalonen/supermamas
|
3ab2b2370de903cea614ea9dfa10ce1c0504a715
|
[
"Apache-2.0"
] | null | null | null |
supermamas/pamperings/forms/__init__.py
|
oasalonen/supermamas
|
3ab2b2370de903cea614ea9dfa10ce1c0504a715
|
[
"Apache-2.0"
] | null | null | null |
supermamas/pamperings/forms/__init__.py
|
oasalonen/supermamas
|
3ab2b2370de903cea614ea9dfa10ce1c0504a715
|
[
"Apache-2.0"
] | null | null | null |
from supermamas.pamperings.forms.pampering_filter import PamperingFilterForm
from supermamas.pamperings.forms.create_pampering import CreatePamperingForm
from supermamas.pamperings.forms.pampering_invitation_form import PamperingInvitationForm
| 81
| 89
| 0.917695
| 25
| 243
| 8.76
| 0.52
| 0.191781
| 0.328767
| 0.39726
| 0.347032
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.045267
| 243
| 3
| 89
| 81
| 0.943966
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0a6240345cb91f842ad64f95de677c935ec5f394
| 133,000
|
py
|
Python
|
test/test_markdown_setext_headings_extra.py
|
jackdewinter/pymarkdown
|
7ae408ba0b24506fa07552ffe520750bbff38c53
|
[
"MIT"
] | 20
|
2021-01-14T17:39:09.000Z
|
2022-03-14T08:35:22.000Z
|
test/test_markdown_setext_headings_extra.py
|
jackdewinter/pymarkdown
|
7ae408ba0b24506fa07552ffe520750bbff38c53
|
[
"MIT"
] | 304
|
2020-08-15T23:24:00.000Z
|
2022-03-31T23:34:03.000Z
|
test/test_markdown_setext_headings_extra.py
|
jackdewinter/pymarkdown
|
7ae408ba0b24506fa07552ffe520750bbff38c53
|
[
"MIT"
] | 3
|
2021-08-11T10:26:26.000Z
|
2021-11-02T20:41:27.000Z
|
"""
https://github.github.com/gfm/#setext-headings
"""
import pytest
from .utils import act_and_assert
# pylint: disable=too-many-lines
@pytest.mark.gfm
def test_setext_headings_extra_01():
"""
Test case extra 1: SetExt heading starts with a backslash escape
"""
# Arrange
source_markdown = """\\\\this is a fun day
---"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):\\\b\\this is a fun day:]",
"[end-setext::]",
]
expected_gfm = """<h2>\\this is a fun day</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_02():
"""
Test case extra 2: SetExt heading starts with a backslash as in a hard line break
"""
# Arrange
source_markdown = """\\
---"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):\\:]",
"[end-setext::]",
]
expected_gfm = """<h2>\\</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_03():
"""
Test case extra 3: SetExt heading starts with 2+ spaces as in a hard line break
"""
# Arrange
source_markdown = """\a\a\a
---""".replace(
"\a", " "
)
expected_tokens = ["[BLANK(1,1): ]", "[tbreak(2,1):-::---]"]
expected_gfm = """<hr />"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_04():
"""
Test case extra 4: SetExt heading string starting with a code span.
"""
# Arrange
source_markdown = """``this`` is a fun day
---"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[icode-span(1,1):this:``::]",
"[text(1,9): is a fun day:]",
"[end-setext::]",
]
expected_gfm = """<h2><code>this</code> is a fun day</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_05():
"""
Test case extra 5: SetExt heading string starting with a character reference.
"""
# Arrange
source_markdown = """& the band played on
---"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):\a&\a\a&\a&\a\a the band played on:]",
"[end-setext::]",
]
expected_gfm = """<h2>& the band played on</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_06():
"""
Test case extra 6: SetExt heading string starting with a raw html block.
"""
# Arrange
source_markdown = """<there it='is'>, really
---"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[raw-html(1,1):there it='is']",
"[text(1,16):, really:]",
"[end-setext::]",
]
expected_gfm = """<h2><there it='is'>, really</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_07():
"""
Test case extra 7: SetExt heading string starting with an URI autolink
"""
# Arrange
source_markdown = """<http://www.google.com> is where to look
---"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[uri-autolink(1,1):http://www.google.com]",
"[text(1,24): is where to look:]",
"[end-setext::]",
]
expected_gfm = """<h2><a href="http://www.google.com">http://www.google.com</a> is where to look</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_08():
"""
Test case extra 8: SetExt heading string starting with an email autolink
"""
# Arrange
source_markdown = """<foo@bar.com> for more information
---"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[email-autolink(1,1):foo@bar.com]",
"[text(1,14): for more information:]",
"[end-setext::]",
]
expected_gfm = (
"""<h2><a href="mailto:foo@bar.com">foo@bar.com</a> for more information</h2>"""
)
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_09():
"""
Test case extra 9: SetExt heading string starting with an emphasis
"""
# Arrange
source_markdown = """*it's* me!
---"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[emphasis(1,1):1:*]",
"[text(1,2):it's:]",
"[end-emphasis(1,6)::]",
"[text(1,7): me!:]",
"[end-setext::]",
]
expected_gfm = """<h2><em>it's</em> me!</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_10():
"""
Test case extra 10: SetExt heading string starting with a link. also see 183
"""
# Arrange
source_markdown = """[Foo](/uri) is a link
---"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[link(1,1):inline:/uri:::::Foo:False::::]",
"[text(1,2):Foo:]",
"[end-link::]",
"[text(1,12): is a link:]",
"[end-setext::]",
]
expected_gfm = """<h2><a href="/uri">Foo</a> is a link</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_11():
"""
Test case extra 11: SetExt heading string starting with an image
"""
# Arrange
source_markdown = """ is an image
---"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
'[image(1,1):inline:/url:title:foo::::foo:False:":: :]',
"[text(1,21): is an image:]",
"[end-setext::]",
]
expected_gfm = """<h2><img src="/url" alt="foo" title="title" /> is an image</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_12():
"""
Test case extra 12: SetExt heading containing a backslash
"""
# Arrange
source_markdown = """this is a \\\\fun\\\\ day
---"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):this is a \\\b\\fun\\\b\\ day:]",
"[end-setext::]",
]
expected_gfm = """<h2>this is a \\fun\\ day</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_13():
"""
Test case extra 13: SetExt heading containing a code span.
"""
# Arrange
source_markdown = """this is a ``fun`` day
---"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):this is a :]",
"[icode-span(1,11):fun:``::]",
"[text(1,18): day:]",
"[end-setext::]",
]
expected_gfm = """<h2>this is a <code>fun</code> day</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_14():
"""
Test case extra 14: SetExt heading containing a character reference.
"""
# Arrange
source_markdown = """fun & joy
---"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):fun \a&\a\a&\a&\a\a joy:]",
"[end-setext::]",
]
expected_gfm = """<h2>fun & joy</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_15():
"""
Test case extra 15: SetExt heading containing a raw html block.
"""
# Arrange
source_markdown = """where <there it='is'> it
---"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):where :]",
"[raw-html(1,7):there it='is']",
"[text(1,22): it:]",
"[end-setext::]",
]
expected_gfm = """<h2>where <there it='is'> it</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_16():
"""
Test case extra 16: SetExt heading containing an URI autolink
"""
# Arrange
source_markdown = """look at <http://www.google.com> for answers
---"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):look at :]",
"[uri-autolink(1,9):http://www.google.com]",
"[text(1,32): for answers:]",
"[end-setext::]",
]
expected_gfm = """<h2>look at <a href="http://www.google.com">http://www.google.com</a> for answers</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_17():
"""
Test case extra 17: SetExt heading containing an email autolink
"""
# Arrange
source_markdown = """email <foo@bar.com> for answers
---"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):email :]",
"[email-autolink(1,7):foo@bar.com]",
"[text(1,20): for answers:]",
"[end-setext::]",
]
expected_gfm = (
"""<h2>email <a href="mailto:foo@bar.com">foo@bar.com</a> for answers</h2>"""
)
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_18():
"""
Test case extra 18: SetExt heading containing emphasis
"""
# Arrange
source_markdown = """really! *it's me!* here!
---"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):really! :]",
"[emphasis(1,9):1:*]",
"[text(1,10):it's me!:]",
"[end-emphasis(1,18)::]",
"[text(1,19): here!:]",
"[end-setext::]",
]
expected_gfm = """<h2>really! <em>it's me!</em> here!</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_19():
"""
Test case extra 19: SetExt heading containing a link.
"""
# Arrange
source_markdown = """look at [Foo](/uri) for more
---"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):look at :]",
"[link(1,9):inline:/uri:::::Foo:False::::]",
"[text(1,10):Foo:]",
"[end-link::]",
"[text(1,20): for more:]",
"[end-setext::]",
]
expected_gfm = """<h2>look at <a href="/uri">Foo</a> for more</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_20():
"""
Test case extra 20: SetExt heading containing an image
"""
# Arrange
source_markdown = """special  headings
---"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):special :]",
'[image(1,9):inline:/url:title:foo::::foo:False:":: :]',
"[text(1,29): headings:]",
"[end-setext::]",
]
expected_gfm = (
"""<h2>special <img src="/url" alt="foo" title="title" /> headings</h2>"""
)
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_21():
"""
Test case extra 21: SetExt headings ends with a backslash escape
"""
# Arrange
source_markdown = """this is a fun day\\\\
---"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):this is a fun day\\\b\\:]",
"[end-setext::]",
]
expected_gfm = """<h2>this is a fun day\\</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_22():
"""
Test case extra 22: SetExt heading ends with a backslash as in a hard line break
"""
# Arrange
source_markdown = """this was \\
---"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):this was \\:]",
"[end-setext::]",
]
expected_gfm = """<h2>this was \\</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_22a():
"""
Test case extra 22a: variation on 22 with more text after the hard break
"""
# Arrange
source_markdown = """this was \\
another line
---"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):this was :]",
"[hard-break(1,10):\\:\n]",
"[text(2,1):another line:]",
"[end-setext::]",
]
expected_gfm = """<h2>this was <br />\nanother line</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_22b():
"""
Test case extra 22b: variation on 22 with a soft line break
"""
# Arrange
source_markdown = """this was
another line
---"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):this was\nanother line::\n]",
"[end-setext::]",
]
expected_gfm = """<h2>this was\nanother line</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_22c():
"""
Test case extra 22c: variation on 22 with more spaces than needed for a hard break
"""
# Arrange
source_markdown = """this was\a\a\a
another line
---""".replace(
"\a", " "
)
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):this was:]",
"[hard-break(1,9): :\n]",
"[text(2,1):another line:]",
"[end-setext::]",
]
expected_gfm = """<h2>this was<br />\nanother line</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_22d():
"""
Test case extra 22d: variation on 22c with next line indented
"""
# Arrange
source_markdown = """this was\a\a\a
another line
---""".replace(
"\a", " "
)
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):this was:]",
"[hard-break(1,9): :\n]",
"[text(2,2):another line:: \x02]",
"[end-setext::]",
]
expected_gfm = """<h2>this was<br />\nanother line</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_23():
"""
Test case extra 23: SetExt heading ends with 2+ spaces as in a hard line break, but not since end of paragraph
"""
# Arrange
source_markdown = """what? no line break?\a\a\a
---""".replace(
"\a", " "
)
expected_tokens = [
"[setext(2,1):-:3::(1,1): ]",
"[text(1,1):what? no line break?:]",
"[end-setext::]",
]
expected_gfm = """<h2>what? no line break?</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_23a():
"""
Test case extra 23a: variation on 23, but with more text so hard break in scope
"""
# Arrange
source_markdown = """what? no line break?\a\a\a
woe is me
---""".replace(
"\a", " "
)
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):what? no line break?:]",
"[hard-break(1,21): :\n]",
"[text(2,1):woe is me:]",
"[end-setext::]",
]
expected_gfm = """<h2>what? no line break?<br />\nwoe is me</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_24():
"""
Test case extra 24: SetExt heading string ending with a code span.
"""
# Arrange
source_markdown = """this is a fun ``day``
---"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):this is a fun :]",
"[icode-span(1,15):day:``::]",
"[end-setext::]",
]
expected_gfm = """<h2>this is a fun <code>day</code></h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_25():
"""
Test case extra 25: SetExt heading string ending with a character reference.
"""
# Arrange
source_markdown = """the band played on &
---"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):the band played on \a&\a\a&\a&\a\a:]",
"[end-setext::]",
]
expected_gfm = """<h2>the band played on &</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_26():
"""
Test case extra 26: SetExt heading string ending with a raw html block.
"""
# Arrange
source_markdown = """really, <there it='is'>
---"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):really, :]",
"[raw-html(1,9):there it='is']",
"[end-setext::]",
]
expected_gfm = """<h2>really, <there it='is'></h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_27():
"""
Test case extra 27: SetExt heading string ending with an URI autolink
"""
# Arrange
source_markdown = """look at <http://www.google.com>
---"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):look at :]",
"[uri-autolink(1,9):http://www.google.com]",
"[end-setext::]",
]
expected_gfm = (
"""<h2>look at <a href="http://www.google.com">http://www.google.com</a></h2>"""
)
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_28():
"""
Test case extra 28: SetExt heading string ending with an email autolink
"""
# Arrange
source_markdown = """for more information, contact <foo@bar.com>
---"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):for more information, contact :]",
"[email-autolink(1,31):foo@bar.com]",
"[end-setext::]",
]
expected_gfm = """<h2>for more information, contact <a href="mailto:foo@bar.com">foo@bar.com</a></h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_29():
"""
Test case extra 29: SetExt heading string ending with an emphasis
"""
# Arrange
source_markdown = """it's *me*
---"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):it's :]",
"[emphasis(1,6):1:*]",
"[text(1,7):me:]",
"[end-emphasis(1,9)::]",
"[end-setext::]",
]
expected_gfm = """<h2>it's <em>me</em></h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_30():
"""
Test case extra 30: SetExt heading string ending with a link.
"""
# Arrange
source_markdown = """a link looks like [Foo](/uri)
---"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):a link looks like :]",
"[link(1,19):inline:/uri:::::Foo:False::::]",
"[text(1,20):Foo:]",
"[end-link::]",
"[end-setext::]",
]
expected_gfm = """<h2>a link looks like <a href="/uri">Foo</a></h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_31():
"""
Test case extra 31: SetExt heading string ending with an image
"""
# Arrange
source_markdown = """an image is 
---"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):an image is :]",
'[image(1,13):inline:/url:title:foo::::foo:False:":: :]',
"[end-setext::]",
]
expected_gfm = """<h2>an image is <img src="/url" alt="foo" title="title" /></h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_32():
"""
Test case extra 32: SetExt heading this is only a backslash escape
"""
# Arrange
source_markdown = """\\\\
---"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):\\\b\\:]",
"[end-setext::]",
]
expected_gfm = """<h2>\\</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_33():
"""
Test case extra 33: SetExt heading this is only a backslash as in a hard line break, but not since end of paragraph
"""
# Arrange
source_markdown = """ \\
---"""
expected_tokens = [
"[setext(2,1):-:3: :(1,2)]",
"[text(1,2):\\:]",
"[end-setext::]",
]
expected_gfm = """<h2>\\</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_34():
"""
Test case extra 34: SetExt heading this is only 2+ spaces as in a hard line break, but not since end of paragraph
"""
# Arrange
source_markdown = """\a\a\a\a
---""".replace(
"\a", " "
)
expected_tokens = ["[BLANK(1,1): ]", "[tbreak(2,1):-::---]"]
expected_gfm = """<hr />"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_35():
"""
Test case extra 35: SetExt heading this is only a code span.
"""
# Arrange
source_markdown = """``day``
---"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[icode-span(1,1):day:``::]",
"[end-setext::]",
]
expected_gfm = """<h2><code>day</code></h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_36():
"""
Test case extra 36: SetExt heading this is only a character reference.
"""
# Arrange
source_markdown = """&
---"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):\a&\a\a&\a&\a\a:]",
"[end-setext::]",
]
expected_gfm = """<h2>&</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_37():
"""
Test case extra 37: SetExt heading this is only a raw html block.
"""
# Arrange
source_markdown = """<there it='is'>
---"""
expected_tokens = [
"[html-block(1,1)]",
"[text(1,1):<there it='is'>\n---:]",
"[end-html-block:::True]",
]
expected_gfm = """<there it='is'>\n---"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_38():
"""
Test case extra 38: SetExt heading this is only an URI autolink
"""
# Arrange
source_markdown = """<http://www.google.com>
---"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[uri-autolink(1,1):http://www.google.com]",
"[end-setext::]",
]
expected_gfm = (
"""<h2><a href="http://www.google.com">http://www.google.com</a></h2>"""
)
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_39():
"""
Test case extra 39: SetExt heading this is only an email autolink
"""
# Arrange
source_markdown = """<foo@bar.com>
---"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[email-autolink(1,1):foo@bar.com]",
"[end-setext::]",
]
expected_gfm = """<h2><a href="mailto:foo@bar.com">foo@bar.com</a></h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_40():
"""
Test case extra 40: SetExt heading this is only an emphasis
"""
# Arrange
source_markdown = """*me*
---"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[emphasis(1,1):1:*]",
"[text(1,2):me:]",
"[end-emphasis(1,4)::]",
"[end-setext::]",
]
expected_gfm = """<h2><em>me</em></h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_41():
"""
Test case extra 41: SetExt heading this is only a link.
"""
# Arrange
source_markdown = """[Foo](/uri)
---"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[link(1,1):inline:/uri:::::Foo:False::::]",
"[text(1,2):Foo:]",
"[end-link::]",
"[end-setext::]",
]
expected_gfm = """<h2><a href="/uri">Foo</a></h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_42():
"""
Test case extra 42: SetExt heading this is only an image
"""
# Arrange
source_markdown = """
---"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
'[image(1,1):inline:/url:title:foo::::foo:False:":: :]',
"[end-setext::]",
]
expected_gfm = """<h2><img src="/url" alt="foo" title="title" /></h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_43():
"""
Test case extra 43: SetExt heading with code span with newline inside
"""
# Arrange
source_markdown = """a`code
span`a
---"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[icode-span(1,2):code\a\n\a \aspan:`::]",
"[text(2,6):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<code>code span</code>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_44():
"""
Test case extra 44: SetExt heading with raw HTML with newline inside
"""
# Arrange
source_markdown = """a<raw
html='cool'>a
---"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[raw-html(1,2):raw\nhtml='cool']",
"[text(2,13):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<raw\nhtml='cool'>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_45():
"""
Test case extra 45: SetExt heading with URI autolink with newline inside, renders invalid
"""
# Arrange
source_markdown = """a<http://www.
google.com>a
---"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a\a<\a<\ahttp://www.\ngoogle.com\a>\a>\aa::\n]",
"[end-setext::]",
]
expected_gfm = """<h2>a<http://www.\ngoogle.com>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_46():
"""
Test case extra 46: SetExt heading with email autolink with newline inside, renders invalid
"""
# Arrange
source_markdown = """a<foo@bar
.com>a
---"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a\a<\a<\afoo@bar\n.com\a>\a>\aa::\n]",
"[end-setext::]",
]
expected_gfm = """<h2>a<foo@bar\n.com>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_47():
"""
Test case extra 47: SetExt heading with inline link with newline in label
??? repeat of 518 series?
"""
# Arrange
source_markdown = """a[Fo
o](/uri "testing")a
---"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
'[link(1,2):inline:/uri:testing::::Fo\no:False:":: :]',
"[text(1,3):Fo\no::\n]",
"[end-link::]",
"[text(2,19):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<a href="/uri" title="testing">Fo\no</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_48():
"""
Test case extra 48: SetExt heading with inline link with newline in pre-URI space
"""
# Arrange
source_markdown = """a[Foo](
/uri "testing")a
---"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
'[link(1,2):inline:/uri:testing::::Foo:False:":\n: :]',
"[text(1,3):Foo:]",
"[end-link::]",
"[text(2,16):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<a href="/uri" title="testing">Foo</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_48a():
"""
Test case extra 48a: variation on 48, with whitespace
"""
# Arrange
source_markdown = """a[Foo](\a\a
/uri "testing")a
---""".replace(
"\a", " "
)
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
'[link(1,2):inline:/uri:testing::::Foo:False:": \n: :]',
"[text(1,3):Foo:]",
"[end-link::]",
"[text(2,16):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<a href="/uri" title="testing">Foo</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_48b():
"""
Test case extra 48b: variation on 48 with whitespace after newline
"""
# Arrange
source_markdown = """a[Foo](
/uri "testing")a
---"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
'[link(1,2):inline:/uri:testing::::Foo:False:":\n : :]',
"[text(1,3):Foo:]",
"[end-link::]",
"[text(2,19):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<a href="/uri" title="testing">Foo</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_48c():
"""
Test case extra 48c: variation on 48 with whitespace before and after newline
"""
# Arrange
source_markdown = """a[Foo](\a\a
/uri "testing")a
---""".replace(
"\a", " "
)
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
'[link(1,2):inline:/uri:testing::::Foo:False:": \n : :]',
"[text(1,3):Foo:]",
"[end-link::]",
"[text(2,19):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<a href="/uri" title="testing">Foo</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_49():
"""
Test case extra 49: SetExt heading with inline link with newline in URI, invalidating it
"""
# Arrange
source_markdown = """a[Foo](/ur
i "testing")a
---"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[text(1,2):[:]",
"[text(1,3):Foo:]",
"[text(1,6):]:]",
'[text(1,7):(/ur\ni \a"\a"\atesting\a"\a"\a)a::\n]',
"[end-setext::]",
]
expected_gfm = """<h2>a[Foo](/ur\ni "testing")a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_50x():
"""
Test case extra 50: SetExt heading with inline link with newline in post-URI space
"""
# Arrange
source_markdown = """a[Foo](/uri\a
"testing")a
---""".replace(
"\a", " "
)
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
'[link(1,2):inline:/uri:testing::::Foo:False:":: \n:]',
"[text(1,3):Foo:]",
"[end-link::]",
"[text(2,11):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<a href="/uri" title="testing">Foo</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_50a():
"""
Test case extra 50: variation 50 with whitespace before newline
"""
# Arrange
source_markdown = """a[Foo](/uri\a\a
"testing")a
---""".replace(
"\a", " "
)
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
'[link(1,2):inline:/uri:testing::::Foo:False:":: \n:]',
"[text(1,3):Foo:]",
"[end-link::]",
"[text(2,11):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<a href="/uri" title="testing">Foo</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_50b():
"""
Test case extra 50: variation with 50 with whitespace after newline
"""
# Arrange
source_markdown = """a[Foo](/uri
"testing")a
---"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
'[link(1,2):inline:/uri:testing::::Foo:False:"::\n :]',
"[text(1,3):Foo:]",
"[end-link::]",
"[text(2,14):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<a href="/uri" title="testing">Foo</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_50c():
"""
Test case extra 50c: variation of 50 with whitespace before and after newline
"""
# Arrange
source_markdown = """a[Foo](/uri\a\a
"testing")a
---""".replace(
"\a", " "
)
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
'[link(1,2):inline:/uri:testing::::Foo:False:":: \n :]',
"[text(1,3):Foo:]",
"[end-link::]",
"[text(2,14):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<a href="/uri" title="testing">Foo</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_51():
"""
Test case extra 51: SetExt heading with inline link with newline in title
"""
# Arrange
source_markdown = """a[Foo](/uri "test
ing")a
---"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
'[link(1,2):inline:/uri:test\ning::::Foo:False:":: :]',
"[text(1,3):Foo:]",
"[end-link::]",
"[text(2,6):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<a href="/uri" title="test\ning">Foo</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_52x():
"""
Test case extra 52: SetExt heading with inline link with newline after title
"""
# Arrange
source_markdown = """a[Foo](/uri "testing"
)a
---"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
'[link(1,2):inline:/uri:testing::::Foo:False:":: :\n]',
"[text(1,3):Foo:]",
"[end-link::]",
"[text(2,2):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<a href="/uri" title="testing">Foo</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_52a():
"""
Test case extra 52: variation of 52 with whitespace before newline
"""
# Arrange
source_markdown = """a[Foo](/uri "testing"\a\a
)a
---""".replace(
"\a", " "
)
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
'[link(1,2):inline:/uri:testing::::Foo:False:":: : \n]',
"[text(1,3):Foo:]",
"[end-link::]",
"[text(2,2):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<a href="/uri" title="testing">Foo</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_52b():
"""
Test case extra 52b: variation of 52 with whitespace after newline
"""
# Arrange
source_markdown = """a[Foo](/uri "testing"
)a
---"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
'[link(1,2):inline:/uri:testing::::Foo:False:":: :\n ]',
"[text(1,3):Foo:]",
"[end-link::]",
"[text(2,4):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<a href="/uri" title="testing">Foo</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_52c():
"""
Test case extra 52c: variation of 52 with whitespace before and after newline
"""
# Arrange
source_markdown = """a[Foo](/uri "testing"\a\a
)a
---""".replace(
"\a", " "
)
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
'[link(1,2):inline:/uri:testing::::Foo:False:":: : \n ]',
"[text(1,3):Foo:]",
"[end-link::]",
"[text(2,4):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<a href="/uri" title="testing">Foo</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_53():
"""
Test case extra 53: SetExt heading with full link with newline in label
"""
# Arrange
source_markdown = """a[foo
bar][bar]a
---
[bar]: /url 'title'"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[link(1,2):full:/url:title:::bar:foo\nbar:::::]",
"[text(1,3):foo\nbar::\n]",
"[end-link::]",
"[text(2,10):a:]",
"[end-setext::]",
"[BLANK(4,1):]",
"[link-ref-def(5,1):True::bar:: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<a href="/url" title="title">foo\nbar</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_54():
"""
Test case extra 54: SetExt heading with full link with newline in reference
"""
# Arrange
source_markdown = """a[foo][ba
r]a
---
[ba\nr]: /url 'title'"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[link(1,2):full:/url:title:::ba\nr:foo:::::]",
"[text(1,3):foo:]",
"[end-link::]",
"[text(2,3):a:]",
"[end-setext::]",
"[BLANK(4,1):]",
"[link-ref-def(5,1):True::ba r:ba\nr: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<a href="/url" title="title">foo</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_55():
"""
Test case extra 55: SetExt heading with shortcut link with newline in label
"""
# Arrange
source_markdown = """a[ba
r]a
---
[ba\nr]: /url 'title'"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[link(1,2):shortcut:/url:title::::ba\nr:::::]",
"[text(1,3):ba\nr::\n]",
"[end-link::]",
"[text(2,3):a:]",
"[end-setext::]",
"[BLANK(4,1):]",
"[link-ref-def(5,1):True::ba r:ba\nr: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<a href="/url" title="title">ba\nr</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_56():
"""
Test case extra 56: SetExt heading with collapsed link with newline in label
"""
# Arrange
source_markdown = """a[ba
r][]a
---
[ba\nr]: /url 'title'"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[link(1,2):collapsed:/url:title::::ba\nr:::::]",
"[text(1,3):ba\nr::\n]",
"[end-link::]",
"[text(2,5):a:]",
"[end-setext::]",
"[BLANK(4,1):]",
"[link-ref-def(5,1):True::ba r:ba\nr: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<a href="/url" title="title">ba\nr</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_57():
"""
Test case extra 57: SetExt heading with collapsed link with newline in label
"""
# Arrange
source_markdown = """a[
bar][]a
---
[\nbar]: /url 'title'"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[link(1,2):collapsed:/url:title::::\nbar:::::]",
"[text(1,3):\nbar::\n]",
"[end-link::]",
"[text(2,7):a:]",
"[end-setext::]",
"[BLANK(4,1):]",
"[link-ref-def(5,1):True::bar:\nbar: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<a href="/url" title="title">\nbar</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_58():
"""
Test case extra 58: SetExt heading with full link with newline in reference
"""
# Arrange
source_markdown = """a[foo][
bar]a
---
[\nbar]: /url 'title'"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[link(1,2):full:/url:title:::\nbar:foo:::::]",
"[text(1,3):foo:]",
"[end-link::]",
"[text(2,5):a:]",
"[end-setext::]",
"[BLANK(4,1):]",
"[link-ref-def(5,1):True::bar:\nbar: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<a href="/url" title="title">foo</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_59():
"""
Test case extra 59: SetExt heading with inline image with newline between image chars, invalidating it.
"""
# Arrange
source_markdown = """a!
[Foo](/uri "testing")a
---"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a!\n::\n]",
'[link(2,1):inline:/uri:testing::::Foo:False:":: :]',
"[text(2,2):Foo:]",
"[end-link::]",
"[text(2,22):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a!\n<a href="/uri" title="testing">Foo</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_60():
"""
Test case extra 60: SetExt heading with inline link with newline in label but not title.
"""
# Arrange
source_markdown = """a[Fo
o](/uri)a
---"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[link(1,2):inline:/uri:::::Fo\no:False::::]",
"[text(1,3):Fo\no::\n]",
"[end-link::]",
"[text(2,9):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<a href="/uri">Fo\no</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_61():
"""
Test case extra 61: SetExt heading with inline image with newline in label
"""
# Arrange
source_markdown = """aa
---"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
'[image(1,2):inline:/url:title:fo\no::::fo\no:False:":: :]',
"[text(2,17):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<img src="/url" alt="fo\no" title="title" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_62():
"""
Test case extra 62: SetExt heading with inline image with newline before URI
"""
# Arrange
source_markdown = """aa
---"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
'[image(1,2):inline:/uri:testing:Foo::::Foo:False:":\n: :]',
"[text(2,16):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<img src="/uri" alt="Foo" title="testing" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_62a():
"""
Test case extra 62a: variation of 62 with whitespace before newline
"""
# Arrange
source_markdown = """aa
---""".replace(
"\a", " "
)
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
'[image(1,2):inline:/uri:testing:Foo::::Foo:False:": \n: :]',
"[text(2,16):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<img src="/uri" alt="Foo" title="testing" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_62b():
"""
Test case extra 62b: variation of 62 with whitespace after newline
"""
# Arrange
source_markdown = """aa
---"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
'[image(1,2):inline:/uri:testing:Foo::::Foo:False:":\n : :]',
"[text(2,19):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<img src="/uri" alt="Foo" title="testing" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_62c():
"""
Test case extra 62c: variation of 62 with whitespace before and after newline
"""
# Arrange
source_markdown = """aa
---""".replace(
"\a", " "
)
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
'[image(1,2):inline:/uri:testing:Foo::::Foo:False:": \n : :]',
"[text(2,19):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<img src="/uri" alt="Foo" title="testing" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_63():
"""
Test case extra 63: SetExt heading with inline image with newline in the URI, invalidating it
"""
# Arrange
source_markdown = """aa
---"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[text(1,2):![:]",
"[text(1,4):Foo:]",
"[text(1,7):]:]",
'[text(1,8):(/ur\ni \a"\a"\atesting\a"\a"\a)a::\n]',
"[end-setext::]",
]
expected_gfm = """<h2>aa</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_64x():
"""
Test case extra 64: SetExt heading with inline image with newline after the URI
"""
# Arrange
source_markdown = """aa
---"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
'[image(1,2):inline:/uri:testing:Foo::::Foo:False:"::\n:]',
"[text(2,11):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<img src="/uri" alt="Foo" title="testing" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_64a():
"""
Test case extra 64a: variation of 64 with whitespace before newline
"""
# Arrange
source_markdown = """aa
---""".replace(
"\a", " "
)
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
'[image(1,2):inline:/uri:testing:Foo::::Foo:False:":: \n:]',
"[text(2,11):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<img src="/uri" alt="Foo" title="testing" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_64b():
"""
Test case extra 64b: variation of 64 with whitespace after newline
"""
# Arrange
source_markdown = """aa
---"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
'[image(1,2):inline:/uri:testing:Foo::::Foo:False:"::\n :]',
"[text(2,12):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<img src="/uri" alt="Foo" title="testing" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_64c():
"""
Test case extra 64c: variation of 64 with whitespace before and after newline
"""
# Arrange
source_markdown = """aa
---""".replace(
"\a", " "
)
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
'[image(1,2):inline:/uri:testing:Foo::::Foo:False:":: \n :]',
"[text(2,12):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<img src="/uri" alt="Foo" title="testing" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_65():
"""
Test case extra 65: SetExt heading with inline image with newline after the URI and no text
"""
# Arrange
source_markdown = """aa
---"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[image(1,2):inline:/uri::Foo::::Foo:False:::\n:]",
"[text(2,2):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<img src="/uri" alt="Foo" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_66():
"""
Test case extra 66: SetExt heading with inline image with newline in the title
"""
# Arrange
source_markdown = """aa
---"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
'[image(1,2):inline:/uri:test\ning:Foo::::Foo:False:":: :]',
"[text(2,6):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<img src="/uri" alt="Foo" title="test\ning" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_67():
"""
Test case extra 67: SetExt heading with inline image with newline after the title
"""
# Arrange
source_markdown = """aa
---"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
'[image(1,2):inline:/uri:testing:Foo::::Foo:False:":: :\n]',
"[text(2,2):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<img src="/uri" alt="Foo" title="testing" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_67a():
"""
Test case extra 67a: variation of 67 with whitespace before newline
"""
# Arrange
source_markdown = """aa
---""".replace(
"\a", " "
)
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
'[image(1,2):inline:/uri:testing:Foo::::Foo:False:":: : \n]',
"[text(2,2):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<img src="/uri" alt="Foo" title="testing" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_67b():
"""
Test case extra 67b: variation of 67 with whitespace after newline
"""
# Arrange
source_markdown = """aa
---"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
'[image(1,2):inline:/uri:testing:Foo::::Foo:False:":: :\n ]',
"[text(2,5):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<img src="/uri" alt="Foo" title="testing" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_67c():
"""
Test case extra 67c: variation of 67 with whitespace before and after newline
"""
# Arrange
source_markdown = """aa
---""".replace(
"\a", " "
)
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
'[image(1,2):inline:/uri:testing:Foo::::Foo:False:":: : \n ]',
"[text(2,5):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<img src="/uri" alt="Foo" title="testing" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_68x():
"""
Test case extra 68: SetExt heading with link containing label with replacement
"""
# Arrange
source_markdown = """a[Foβo](/uri "testing")a
---"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):a:]",
'[link(1,2):inline:/uri:testing::::Foβo:False:":: :]',
"[text(1,3):Fo\aβ\aβ\ao:]",
"[end-link::]",
"[text(1,29):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<a href="/uri" title="testing">Foβo</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_68a():
"""
Test case extra 68a: variation of 68 without special characters
"""
# Arrange
source_markdown = """a[Foo](/uri "testing")a
---"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):a:]",
'[link(1,2):inline:/uri:testing::::Foo:False:":: :]',
"[text(1,3):Foo:]",
"[end-link::]",
"[text(1,23):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<a href="/uri" title="testing">Foo</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_68b():
"""
Test case extra 68b: variation of 68 with newline before special characters
"""
# Arrange
source_markdown = """a[Fo
βo](/uri "testing")a
---"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
'[link(1,2):inline:/uri:testing::::Fo\nβo:False:":: :]',
"[text(1,3):Fo\n\aβ\aβ\ao::\n]",
"[end-link::]",
"[text(2,25):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<a href="/uri" title="testing">Fo\nβo</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_69():
"""
Test case extra 69: SetExt heading with link containing label with backslash
"""
# Arrange
source_markdown = """a[Fo\\]o](/uri "testing")a
---"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):a:]",
'[link(1,2):inline:/uri:testing::::Fo\\]o:False:":: :]',
"[text(1,3):Fo\\\b]o:]",
"[end-link::]",
"[text(1,25):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<a href="/uri" title="testing">Fo]o</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_69a():
"""
Test case extra 69a: variation of 69 with newline before special characters
"""
# Arrange
source_markdown = """a[Fo
\\]o](/uri "testing")a
---"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
'[link(1,2):inline:/uri:testing::::Fo\n\\]o:False:":: :]',
"[text(1,3):Fo\n\\\b]o::\n]",
"[end-link::]",
"[text(2,21):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<a href="/uri" title="testing">Fo\n]o</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_70():
"""
Test case extra 70: SetExt heading with link containing uri with space
"""
# Arrange
source_markdown = """a[Foo](</my uri> "testing")a
---"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):a:]",
'[link(1,2):inline:/my%20uri:testing:/my uri:::Foo:True:":: :]',
"[text(1,3):Foo:]",
"[end-link::]",
"[text(1,28):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<a href="/my%20uri" title="testing">Foo</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_70a():
"""
Test case extra 70a: variation of 70 with newline before special characters, rendering it invalid
"""
# Arrange
source_markdown = """a[Foo](</my
uri> "testing")a
---"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[text(1,2):[:]",
"[text(1,3):Foo:]",
"[text(1,6):]:]",
'[text(1,7):(\a<\a<\a/my\nuri\a>\a>\a \a"\a"\atesting\a"\a"\a)a::\n \x02]',
"[end-setext::]",
]
expected_gfm = """<h2>a[Foo](</my\nuri> "testing")a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_71():
"""
Test case extra 71: SetExt heading with link containing title with replacement
"""
# Arrange
source_markdown = """a[Foo](/uri "testβing")a
---"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):a:]",
'[link(1,2):inline:/uri:testβing::testβing::Foo:False:":: :]',
"[text(1,3):Foo:]",
"[end-link::]",
"[text(1,29):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<a href="/uri" title="testβing">Foo</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_71a():
"""
Test case extra 71a: variation of 71 with newline before special characters
"""
# Arrange
source_markdown = """a[Foo](/uri "test
βing")a
---"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
'[link(1,2):inline:/uri:test\nβing::test\nβing::Foo:False:":: :]',
"[text(1,3):Foo:]",
"[end-link::]",
"[text(2,12):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<a href="/uri" title="test\nβing">Foo</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_72():
"""
Test case extra 72: SetExt heading with link containing title with backslash
"""
# Arrange
source_markdown = """a[Foo](/uri "test\\#ing")a
---"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):a:]",
'[link(1,2):inline:/uri:test#ing::test\\#ing::Foo:False:":: :]',
"[text(1,3):Foo:]",
"[end-link::]",
"[text(1,25):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<a href="/uri" title="test#ing">Foo</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_72a():
"""
Test case extra 72a: variation of 72 with newline before special characters
"""
# Arrange
source_markdown = """a[Foo](/uri "test
\\#ing")a
---"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
'[link(1,2):inline:/uri:test\n#ing::test\n\\#ing::Foo:False:":: :]',
"[text(1,3):Foo:]",
"[end-link::]",
"[text(2,8):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<a href="/uri" title="test\n#ing">Foo</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_73():
"""
Test case extra 73: SetExt heading with image containing label with replacement
"""
# Arrange
source_markdown = """aa
---"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):a:]",
'[image(1,2):inline:/uri:testing:Foβo::::Foβo:False:":: :]',
"[text(1,30):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<img src="/uri" alt="Foβo" title="testing" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_73a():
"""
Test case extra 73a: variation of 73 without special characters
"""
# Arrange
source_markdown = """aa
---"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):a:]",
'[image(1,2):inline:/uri:testing:Foo::::Foo:False:":: :]',
"[text(1,24):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<img src="/uri" alt="Foo" title="testing" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_73b():
"""
Test case extra 73b: 73 variation of with newline before special characters
"""
# Arrange
source_markdown = """aa
---"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
'[image(1,2):inline:/uri:testing:Fo\nβo::::Fo\nβo:False:":: :]',
"[text(2,25):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<img src="/uri" alt="Fo\nβo" title="testing" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_74():
"""
Test case extra 74: SetExt heading with image containing label with backslash
"""
# Arrange
source_markdown = """a![Fo\\]o](/uri "testing")a
---"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):a:]",
'[image(1,2):inline:/uri:testing:Fo]o::::Fo\\]o:False:":: :]',
"[text(1,26):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<img src="/uri" alt="Fo]o" title="testing" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_74a():
"""
Test case extra 74a: variation of 74 with newline before special characters
"""
# Arrange
source_markdown = """a![Fo
\\]o](/uri "testing")a
---"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
'[image(1,2):inline:/uri:testing:Fo\n]o::::Fo\n\\]o:False:":: :]',
"[text(2,21):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<img src="/uri" alt="Fo\n]o" title="testing" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_75():
"""
Test case extra 75: SetExt heading with image containing uri with space
"""
# Arrange
source_markdown = """aa
---"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):a:]",
'[image(1,2):inline:/my%20uri:testing:Foo:/my uri:::Foo:True:":: :]',
"[text(1,29):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<img src="/my%20uri" alt="Foo" title="testing" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_75a():
"""
Test case extra 75a: variation of 75 with newline before special characters, invalidating it
"""
# Arrange
source_markdown = """aa
---"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[text(1,2):![:]",
"[text(1,4):Foo:]",
"[text(1,7):]:]",
'[text(1,8):(\a<\a<\a/my\nuri\a>\a>\a \a"\a"\atesting\a"\a"\a)a::\n \x02]',
"[end-setext::]",
]
expected_gfm = """<h2>aa</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_76():
"""
Test case extra 76: SetExt heading with image containing title with replacement
"""
# Arrange
source_markdown = """aa
---"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):a:]",
'[image(1,2):inline:/uri:testβing:Foo::testβing::Foo:False:":: :]',
"[text(1,30):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<img src="/uri" alt="Foo" title="testβing" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_76a():
"""
Test case extra 76a: variation of 76 with newline before special characters
"""
# Arrange
source_markdown = """aa
---"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
'[image(1,2):inline:/uri:test\nβing:Foo::test\nβing::Foo:False:":: :]',
"[text(2,12):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<img src="/uri" alt="Foo" title="test\nβing" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_77():
"""
Test case extra 77: SetExt heading with image containing title with backslash
"""
# Arrange
source_markdown = """aa
---"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):a:]",
'[image(1,2):inline:/uri:test#ing:Foo::test\\#ing::Foo:False:":: :]',
"[text(1,26):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<img src="/uri" alt="Foo" title="test#ing" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_77a():
"""
Test case extra 77a: variation of 77 with newline before special characters
"""
# Arrange
source_markdown = """aa
---"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
'[image(1,2):inline:/uri:test\n#ing:Foo::test\n\\#ing::Foo:False:":: :]',
"[text(2,8):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<img src="/uri" alt="Foo" title="test
#ing" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_78():
"""
Test case extra 78: SetExt heading with full link with backslash in label
"""
# Arrange
source_markdown = """a[foo\\#bar][bar]a
---
[bar]: /url 'title'"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[link(1,2):full:/url:title:::bar:foo\\#bar:::::]",
"[text(1,3):foo\\\b#bar:]",
"[end-link::]",
"[text(1,17):a:]",
"[end-setext::]",
"[BLANK(3,1):]",
"[link-ref-def(4,1):True::bar:: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<a href="/url" title="title">foo#bar</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_78a():
"""
Test case extra 78a: variation of 78 with newline before special chars
"""
# Arrange
source_markdown = """a[foo
\\#bar][bar]a
---
[bar]: /url 'title'"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[link(1,2):full:/url:title:::bar:foo\n\\#bar:::::]",
"[text(1,3):foo\n\\\b#bar::\n]",
"[end-link::]",
"[text(2,12):a:]",
"[end-setext::]",
"[BLANK(4,1):]",
"[link-ref-def(5,1):True::bar:: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<a href="/url" title="title">foo\n#bar</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_79():
"""
Test case extra 79: SetExt heading with full link with replacement in label
"""
# Arrange
source_markdown = """a[fooβbar][bar]a
---
[bar]: /url 'title'"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[link(1,2):full:/url:title:::bar:fooβbar:::::]",
"[text(1,3):foo\aβ\aβ\abar:]",
"[end-link::]",
"[text(1,21):a:]",
"[end-setext::]",
"[BLANK(3,1):]",
"[link-ref-def(4,1):True::bar:: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<a href="/url" title="title">fooβbar</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_79a():
"""
Test case extra 79a: variation of 79 with newline before special characters
"""
# Arrange
source_markdown = """a[foo
βbar][bar]a
---
[bar]: /url 'title'"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[link(1,2):full:/url:title:::bar:foo\nβbar:::::]",
"[text(1,3):foo\n\aβ\aβ\abar::\n]",
"[end-link::]",
"[text(2,16):a:]",
"[end-setext::]",
"[BLANK(4,1):]",
"[link-ref-def(5,1):True::bar:: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<a href="/url" title="title">foo\nβbar</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_80():
"""
Test case extra 80: SetExt heading with full link with replacement in reference
"""
# Arrange
source_markdown = """a[foo][baβr]a
---
[baβr]: /url 'title'"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[link(1,2):full:/url:title:::baβr:foo:::::]",
"[text(1,3):foo:]",
"[end-link::]",
"[text(1,18):a:]",
"[end-setext::]",
"[BLANK(3,1):]",
"[link-ref-def(4,1):True::baβr:: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<a href="/url" title="title">foo</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_80a():
"""
Test case extra 80a: variation of 80 with newline before special characters
"""
# Arrange
source_markdown = """a[foo][ba
βr]a
---
[ba
βr]: /url 'title'"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[link(1,2):full:/url:title:::ba\nβr:foo:::::]",
"[text(1,3):foo:]",
"[end-link::]",
"[text(2,9):a:]",
"[end-setext::]",
"[BLANK(4,1):]",
"[link-ref-def(5,1):True::ba βr:ba\nβr: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<a href="/url" title="title">foo</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_81():
"""
Test case extra 81: SetExt heading with full link with backspace in reference
"""
# Arrange
source_markdown = """a[foo][ba\\]r]a
---
[ba\\]r]: /url 'title'"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[link(1,2):full:/url:title:::ba\\]r:foo:::::]",
"[text(1,3):foo:]",
"[end-link::]",
"[text(1,14):a:]",
"[end-setext::]",
"[BLANK(3,1):]",
"[link-ref-def(4,1):True::ba\\]r:: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<a href="/url" title="title">foo</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_81a():
"""
Test case extra 81a: variation of 81 with newline before special characters
"""
# Arrange
source_markdown = """a[foo][ba
\\]r]a
---
[ba
\\]r]: /url 'title'"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[link(1,2):full:/url:title:::ba\n\\]r:foo:::::]",
"[text(1,3):foo:]",
"[end-link::]",
"[text(2,5):a:]",
"[end-setext::]",
"[BLANK(4,1):]",
"[link-ref-def(5,1):True::ba \\]r:ba\n\\]r: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<a href="/url" title="title">foo</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_82():
"""
Test case extra 82: SetExt heading with shortcut link with replacement in label
"""
# Arrange
source_markdown = """a[baβr]a
---
[baβr]: /url 'title'"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[link(1,2):shortcut:/url:title::::baβr:::::]",
"[text(1,3):ba\aβ\aβ\ar:]",
"[end-link::]",
"[text(1,13):a:]",
"[end-setext::]",
"[BLANK(3,1):]",
"[link-ref-def(4,1):True::baβr:: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<a href="/url" title="title">baβr</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_82a():
"""
Test case extra 82a: variation of 82 with newline before special characters
"""
# Arrange
source_markdown = """a[ba
βr]a
---
[ba
βr]: /url 'title'"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[link(1,2):shortcut:/url:title::::ba\nβr:::::]",
"[text(1,3):ba\n\aβ\aβ\ar::\n]",
"[end-link::]",
"[text(2,9):a:]",
"[end-setext::]",
"[BLANK(4,1):]",
"[link-ref-def(5,1):True::ba βr:ba\nβr: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<a href="/url" title="title">ba\nβr</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_83():
"""
Test case extra 83: SetExt heading with shortcut link with backslash in label
"""
# Arrange
source_markdown = """a[ba\\]r]a
---
[ba\\]r]: /url 'title'"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[link(1,2):shortcut:/url:title::::ba\\]r:::::]",
"[text(1,3):ba\\\b]r:]",
"[end-link::]",
"[text(1,9):a:]",
"[end-setext::]",
"[BLANK(3,1):]",
"[link-ref-def(4,1):True::ba\\]r:: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<a href="/url" title="title">ba]r</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_83a():
"""
Test case extra 83a: variation of 83 with newline before special characters
"""
# Arrange
source_markdown = """a[ba
\\]r]a
---
[ba
\\]r]: /url 'title'"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[link(1,2):shortcut:/url:title::::ba\n\\]r:::::]",
"[text(1,3):ba\n\\\b]r::\n]",
"[end-link::]",
"[text(2,5):a:]",
"[end-setext::]",
"[BLANK(4,1):]",
"[link-ref-def(5,1):True::ba \\]r:ba\n\\]r: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<a href="/url" title="title">ba\n]r</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_84x():
"""
Test case extra 84: SetExt heading with collapsed link with replacement in label
"""
# Arrange
source_markdown = """a[baβr][]a
---
[baβr]: /url 'title'"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[link(1,2):collapsed:/url:title::::baβr:::::]",
"[text(1,3):ba\aβ\aβ\ar:]",
"[end-link::]",
"[text(1,15):a:]",
"[end-setext::]",
"[BLANK(3,1):]",
"[link-ref-def(4,1):True::baβr:: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<a href="/url" title="title">baβr</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_84a():
"""
Test case extra 84a: variation of 84 with newline before special characters
"""
# Arrange
source_markdown = """a[ba
βr][]a
---
[ba
βr]: /url 'title'"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[link(1,2):collapsed:/url:title::::ba\nβr:::::]",
"[text(1,3):ba\n\aβ\aβ\ar::\n]",
"[end-link::]",
"[text(2,11):a:]",
"[end-setext::]",
"[BLANK(4,1):]",
"[link-ref-def(5,1):True::ba βr:ba\nβr: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<a href="/url" title="title">ba\nβr</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_85():
"""
Test case extra 85: SetExt heading with collapsed link with backslash in label
"""
# Arrange
source_markdown = """a[ba\\]r][]a
---
[ba\\]r]: /url 'title'"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[link(1,2):collapsed:/url:title::::ba\\]r:::::]",
"[text(1,3):ba\\\b]r:]",
"[end-link::]",
"[text(1,11):a:]",
"[end-setext::]",
"[BLANK(3,1):]",
"[link-ref-def(4,1):True::ba\\]r:: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<a href="/url" title="title">ba]r</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_85a():
"""
Test case extra 85a: variation of 85 with newline before special characters
"""
# Arrange
source_markdown = """a[ba
\\]r][]a
---
[ba
\\]r]: /url 'title'"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[link(1,2):collapsed:/url:title::::ba\n\\]r:::::]",
"[text(1,3):ba\n\\\b]r::\n]",
"[end-link::]",
"[text(2,7):a:]",
"[end-setext::]",
"[BLANK(4,1):]",
"[link-ref-def(5,1):True::ba \\]r:ba\n\\]r: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<a href="/url" title="title">ba\n]r</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_86x():
"""
Test case extra 86: SetExt heading with full link with replacement in label
"""
# Arrange
source_markdown = """a[foβo][bar]a
---
[bar]: /url 'title'"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[link(1,2):full:/url:title:::bar:foβo:::::]",
"[text(1,3):fo\aβ\aβ\ao:]",
"[end-link::]",
"[text(1,18):a:]",
"[end-setext::]",
"[BLANK(3,1):]",
"[link-ref-def(4,1):True::bar:: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<a href="/url" title="title">foβo</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_86a():
"""
Test case extra 86a: variation of 86 with newline before special characters
"""
# Arrange
source_markdown = """a[fo
βo][bar]a
---
[bar]: /url 'title'"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[link(1,2):full:/url:title:::bar:fo\nβo:::::]",
"[text(1,3):fo\n\aβ\aβ\ao::\n]",
"[end-link::]",
"[text(2,14):a:]",
"[end-setext::]",
"[BLANK(4,1):]",
"[link-ref-def(5,1):True::bar:: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<a href="/url" title="title">fo\nβo</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_87():
"""
Test case extra 87: SetExt heading with full link with backslash in label
"""
# Arrange
source_markdown = """a[fo\\]o][bar]a
---
[bar]: /url 'title'"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[link(1,2):full:/url:title:::bar:fo\\]o:::::]",
"[text(1,3):fo\\\b]o:]",
"[end-link::]",
"[text(1,14):a:]",
"[end-setext::]",
"[BLANK(3,1):]",
"[link-ref-def(4,1):True::bar:: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<a href="/url" title="title">fo]o</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_87a():
"""
Test case extra 87a: variation of 87 with newline before special characters
"""
# Arrange
source_markdown = """a[fo
\\]o][bar]a
---
[\nbar]: /url 'title'"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[link(1,2):full:/url:title:::bar:fo\n\\]o:::::]",
"[text(1,3):fo\n\\\b]o::\n]",
"[end-link::]",
"[text(2,10):a:]",
"[end-setext::]",
"[BLANK(4,1):]",
"[link-ref-def(5,1):True::bar:\nbar: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<a href="/url" title="title">fo\n]o</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_88x():
"""
Test case extra 88: SetExt heading with full link with backslash in link
"""
# Arrange
source_markdown = """a[foo][ba\\]r]a
---
[ba\\]r]: /url 'title'"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[link(1,2):full:/url:title:::ba\\]r:foo:::::]",
"[text(1,3):foo:]",
"[end-link::]",
"[text(1,14):a:]",
"[end-setext::]",
"[BLANK(3,1):]",
"[link-ref-def(4,1):True::ba\\]r:: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<a href="/url" title="title">foo</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_88a():
"""
Test case extra 88a: variation of 88 with newline before special characters
"""
# Arrange
source_markdown = """a[foo][ba
\\]r]a
---
[ba
\\]r]: /url 'title'"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[link(1,2):full:/url:title:::ba\n\\]r:foo:::::]",
"[text(1,3):foo:]",
"[end-link::]",
"[text(2,5):a:]",
"[end-setext::]",
"[BLANK(4,1):]",
"[link-ref-def(5,1):True::ba \\]r:ba\n\\]r: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<a href="/url" title="title">foo</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_89x():
"""
Test case extra 89: SetExt heading with full link with replacement in link
"""
# Arrange
source_markdown = """a[foo][baβr]a
---
[baβr]: /url 'title'"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[link(1,2):full:/url:title:::baβr:foo:::::]",
"[text(1,3):foo:]",
"[end-link::]",
"[text(1,18):a:]",
"[end-setext::]",
"[BLANK(3,1):]",
"[link-ref-def(4,1):True::baβr:: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<a href="/url" title="title">foo</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_89a():
"""
Test case extra 88a: variation of 88 with newline before special characters
"""
# Arrange
source_markdown = """a[foo][ba
βr]a
---
[ba
βr]: /url 'title'"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[link(1,2):full:/url:title:::ba\nβr:foo:::::]",
"[text(1,3):foo:]",
"[end-link::]",
"[text(2,9):a:]",
"[end-setext::]",
"[BLANK(4,1):]",
"[link-ref-def(5,1):True::ba βr:ba\nβr: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<a href="/url" title="title">foo</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_90x():
"""
Test case extra 90: SetExt heading with full image with backslash in label
"""
# Arrange
source_markdown = """a![foo\\#bar][bar]a
---
[bar]: /url 'title'"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[image(1,2):full:/url:title:foo#bar:::bar:foo\\#bar:::::]",
"[text(1,18):a:]",
"[end-setext::]",
"[BLANK(3,1):]",
"[link-ref-def(4,1):True::bar:: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<img src="/url" alt="foo#bar" title="title" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_90a():
"""
Test case extra 90a: variation of 90 with newline before special chars
"""
# Arrange
source_markdown = """a![foo
\\#bar][bar]a
---
[bar]: /url 'title'"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[image(1,2):full:/url:title:foo\n#bar:::bar:foo\n\\#bar:::::]",
"[text(2,12):a:]",
"[end-setext::]",
"[BLANK(4,1):]",
"[link-ref-def(5,1):True::bar:: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<img src="/url" alt="foo\n#bar" title="title" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_91x():
"""
Test case extra 91: SetExt heading with full image with replacement in label
"""
# Arrange
source_markdown = """a![fooβbar][bar]a
---
[bar]: /url 'title'"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[image(1,2):full:/url:title:fooβbar:::bar:fooβbar:::::]",
"[text(1,22):a:]",
"[end-setext::]",
"[BLANK(3,1):]",
"[link-ref-def(4,1):True::bar:: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<img src="/url" alt="fooβbar" title="title" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_91a():
"""
Test case extra 91a: variation of 91 with newline before special characters
"""
# Arrange
source_markdown = """a![foo
βbar][bar]a
---
[bar]: /url 'title'"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[image(1,2):full:/url:title:foo\nβbar:::bar:foo\nβbar:::::]",
"[text(2,16):a:]",
"[end-setext::]",
"[BLANK(4,1):]",
"[link-ref-def(5,1):True::bar:: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<img src="/url" alt="foo\nβbar" title="title" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_92x():
"""
Test case extra 92: SetExt heading with full image with replacement in reference
"""
# Arrange
source_markdown = """a![foo][baβr]a
---
[baβr]: /url 'title'"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[image(1,2):full:/url:title:foo:::baβr:foo:::::]",
"[text(1,19):a:]",
"[end-setext::]",
"[BLANK(3,1):]",
"[link-ref-def(4,1):True::baβr:: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<img src="/url" alt="foo" title="title" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_92a():
"""
Test case extra 92a: variation of 92 with newline before special characters
"""
# Arrange
source_markdown = """a![foo][ba
βr]a
---
[ba
βr]: /url 'title'"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[image(1,2):full:/url:title:foo:::ba\nβr:foo:::::]",
"[text(2,9):a:]",
"[end-setext::]",
"[BLANK(4,1):]",
"[link-ref-def(5,1):True::ba βr:ba\nβr: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<img src="/url" alt="foo" title="title" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_93x():
"""
Test case extra 93: SetExt heading with full image with backspace in reference
"""
# Arrange
source_markdown = """a![foo][ba\\]r]a
---
[ba\\]r]: /url 'title'"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[image(1,2):full:/url:title:foo:::ba\\]r:foo:::::]",
"[text(1,15):a:]",
"[end-setext::]",
"[BLANK(3,1):]",
"[link-ref-def(4,1):True::ba\\]r:: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<img src="/url" alt="foo" title="title" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_93a():
"""
Test case extra 93a: variation of 93 with newline before special characters
"""
# Arrange
source_markdown = """a![foo][ba
\\]r]a
---
[ba
\\]r]: /url 'title'"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[image(1,2):full:/url:title:foo:::ba\n\\]r:foo:::::]",
"[text(2,5):a:]",
"[end-setext::]",
"[BLANK(4,1):]",
"[link-ref-def(5,1):True::ba \\]r:ba\n\\]r: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<img src="/url" alt="foo" title="title" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_94x():
"""
Test case extra 94: SetExt heading with shortcut image with replacement in label
"""
# Arrange
source_markdown = """a![baβr]a
---
[baβr]: /url 'title'"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[image(1,2):shortcut:/url:title:baβr::::baβr:::::]",
"[text(1,14):a:]",
"[end-setext::]",
"[BLANK(3,1):]",
"[link-ref-def(4,1):True::baβr:: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<img src="/url" alt="baβr" title="title" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_94a():
"""
Test case extra 94a: variation of 94 with newline before special characters
"""
# Arrange
source_markdown = """a![ba
βr]a
---
[ba
βr]: /url 'title'"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[image(1,2):shortcut:/url:title:ba\nβr::::ba\nβr:::::]",
"[text(2,9):a:]",
"[end-setext::]",
"[BLANK(4,1):]",
"[link-ref-def(5,1):True::ba βr:ba\nβr: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<img src="/url" alt="ba\nβr" title="title" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_95x():
"""
Test case extra 95: SetExt heading with shortcut image with backslash in label
"""
# Arrange
source_markdown = """a![ba\\]r]a
---
[ba\\]r]: /url 'title'"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[image(1,2):shortcut:/url:title:ba]r::::ba\\]r:::::]",
"[text(1,10):a:]",
"[end-setext::]",
"[BLANK(3,1):]",
"[link-ref-def(4,1):True::ba\\]r:: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<img src="/url" alt="ba]r" title="title" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_95a():
"""
Test case extra 95a: variation of 95 with newline before special characters
"""
# Arrange
source_markdown = """a![ba
\\]r]a
---
[ba
\\]r]: /url 'title'"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[image(1,2):shortcut:/url:title:ba\n]r::::ba\n\\]r:::::]",
"[text(2,5):a:]",
"[end-setext::]",
"[BLANK(4,1):]",
"[link-ref-def(5,1):True::ba \\]r:ba\n\\]r: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<img src="/url" alt="ba\n]r" title="title" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_96x():
"""
Test case extra 96: SetExt heading with collapsed image with replacement in label
"""
# Arrange
source_markdown = """a![baβr][]a
---
[baβr]: /url 'title'"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[image(1,2):collapsed:/url:title:baβr::::baβr:::::]",
"[text(1,16):a:]",
"[end-setext::]",
"[BLANK(3,1):]",
"[link-ref-def(4,1):True::baβr:: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<img src="/url" alt="baβr" title="title" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_96a():
"""
Test case extra 96a: variation of 96 with newline before special characters
"""
# Arrange
source_markdown = """a![ba
βr][]a
---
[ba
βr]: /url 'title'"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[image(1,2):collapsed:/url:title:ba\nβr::::ba\nβr:::::]",
"[text(2,11):a:]",
"[end-setext::]",
"[BLANK(4,1):]",
"[link-ref-def(5,1):True::ba βr:ba\nβr: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<img src="/url" alt="ba\nβr" title="title" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_97x():
"""
Test case extra 97: SetExt heading with collapsed image with backslash in label
"""
# Arrange
source_markdown = """a![ba\\]r][]a
---
[ba\\]r]: /url 'title'"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[image(1,2):collapsed:/url:title:ba]r::::ba\\]r:::::]",
"[text(1,12):a:]",
"[end-setext::]",
"[BLANK(3,1):]",
"[link-ref-def(4,1):True::ba\\]r:: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<img src="/url" alt="ba]r" title="title" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_97a():
"""
Test case extra 97a: variation of 97 with newline before special characters
"""
# Arrange
source_markdown = """a![ba
\\]r][]a
---
[ba
\\]r]: /url 'title'"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[image(1,2):collapsed:/url:title:ba\n]r::::ba\n\\]r:::::]",
"[text(2,7):a:]",
"[end-setext::]",
"[BLANK(4,1):]",
"[link-ref-def(5,1):True::ba \\]r:ba\n\\]r: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<img src="/url" alt="ba\n]r" title="title" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_98x():
"""
Test case extra 98: SetExt heading with full image with replacement in label
"""
# Arrange
source_markdown = """a![foβo][bar]a
---
[bar]: /url 'title'"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[image(1,2):full:/url:title:foβo:::bar:foβo:::::]",
"[text(1,19):a:]",
"[end-setext::]",
"[BLANK(3,1):]",
"[link-ref-def(4,1):True::bar:: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<img src="/url" alt="foβo" title="title" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_98a():
"""
Test case extra 98a: variation of 98 with newline before special characters
"""
# Arrange
source_markdown = """a![fo
βo][bar]a
---
[bar]: /url 'title'"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[image(1,2):full:/url:title:fo\nβo:::bar:fo\nβo:::::]",
"[text(2,14):a:]",
"[end-setext::]",
"[BLANK(4,1):]",
"[link-ref-def(5,1):True::bar:: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<img src="/url" alt="fo\nβo" title="title" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_99x():
"""
Test case extra 99: SetExt heading with full image with backslash in label
"""
# Arrange
source_markdown = """a![fo\\]o][bar]a
---
[bar]: /url 'title'"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[image(1,2):full:/url:title:fo]o:::bar:fo\\]o:::::]",
"[text(1,15):a:]",
"[end-setext::]",
"[BLANK(3,1):]",
"[link-ref-def(4,1):True::bar:: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<img src="/url" alt="fo]o" title="title" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_99a():
"""
Test case extra 99a: variation of 99 with newline before special characters
"""
# Arrange
source_markdown = """a![fo
\\]o][bar]a
---
[\nbar]: /url 'title'"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[image(1,2):full:/url:title:fo\n]o:::bar:fo\n\\]o:::::]",
"[text(2,10):a:]",
"[end-setext::]",
"[BLANK(4,1):]",
"[link-ref-def(5,1):True::bar:\nbar: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<img src="/url" alt="fo\n]o" title="title" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_a0x():
"""
Test case extra A0: SetExt heading with full image with backslash in link
"""
# Arrange
source_markdown = """a![foo][ba\\]r]a
---
[ba\\]r]: /url 'title'"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[image(1,2):full:/url:title:foo:::ba\\]r:foo:::::]",
"[text(1,15):a:]",
"[end-setext::]",
"[BLANK(3,1):]",
"[link-ref-def(4,1):True::ba\\]r:: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<img src="/url" alt="foo" title="title" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_a0a():
"""
Test case extra A0a: variation of A0 with newline before special characters
"""
# Arrange
source_markdown = """a![foo][ba
\\]r]a
---
[ba
\\]r]: /url 'title'"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[image(1,2):full:/url:title:foo:::ba\n\\]r:foo:::::]",
"[text(2,5):a:]",
"[end-setext::]",
"[BLANK(4,1):]",
"[link-ref-def(5,1):True::ba \\]r:ba\n\\]r: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<img src="/url" alt="foo" title="title" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_a1x():
"""
Test case extra A1: SetExt heading with full image with replacement in link
"""
# Arrange
source_markdown = """a![foo][baβr]a
---
[baβr]: /url 'title'"""
expected_tokens = [
"[setext(2,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[image(1,2):full:/url:title:foo:::baβr:foo:::::]",
"[text(1,19):a:]",
"[end-setext::]",
"[BLANK(3,1):]",
"[link-ref-def(4,1):True::baβr:: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<img src="/url" alt="foo" title="title" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_a1a():
"""
Test case extra A1a: variation of A1 with newline before special characters
"""
# Arrange
source_markdown = """a![foo][ba
βr]a
---
[ba
βr]: /url 'title'"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[image(1,2):full:/url:title:foo:::ba\nβr:foo:::::]",
"[text(2,9):a:]",
"[end-setext::]",
"[BLANK(4,1):]",
"[link-ref-def(5,1):True::ba βr:ba\nβr: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<img src="/url" alt="foo" title="title" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_a2():
"""
Test case extra A2: SetExt heading with full image with label newline in url link
"""
# Arrange
source_markdown = """aa
---"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[image(1,2):inline:/my%20url::fo\no:/my url:::fo\no:True::::]",
"[text(2,14):a:]",
"[end-setext::]",
]
expected_gfm = """<h2>a<img src="/my%20url" alt="fo\no" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_a3():
"""
Test case extra A3: SetExt with inline link label text split over 2 lines
"""
# Arrange
source_markdown = """abc
[li
nk](/uri "title" )
def
---"""
expected_tokens = [
"[setext(5,1):-:3::(1,1)]",
"[text(1,1):abc\n::\n]",
'[link(2,1):inline:/uri:title::::li\nnk:False:":: : ]',
"[text(2,2):li\nnk::\n]",
"[end-link::]",
"[text(3,19):\ndef::\n \x02]",
"[end-setext::]",
]
expected_gfm = """<h2>abc\n<a href="/uri" title="title">li\nnk</a>\ndef</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_a4():
"""
Test case extra A4: SetText with inline link label code span split over 2 lines
"""
# Arrange
source_markdown = """abc
[li`de
fg`nk](/uri "title" )
def
---"""
expected_tokens = [
"[setext(5,1):-:3::(1,1)]",
"[text(1,1):abc\n::\n]",
'[link(2,1):inline:/uri:title::::li`de\nfg`nk:False:":: : ]',
"[text(2,2):li:]",
"[icode-span(2,4):de\a\n\a \afg:`::]",
"[text(3,4):nk:]",
"[end-link::]",
"[text(3,22):\ndef::\n \x02]",
"[end-setext::]",
]
expected_gfm = (
"""<h2>abc\n<a href="/uri" title="title">li<code>de fg</code>nk</a>\ndef</h2>"""
)
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_a5():
"""
Test case extra A5: SetExt with inline link label raw html split over 2 lines
"""
# Arrange
source_markdown = """abc
[li<de
fg>nk](/uri "title" )
def
---"""
expected_tokens = [
"[setext(5,1):-:3::(1,1)]",
"[text(1,1):abc\n::\n]",
'[link(2,1):inline:/uri:title::::li<de\nfg>nk:False:":: : ]',
"[text(2,2):li:]",
"[raw-html(2,4):de\nfg]",
"[text(3,4):nk:]",
"[end-link::]",
"[text(3,22):\ndef::\n \x02]",
"[end-setext::]",
]
expected_gfm = (
"""<h2>abc\n<a href="/uri" title="title">li<de\nfg>nk</a>\ndef</h2>"""
)
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_a6():
"""
Test case extra A6: SetExt with inline link label text split over 2 lines
"""
# Arrange
source_markdown = """a[li
nk][bar]a
---
[bar]: /url 'title'"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[link(1,2):full:/url:title:::bar:li\nnk:::::]",
"[text(1,3):li\nnk::\n]",
"[end-link::]",
"[text(2,9):a:]",
"[end-setext::]",
"[BLANK(4,1):]",
"[link-ref-def(5,1):True::bar:: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<a href="/url" title="title">li\nnk</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_a7():
"""
Test case extra A7: SetExt with full link label code span split over 2 lines
"""
# Arrange
source_markdown = """a[li`de
fg`nk][bar]a
---
[bar]: /url 'title'"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[link(1,2):full:/url:title:::bar:li`de\nfg`nk:::::]",
"[text(1,3):li:]",
"[icode-span(1,5):de\a\n\a \afg:`::]",
"[text(2,4):nk:]",
"[end-link::]",
"[text(2,12):a:]",
"[end-setext::]",
"[BLANK(4,1):]",
"[link-ref-def(5,1):True::bar:: :/url:: :title:'title':]",
]
expected_gfm = (
"""<h2>a<a href="/url" title="title">li<code>de fg</code>nk</a>a</h2>"""
)
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_a8():
"""
Test case extra A8: SetExt with full link label raw html split over 2 lines
"""
# Arrange
source_markdown = """a[li<de
fg>nk][bar]a
---
[bar]: /url 'title'"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[link(1,2):full:/url:title:::bar:li<de\nfg>nk:::::]",
"[text(1,3):li:]",
"[raw-html(1,5):de\nfg]",
"[text(2,4):nk:]",
"[end-link::]",
"[text(2,12):a:]",
"[end-setext::]",
"[BLANK(4,1):]",
"[link-ref-def(5,1):True::bar:: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<a href="/url" title="title">li<de\nfg>nk</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_a9():
"""
Test case extra A9: SetExt with collapsed link label text split over 2 lines
"""
# Arrange
source_markdown = """a[li
nk][]a
---
[li\nnk]: /url 'title'"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[link(1,2):collapsed:/url:title::::li\nnk:::::]",
"[text(1,3):li\nnk::\n]",
"[end-link::]",
"[text(2,6):a:]",
"[end-setext::]",
"[BLANK(4,1):]",
"[link-ref-def(5,1):True::li nk:li\nnk: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<a href="/url" title="title">li\nnk</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_b0():
"""
Test case extra b0: SetExt with collapsed link label code span split over 2 lines
"""
# Arrange
source_markdown = """a[li`de
fg`nk][]a
---
[li`de\nfg`nk]: /url 'title'"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[link(1,2):collapsed:/url:title::::li`de\nfg`nk:::::]",
"[text(1,3):li:]",
"[icode-span(1,5):de\a\n\a \afg:`::]",
"[text(2,4):nk:]",
"[end-link::]",
"[text(2,9):a:]",
"[end-setext::]",
"[BLANK(4,1):]",
"[link-ref-def(5,1):True::li`de fg`nk:li`de\nfg`nk: :/url:: :title:'title':]",
]
expected_gfm = (
"""<h2>a<a href="/url" title="title">li<code>de fg</code>nk</a>a</h2>"""
)
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_b1():
"""
Test case extra b1: SetExt with collapsed link label raw html split over 2 lines
"""
# Arrange
source_markdown = """a[li<de
fg>nk][]a
---
[li<de\nfg>nk]: /url 'title'"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[link(1,2):collapsed:/url:title::::li<de\nfg>nk:::::]",
"[text(1,3):li:]",
"[raw-html(1,5):de\nfg]",
"[text(2,4):nk:]",
"[end-link::]",
"[text(2,9):a:]",
"[end-setext::]",
"[BLANK(4,1):]",
"[link-ref-def(5,1):True::li<de fg>nk:li<de\nfg>nk: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<a href="/url" title="title">li<de\nfg>nk</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_b2():
"""
Test case extra b2: SetExt with shortcut link label text split over 2 lines
"""
# Arrange
source_markdown = """a[li
nk]a
---
[li\nnk]: /url 'title'"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[link(1,2):shortcut:/url:title::::li\nnk:::::]",
"[text(1,3):li\nnk::\n]",
"[end-link::]",
"[text(2,4):a:]",
"[end-setext::]",
"[BLANK(4,1):]",
"[link-ref-def(5,1):True::li nk:li\nnk: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<a href="/url" title="title">li\nnk</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_b3():
"""
Test case extra b3: Paragraph with shortcut link label code span split over 2 lines
"""
# Arrange
source_markdown = """a[li`de
fg`nk]a
---
[li`de\nfg`nk]: /url 'title'"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[link(1,2):shortcut:/url:title::::li`de\nfg`nk:::::]",
"[text(1,3):li:]",
"[icode-span(1,5):de\a\n\a \afg:`::]",
"[text(2,4):nk:]",
"[end-link::]",
"[text(2,7):a:]",
"[end-setext::]",
"[BLANK(4,1):]",
"[link-ref-def(5,1):True::li`de fg`nk:li`de\nfg`nk: :/url:: :title:'title':]",
]
expected_gfm = (
"""<h2>a<a href="/url" title="title">li<code>de fg</code>nk</a>a</h2>"""
)
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_b4():
"""
Test case extra b4: SetExt with shortcut link label raw html split over 2 lines
"""
# Arrange
source_markdown = """a[li<de
fg>nk]a
---
[li<de\nfg>nk]: /url 'title'"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[link(1,2):shortcut:/url:title::::li<de\nfg>nk:::::]",
"[text(1,3):li:]",
"[raw-html(1,5):de\nfg]",
"[text(2,4):nk:]",
"[end-link::]",
"[text(2,7):a:]",
"[end-setext::]",
"[BLANK(4,1):]",
"[link-ref-def(5,1):True::li<de fg>nk:li<de\nfg>nk: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<a href="/url" title="title">li<de\nfg>nk</a>a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_b5():
"""
Test case extra b5: SetExt with inline image label text split over 2 lines
"""
# Arrange
source_markdown = """abc

def
---"""
expected_tokens = [
"[setext(5,1):-:3::(1,1)]",
"[text(1,1):abc\n::\n]",
'[image(2,1):inline:/uri:title:li\nnk::::li\nnk:False:":: : ]',
"[text(3,19):\ndef::\n \x02]",
"[end-setext::]",
]
expected_gfm = (
"""<h2>abc\n<img src="/uri" alt="li\nnk" title="title" />\ndef</h2>"""
)
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_b6():
"""
Test case extra b6: SetExt with inline image label code span split over 2 lines
"""
# Arrange
source_markdown = """abc

def
---"""
expected_tokens = [
"[setext(5,1):-:3::(1,1)]",
"[text(1,1):abc\n::\n]",
'[image(2,1):inline:/uri:title:lide fgnk::::li`de\nfg`nk:False:":: : ]',
"[text(3,22):\ndef::\n \x02]",
"[end-setext::]",
]
expected_gfm = (
"""<h2>abc\n<img src="/uri" alt="lide fgnk" title="title" />\ndef</h2>"""
)
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_b7():
"""
Test case extra b7: SetExt with inline image label raw html split over 2 lines
"""
# Arrange
source_markdown = """abc

def
---"""
expected_tokens = [
"[setext(5,1):-:3::(1,1)]",
"[text(1,1):abc\n::\n]",
'[image(2,1):inline:/uri:title:li<de\nfg>nk::::li<de\nfg>nk:False:":: : ]',
"[text(3,22):\ndef::\n \x02]",
"[end-setext::]",
]
expected_gfm = (
"""<h2>abc\n<img src="/uri" alt="li<de\nfg>nk" title="title" />\ndef</h2>"""
)
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_b8():
"""
Test case extra b8: SetExt with inline image label text split over 2 lines
"""
# Arrange
source_markdown = """a![li
nk][bar]a
---
[bar]: /url 'title'"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[image(1,2):full:/url:title:li\nnk:::bar:li\nnk:::::]",
"[text(2,9):a:]",
"[end-setext::]",
"[BLANK(4,1):]",
"[link-ref-def(5,1):True::bar:: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<img src="/url" alt="li\nnk" title="title" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_b9():
"""
Test case extra b9: SetExt with full image label code span split over 2 lines
"""
# Arrange
source_markdown = """a![li`de
fg`nk][bar]a
---
[bar]: /url 'title'"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[image(1,2):full:/url:title:lide fgnk:::bar:li`de\nfg`nk:::::]",
"[text(2,12):a:]",
"[end-setext::]",
"[BLANK(4,1):]",
"[link-ref-def(5,1):True::bar:: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<img src="/url" alt="lide fgnk" title="title" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_c0():
"""
Test case extra c0: SetExt with full image label raw html split over 2 lines
"""
# Arrange
source_markdown = """a![li<de
fg>nk][bar]a
---
[bar]: /url 'title'"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[image(1,2):full:/url:title:li<de\nfg>nk:::bar:li<de\nfg>nk:::::]",
"[text(2,12):a:]",
"[end-setext::]",
"[BLANK(4,1):]",
"[link-ref-def(5,1):True::bar:: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<img src="/url" alt="li<de\nfg>nk" title="title" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_c1():
"""
Test case extra c1: SetExt with collapsed image label text split over 2 lines
"""
# Arrange
source_markdown = """a![li
nk][]a
---
[li\nnk]: /url 'title'"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[image(1,2):collapsed:/url:title:li\nnk::::li\nnk:::::]",
"[text(2,6):a:]",
"[end-setext::]",
"[BLANK(4,1):]",
"[link-ref-def(5,1):True::li nk:li\nnk: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<img src="/url" alt="li\nnk" title="title" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_c2():
"""
Test case extra c2: SetExt with collapsed image label code span split over 2 lines
"""
# Arrange
source_markdown = """a![li`de
fg`nk][]a
---
[li`de\nfg`nk]: /url 'title'"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[image(1,2):collapsed:/url:title:lide fgnk::::li`de\nfg`nk:::::]",
"[text(2,9):a:]",
"[end-setext::]",
"[BLANK(4,1):]",
"[link-ref-def(5,1):True::li`de fg`nk:li`de\nfg`nk: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<img src="/url" alt="lide fgnk" title="title" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_c3():
"""
Test case extra c3: SetExt with collapsed image label raw html split over 2 lines
"""
# Arrange
source_markdown = """a![li<de
fg>nk][]a
---
[li<de\nfg>nk]: /url 'title'"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[image(1,2):collapsed:/url:title:li<de\nfg>nk::::li<de\nfg>nk:::::]",
"[text(2,9):a:]",
"[end-setext::]",
"[BLANK(4,1):]",
"[link-ref-def(5,1):True::li<de fg>nk:li<de\nfg>nk: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<img src="/url" alt="li<de\nfg>nk" title="title" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_c4():
"""
Test case extra c4: SetExt with shortcut image label text split over 2 lines
"""
# Arrange
source_markdown = """a![li
nk]a
---
[li\nnk]: /url 'title'"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[image(1,2):shortcut:/url:title:li\nnk::::li\nnk:::::]",
"[text(2,4):a:]",
"[end-setext::]",
"[BLANK(4,1):]",
"[link-ref-def(5,1):True::li nk:li\nnk: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<img src="/url" alt="li\nnk" title="title" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_c5():
"""
Test case extra c5: SetExt with shortcut image label code span split over 2 lines
"""
# Arrange
source_markdown = """a![li`de
fg`nk]a
---
[li`de\nfg`nk]: /url 'title'"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[image(1,2):shortcut:/url:title:lide fgnk::::li`de\nfg`nk:::::]",
"[text(2,7):a:]",
"[end-setext::]",
"[BLANK(4,1):]",
"[link-ref-def(5,1):True::li`de fg`nk:li`de\nfg`nk: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<img src="/url" alt="lide fgnk" title="title" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_c6():
"""
Test case extra c6: SetExt with shortcut image label raw html split over 2 lines
"""
# Arrange
source_markdown = """a![li<de
fg>nk]a
---
[li<de\nfg>nk]: /url 'title'"""
expected_tokens = [
"[setext(3,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[image(1,2):shortcut:/url:title:li<de\nfg>nk::::li<de\nfg>nk:::::]",
"[text(2,7):a:]",
"[end-setext::]",
"[BLANK(4,1):]",
"[link-ref-def(5,1):True::li<de fg>nk:li<de\nfg>nk: :/url:: :title:'title':]",
]
expected_gfm = """<h2>a<img src="/url" alt="li<de\nfg>nk" title="title" />a</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_c7():
"""
Test case extra c7: SetExt with link split over 2 lines followed by text split over 2 lines
"""
# Arrange
source_markdown = """a[li<de
fg>nk](/url)a
b
---
"""
expected_tokens = [
"[setext(4,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[link(1,2):inline:/url:::::li<de\nfg>nk:False::::]",
"[text(1,3):li:]",
"[raw-html(1,5):de\nfg]",
"[text(2,4):nk:]",
"[end-link::]",
"[text(2,13):a\nb::\n]",
"[end-setext::]",
"[BLANK(5,1):]",
]
expected_gfm = """<h2>a<a href="/url">li<de\nfg>nk</a>a\nb</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_c8():
"""
Test case extra c8: SetExt with image split over 2 lines followed by text split over 2 lines
"""
# Arrange
source_markdown = """aa
b
---
"""
expected_tokens = [
"[setext(4,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[image(1,2):inline:/url::li<de\nfg>nk::::li<de\nfg>nk:False::::]",
"[text(2,13):a\nb::\n]",
"[end-setext::]",
"[BLANK(5,1):]",
]
expected_gfm = """<h2>a<img src="/url" alt="li<de\nfg>nk" />a\nb</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_c9():
"""
Test case extra c9: SetExt with link split over 2 lines followed by code span split over 2 lines
"""
# Arrange
source_markdown = """a[li<de
fg>nk](/url)`a
b`
---
"""
expected_tokens = [
"[setext(4,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[link(1,2):inline:/url:::::li<de\nfg>nk:False::::]",
"[text(1,3):li:]",
"[raw-html(1,5):de\nfg]",
"[text(2,4):nk:]",
"[end-link::]",
"[icode-span(2,13):a\a\n\a \ab:`::]",
"[end-setext::]",
"[BLANK(5,1):]",
]
expected_gfm = """<h2>a<a href="/url">li<de\nfg>nk</a><code>a b</code></h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_d0():
"""
Test case extra d0: SetExt with image split over 2 lines followed by code span split over 2 lines
"""
# Arrange
source_markdown = """a`a
b`
---
"""
expected_tokens = [
"[setext(4,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[image(1,2):inline:/url::li<de\nfg>nk::::li<de\nfg>nk:False::::]",
"[icode-span(2,13):a\a\n\a \ab:`::]",
"[end-setext::]",
"[BLANK(5,1):]",
]
expected_gfm = """<h2>a<img src="/url" alt="li<de\nfg>nk" /><code>a b</code></h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_d1():
"""
Test case extra d1: SetExt with image split over 2 lines followed by raw html split over 2 lines
"""
# Arrange
source_markdown = """a[li<de
fg>nk](/url)<a
b>
---
"""
expected_tokens = [
"[setext(4,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[link(1,2):inline:/url:::::li<de\nfg>nk:False::::]",
"[text(1,3):li:]",
"[raw-html(1,5):de\nfg]",
"[text(2,4):nk:]",
"[end-link::]",
"[raw-html(2,13):a\nb]",
"[end-setext::]",
"[BLANK(5,1):]",
]
expected_gfm = """<h2>a<a href="/url">li<de\nfg>nk</a><a\nb></h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_d2():
"""
Test case extra d2: SetExt with image split over 2 lines followed by raw html split over 2 lines
"""
# Arrange
source_markdown = """a<a
b>
---
"""
expected_tokens = [
"[setext(4,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[image(1,2):inline:/url::li<de\nfg>nk::::li<de\nfg>nk:False::::]",
"[raw-html(2,13):a\nb]",
"[end-setext::]",
"[BLANK(5,1):]",
]
expected_gfm = """<h2>a<img src="/url" alt="li<de\nfg>nk" /><a\nb></h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_d3():
"""
Test case extra d3: SetExt with link split over 2 lines followed by emphasis split over 2 lines
"""
# Arrange
source_markdown = """a[li<de
fg>nk](/url)*a
b*
---
"""
expected_tokens = [
"[setext(4,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[link(1,2):inline:/url:::::li<de\nfg>nk:False::::]",
"[text(1,3):li:]",
"[raw-html(1,5):de\nfg]",
"[text(2,4):nk:]",
"[end-link::]",
"[emphasis(2,13):1:*]",
"[text(2,14):a\nb::\n]",
"[end-emphasis(3,2)::]",
"[end-setext::]",
"[BLANK(5,1):]",
]
expected_gfm = """<h2>a<a href="/url">li<de\nfg>nk</a><em>a\nb</em></h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_d4():
"""
Test case extra d4: SetExt with image split over 2 lines followed by emphasis split over 2 lines
"""
# Arrange
source_markdown = """a*a
b*
---
"""
expected_tokens = [
"[setext(4,1):-:3::(1,1)]",
"[text(1,1):a:]",
"[image(1,2):inline:/url::li<de\nfg>nk::::li<de\nfg>nk:False::::]",
"[emphasis(2,13):1:*]",
"[text(2,14):a\nb::\n]",
"[end-emphasis(3,2)::]",
"[end-setext::]",
"[BLANK(5,1):]",
]
expected_gfm = """<h2>a<img src="/url" alt="li<de\nfg>nk" /><em>a\nb</em></h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_d5():
"""
Test case extra d5: SetExt with link split at the whitespaces
"""
# Arrange
source_markdown = """abc
[link](
/uri
"title"
)
def
---"""
expected_tokens = [
"[setext(7,1):-:3::(1,1)]",
"[text(1,1):abc\n::\n]",
'[link(2,1):inline:/uri:title::::link:False:":\n :\n :\n ]',
"[text(2,2):link:]",
"[end-link::]",
"[text(5,5):\ndef::\n \x02]",
"[end-setext::]",
]
expected_gfm = """<h2>abc\n<a href="/uri" title="title">link</a>\ndef</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_d6():
"""
Test case extra d6: SetExt with image split at the whitespaces
"""
# Arrange
source_markdown = """abc

def
---"""
expected_tokens = [
"[setext(7,1):-:3::(1,1)]",
"[text(1,1):abc\n::\n]",
'[image(2,1):inline:/uri:title:link::::link:False:":\n :\n :\n ]',
"[text(5,5):\ndef::\n \x02]",
"[end-setext::]",
]
expected_gfm = """<h2>abc\n<img src="/uri" alt="link" title="title" />\ndef</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_d7():
"""
Test case extra d7: SetExt with link surrounded by emphasis
"""
# Arrange
source_markdown = """abc
*[link](/uri "title")*
def
---"""
expected_tokens = [
"[setext(4,1):-:3::(1,1)]",
"[text(1,1):abc\n::\n]",
"[emphasis(2,1):1:*]",
'[link(2,2):inline:/uri:title::::link:False:":: :]',
"[text(2,3):link:]",
"[end-link::]",
"[end-emphasis(2,22)::]",
"[text(2,23):\ndef::\n]",
"[end-setext::]",
]
expected_gfm = (
"""<h2>abc\n<em><a href="/uri" title="title">link</a></em>\ndef</h2>"""
)
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_d8():
"""
Test case extra d8: SetExt with image surrounded by emphasis
"""
# Arrange
source_markdown = """abc
**
def
---"""
expected_tokens = [
"[setext(4,1):-:3::(1,1)]",
"[text(1,1):abc\n::\n]",
"[emphasis(2,1):1:*]",
'[image(2,2):inline:/uri:title:link::::link:False:":: :]',
"[end-emphasis(2,23)::]",
"[text(2,24):\ndef::\n]",
"[end-setext::]",
]
expected_gfm = (
"""<h2>abc\n<em><img src="/uri" alt="link" title="title" /></em>\ndef</h2>"""
)
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_d9():
"""
Test case extra d9: SetExt with emphasis inside of link label
"""
# Arrange
source_markdown = """abc
[*link*](/uri "title")
def
---"""
expected_tokens = [
"[setext(4,1):-:3::(1,1)]",
"[text(1,1):abc\n::\n]",
'[link(2,1):inline:/uri:title::::*link*:False:":: :]',
"[emphasis(2,2):1:*]",
"[text(2,3):link:]",
"[end-emphasis(2,7)::]",
"[end-link::]",
"[text(2,23):\ndef::\n]",
"[end-setext::]",
]
expected_gfm = (
"""<h2>abc\n<a href="/uri" title="title"><em>link</em></a>\ndef</h2>"""
)
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
@pytest.mark.gfm
def test_setext_headings_extra_e0():
"""
Test case extra e0: SetExt with emphasis inside of image label
"""
# Arrange
source_markdown = """abc

def
---"""
expected_tokens = [
"[setext(4,1):-:3::(1,1)]",
"[text(1,1):abc\n::\n]",
'[image(2,1):inline:/uri:title:link::::*link*:False:":: :]',
"[text(2,24):\ndef::\n]",
"[end-setext::]",
]
expected_gfm = """<h2>abc\n<img src="/uri" alt="link" title="title" />\ndef</h2>"""
# Act & Assert
act_and_assert(source_markdown, expected_gfm, expected_tokens)
| 26.232742
| 120
| 0.538887
| 18,543
| 133,000
| 3.736289
| 0.02443
| 0.080425
| 0.064144
| 0.045957
| 0.909255
| 0.889178
| 0.863312
| 0.847031
| 0.835354
| 0.824009
| 0
| 0.039372
| 0.228293
| 133,000
| 5,069
| 121
| 26.237917
| 0.635648
| 0.146955
| 0
| 0.7317
| 0
| 0.104932
| 0.438961
| 0.183976
| 0
| 0
| 0
| 0
| 0.062834
| 1
| 0.06252
| false
| 0
| 0.000628
| 0
| 0.063148
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6ad4fafcb502ea54b2a1f5a1e0f4f759fe3ae8d8
| 24,234
|
py
|
Python
|
sdk/python/pulumi_gcp/projects/iam_policy.py
|
la3mmchen/pulumi-gcp
|
0e3c6fecd062dff78a4fd95b7ebd5ce4492ad1ea
|
[
"ECL-2.0",
"Apache-2.0"
] | 121
|
2018-06-18T19:16:42.000Z
|
2022-03-31T06:06:48.000Z
|
sdk/python/pulumi_gcp/projects/iam_policy.py
|
la3mmchen/pulumi-gcp
|
0e3c6fecd062dff78a4fd95b7ebd5ce4492ad1ea
|
[
"ECL-2.0",
"Apache-2.0"
] | 492
|
2018-06-22T19:41:03.000Z
|
2022-03-31T15:33:53.000Z
|
sdk/python/pulumi_gcp/projects/iam_policy.py
|
la3mmchen/pulumi-gcp
|
0e3c6fecd062dff78a4fd95b7ebd5ce4492ad1ea
|
[
"ECL-2.0",
"Apache-2.0"
] | 43
|
2018-06-19T01:43:13.000Z
|
2022-03-23T22:43:37.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['IAMPolicyArgs', 'IAMPolicy']
@pulumi.input_type
class IAMPolicyArgs:
def __init__(__self__, *,
policy_data: pulumi.Input[str],
project: pulumi.Input[str]):
"""
The set of arguments for constructing a IAMPolicy resource.
:param pulumi.Input[str] policy_data: The `organizations.get_iam_policy` data source that represents
the IAM policy that will be applied to the project. The policy will be
merged with any existing policy applied to the project.
:param pulumi.Input[str] project: The project id of the target project. This is not
inferred from the provider.
"""
pulumi.set(__self__, "policy_data", policy_data)
pulumi.set(__self__, "project", project)
@property
@pulumi.getter(name="policyData")
def policy_data(self) -> pulumi.Input[str]:
"""
The `organizations.get_iam_policy` data source that represents
the IAM policy that will be applied to the project. The policy will be
merged with any existing policy applied to the project.
"""
return pulumi.get(self, "policy_data")
@policy_data.setter
def policy_data(self, value: pulumi.Input[str]):
pulumi.set(self, "policy_data", value)
@property
@pulumi.getter
def project(self) -> pulumi.Input[str]:
"""
The project id of the target project. This is not
inferred from the provider.
"""
return pulumi.get(self, "project")
@project.setter
def project(self, value: pulumi.Input[str]):
pulumi.set(self, "project", value)
@pulumi.input_type
class _IAMPolicyState:
def __init__(__self__, *,
etag: Optional[pulumi.Input[str]] = None,
policy_data: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering IAMPolicy resources.
:param pulumi.Input[str] etag: (Computed) The etag of the project's IAM policy.
:param pulumi.Input[str] policy_data: The `organizations.get_iam_policy` data source that represents
the IAM policy that will be applied to the project. The policy will be
merged with any existing policy applied to the project.
:param pulumi.Input[str] project: The project id of the target project. This is not
inferred from the provider.
"""
if etag is not None:
pulumi.set(__self__, "etag", etag)
if policy_data is not None:
pulumi.set(__self__, "policy_data", policy_data)
if project is not None:
pulumi.set(__self__, "project", project)
@property
@pulumi.getter
def etag(self) -> Optional[pulumi.Input[str]]:
"""
(Computed) The etag of the project's IAM policy.
"""
return pulumi.get(self, "etag")
@etag.setter
def etag(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "etag", value)
@property
@pulumi.getter(name="policyData")
def policy_data(self) -> Optional[pulumi.Input[str]]:
"""
The `organizations.get_iam_policy` data source that represents
the IAM policy that will be applied to the project. The policy will be
merged with any existing policy applied to the project.
"""
return pulumi.get(self, "policy_data")
@policy_data.setter
def policy_data(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "policy_data", value)
@property
@pulumi.getter
def project(self) -> Optional[pulumi.Input[str]]:
"""
The project id of the target project. This is not
inferred from the provider.
"""
return pulumi.get(self, "project")
@project.setter
def project(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project", value)
class IAMPolicy(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
policy_data: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Four different resources help you manage your IAM policy for a project. Each of these resources serves a different use case:
* `projects.IAMPolicy`: Authoritative. Sets the IAM policy for the project and replaces any existing policy already attached.
* `projects.IAMBinding`: Authoritative for a given role. Updates the IAM policy to grant a role to a list of members. Other roles within the IAM policy for the project are preserved.
* `projects.IAMMember`: Non-authoritative. Updates the IAM policy to grant a role to a new member. Other members for the role for the project are preserved.
* `projects.IAMAuditConfig`: Authoritative for a given service. Updates the IAM policy to enable audit logging for the given service.
> **Note:** `projects.IAMPolicy` **cannot** be used in conjunction with `projects.IAMBinding`, `projects.IAMMember`, or `projects.IAMAuditConfig` or they will fight over what your policy should be.
> **Note:** `projects.IAMBinding` resources **can be** used in conjunction with `projects.IAMMember` resources **only if** they do not grant privilege to the same role.
> **Note:** The underlying API method `projects.setIamPolicy` has a lot of constraints which are documented [here](https://cloud.google.com/resource-manager/reference/rest/v1/projects/setIamPolicy). In addition to these constraints,
IAM Conditions cannot be used with Basic Roles such as Owner. Violating these constraints will result in the API returning 400 error code so please review these if you encounter errors with this resource.
## google\_project\_iam\_policy
> **Be careful!** You can accidentally lock yourself out of your project
using this resource. Deleting a `projects.IAMPolicy` removes access
from anyone without organization-level access to the project. Proceed with caution.
It's not recommended to use `projects.IAMPolicy` with your provider project
to avoid locking yourself out, and it should generally only be used with projects
fully managed by this provider. If you do use this resource, it is recommended to **import** the policy before
applying the change.
```python
import pulumi
import pulumi_gcp as gcp
admin = gcp.organizations.get_iam_policy(bindings=[gcp.organizations.GetIAMPolicyBindingArgs(
role="roles/editor",
members=["user:jane@example.com"],
)])
project = gcp.projects.IAMPolicy("project",
project="your-project-id",
policy_data=admin.policy_data)
```
With IAM Conditions:
```python
import pulumi
import pulumi_gcp as gcp
admin = gcp.organizations.get_iam_policy(bindings=[gcp.organizations.GetIAMPolicyBindingArgs(
condition=gcp.organizations.GetIAMPolicyBindingConditionArgs(
description="Expiring at midnight of 2019-12-31",
expression="request.time < timestamp(\"2020-01-01T00:00:00Z\")",
title="expires_after_2019_12_31",
),
members=["user:jane@example.com"],
role="roles/compute.admin",
)])
project = gcp.projects.IAMPolicy("project",
policy_data=admin.policy_data,
project="your-project-id")
```
## google\_project\_iam\_binding
```python
import pulumi
import pulumi_gcp as gcp
project = gcp.projects.IAMBinding("project",
members=["user:jane@example.com"],
project="your-project-id",
role="roles/editor")
```
With IAM Conditions:
```python
import pulumi
import pulumi_gcp as gcp
project = gcp.projects.IAMBinding("project",
condition=gcp.projects.IAMBindingConditionArgs(
description="Expiring at midnight of 2019-12-31",
expression="request.time < timestamp(\"2020-01-01T00:00:00Z\")",
title="expires_after_2019_12_31",
),
members=["user:jane@example.com"],
project="your-project-id",
role="roles/container.admin")
```
## google\_project\_iam\_member
```python
import pulumi
import pulumi_gcp as gcp
project = gcp.projects.IAMMember("project",
member="user:jane@example.com",
project="your-project-id",
role="roles/editor")
```
With IAM Conditions:
```python
import pulumi
import pulumi_gcp as gcp
project = gcp.projects.IAMMember("project",
condition=gcp.projects.IAMMemberConditionArgs(
description="Expiring at midnight of 2019-12-31",
expression="request.time < timestamp(\"2020-01-01T00:00:00Z\")",
title="expires_after_2019_12_31",
),
member="user:jane@example.com",
project="your-project-id",
role="roles/firebase.admin")
```
## google\_project\_iam\_audit\_config
```python
import pulumi
import pulumi_gcp as gcp
project = gcp.projects.IAMAuditConfig("project",
audit_log_configs=[
gcp.projects.IAMAuditConfigAuditLogConfigArgs(
log_type="ADMIN_READ",
),
gcp.projects.IAMAuditConfigAuditLogConfigArgs(
exempted_members=["user:joebloggs@hashicorp.com"],
log_type="DATA_READ",
),
],
project="your-project-id",
service="allServices")
```
## Import
IAM member imports use space-delimited identifiers; the resource in question, the role, and the account.
This member resource can be imported using the `project_id`, role, and member e.g.
```sh
$ pulumi import gcp:projects/iAMPolicy:IAMPolicy my_project "your-project-id roles/viewer user:foo@example.com"
```
IAM binding imports use space-delimited identifiers; the resource in question and the role.
This binding resource can be imported using the `project_id` and role, e.g.
```sh
$ pulumi import gcp:projects/iAMPolicy:IAMPolicy my_project "your-project-id roles/viewer"
```
IAM policy imports use the identifier of the resource in question.
This policy resource can be imported using the `project_id`.
```sh
$ pulumi import gcp:projects/iAMPolicy:IAMPolicy my_project your-project-id
```
IAM audit config imports use the identifier of the resource in question and the service, e.g.
```sh
$ pulumi import gcp:projects/iAMPolicy:IAMPolicy my_project "your-project-id foo.googleapis.com"
```
-> **Custom Roles**If you're importing a IAM resource with a custom role, make sure to use the
full name of the custom role, e.g. `[projects/my-project|organizations/my-org]/roles/my-custom-role`.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] policy_data: The `organizations.get_iam_policy` data source that represents
the IAM policy that will be applied to the project. The policy will be
merged with any existing policy applied to the project.
:param pulumi.Input[str] project: The project id of the target project. This is not
inferred from the provider.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: IAMPolicyArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Four different resources help you manage your IAM policy for a project. Each of these resources serves a different use case:
* `projects.IAMPolicy`: Authoritative. Sets the IAM policy for the project and replaces any existing policy already attached.
* `projects.IAMBinding`: Authoritative for a given role. Updates the IAM policy to grant a role to a list of members. Other roles within the IAM policy for the project are preserved.
* `projects.IAMMember`: Non-authoritative. Updates the IAM policy to grant a role to a new member. Other members for the role for the project are preserved.
* `projects.IAMAuditConfig`: Authoritative for a given service. Updates the IAM policy to enable audit logging for the given service.
> **Note:** `projects.IAMPolicy` **cannot** be used in conjunction with `projects.IAMBinding`, `projects.IAMMember`, or `projects.IAMAuditConfig` or they will fight over what your policy should be.
> **Note:** `projects.IAMBinding` resources **can be** used in conjunction with `projects.IAMMember` resources **only if** they do not grant privilege to the same role.
> **Note:** The underlying API method `projects.setIamPolicy` has a lot of constraints which are documented [here](https://cloud.google.com/resource-manager/reference/rest/v1/projects/setIamPolicy). In addition to these constraints,
IAM Conditions cannot be used with Basic Roles such as Owner. Violating these constraints will result in the API returning 400 error code so please review these if you encounter errors with this resource.
## google\_project\_iam\_policy
> **Be careful!** You can accidentally lock yourself out of your project
using this resource. Deleting a `projects.IAMPolicy` removes access
from anyone without organization-level access to the project. Proceed with caution.
It's not recommended to use `projects.IAMPolicy` with your provider project
to avoid locking yourself out, and it should generally only be used with projects
fully managed by this provider. If you do use this resource, it is recommended to **import** the policy before
applying the change.
```python
import pulumi
import pulumi_gcp as gcp
admin = gcp.organizations.get_iam_policy(bindings=[gcp.organizations.GetIAMPolicyBindingArgs(
role="roles/editor",
members=["user:jane@example.com"],
)])
project = gcp.projects.IAMPolicy("project",
project="your-project-id",
policy_data=admin.policy_data)
```
With IAM Conditions:
```python
import pulumi
import pulumi_gcp as gcp
admin = gcp.organizations.get_iam_policy(bindings=[gcp.organizations.GetIAMPolicyBindingArgs(
condition=gcp.organizations.GetIAMPolicyBindingConditionArgs(
description="Expiring at midnight of 2019-12-31",
expression="request.time < timestamp(\"2020-01-01T00:00:00Z\")",
title="expires_after_2019_12_31",
),
members=["user:jane@example.com"],
role="roles/compute.admin",
)])
project = gcp.projects.IAMPolicy("project",
policy_data=admin.policy_data,
project="your-project-id")
```
## google\_project\_iam\_binding
```python
import pulumi
import pulumi_gcp as gcp
project = gcp.projects.IAMBinding("project",
members=["user:jane@example.com"],
project="your-project-id",
role="roles/editor")
```
With IAM Conditions:
```python
import pulumi
import pulumi_gcp as gcp
project = gcp.projects.IAMBinding("project",
condition=gcp.projects.IAMBindingConditionArgs(
description="Expiring at midnight of 2019-12-31",
expression="request.time < timestamp(\"2020-01-01T00:00:00Z\")",
title="expires_after_2019_12_31",
),
members=["user:jane@example.com"],
project="your-project-id",
role="roles/container.admin")
```
## google\_project\_iam\_member
```python
import pulumi
import pulumi_gcp as gcp
project = gcp.projects.IAMMember("project",
member="user:jane@example.com",
project="your-project-id",
role="roles/editor")
```
With IAM Conditions:
```python
import pulumi
import pulumi_gcp as gcp
project = gcp.projects.IAMMember("project",
condition=gcp.projects.IAMMemberConditionArgs(
description="Expiring at midnight of 2019-12-31",
expression="request.time < timestamp(\"2020-01-01T00:00:00Z\")",
title="expires_after_2019_12_31",
),
member="user:jane@example.com",
project="your-project-id",
role="roles/firebase.admin")
```
## google\_project\_iam\_audit\_config
```python
import pulumi
import pulumi_gcp as gcp
project = gcp.projects.IAMAuditConfig("project",
audit_log_configs=[
gcp.projects.IAMAuditConfigAuditLogConfigArgs(
log_type="ADMIN_READ",
),
gcp.projects.IAMAuditConfigAuditLogConfigArgs(
exempted_members=["user:joebloggs@hashicorp.com"],
log_type="DATA_READ",
),
],
project="your-project-id",
service="allServices")
```
## Import
IAM member imports use space-delimited identifiers; the resource in question, the role, and the account.
This member resource can be imported using the `project_id`, role, and member e.g.
```sh
$ pulumi import gcp:projects/iAMPolicy:IAMPolicy my_project "your-project-id roles/viewer user:foo@example.com"
```
IAM binding imports use space-delimited identifiers; the resource in question and the role.
This binding resource can be imported using the `project_id` and role, e.g.
```sh
$ pulumi import gcp:projects/iAMPolicy:IAMPolicy my_project "your-project-id roles/viewer"
```
IAM policy imports use the identifier of the resource in question.
This policy resource can be imported using the `project_id`.
```sh
$ pulumi import gcp:projects/iAMPolicy:IAMPolicy my_project your-project-id
```
IAM audit config imports use the identifier of the resource in question and the service, e.g.
```sh
$ pulumi import gcp:projects/iAMPolicy:IAMPolicy my_project "your-project-id foo.googleapis.com"
```
-> **Custom Roles**If you're importing a IAM resource with a custom role, make sure to use the
full name of the custom role, e.g. `[projects/my-project|organizations/my-org]/roles/my-custom-role`.
:param str resource_name: The name of the resource.
:param IAMPolicyArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(IAMPolicyArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
policy_data: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = IAMPolicyArgs.__new__(IAMPolicyArgs)
if policy_data is None and not opts.urn:
raise TypeError("Missing required property 'policy_data'")
__props__.__dict__["policy_data"] = policy_data
if project is None and not opts.urn:
raise TypeError("Missing required property 'project'")
__props__.__dict__["project"] = project
__props__.__dict__["etag"] = None
super(IAMPolicy, __self__).__init__(
'gcp:projects/iAMPolicy:IAMPolicy',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
etag: Optional[pulumi.Input[str]] = None,
policy_data: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None) -> 'IAMPolicy':
"""
Get an existing IAMPolicy resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] etag: (Computed) The etag of the project's IAM policy.
:param pulumi.Input[str] policy_data: The `organizations.get_iam_policy` data source that represents
the IAM policy that will be applied to the project. The policy will be
merged with any existing policy applied to the project.
:param pulumi.Input[str] project: The project id of the target project. This is not
inferred from the provider.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _IAMPolicyState.__new__(_IAMPolicyState)
__props__.__dict__["etag"] = etag
__props__.__dict__["policy_data"] = policy_data
__props__.__dict__["project"] = project
return IAMPolicy(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def etag(self) -> pulumi.Output[str]:
"""
(Computed) The etag of the project's IAM policy.
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter(name="policyData")
def policy_data(self) -> pulumi.Output[str]:
"""
The `organizations.get_iam_policy` data source that represents
the IAM policy that will be applied to the project. The policy will be
merged with any existing policy applied to the project.
"""
return pulumi.get(self, "policy_data")
@property
@pulumi.getter
def project(self) -> pulumi.Output[str]:
"""
The project id of the target project. This is not
inferred from the provider.
"""
return pulumi.get(self, "project")
| 41.496575
| 240
| 0.632624
| 2,858
| 24,234
| 5.235829
| 0.107768
| 0.031409
| 0.03181
| 0.029404
| 0.883788
| 0.872427
| 0.860799
| 0.845763
| 0.834002
| 0.824779
| 0
| 0.010789
| 0.277131
| 24,234
| 583
| 241
| 41.567753
| 0.843418
| 0.647479
| 0
| 0.492537
| 1
| 0
| 0.087269
| 0.005476
| 0
| 0
| 0
| 0
| 0
| 1
| 0.149254
| false
| 0.007463
| 0.037313
| 0
| 0.276119
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0a73fd1003779f12f08227e95989aeb45b095e69
| 3,546
|
py
|
Python
|
setup.py
|
bung87/jieba_fast
|
92b54dd9c1816382784cb0e8e02d92bf2564eea8
|
[
"MIT"
] | 2
|
2019-08-05T01:34:04.000Z
|
2019-10-12T15:49:42.000Z
|
setup.py
|
hufulin/jieba_fast
|
c85771c036ca4652c65268cbd749d15ac8fdde26
|
[
"MIT"
] | null | null | null |
setup.py
|
hufulin/jieba_fast
|
c85771c036ca4652c65268cbd749d15ac8fdde26
|
[
"MIT"
] | 1
|
2020-03-17T16:47:48.000Z
|
2020-03-17T16:47:48.000Z
|
# -*- coding: utf-8 -*-
from distutils.core import setup, Extension
import platform
LONGDOC = 'Use C and Swig to Speed up jieba<Chinese Words Segementation Utilities>'
jieba_fast_functions_py2 = Extension('_jieba_fast_functions_py2',
sources=['jieba_fast/source/jieba_fast_functions_wrap_py2_wrap.c'],
)
jieba_fast_functions_py3 = Extension('_jieba_fast_functions_py3',
sources=['jieba_fast/source/jieba_fast_functions_wrap_py3_wrap.c'],
)
if platform.python_version().startswith('2'):
setup(name='jieba_fast',
version='0.52',
description='Use C and Swig to Speed up jieba<Chinese Words Segementation Utilities>',
long_description=LONGDOC,
author='Sun, Junyi, deepcs233',
author_email='shaohao97@gmail.com',
url='https://github.com/deepcs233/jieba_fast',
license="MIT",
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Natural Language :: Chinese (Simplified)',
'Natural Language :: Chinese (Traditional)',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.7',
'Topic :: Text Processing',
'Topic :: Text Processing :: Indexing',
'Topic :: Text Processing :: Linguistic',
],
keywords='NLP,tokenizing,Chinese word segementation',
packages=['jieba_fast'],
package_dir={'jieba_fast':'jieba_fast'},
package_data={'jieba_fast':['*.*','finalseg/*','analyse/*','posseg/*','source/*']},
ext_modules = [jieba_fast_functions_py2],
)
if platform.python_version().startswith('3'):
setup(name='jieba_fast',
version='0.52',
description='Use C and Swig to Speed up jieba<Chinese Words Segementation Utilities>',
long_description=LONGDOC,
author='Sun, Junyi, deepcs233',
author_email='shaohao97@gmail.com',
url='https://github.com/deepcs233/jieba_fast',
license="MIT",
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Natural Language :: Chinese (Simplified)',
'Natural Language :: Chinese (Traditional)',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.7',
'Topic :: Text Processing',
'Topic :: Text Processing :: Indexing',
'Topic :: Text Processing :: Linguistic',
],
keywords='NLP,tokenizing,Chinese word segementation',
packages=['jieba_fast'],
package_dir={'jieba_fast':'jieba_fast'},
package_data={'jieba_fast':['*.*','finalseg/*','analyse/*','posseg/*','source/*']},
ext_modules = [jieba_fast_functions_py3],
)
| 43.777778
| 96
| 0.581782
| 344
| 3,546
| 5.834302
| 0.264535
| 0.098655
| 0.17439
| 0.077728
| 0.903338
| 0.870453
| 0.870453
| 0.870453
| 0.826607
| 0.826607
| 0
| 0.021594
| 0.281726
| 3,546
| 80
| 97
| 44.325
| 0.766392
| 0.005922
| 0
| 0.794521
| 0
| 0
| 0.529946
| 0.057338
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.027397
| 0
| 0.027397
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0a8836b1458939f9ee9e326aec838e0e99b6c0c6
| 7,372
|
py
|
Python
|
src/genie/libs/parser/asa/tests/ShowRoute/cli/equal/golden_output_2_expected.py
|
nujo/genieparser
|
083b01efc46afc32abe1a1858729578beab50cd3
|
[
"Apache-2.0"
] | 204
|
2018-06-27T00:55:27.000Z
|
2022-03-06T21:12:18.000Z
|
src/genie/libs/parser/asa/tests/ShowRoute/cli/equal/golden_output_2_expected.py
|
nujo/genieparser
|
083b01efc46afc32abe1a1858729578beab50cd3
|
[
"Apache-2.0"
] | 468
|
2018-06-19T00:33:18.000Z
|
2022-03-31T23:23:35.000Z
|
src/genie/libs/parser/asa/tests/ShowRoute/cli/equal/golden_output_2_expected.py
|
nujo/genieparser
|
083b01efc46afc32abe1a1858729578beab50cd3
|
[
"Apache-2.0"
] | 309
|
2019-01-16T20:21:07.000Z
|
2022-03-30T12:56:41.000Z
|
expected_output = {
"vrf": {
"default": {
"address_family": {
"ipv4": {
"routes": {
"10.121.65.0/24": {
"active": True,
"candidate_default": False,
"date": "7w0d",
"metric": 20,
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "10.121.64.35",
"outgoing_interface_name": "inside",
},
2: {
"index": 2,
"next_hop": "10.121.64.34",
"outgoing_interface_name": "inside",
},
}
},
"route": "10.121.65.0/24",
"route_preference": 110,
"source_protocol": "ospf",
"source_protocol_codes": "O",
},
"10.121.67.0/24": {
"active": True,
"candidate_default": False,
"date": "2w1d",
"metric": 345856,
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "10.9.193.99",
"outgoing_interface_name": "esavpn",
},
2: {
"index": 2,
"next_hop": "10.9.193.98",
"outgoing_interface_name": "esavpn",
},
}
},
"route": "10.121.67.0/24",
"route_preference": 170,
"source_protocol": "eigrp",
"source_protocol_codes": "EX",
},
"10.121.68.0/24": {
"active": True,
"candidate_default": False,
"date": "2w1d",
"metric": 345856,
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "10.9.193.99",
"outgoing_interface_name": "esavpn",
},
2: {
"index": 2,
"next_hop": "10.9.193.98",
"outgoing_interface_name": "esavpn",
},
}
},
"route": "10.121.68.0/24",
"route_preference": 170,
"source_protocol": "eigrp",
"source_protocol_codes": "EX",
},
"10.121.69.0/24": {
"active": True,
"candidate_default": False,
"date": "7w0d",
"metric": 20,
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "10.121.64.35",
"outgoing_interface_name": "inside",
},
2: {
"index": 2,
"next_hop": "10.121.64.34",
"outgoing_interface_name": "inside",
},
}
},
"route": "10.121.69.0/24",
"route_preference": 110,
"source_protocol": "ospf",
"source_protocol_codes": "O",
},
"10.121.70.0/24": {
"active": True,
"candidate_default": False,
"date": "2w1d",
"metric": 345856,
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "10.9.193.99",
"outgoing_interface_name": "esavpn",
},
2: {
"index": 2,
"next_hop": "10.9.193.98",
"outgoing_interface_name": "esavpn",
},
}
},
"route": "10.121.70.0/24",
"route_preference": 170,
"source_protocol": "eigrp",
"source_protocol_codes": "EX",
},
"10.121.71.0/24": {
"active": True,
"candidate_default": False,
"date": "2w1d",
"metric": 345856,
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "10.9.193.99",
"outgoing_interface_name": "esavpn",
},
2: {
"index": 2,
"next_hop": "10.9.193.98",
"outgoing_interface_name": "esavpn",
},
}
},
"route": "10.121.71.0/24",
"route_preference": 170,
"source_protocol": "eigrp",
"source_protocol_codes": "EX",
},
}
}
}
}
}
}
| 46.955414
| 76
| 0.220429
| 374
| 7,372
| 4.114973
| 0.152406
| 0.109162
| 0.070175
| 0.051982
| 0.969461
| 0.950617
| 0.950617
| 0.950617
| 0.950617
| 0.950617
| 0
| 0.13066
| 0.685431
| 7,372
| 156
| 77
| 47.25641
| 0.532988
| 0
| 0
| 0.653846
| 0
| 0
| 0.212968
| 0.054531
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0afb6f5349db0a76e2184a12e8d2230cba3f79ef
| 30,523
|
py
|
Python
|
torchsample/transforms/affine3d_transforms.py
|
JoostJM/torchsample
|
5bfde5e36e51314b117024dc98607433e8234390
|
[
"MIT"
] | null | null | null |
torchsample/transforms/affine3d_transforms.py
|
JoostJM/torchsample
|
5bfde5e36e51314b117024dc98607433e8234390
|
[
"MIT"
] | null | null | null |
torchsample/transforms/affine3d_transforms.py
|
JoostJM/torchsample
|
5bfde5e36e51314b117024dc98607433e8234390
|
[
"MIT"
] | null | null | null |
"""
Affine transforms implemented on torch tensors, and
requiring only one interpolation
"""
import math
import random
import torch as th
from ..utils import th_affine3d, th_random_choice
class RandomAffine3D(object):
def __init__(self,
rotation_range=None,
translation_range=None,
shear_range=None,
zoom_range=None,
interp='trilinear',
lazy=False):
"""
Perform an affine transforms with various sub-transforms, using
only one interpolation and without having to instantiate each
sub-transform individually.
Arguments
---------
rotation_range : one integer or float
image will be rotated randomly between (-degrees, degrees)
translation_range : float or 3-tuple of float between [0, 1)
first value:
fractional bounds of total depth to shift image
image will be depth shifted between
(-depth_range * depth_dimension, depth_range * depth_dimension)
second value:
fractional bounds of total width to shift image
Image will be vertically shifted between
(-width_range * width_dimension, width_range * width_dimension)
third value:
fractional bounds of total heigth to shift image
image will be horizontally shifted between
(-height_range * height_dimension, height_range * height_dimension)
shear_range : float
image will be sheared randomly between (-degrees, degrees)
zoom_range : list/tuple with two floats between [0, infinity).
first float should be less than the second
lower and upper bounds on percent zoom.
Anything less than 1.0 will zoom in on the image,
anything greater than 1.0 will zoom out on the image.
e.g. (0.7, 1.0) will only zoom in,
(1.0, 1.4) will only zoom out,
(0.7, 1.4) will randomly zoom in or out
interp : string in {'trilinear', 'nearest'} or list of strings
type of interpolation to use. You can provide a different
type of interpolation for each input, e.g. if you have two
inputs then you can say `interp=['trilinear','nearest']
"""
self.transforms = []
if rotation_range is not None:
rotation_tform = RandomRotate3D(rotation_range, lazy=True)
self.transforms.append(rotation_tform)
if translation_range is not None:
translation_tform = RandomTranslate3D(translation_range, lazy=True)
self.transforms.append(translation_tform)
if shear_range is not None:
shear_tform = RandomShear3D(shear_range, lazy=True)
self.transforms.append(shear_tform)
if zoom_range is not None:
zoom_tform = RandomZoom3D(zoom_range, lazy=True)
self.transforms.append(zoom_tform)
self.interp = interp
self.lazy = lazy
if len(self.transforms) == 0:
raise Exception('Must give at least one transform parameter')
def __call__(self, *inputs):
# collect all of the lazily returned tform matrices
tform_matrix = self.transforms[0](inputs[0])
for tform in self.transforms[1:]:
tform_matrix = tform_matrix.mm(tform(inputs[0]))
self.tform_matrix = tform_matrix
if self.lazy:
return tform_matrix
else:
outputs = Affine3D(tform_matrix,
interp=self.interp)(*inputs)
return outputs
class Affine3D(object):
def __init__(self,
tform_matrix,
interp='trilinear'):
"""
Perform an affine transforms with various sub-transforms, using
only one interpolation and without having to instantiate each
sub-transform individually.
Arguments
---------
tform_matrix : a 3x3 or 3x4 matrix
affine transformation matrix to apply
interp : string in {'trilinear', 'nearest'} or list of strings
type of interpolation to use. You can provide a different
type of interpolation for each input, e.g. if you have two
inputs then you can say `interp=['trilinear','nearest']
"""
self.tform_matrix = tform_matrix
self.interp = interp
def __call__(self, *inputs):
if not isinstance(self.interp, (tuple,list)):
interp = [self.interp]*len(inputs)
else:
interp = self.interp
outputs = []
for idx, _input in enumerate(inputs):
input_tf = th_affine3d(_input,
self.tform_matrix,
mode=interp[idx])
outputs.append(input_tf)
return outputs if idx >= 1 else outputs[0]
class Affine3DCompose(object):
def __init__(self,
transforms,
interp='trilinear'):
"""
Apply a collection of explicit affine transforms to an input image,
and to a target image if necessary
Arguments
---------
transforms : list or tuple
each element in the list/tuple should be an affine transform.
currently supported transforms:
- Rotate3D()
- Translate3D()
- Shear3D()
- Zoom3D()
interp : string in {'trilinear', 'nearest'} or list of strings
type of interpolation to use. You can provide a different
type of interpolation for each input, e.g. if you have two
inputs then you can say `interp=['trilinear','nearest']
"""
self.transforms = transforms
self.interp = interp
# set transforms to lazy so they only return the tform matrix
for t in self.transforms:
t.lazy = True
def __call__(self, *inputs):
# collect all of the lazily returned tform matrices
tform_matrix = self.transforms[0](inputs[0])
for tform in self.transforms[1:]:
tform_matrix = tform_matrix.mm(tform(inputs[0]))
if not isinstance(self.interp, (tuple,list)):
interp = [self.interp]*len(inputs)
else:
interp = self.interp
outputs = []
for idx, _input in enumerate(inputs):
input_tf = th_affine3d(_input,
tform_matrix,
mode=interp[idx])
outputs.append(input_tf)
return outputs if idx >= 1 else outputs[0]
class RandomRotate3D(object):
def __init__(self,
rotation_range,
axis=0,
interp='trilinear',
lazy=False):
"""
Randomly rotate an image between (-degrees, degrees). If the image
has multiple channels, the same rotation will be applied to each channel.
Arguments
---------
rotation_range : integer or float
image will be rotated between (-degrees, degrees) degrees
axis: integer in (0, 1, 2)
axis (z, y, x) for rotation. This axis will be fixed.
interp : string in {'trilinear', 'nearest'} or list of strings
type of interpolation to use. You can provide a different
type of interpolation for each input, e.g. if you have two
inputs then you can say `interp=['trilinear','nearest']
lazy : boolean
if true, only create the affine transform matrix and return that
if false, perform the transform on the tensor and return the tensor
"""
self.rotation_range = rotation_range
self.axis = axis
self.interp = interp
self.lazy = lazy
def __call__(self, *inputs):
degree = random.uniform(-self.rotation_range, self.rotation_range)
if self.lazy:
return Rotate3D(degree, axis=self.axis, lazy=True)(inputs[0])
else:
outputs = Rotate3D(degree, axis=self.axis,
interp=self.interp)(*inputs)
return outputs
class RandomChoiceRotate3D(object):
def __init__(self,
values,
axis=0,
p=None,
interp='trilinear',
lazy=False):
"""
Randomly rotate an image from a list of values. If the image
has multiple channels, the same rotation will be applied to each channel.
Arguments
---------
values : a list or tuple
the values from which the rotation value will be sampled
axis: integer in (0, 1, 2)
axis (z, y, x) for rotation. This axis will be fixed.
p : a list or tuple the same length as `values`
the probabilities of sampling any given value. Must sum to 1.
interp : string in {'trilinear', 'nearest'} or list of strings
type of interpolation to use. You can provide a different
type of interpolation for each input, e.g. if you have two
inputs then you can say `interp=['trilinear','nearest']
lazy : boolean
if true, only create the affine transform matrix and return that
if false, perform the transform on the tensor and return the tensor
"""
if isinstance(values, (list, tuple)):
values = th.FloatTensor(values)
self.values = values
self.axis = axis
if p is None:
p = th.ones(len(values)) / len(values)
else:
if abs(1.0-sum(p)) > 1e-3:
raise ValueError('Probs must sum to 1')
self.p = p
self.interp = interp
self.lazy = lazy
def __call__(self, *inputs):
degree = th_random_choice(self.values, p=self.p)
if self.lazy:
return Rotate3D(degree, axis=self.axis, lazy=True)(inputs[0])
else:
outputs = Rotate3D(degree, axis=self.axis,
interp=self.interp)(*inputs)
return outputs
class Rotate3D(object):
def __init__(self,
value,
axis=0,
interp='trilinear',
lazy=False):
"""
Randomly rotate an image between (-degrees, degrees). If the image
has multiple channels, the same rotation will be applied to each channel.
Arguments
---------
value : integer or float
image will be rotated degrees
axis: integer in (0, 1, 2)
axis (z, y, x) for rotation. This axis will be fixed.
interp : string in {'trilinear', 'nearest'} or list of strings
type of interpolation to use. You can provide a different
type of interpolation for each input, e.g. if you have two
inputs then you can say `interp=['trilinear','nearest']
lazy : boolean
if true, only create the affine transform matrix and return that
if false, perform the transform on the tensor and return the tensor
"""
self.value = value
self.axis = axis
self.interp = interp
self.lazy = lazy
def __call__(self, *inputs):
if not isinstance(self.interp, (tuple,list)):
interp = [self.interp]*len(inputs)
else:
interp = self.interp
theta = math.pi / 180 * self.value
if self.axis == 0:
rotation_matrix = th.FloatTensor([[1, 0, 0, 0],
[0, math.cos(theta), -math.sin(theta), 0],
[0, math.sin(theta), math.cos(theta), 0],
[0, 0, 0, 1]])
elif self.axis == 1:
rotation_matrix = th.FloatTensor([[math.cos(theta), 0, math.sin(theta), 0],
[0, 1, 0, 0],
[-math.sin(theta), 0, math.cos(theta), 0],
[0, 0, 0, 1]])
elif self.axis == 2:
rotation_matrix = th.FloatTensor([[math.cos(theta), -math.sin(theta), 0, 0],
[math.sin(theta), math.cos(theta), 0, 0],
[ 0, 0, 1, 0],
[ 0, 0, 0, 1]])
else:
raise ValueError('axis out of range [0-2]')
if self.lazy:
return rotation_matrix
else:
outputs = []
for idx, _input in enumerate(inputs):
input_tf = th_affine3d(_input,
rotation_matrix,
mode=interp[idx],
center=True)
outputs.append(input_tf)
return outputs if idx >= 1 else outputs[0]
class RandomTranslate3D(object):
def __init__(self,
translation_range,
interp='trilinear',
lazy=False):
"""
Randomly translate an image some fraction of total height and/or
some fraction of total width. If the image has multiple channels,
the same translation will be applied to each channel. Assumes CDWH
ordering.
Arguments
---------
translation_range : float or 3-tuple of float between [0, 1)
first value:
fractional bounds of total depth to shift image
image will be depth shifted between
(-depth_range * depth_dimension, depth_range * depth_dimension)
second value:
fractional bounds of total width to shift image
Image will be vertically shifted between
(-width_range * width_dimension, width_range * width_dimension)
third value:
fractional bounds of total heigth to shift image
image will be horizontally shifted between
(-height_range * height_dimension, height_range * height_dimension)
interp : string in {'trilinear', 'nearest'} or list of strings
type of interpolation to use. You can provide a different
type of interpolation for each input, e.g. if you have two
inputs then you can say `interp=['trilinear','nearest']
lazy : boolean
if true, only create the affine transform matrix and return that
if false, perform the transform on the tensor and return the tensor
"""
if isinstance(translation_range, float):
translation_range = (translation_range, translation_range, translation_range)
self.depth_range = translation_range[0]
self.width_range = translation_range[1]
self.height_range = translation_range[2]
self.interp = interp
self.lazy = lazy
def __call__(self, *inputs):
# height shift
random_height = random.uniform(-self.height_range, self.height_range)
# width shift
random_width = random.uniform(-self.width_range, self.width_range)
# depth shift
random_depth = random.uniform(-self.depth_range, self.depth_range)
if self.lazy:
return Translate3D([random_depth, random_width, random_height],
lazy=True)(inputs[0])
else:
outputs = Translate3D([random_depth, random_width, random_height],
interp=self.interp)(*inputs)
return outputs
class RandomChoiceTranslate3D(object):
def __init__(self,
values,
p=None,
interp='trilinear',
lazy=False):
"""
Randomly translate an image some fraction of total height and/or
some fraction of total width from a list of potential values.
If the image has multiple channels,
the same translation will be applied to each channel.
Arguments
---------
values : a list or tuple
the values from which the translation value will be sampled
p : a list or tuple the same length as `values`
the probabilities of sampling any given value. Must sum to 1.
interp : string in {'trilinear', 'nearest'} or list of strings
type of interpolation to use. You can provide a different
type of interpolation for each input, e.g. if you have two
inputs then you can say `interp=['trilinear','nearest']
lazy : boolean
if true, only create the affine transform matrix and return that
if false, perform the transform on the tensor and return the tensor
"""
if isinstance(values, (list, tuple)):
values = th.FloatTensor(values)
self.values = values
if p is None:
p = th.ones(len(values)) / len(values)
else:
if abs(1.0-sum(p)) > 1e-3:
raise ValueError('Probs must sum to 1')
self.p = p
self.interp = interp
self.lazy = lazy
def __call__(self, *inputs):
random_height = th_random_choice(self.values, p=self.p)
random_width = th_random_choice(self.values, p=self.p)
random_depth = th_random_choice(self.values, p=self.p)
if self.lazy:
return Translate3D([random_depth, random_width, random_height],
lazy=True)(inputs[0])
else:
outputs = Translate3D([random_depth, random_width, random_height],
interp=self.interp)(*inputs)
return outputs
class Translate3D(object):
def __init__(self,
value,
interp='trilinear',
lazy=False):
"""
Arguments
---------
value : float or 3-tuple of float
if single value, both horizontal, vertical and depth translation
will be this value * total height/width. Thus, value should
be a fraction of total height/width with range (-1, 1)
interp : string in {'trilinear', 'nearest'} or list of strings
type of interpolation to use. You can provide a different
type of interpolation for each input, e.g. if you have two
inputs then you can say `interp=['trilinear','nearest']
"""
if not isinstance(value, (tuple,list)):
value = (value, value, value)
if value[0] > 1 or value[0] < -1:
raise ValueError('Translation must be between -1 and 1')
if value[1] > 1 or value[1] < -1:
raise ValueError('Translation must be between -1 and 1')
if value[2] > 1 or value[2] < -1:
raise ValueError('Translation must be between -1 and 1')
self.depth_range = value[0]
self.width_range = value[1]
self.height_range = value[2]
self.interp = interp
self.lazy = lazy
def __call__(self, *inputs):
if not isinstance(self.interp, (tuple,list)):
interp = [self.interp]*len(inputs)
else:
interp = self.interp
tz = self.depth_range * inputs[0].size(1)
ty = self.width_range * inputs[0].size(2)
tx = self.height_range * inputs[0].size(3)
translation_matrix = th.FloatTensor([[1, 0, 0, tz],
[0, 1, 0, ty],
[0, 0, 1, tx],
[0, 0, 0, 1]])
if self.lazy:
return translation_matrix
else:
outputs = []
for idx, _input in enumerate(inputs):
input_tf = th_affine3d(_input,
translation_matrix,
mode=interp[idx],
center=True)
outputs.append(input_tf)
return outputs if idx >= 1 else outputs[0]
class RandomShear3D(object):
def __init__(self,
shear_range,
interp='trilinear',
lazy=False):
"""
Randomly shear an image with radians (-shear_range, shear_range)
Arguments
---------
shear_range : float
radian bounds on the shear transform
interp : string in {'trilinear', 'nearest'} or list of strings
type of interpolation to use. You can provide a different
type of interpolation for each input, e.g. if you have two
inputs then you can say `interp=['trilinear','nearest']
lazy : boolean
if false, perform the transform on the tensor and return the tensor
if true, only create the affine transform matrix and return that
"""
self.shear_range = shear_range
self.interp = interp
self.lazy = lazy
def __call__(self, *inputs):
shear_x = random.uniform(-self.shear_range, self.shear_range)
shear_y = random.uniform(-self.shear_range, self.shear_range)
if self.lazy:
return Shear3D([shear_x, shear_y],
lazy=True)(inputs[0])
else:
outputs = Shear3D([shear_x, shear_y],
interp=self.interp)(*inputs)
return outputs
class RandomChoiceShear3D(object):
def __init__(self,
values,
p=None,
interp='trilinear',
lazy=False):
"""
Randomly shear an image with a value sampled from a list of values.
Arguments
---------
values : a list or tuple
the values from which the rotation value will be sampled
p : a list or tuple the same length as `values`
the probabilities of sampling any given value. Must sum to 1.
interp : string in {'trilinear', 'nearest'} or list of strings
type of interpolation to use. You can provide a different
type of interpolation for each input, e.g. if you have two
inputs then you can say `interp=['trilinear','nearest']
lazy : boolean
if false, perform the transform on the tensor and return the tensor
if true, only create the affine transform matrix and return that
"""
if isinstance(values, (list, tuple)):
values = th.FloatTensor(values)
self.values = values
if p is None:
p = th.ones(len(values)) / len(values)
else:
if abs(1.0-sum(p)) > 1e-3:
raise ValueError('Probs must sum to 1')
self.p = p
self.interp = interp
self.lazy = lazy
def __call__(self, *inputs):
shear_x = th_random_choice(self.values, p=self.p)
shear_y = th_random_choice(self.values, p=self.p)
if self.lazy:
return Shear3D([shear_x, shear_y],
lazy=True)(inputs[0])
else:
outputs = Shear3D([shear_x, shear_y],
interp=self.interp)(*inputs)
return outputs
class Shear3D(object):
def __init__(self,
value,
interp='trilinear',
lazy=False):
if isinstance(value, (list, tuple)):
self.value = value
else:
self.value = (value, 0)
self.interp = interp
self.lazy = lazy
def __call__(self, *inputs):
if not isinstance(self.interp, (tuple,list)):
interp = [self.interp]*len(inputs)
else:
interp = self.interp
theta_x = (math.pi * self.value[0]) / 180
theta_y = (math.pi * self.value[1]) / 180
shear_matrix = th.FloatTensor([[1, 0, 0, 0],
[0, math.cos(theta_x), math.sin(theta_y), 0],
[0, -math.sin(theta_x), math.cos(theta_y), 0],
[0, 0, 0, 1]])
if self.lazy:
return shear_matrix
else:
outputs = []
for idx, _input in enumerate(inputs):
input_tf = th_affine3d(_input,
shear_matrix,
mode=interp[idx],
center=True)
outputs.append(input_tf)
return outputs if idx >= 1 else outputs[0]
class RandomZoom3D(object):
def __init__(self,
zoom_range,
interp='trilinear',
lazy=False):
"""
Randomly zoom in and/or out on an image
Arguments
---------
zoom_range : tuple or list with 2 values, both between (0, infinity)
lower and upper bounds on percent zoom.
Anything less than 1.0 will zoom in on the image,
anything greater than 1.0 will zoom out on the image.
e.g. (0.7, 1.0) will only zoom in,
(1.0, 1.4) will only zoom out,
(0.7, 1.4) will randomly zoom in or out
interp : string in {'trilinear', 'nearest'} or list of strings
type of interpolation to use. You can provide a different
type of interpolation for each input, e.g. if you have two
inputs then you can say `interp=['trilinear','nearest']
lazy : boolean
if false, perform the transform on the tensor and return the tensor
if true, only create the affine transform matrix and return that
"""
if not isinstance(zoom_range, list) and not isinstance(zoom_range, tuple):
raise ValueError('zoom_range must be tuple or list with 2 values')
self.zoom_range = zoom_range
self.interp = interp
self.lazy = lazy
def __call__(self, *inputs):
zx = random.uniform(self.zoom_range[0], self.zoom_range[1])
zy = random.uniform(self.zoom_range[0], self.zoom_range[1])
zz = random.uniform(self.zoom_range[0], self.zoom_range[1])
if self.lazy:
return Zoom3D([zz, zy, zx], lazy=True)(inputs[0])
else:
outputs = Zoom3D([zz, zy, zx],
interp=self.interp)(*inputs)
return outputs
class RandomChoiceZoom3D(object):
def __init__(self,
values,
p=None,
interp='trilinear',
lazy=False):
"""
Randomly zoom in and/or out on an image with a value sampled from
a list of values
Arguments
---------
values : a list or tuple
the values from which the applied zoom value will be sampled
p : a list or tuple the same length as `values`
the probabilities of sampling any given value. Must sum to 1.
interp : string in {'trilinear', 'nearest'} or list of strings
type of interpolation to use. You can provide a different
type of interpolation for each input, e.g. if you have two
inputs then you can say `interp=['trilinear','nearest']
lazy : boolean
if false, perform the transform on the tensor and return the tensor
if true, only create the affine transform matrix and return that
"""
if isinstance(values, (list, tuple)):
values = th.FloatTensor(values)
self.values = values
if p is None:
p = th.ones(len(values)) / len(values)
else:
if abs(1.0-sum(p)) > 1e-3:
raise ValueError('Probs must sum to 1')
self.p = p
self.interp = interp
self.lazy = lazy
def __call__(self, *inputs):
zx = th_random_choice(self.values, p=self.p)
zy = th_random_choice(self.values, p=self.p)
zz = th_random_choice(self.values, p=self.p)
if self.lazy:
return Zoom3D([zz, zy, zx], lazy=True)(inputs[0])
else:
outputs = Zoom3D([zz, zy, zx],
interp=self.interp)(*inputs)
return outputs
class Zoom3D(object):
def __init__(self,
value,
interp='trilinear',
lazy=False):
"""
Arguments
---------
value : float
Fractional zoom.
=1 : no zoom
>1 : zoom-in (value-1)%
<1 : zoom-out (1-value)%
interp : string in {'trilinear', 'nearest'} or list of strings
type of interpolation to use. You can provide a different
type of interpolation for each input, e.g. if you have two
inputs then you can say `interp=['trilinear','nearest']
lazy: boolean
If true, just return transformed
"""
if not isinstance(value, (tuple,list)):
value = (value, value, value)
self.value = value
self.interp = interp
self.lazy = lazy
def __call__(self, *inputs):
if not isinstance(self.interp, (tuple,list)):
interp = [self.interp]*len(inputs)
else:
interp = self.interp
zz, zy, zx = self.value
zoom_matrix = th.FloatTensor([[zz, 0, 0, 0],
[0, zy, 0, 0],
[0, 0, zx, 0],
[0, 0, 0, 1]])
if self.lazy:
return zoom_matrix
else:
outputs = []
for idx, _input in enumerate(inputs):
input_tf = th_affine3d(_input,
zoom_matrix,
mode=interp[idx],
center=True)
outputs.append(input_tf)
return outputs if idx >= 1 else outputs[0]
| 36.819059
| 89
| 0.539397
| 3,531
| 30,523
| 4.566695
| 0.068536
| 0.026047
| 0.032992
| 0.015814
| 0.80738
| 0.795473
| 0.771907
| 0.75907
| 0.744372
| 0.730419
| 0
| 0.015453
| 0.378796
| 30,523
| 828
| 90
| 36.863527
| 0.834977
| 0.367199
| 0
| 0.708134
| 0
| 0
| 0.024956
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.07177
| false
| 0
| 0.009569
| 0
| 0.184211
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e408450fd809c8382f6f39d174f9c71bf13d2588
| 61
|
py
|
Python
|
modules/ckanext-ytp_drupal/ckanext/ytp_drupal/celery_import.py
|
eetumans/opendata
|
061f58550bcb820016a764cca4763ed0a5f627fe
|
[
"MIT"
] | 16
|
2018-07-12T14:26:02.000Z
|
2022-02-24T12:10:00.000Z
|
modules/ckanext-ytp_drupal/ckanext/ytp_drupal/celery_import.py
|
eetumans/opendata
|
061f58550bcb820016a764cca4763ed0a5f627fe
|
[
"MIT"
] | 751
|
2017-09-28T07:47:50.000Z
|
2022-03-31T12:08:25.000Z
|
modules/ckanext-ytp_drupal/ckanext/ytp_drupal/celery_import.py
|
vrk-kpa/opendata-ckan
|
8936e2d9e700b9e5534fe2a51eedc2d1ede8c10b
|
[
"MIT"
] | 6
|
2017-10-31T07:47:07.000Z
|
2021-10-06T07:09:07.000Z
|
def task_imports():
return ['ckanext.ytp_drupal.tasks']
| 15.25
| 39
| 0.704918
| 8
| 61
| 5.125
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.147541
| 61
| 3
| 40
| 20.333333
| 0.788462
| 0
| 0
| 0
| 0
| 0
| 0.4
| 0.4
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0.5
| 0.5
| 1.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
7c1934e0b182ebeeebae567c0a47deebba567f63
| 22,613
|
py
|
Python
|
tests/unit/operations/test_sshops.py
|
myungseokang/aws-elastic-beanstalk-cli
|
339ff2660058ad76d0aef2e86ebe97d68f5f2789
|
[
"Apache-2.0"
] | 110
|
2020-01-15T22:58:46.000Z
|
2022-03-27T20:47:33.000Z
|
tests/unit/operations/test_sshops.py
|
QPC-database/aws-elastic-beanstalk-cli
|
87ad9d8bbe5e4e7cb01b1bd4392eda33cb1943f7
|
[
"Apache-2.0"
] | 89
|
2020-01-15T23:18:34.000Z
|
2022-03-31T21:56:05.000Z
|
tests/unit/operations/test_sshops.py
|
QPC-database/aws-elastic-beanstalk-cli
|
87ad9d8bbe5e4e7cb01b1bd4392eda33cb1943f7
|
[
"Apache-2.0"
] | 50
|
2020-01-15T22:58:53.000Z
|
2022-02-11T17:39:28.000Z
|
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import os
import shutil
from copy import deepcopy
import mock
import unittest
from ebcli.operations import sshops
from .. import mock_responses
class TestSSHOps(unittest.TestCase):
def setUp(self):
self.root_dir = os.getcwd()
if not os.path.exists('testDir'):
os.mkdir('testDir')
os.chdir('testDir')
def tearDown(self):
os.chdir(self.root_dir)
shutil.rmtree('testDir')
@mock.patch('ebcli.operations.sshops.io.prompt')
@mock.patch('ebcli.operations.sshops.subprocess.call')
@mock.patch('ebcli.operations.sshops.commonops.upload_keypair_if_needed')
def test_generate_and_upload_keypair__exit_code_0(
self,
upload_keypair_if_needed_mock,
call_mock,
prompt_mock
):
prompt_mock.return_value = 'aws-eb-us-west-2'
call_mock.return_value = 0
self.assertEqual(
'aws-eb-us-west-2',
sshops._generate_and_upload_keypair(['aws-eb', 'aws-eb-us-east-2'])
)
upload_keypair_if_needed_mock.assert_called_once_with('aws-eb-us-west-2')
call_mock.assert_called_once_with(
[
'ssh-keygen',
'-f',
os.path.expanduser('~') + '{0}.ssh{0}aws-eb-us-west-2'.format(os.path.sep),
'-C',
'aws-eb-us-west-2'
]
)
@mock.patch('ebcli.operations.sshops.io.prompt')
@mock.patch('ebcli.operations.sshops.subprocess.call')
@mock.patch('ebcli.operations.sshops.commonops.upload_keypair_if_needed')
def test_generate_and_upload_keypair__exit_code_1(
self,
upload_keypair_if_needed_mock,
call_mock,
prompt_mock
):
prompt_mock.return_value = 'aws-eb-us-west-2'
call_mock.return_value = 1
self.assertEqual(
'aws-eb-us-west-2',
sshops._generate_and_upload_keypair(['aws-eb', 'aws-eb-us-east-2'])
)
upload_keypair_if_needed_mock.assert_called_once_with('aws-eb-us-west-2')
call_mock.assert_called_once_with(
[
'ssh-keygen',
'-f',
os.path.expanduser('~') + '{0}.ssh{0}aws-eb-us-west-2'.format(os.path.sep),
'-C',
'aws-eb-us-west-2'
]
)
@mock.patch('ebcli.operations.sshops.io.prompt')
@mock.patch('ebcli.operations.sshops.subprocess.call')
def test_generate_and_upload_keypair__exit_code_is_other_than_1_and_0(
self,
call_mock,
prompt_mock
):
prompt_mock.return_value = 'aws-eb-us-west-2'
call_mock.return_value = 2
with self.assertRaises(sshops.CommandError) as context_manager:
sshops._generate_and_upload_keypair(['aws-eb', 'aws-eb-us-east-2'])
self.assertEqual(
'An error occurred while running ssh-keygen.',
str(context_manager.exception)
)
@mock.patch('ebcli.operations.sshops.io.prompt')
@mock.patch('ebcli.operations.sshops.subprocess.call')
def test_generate_and_upload_keypair__ssh_keygen_not_present(
self,
call_mock,
prompt_mock
):
prompt_mock.return_value = 'aws-eb-us-west-2'
call_mock.sideeffect = OSError
with self.assertRaises(sshops.CommandError) as context_manager:
sshops._generate_and_upload_keypair(['aws-eb', 'aws-eb-us-east-2'])
self.assertEqual(
'An error occurred while running ssh-keygen.',
str(context_manager.exception)
)
@mock.patch('ebcli.operations.sshops.utils.prompt_for_item_in_list')
@mock.patch('ebcli.operations.sshops._generate_and_upload_keypair')
@mock.patch('ebcli.operations.sshops.ec2.get_key_pairs')
@mock.patch('ebcli.operations.sshops.io.validate_action')
def test_prompt_for_ec2_keyname(
self,
validate_action_mock,
get_key_pairs_mock,
generate_and_upload_keypair_mock,
prompt_for_item_in_list_mock
):
get_key_pairs_mock.return_value = mock_responses.DESCRIBE_KEY_PAIRS_RESPONSE['KeyPairs']
prompt_for_item_in_list_mock.return_value = '[ Create new KeyPair ]'
sshops.prompt_for_ec2_keyname('my-environment')
validate_action_mock.assert_called_once_with('To confirm, type the environment name', 'my-environment')
generate_and_upload_keypair_mock.assert_called_once_with(['key_pair_1', 'key_pair_2', '[ Create new KeyPair ]'])
@mock.patch('ebcli.operations.sshops.utils.prompt_for_item_in_list')
@mock.patch('ebcli.operations.sshops._generate_and_upload_keypair')
@mock.patch('ebcli.operations.sshops.ec2.get_key_pairs')
@mock.patch('ebcli.operations.sshops.io.validate_action')
def test_prompt_for_ec2_keyname__choose_existing_key(
self,
validate_action_mock,
get_key_pairs_mock,
generate_and_upload_keypair_mock,
prompt_for_item_in_list_mock
):
get_key_pairs_mock.return_value = mock_responses.DESCRIBE_KEY_PAIRS_RESPONSE['KeyPairs']
prompt_for_item_in_list_mock.return_value = 'key_pair_2'
sshops.prompt_for_ec2_keyname('my-environment')
validate_action_mock.assert_called_once_with('To confirm, type the environment name', 'my-environment')
generate_and_upload_keypair_mock.assert_not_called()
@mock.patch('ebcli.operations.sshops.utils.prompt_for_item_in_list')
@mock.patch('ebcli.operations.sshops._generate_and_upload_keypair')
@mock.patch('ebcli.operations.sshops.ec2.get_key_pairs')
@mock.patch('ebcli.operations.sshops.io.get_boolean_response')
def test_prompt_for_ec2_keyname__get_boolean_response_to_confirm_termination(
self,
get_boolean_response_mock,
get_key_pairs_mock,
generate_and_upload_keypair_mock,
prompt_for_item_in_list_mock
):
get_key_pairs_mock.return_value = mock_responses.DESCRIBE_KEY_PAIRS_RESPONSE['KeyPairs']
prompt_for_item_in_list_mock.return_value = 'key_pair_2'
get_boolean_response_mock.return_value = True
sshops.prompt_for_ec2_keyname()
generate_and_upload_keypair_mock.assert_not_called()
@mock.patch('ebcli.operations.sshops._generate_and_upload_keypair')
@mock.patch('ebcli.operations.sshops.ec2.get_key_pairs')
@mock.patch('ebcli.operations.sshops.io.validate_action')
@mock.patch('ebcli.operations.sshops.io.get_boolean_response')
def test_prompt_for_ec2_keyname__no_keys_exist(
self,
get_boolean_response_mock,
validate_action_mock,
get_key_pairs_mock,
generate_and_upload_keypair_mock
):
get_key_pairs_mock.return_value = []
get_boolean_response_mock.return_value = True
sshops.prompt_for_ec2_keyname('my-environment')
generate_and_upload_keypair_mock.assert_called_once_with([])
validate_action_mock.assert_called_once()
@mock.patch('ebcli.operations.sshops.fileoperations.get_ssh_folder')
def test_get_ssh_file(
self,
get_ssh_folder_mock
):
open('aws-eb-us-west-2', 'w').close()
get_ssh_folder_mock.return_value = os.getcwd() + os.path.sep
sshops._get_ssh_file('aws-eb-us-west-2').endswith('testDir{}aws-eb-us-west-2'.format(os.pathsep))
@mock.patch('ebcli.operations.sshops.fileoperations.get_ssh_folder')
def test_get_ssh_file__file_present_as_pem(
self,
get_ssh_folder_mock
):
open('aws-eb-us-west-2.pem', 'w').close()
get_ssh_folder_mock.return_value = os.getcwd() + os.path.sep
sshops._get_ssh_file('aws-eb-us-west-2').endswith('testDir{}aws-eb-us-west-2.pem'.format(os.pathsep))
@mock.patch('ebcli.operations.sshops.fileoperations.get_ssh_folder')
def test_get_ssh_file__file_absent(
self,
get_ssh_folder_mock
):
open('aws-eb-us-west-2.pem', 'w').close()
get_ssh_folder_mock.return_value = os.getcwd() + os.path.sep
with self.assertRaises(sshops.NotFoundError) as context_manager:
sshops._get_ssh_file('absent_file').endswith('testDir{}aws-eb-us-west-2.pem'.format(os.pathsep))
self.assertEqual(
'The EB CLI cannot find your SSH key file for keyname "absent_file". '
'Your SSH key file must be located in the .ssh folder in your home directory.',
str(context_manager.exception))
@mock.patch('ebcli.operations.sshops.ec2.describe_instance')
def test_ssh_into_instance__no_key_pair(
self,
describe_instance_mock
):
describe_instance_mock.return_value = dict()
with self.assertRaises(sshops.NoKeypairError):
sshops.ssh_into_instance('some-instance-id')
@mock.patch('ebcli.operations.sshops.ec2.describe_instance')
@mock.patch('ebcli.operations.sshops.ec2.describe_security_group')
@mock.patch('ebcli.operations.sshops.ec2.authorize_ssh')
@mock.patch('ebcli.operations.sshops._get_ssh_file')
@mock.patch('ebcli.operations.sshops.subprocess.call')
def test_ssh_into_instance(
self,
call_mock,
_get_ssh_file_mock,
authorize_ssh_mock,
describe_security_group_mock,
describe_instance_mock
):
describe_instance_mock.return_value = mock_responses.DESCRIBE_INSTANCES_RESPONSE['Reservations'][0]['Instances'][0]
describe_security_group_mock.return_value = mock_responses.DESCRIBE_SECURITY_GROUPS_RESPONSE['SecurityGroups'][0]
_get_ssh_file_mock.return_value = 'aws-eb-us-west-2'
call_mock.return_value = 0
sshops.ssh_into_instance('instance-id')
@mock.patch('ebcli.operations.sshops.ec2.describe_instance')
@mock.patch('ebcli.operations.sshops.ec2.describe_security_group')
@mock.patch('ebcli.operations.sshops.ec2.authorize_ssh')
@mock.patch('ebcli.operations.sshops._get_ssh_file')
@mock.patch('ebcli.operations.sshops.subprocess.call')
def test_ssh_into_instance__ssh_fails(
self,
call_mock,
_get_ssh_file_mock,
authorize_ssh_mock,
describe_security_group_mock,
describe_instance_mock
):
describe_instance_mock.return_value = mock_responses.DESCRIBE_INSTANCES_RESPONSE['Reservations'][0]['Instances'][0]
describe_security_group_mock.return_value = mock_responses.DESCRIBE_SECURITY_GROUPS_RESPONSE['SecurityGroups'][0]
_get_ssh_file_mock.return_value = 'aws-eb-us-west-2'
call_mock.return_value = 1
with self.assertRaises(sshops.CommandError) as context_manager:
sshops.ssh_into_instance('instance-id')
self.assertEqual(
'An error occurred while running: ssh.',
str(context_manager.exception)
)
@mock.patch('ebcli.operations.sshops.ec2.describe_instance')
@mock.patch('ebcli.operations.sshops._get_ssh_file')
@mock.patch('ebcli.operations.sshops.subprocess.call')
def test_ssh_into_instance__neither_public_nor_private_ip_found(
self,
call_mock,
_get_ssh_file_mock,
describe_instance_mock
):
describe_instance_response = deepcopy(mock_responses.DESCRIBE_INSTANCES_RESPONSE['Reservations'][0]['Instances'][0])
del describe_instance_response['PublicIpAddress']
del describe_instance_response['PrivateIpAddress']
describe_instance_mock.return_value = describe_instance_response
_get_ssh_file_mock.return_value = 'aws-eb-us-west-2'
call_mock.return_value = 0
with self.assertRaises(sshops.NotFoundError):
sshops.ssh_into_instance('instance-id')
@mock.patch('ebcli.operations.sshops.ec2.describe_instance')
@mock.patch('ebcli.operations.sshops.ec2.describe_security_group')
@mock.patch('ebcli.operations.sshops.ec2.authorize_ssh')
@mock.patch('ebcli.operations.sshops._get_ssh_file')
@mock.patch('ebcli.operations.sshops.subprocess.call')
def test_ssh_into_instance__uses_private_address(
self,
call_mock,
_get_ssh_file_mock,
authorize_ssh_mock,
describe_security_group_mock,
describe_instance_mock
):
describe_instance_response = deepcopy(mock_responses.DESCRIBE_INSTANCES_RESPONSE['Reservations'][0]['Instances'][0])
del describe_instance_response['PublicIpAddress']
describe_instance_mock.return_value = describe_instance_response
describe_security_group_mock.return_value = mock_responses.DESCRIBE_SECURITY_GROUPS_RESPONSE['SecurityGroups'][0]
_get_ssh_file_mock.return_value = 'aws-eb-us-west-2'
call_mock.return_value = 0
sshops.ssh_into_instance('instance-id')
call_mock.assert_called_once_with(['ssh', '-i', 'aws-eb-us-west-2', 'ec2-user@172.31.35.210'])
@mock.patch('ebcli.operations.sshops.ec2.describe_instance')
@mock.patch('ebcli.operations.sshops.ec2.describe_security_group')
@mock.patch('ebcli.operations.sshops.ec2.revoke_ssh')
@mock.patch('ebcli.operations.sshops.ec2.authorize_ssh')
@mock.patch('ebcli.operations.sshops._get_ssh_file')
@mock.patch('ebcli.operations.sshops.subprocess.call')
def test_ssh_into_instance__ssh_rule_exists(
self,
call_mock,
_get_ssh_file_mock,
authorize_ssh_mock,
revoke_ssh_mock,
describe_security_group_mock,
describe_instance_mock
):
describe_instance_response = deepcopy(mock_responses.DESCRIBE_INSTANCES_RESPONSE['Reservations'][0]['Instances'][0])
describe_instance_mock.return_value = describe_instance_response
describe_security_group_mock.return_value = mock_responses.DESCRIBE_SECURITY_GROUPS_RESPONSE['SecurityGroups'][0]
_get_ssh_file_mock.return_value = 'aws-eb-us-west-2'
call_mock.return_value = 0
sshops.ssh_into_instance('instance-id')
authorize_ssh_mock.assert_not_called()
revoke_ssh_mock.assert_not_called()
call_mock.assert_called_once_with(['ssh', '-i', 'aws-eb-us-west-2', 'ec2-user@54.218.96.238'])
@mock.patch('ebcli.operations.sshops.ec2.describe_instance')
@mock.patch('ebcli.operations.sshops.ec2.describe_security_group')
@mock.patch('ebcli.operations.sshops.ec2.revoke_ssh')
@mock.patch('ebcli.operations.sshops.ec2.authorize_ssh')
@mock.patch('ebcli.operations.sshops._get_ssh_file')
@mock.patch('ebcli.operations.sshops.subprocess.call')
def test_ssh_into_instance__no_ssh_rule_exists(
self,
call_mock,
_get_ssh_file_mock,
authorize_ssh_mock,
revoke_ssh_mock,
describe_security_group_mock,
describe_instance_mock
):
describe_instance_response = deepcopy(mock_responses.DESCRIBE_INSTANCES_RESPONSE['Reservations'][0]['Instances'][0])
describe_instance_mock.return_value = describe_instance_response
describe_security_group_mock.return_value = mock_responses.DESCRIBE_SECURITY_GROUPS_RESPONSE['SecurityGroups'][1]
_get_ssh_file_mock.return_value = 'aws-eb-us-west-2'
call_mock.return_value = 0
sshops.ssh_into_instance('instance-id')
authorize_ssh_mock.assert_called_once_with('sg-12312313')
revoke_ssh_mock.assert_called_once_with('sg-12312313')
call_mock.assert_called_once_with(['ssh', '-i', 'aws-eb-us-west-2', 'ec2-user@54.218.96.238'])
@mock.patch('ebcli.operations.sshops.prompt_for_ec2_keyname')
@mock.patch('ebcli.operations.sshops.commonops.update_environment')
def test_setup_ssh(
self,
update_environment_mock,
prompt_for_ec2_keyname_mock
):
prompt_for_ec2_keyname_mock.return_value = 'aws-eb-us-west-2'
sshops.setup_ssh('my-environment', 'aws-eb-us-west-2')
update_environment_mock.assert_called_once_with(
'my-environment',
[
{
'Namespace': 'aws:autoscaling:launchconfiguration',
'OptionName': 'EC2KeyName',
'Value': 'aws-eb-us-west-2'
}
],
False,
timeout=5
)
@mock.patch('ebcli.operations.sshops.prompt_for_ec2_keyname')
@mock.patch('ebcli.operations.sshops.commonops.update_environment')
def test_setup_ssh__keyname_not_entered(
self,
update_environment_mock,
prompt_for_ec2_keyname_mock
):
prompt_for_ec2_keyname_mock.return_value = None
sshops.setup_ssh('my-environment', 'aws-eb-us-west-2')
update_environment_mock.assert_not_called()
@mock.patch('ebcli.operations.sshops.setup_ssh')
def test_prepare_for_ssh(
self,
setup_ssh_mock
):
sshops.prepare_for_ssh(
'my-environment',
'instance',
False,
False,
True,
None
)
setup_ssh_mock.assert_called_once_with('my-environment', None, timeout=None)
def test_prepare_for_ssh__instance_and_number(self):
with self.assertRaises(sshops.InvalidOptionsError) as context_manager:
sshops.prepare_for_ssh(
'my-environment',
'instance',
False,
False,
False,
1
)
self.assertEqual(
'You cannot use the "--instance" and "--number" options together.',
str(context_manager.exception)
)
@mock.patch('ebcli.operations.sshops.commonops.get_instance_ids')
@mock.patch('ebcli.operations.sshops.utils.prompt_for_item_in_list')
@mock.patch('ebcli.operations.sshops.ssh_into_instance')
def test_prepare_for_ssh__choose_instance_to_ssh_into(
self,
ssh_into_instance_mock,
prompt_for_item_in_list_mock,
get_instance_ids_mock
):
get_instance_ids_mock.return_value = [
'i-123123123123',
'i-234234234424',
'i-353454535434',
]
prompt_for_item_in_list_mock.return_value = 'i-353454535434'
sshops.prepare_for_ssh(
'my-environment',
None,
False,
False,
False,
None
)
ssh_into_instance_mock.assert_called_once_with(
'i-353454535434',
command=None,
custom_ssh=None,
force_open=False,
keep_open=False
)
@mock.patch('ebcli.operations.sshops.commonops.get_instance_ids')
@mock.patch('ebcli.operations.sshops.utils.prompt_for_item_in_list')
@mock.patch('ebcli.operations.sshops.ssh_into_instance')
def test_prepare_for_ssh__choose_instance_to_ssh_into(
self,
ssh_into_instance_mock,
prompt_for_item_in_list_mock,
get_instance_ids_mock
):
get_instance_ids_mock.return_value = [
'i-123123123123',
]
prompt_for_item_in_list_mock.return_value = 'i-353454535434'
sshops.prepare_for_ssh(
'my-environment',
None,
False,
False,
False,
None
)
ssh_into_instance_mock.assert_called_once_with(
'i-123123123123',
command=None,
custom_ssh=None,
force_open=False,
keep_open=False
)
@mock.patch('ebcli.operations.sshops.commonops.get_instance_ids')
@mock.patch('ebcli.operations.sshops.utils.prompt_for_item_in_list')
@mock.patch('ebcli.operations.sshops.ssh_into_instance')
def test_prepare_for_ssh__number_of_instance_specified(
self,
ssh_into_instance_mock,
prompt_for_item_in_list_mock,
get_instance_ids_mock
):
get_instance_ids_mock.return_value = [
'i-123123123123',
'i-234234234424',
'i-353454535434',
]
prompt_for_item_in_list_mock.return_value = 'i-353454535434'
sshops.prepare_for_ssh(
'my-environment',
None,
False,
False,
False,
2
)
ssh_into_instance_mock.assert_called_once_with(
'i-234234234424',
command=None,
custom_ssh=None,
force_open=False,
keep_open=False
)
@mock.patch('ebcli.operations.sshops.commonops.get_instance_ids')
@mock.patch('ebcli.operations.sshops.utils.prompt_for_item_in_list')
@mock.patch('ebcli.operations.sshops.ssh_into_instance')
@mock.patch('ebcli.operations.sshops.io.log_error')
def test_prepare_for_ssh__ssh_into_instance_fails(
self,
log_error_mock,
ssh_into_instance_mock,
prompt_for_item_in_list_mock,
get_instance_ids_mock
):
get_instance_ids_mock.return_value = [
'i-123123123123',
'i-234234234424',
'i-353454535434',
]
prompt_for_item_in_list_mock.return_value = 'i-353454535434'
ssh_into_instance_mock.side_effect = sshops.NoKeypairError
sshops.prepare_for_ssh(
'my-environment',
None,
False,
False,
False,
2
)
ssh_into_instance_mock.assert_called_once_with(
'i-234234234424',
command=None,
custom_ssh=None,
force_open=False,
keep_open=False
)
log_error_mock.assert_called_once_with(
'This environment is not set up for SSH. Use "eb ssh --setup" to set up SSH for the environment.'
)
| 39.055268
| 124
| 0.661389
| 2,764
| 22,613
| 5.030753
| 0.09081
| 0.085221
| 0.078533
| 0.134628
| 0.872132
| 0.8548
| 0.844444
| 0.83639
| 0.819705
| 0.803524
| 0
| 0.02266
| 0.238889
| 22,613
| 578
| 125
| 39.122837
| 0.785254
| 0.023703
| 0
| 0.757396
| 0
| 0.001972
| 0.262645
| 0.167603
| 0
| 0
| 0
| 0
| 0.078895
| 1
| 0.055227
| false
| 0
| 0.013807
| 0
| 0.071006
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7c2160488520b872bdf57d0dd7199295c8482b6a
| 22,373
|
py
|
Python
|
python/Tracker_wrapper/ObjectnessVisualizer.py
|
ibogun/DeepAntrack
|
0b13d363a30c8ad63c0e2c8cbc16aebff90de69a
|
[
"MIT"
] | 1
|
2016-09-13T18:20:17.000Z
|
2016-09-13T18:20:17.000Z
|
python/Tracker_wrapper/ObjectnessVisualizer.py
|
ibogun/DeepAntrack
|
0b13d363a30c8ad63c0e2c8cbc16aebff90de69a
|
[
"MIT"
] | null | null | null |
python/Tracker_wrapper/ObjectnessVisualizer.py
|
ibogun/DeepAntrack
|
0b13d363a30c8ad63c0e2c8cbc16aebff90de69a
|
[
"MIT"
] | null | null | null |
__author__ = 'Ivan'
import objectness_python
import tracker_python
from Dataset import VOT2015Dataset
import numpy as np
import matplotlib.pyplot as plt
import cv2
from matplotlib import gridspec
import re
import os
import time
import math
import copy
class ObjectnessVizualizer(object):
"""Class to perform objectness visualization"""
def __init__(self, dataset, superpixels = 200, inner=0.9):
"""Constructor for ObjectnessVizualizer"""
self.dataset = dataset
self.superpixels = superpixels
self.inner = inner
@staticmethod
def combinePlotsWithMean(full_image, H, img, mean, filename = None, axis_str = None):
gs = gridspec.GridSpec(1, 3, width_ratios=[4, 2, 2])
ax0 = plt.subplot(gs[0])
ax0.imshow(full_image)
ax0.axis('off')
zvals = np.array(H)
zvals2 = cv2.cvtColor(img, cv2.COLOR_RGB2GRAY)
#zvals = np.transpose(zvals)
zvals = np.flipud(zvals)
zvals2 = np.flipud(zvals2)
cmap1 = plt.cm.jet
cmap2 = plt.cm.gray
cmap2._init() # create the _lut array, with rgba valuesHH
alphas = np.linspace(0, 0.6, cmap2.N+3)
cmap2._lut[:,-1] = alphas
ax1 = plt.subplot(gs[1])
ax1.imshow(zvals, interpolation='nearest', cmap=cmap1, origin='lower')
ax1.imshow(zvals2, interpolation='nearest', cmap=cmap2, origin='lower')
ax1.axis('off')
if axis_str is not None:
ax0.set_title(axis_str)
ax1.set_title("Straddling")
ax2=plt.subplot(gs[2])
ax2.matshow(mean)
ax2.axis('off')
ax2.set_title("Mean")
if filename is None:
#plt.show()
plt.draw()
time.sleep(1)
else:
plt.savefig(filename,bbox_inches='tight', dpi = 100)
plt.close()
@staticmethod
def combinePlots(full_image, H, img,filename = None, axis_str = None):
gs = gridspec.GridSpec(1, 2, width_ratios=[3, 1])
ax0 = plt.subplot(gs[0])
ax0.imshow(full_image)
ax0.axis('off')
zvals = np.array(H)
#min_z = np.min(zvals.flatten(1))
#max_z = np.max(zvals.flatten(1))
#zvals = (zvals - min_z)/(max_z - min_z)
zvals2 = cv2.cvtColor(img, cv2.COLOR_RGB2GRAY)
#zvals = np.transpose(zvals)
zvals = np.flipud(zvals)
zvals2 = np.flipud(zvals2)
cmap1 = plt.cm.jet
cmap2 = plt.cm.gray
cmap2._init() # create the _lut array, with rgba valuesHH
alphas = np.linspace(0, 0.6, cmap2.N+3)
cmap2._lut[:,-1] = alphas
ax1 = plt.subplot(gs[1])
ax1.imshow(zvals, interpolation='nearest', cmap=cmap1, origin='lower')
ax1.imshow(zvals2, interpolation='nearest', cmap=cmap2, origin='lower')
ax1.axis('off')
if axis_str is not None:
ax0.set_title(axis_str)
if filename is None:
#plt.show()
plt.draw()
time.sleep(1)
else:
plt.savefig(filename,bbox_inches='tight', dpi = 100)
plt.close()
@staticmethod
def correctDims(box, width, height, R):
min_x = max(box[0]-R, 0)
min_y = max(box[1]-R, 0)
max_x = min(box[0]+R +box[2], width -1)
max_y = min(box[1]+R+box[3], height -1)
return (min_x, min_y, max_x, max_y)
@staticmethod
def drawRectangle(image, box, R):
n = image.shape[0]
m = image.shape[1]
c_x = n/2
c_y = m/2
pt1 = (max(c_y - R, box[2]/2), max(c_x - R, box[3]/2))
pt2 = (min(c_y + R, m - box[2]/2), min(c_x + R, n - box[3]/2))
cv2.rectangle(image, pt1, pt2, (0,255,100), 2)
return image
def evaluateImageAverageStraddling(self, video_number, frame_number = 0, saveFolder = None):
video = self.dataset.video_folders[video_number]
boxes = self.dataset.readGroundTruthAll(video)
print video
print len(boxes)
images = self.dataset.getListOfImages(video)
R = 60
scale_R = 60
min_size_half = 10
min_scales=-15
max_scales =8
downsample=1.03
shrink_one_size = 0
s=re.split('/',video)
video_name = s[len(s)-1]
fig = plt.figure(figsize=(8, 6))
plt.ion()
plt.show()
i = frame_number
obj = objectness_python.Objectness()
box=boxes[i]
im_name = images[i]
img = cv2.imread(im_name,1)
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
height = img.shape[0]
width = img.shape[1]
(min_x, min_y, max_x, max_y) = self.correctDims(box, width, height, R)
small_image = img[min_y:max_y, min_x :max_x]
obj.readImage(im_name)
obj.smallImage(R, box[0], box[1], box[2], box[3])
a = obj.process(self.superpixels, self.inner, 0, R, scale_R, min_size_half, min_scales, max_scales,
downsample, shrink_one_size,
box[0], box[1], box[2], box[3])
#obj.plot()
c_x = box[0] - min_x + int(box[2]/2.0)
c_y = box[1] - min_y + int(box[3]/2.0)
counter = 1
# reshuffle the list a bit
a = a[1:min_scales] + [a[0]] + a[min_scales:len(a)]
sums =np.zeros((len(a[0]),len(a[0][0])))
counts = np.zeros((len(a[0]),len(a[0][0])))
normalized=list()
delay = 5
for H,i in zip(a, range(0,len(a))):
prevExists = (i-delay>=0)
if (prevExists):
objs_delay = np.array(a[i - delay])
objs = np.array(H)
mat = np.zeros((len(a[0]),len(a[0][0])))
print np.max(H)
for x in range(0,objs.shape[0]):
for y in range(0, objs.shape[1]):
# get the new data
if objs[x,y]!=0:
counts[x,y]= counts[x,y]+1
sums[x,y] = sums[x,y] +objs[x,y]
# keep the moving average moving
if prevExists:
sums[x,y] = sums[x,y] - objs_delay[x,y]
if (objs_delay[x,y]!=0):
counts[x,y] = counts[x,y] -1
if counts[x,y]!= 0:
mat[x,y] = sums[x,y] / float(counts[x,y])
normalized.append(mat)
for H,h in zip(normalized,a):
h=np.array(h)
image_full = copy.deepcopy(img)
small_image_copy = image_full[min_y:max_y, min_x :max_x]
if ( counter == 1):
half_width = box[2]/2.0
half_height = box[3]/2.0
width = box[2]
height = box[3]
else:
half_width = ((box[2]/2)*math.pow(downsample, min_scales + counter - 1))
half_height = ((box[3]/2)*math.pow(downsample, min_scales + counter - 1))
width = int(half_width*2)
height = int(half_height*2)
pt1=(int(c_x - half_width), int(c_y - half_height))
pt2=(int(c_x + half_width), int(c_y + half_height))
cv2.rectangle(image_full, (pt1[0]+min_x, pt1[1]+min_y),(pt2[0]+min_x, pt2[1]+min_y), (100,0,150), 2)
cv2.rectangle(image_full, (min_x, min_y), (max_x, max_y), (0,255,200),2)
small_image_copy = self.drawRectangle(small_image_copy, (0,0,width, height) , R)
print "processing image: ", " " , counter ,"/", len(a)
if saveFolder is not None:
directory = saveFolder + "/" + video_name+"/"
if not os.path.exists(directory):
os.makedirs(directory)
saveImage = directory+ str(1000 + counter) + ".png"
if(os.path.isfile(saveImage)):
counter = counter + 1
continue
else:
saveImage = None
axis_str = str(round(width/float(box[2])*100,2)) +"%"
self.combinePlotsWithMean(image_full, H, small_image_copy,h,filename = saveImage, axis_str=axis_str)
counter = counter + 1
plt.close()
def evaluateImage(self, video_number, frame_number = 0, saveFolder = None):
video = self.dataset.video_folders[video_number]
boxes = self.dataset.readGroundTruthAll(video)
print video
print len(boxes)
images = self.dataset.getListOfImages(video)
R = 60
scale_R = 60
min_size_half = 10
min_scales=-15
max_scales =8
downsample=1.03
shrink_one_size = 0
s=re.split('/',video)
video_name = s[len(s)-1]
fig = plt.figure(figsize=(8, 6))
plt.ion()
plt.show()
i = frame_number
obj = objectness_python.Objectness()
box=boxes[i]
im_name = images[i]
img = cv2.imread(im_name,1)
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
height = img.shape[0]
width = img.shape[1]
(min_x, min_y, max_x, max_y) = self.correctDims(box, width, height, R)
small_image = img[min_y:max_y, min_x :max_x]
obj.readImage(im_name)
obj.smallImage(R, box[0], box[1], box[2], box[3])
a = obj.process(self.superpixels, self.inner, R, 0, scale_R, min_size_half, min_scales, max_scales,
downsample, shrink_one_size,
box[0], box[1], box[2], box[3])
#obj.plot()
c_x = box[0] - min_x + int(box[2]/2.0)
c_y = box[1] - min_y + int(box[3]/2.0)
counter = 1
mean =np.zeros((len(a[0]),len(a[0][0])))
counts = np.zeros((len(a[0]),len(a[0][0])))
for H in a:
objs = np.array(H)
for x in range(0,objs.shape[0]):
for y in range(0, objs.shape[1]):
if objs[x,y]!=0:
counts[x,y]= counts[x,y]+1
mean[x,y] = mean[x,y] +objs[x,y]
for x in range(0,objs.shape[0]):
for y in range(0, objs.shape[1]):
if counts[x,y]!=0:
mean[x,y] = mean[x,y]/float(counts[x,y])
for H in a:
image_full = copy.deepcopy(img)
small_image_copy = image_full[min_y:max_y, min_x :max_x]
if ( counter == 1):
half_width = box[2]/2.0
half_height = box[3]/2.0
width = box[2]
height = box[3]
else:
half_width = ((box[2]/2)*math.pow(downsample, min_scales + counter - 1))
half_height = ((box[3]/2)*math.pow(downsample, min_scales + counter - 1))
width = int(half_width*2)
height = int(half_height*2)
pt1=(int(c_x - half_width), int(c_y - half_height))
pt2=(int(c_x + half_width), int(c_y + half_height))
cv2.rectangle(image_full, (pt1[0]+min_x, pt1[1]+min_y),(pt2[0]+min_x, pt2[1]+min_y), (100,0,150), 2)
cv2.rectangle(image_full, (min_x, min_y), (max_x, max_y), (0,255,200),2)
small_image_copy = self.drawRectangle(small_image_copy, (0,0,width, height) , R)
print "processing image: ", " " , counter ,"/", len(a)
if saveFolder is not None:
directory = saveFolder + "/" + video_name+"/"
if not os.path.exists(directory):
os.makedirs(directory)
saveImage = directory+ str(1000 + counter) + ".png"
if(os.path.isfile(saveImage)):
counter = counter + 1
continue
else:
saveImage = None
axis_str = str(round(width/float(box[2])*100,2)) +"%"
self.combinePlotsWithMean(image_full, H, small_image_copy, mean,filename = saveImage, axis_str=axis_str)
counter = counter + 1
plt.close()
def evaluateDiscriminativeFunction(self, video_number, together=False, saveFolder=None):
video = self.dataset.video_folders[video_number]
boxes = self.dataset.readGroundTruthAll(video)
print video
print len(boxes)
images = self.dataset.getListOfImages(video)
bbox = boxes[0]
R = 60
scale_R = 60
min_size_half = 10
min_scales=0
max_scales =0
downsample=1.05
shrink_one_size = 0
s=re.split('/',video)
video_name = s[len(s)-1]
tracker = tracker_python.Antrack()
tracker.initializeTracker()
print images[0], bbox
tracker.initialize(images[0], bbox[0], bbox[1], bbox[2], bbox[3])
fig = plt.figure(figsize=(8, 6))
plt.ion()
plt.show()
for i in range(1, len(images)):
print "processing image: ", " " , i ,"/", len(images)
if saveFolder is not None:
directory = saveFolder + "/" + video_name+"/"
if not os.path.exists(directory):
os.makedirs(directory)
saveImage = directory+ str(1000 + i) + ".png"
if(os.path.isfile(saveImage)):
continue
else:
saveImage = None
box=boxes[i]
im_name = images[i]
img = cv2.imread(im_name,1)
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
height = img.shape[0]
width = img.shape[1]
(min_x, min_y, max_x, max_y) = self.correctDims(box, width, height, R)
small_image = img[min_y:max_y, min_x :max_x]
im_name = images[i]
out = tracker.track(im_name)
if i == 100:
if together:
obj = objectness_python.Objectness()
obj.readImage(im_name)
obj.smallImage(R, box[0], box[1], box[2], box[3])
a_s = obj.processEdge(self.superpixels,self.inner, 0,
R, scale_R, min_size_half, min_scales, max_scales,
downsample, shrink_one_size,
box[0], box[1], box[2], box[3])
a_e = obj.processEdge(self.superpixels,self.inner, 0,
R, scale_R, min_size_half, min_scales, max_scales,
downsample, shrink_one_size,
box[0], box[1], box[2], box[3])
H_s=np.array(a_s[0])
H_e=np.array(a_e[0])
a = tracker.calculateDiscriminativeFunction(im_name)
H=np.array(a)
H=H[min_x:max_x, min_y :max_y]
H = np.transpose(H)
if together:
min_z = np.min(H.flatten(1))
max_z = np.max(H.flatten(1))
H = (H - min_z)/(max_z - min_z)
H = H + 0.3* H_s + 0.3 * H_e
print H.shape
self.combinePlots(img, H, small_image, saveImage)
def evaluateVideoEdge(self, video_number, saveFolder=None):
video = self.dataset.video_folders[video_number]
boxes = self.dataset.readGroundTruthAll(video)
print video
print len(boxes)
images = self.dataset.getListOfImages(video)
R = 60
scale_R = 60
min_size_half = 10
min_scales=0
max_scales =0
downsample=1.05
shrink_one_size = 0
s=re.split('/',video)
video_name = s[len(s)-1]
fig = plt.figure(figsize=(8, 6))
plt.ion()
plt.show()
for i in range(0, len(images)):
print "processing image: ", " " , i ,"/", len(images)
if saveFolder is not None:
directory = saveFolder + "/" + video_name+"/"
if not os.path.exists(directory):
os.makedirs(directory)
saveImage = directory+ str(1000 + i) + ".png"
if(os.path.isfile(saveImage)):
continue
else:
saveImage = None
obj = objectness_python.Objectness()
box=boxes[i]
im_name = images[i]
img = cv2.imread(im_name,1)
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
height = img.shape[0]
width = img.shape[1]
(min_x, min_y, max_x, max_y) = self.correctDims(box, width, height, R)
small_image = img[min_y:max_y, min_x :max_x]
obj.readImage(im_name)
pt1=(box[0] - min_x, box[1] - min_y)
pt2=(box[0] - min_x + box[2], box[1] -min_y + box[3])
cv2.rectangle(small_image, pt1,pt2, (100,0,150), 2)
cv2.rectangle(img, (min_x, min_y), (max_x, max_y), (0,255,200),2)
small_image = self.drawRectangle(small_image, box , R)
obj.smallImage(R, box[0], box[1], box[2], box[3])
a = obj.processEdge(self.superpixels,self.inner, 0,
R, scale_R, min_size_half, min_scales, max_scales,
downsample, shrink_one_size,
box[0], box[1], box[2], box[3])
#obj.plot()
H = a[0]
print len(H), len(H[0])
self.combinePlots(img, H, small_image, saveImage)
def evaluateVideo(self, video_number, saveFolder=None):
video = self.dataset.video_folders[video_number]
boxes = self.dataset.readGroundTruthAll(video)
print video
print len(boxes)
images = self.dataset.getListOfImages(video)
R = 60
scale_R = 60
min_size_half = 10
min_scales=0
max_scales =0
downsample=1.05
shrink_one_size = 0
s=re.split('/',video)
video_name = s[len(s)-1]
fig = plt.figure(figsize=(8, 6))
plt.ion()
plt.show()
for i in range(0, len(images)):
print "processing image: ", " " , i ,"/", len(images)
if saveFolder is not None:
directory = saveFolder + "/" + video_name+"/"
if not os.path.exists(directory):
os.makedirs(directory)
saveImage = directory+ str(1000 + i) + ".png"
if(os.path.isfile(saveImage)):
continue
else:
saveImage = None
obj = objectness_python.Objectness()
box=boxes[i]
im_name = images[i]
img = cv2.imread(im_name,1)
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
height = img.shape[0]
width = img.shape[1]
(min_x, min_y, max_x, max_y) = self.correctDims(box, width, height, R)
small_image = img[min_y:max_y, min_x :max_x]
obj.readImage(im_name)
pt1=(box[0] - min_x, box[1] - min_y)
pt2=(box[0] - min_x + box[2], box[1] -min_y + box[3])
cv2.rectangle(small_image, pt1,pt2, (100,0,150), 2)
cv2.rectangle(img, (min_x, min_y), (max_x, max_y), (0,255,200),2)
#small_image = self.drawRectangle(small_image, box , R)
obj.smallImage(R, box[0], box[1], box[2], box[3])
a = obj.process(self.superpixels,self.inner, 0,
R, scale_R, min_size_half, min_scales, max_scales,
downsample, shrink_one_size,
box[0], box[1], box[2], box[3])
#obj.plot()
H = a[0]
self.combinePlots(img, H, small_image, saveImage)
def straddlingInTime(save = False):
root_folder = '/Users/Ivan/Code/Tracking/Antrack/matlab/vot-toolkit/vot2015/sequences'
vot = VOT2015Dataset(root_folder)
superpixels = 200
obj = ObjectnessVizualizer(vot)
#videos = [3, 30, 25]
videos = [3]
if save:
saveOutputFolder = '/Users/Ivan/Files/Results/Tracking/VOT2015_straddling_in_time'
else:
saveOutputFolder = None
for v in videos:
obj.evaluateVideo(v, saveOutputFolder)
def edgeDensityInTime(save = False):
root_folder = '/Users/Ivan/Code/Tracking/Antrack/matlab/vot-toolkit/vot2015/sequences'
vot = VOT2015Dataset(root_folder)
obj = ObjectnessVizualizer(vot)
#videos = [3, 30, 25]
videos = [3]
if save:
saveOutputFolder = '/Users/Ivan/Files/Results/Tracking/VOT2015_edgeDensity_in_time'
else:
saveOutputFolder = None
for v in videos:
obj.evaluateVideoEdge(v, saveOutputFolder)
def discriminativeFunctionInTime(together = True, save = False):
root_folder = '/Users/Ivan/Code/Tracking/Antrack/matlab/vot-toolkit/vot2015/sequences'
vot = VOT2015Dataset(root_folder)
obj = ObjectnessVizualizer(vot)
#videos = [3, 30, 25]
videos = [3]
if save:
saveOutputFolder = '/Users/Ivan/Files/Results/Tracking/VOT2015_discriminative_in_time'
else:
saveOutputFolder = None
for v in videos:
obj.evaluateDiscriminativeFunction(v,together=together, saveFolder=saveOutputFolder)
def straddlingInSpace( save = False):
root_folder = '/Users/Ivan/Code/Tracking/Antrack/matlab/vot-toolkit/vot2015/sequences'
vot = VOT2015Dataset(root_folder)
superpixels = 200
obj = ObjectnessVizualizer(vot, superpixels)
videos = [3, 30, 25]
#videos = [30]
if save:
saveOutputFolder = '/Users/Ivan/Files/Results/Tracking/VOT2015_straddling_in_space'
else:
saveOutputFolder = None
for v in videos:
obj.evaluateImage(v, saveFolder=saveOutputFolder)
def straddelingAverageInSpace(save = False):
root_folder = '/Users/Ivan/Code/Tracking/Antrack/matlab/vot-toolkit/vot2015/sequences'
vot = VOT2015Dataset(root_folder)
superpixels = 200
obj = ObjectnessVizualizer(vot, superpixels)
videos = [3, 30, 25]
videos = [30]
if save:
saveOutputFolder = '/Users/Ivan/Files/Results/Tracking/VOT2015_straddling_in_space_average'
else:
saveOutputFolder = None
for v in videos:
obj.evaluateImageAverageStraddling(v, saveFolder=saveOutputFolder)
if __name__ == "__main__":
discriminativeFunctionInTime(together=True, save=True)
#straddlingInTime(True)
#edgeDensityInTime(False)
| 32.709064
| 116
| 0.538998
| 2,887
| 22,373
| 4.034292
| 0.087288
| 0.00996
| 0.011419
| 0.007556
| 0.811454
| 0.808019
| 0.796858
| 0.79411
| 0.782433
| 0.766292
| 0
| 0.04427
| 0.334644
| 22,373
| 684
| 117
| 32.709064
| 0.738143
| 0.024449
| 0
| 0.769384
| 0
| 0
| 0.041722
| 0.030854
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.023857
| null | null | 0.037773
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7c3865b126a3a4d6d2190aace430df22e1463b4c
| 147
|
py
|
Python
|
environments/assets/gym_dummy/envs/__init__.py
|
GPaolo/SERENE
|
83bc38a37ad8f1be9695d2483fd463428d4dae23
|
[
"MIT"
] | 3
|
2021-04-19T21:55:00.000Z
|
2021-12-20T15:26:12.000Z
|
environments/assets/gym_dummy/envs/__init__.py
|
GPaolo/SERENE
|
83bc38a37ad8f1be9695d2483fd463428d4dae23
|
[
"MIT"
] | null | null | null |
environments/assets/gym_dummy/envs/__init__.py
|
GPaolo/SERENE
|
83bc38a37ad8f1be9695d2483fd463428d4dae23
|
[
"MIT"
] | null | null | null |
# Created by Giuseppe Paolo
# Date: 13/03/2020
from gym_dummy.envs.dummy_env import DummyEnv
from gym_dummy.envs.walker_2d_env import Walker2DEnv
| 29.4
| 52
| 0.823129
| 25
| 147
| 4.64
| 0.72
| 0.12069
| 0.206897
| 0.275862
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 0.115646
| 147
| 5
| 52
| 29.4
| 0.815385
| 0.292517
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
7c40e1014c91b4f27ea4e52c1bae71cf398004d6
| 5,114
|
py
|
Python
|
app/gws/gis/ows/_test/gml_writer_test.py
|
ewie/gbd-websuite
|
6f2814c7bb64d11cb5a0deec712df751718fb3e1
|
[
"Apache-2.0"
] | null | null | null |
app/gws/gis/ows/_test/gml_writer_test.py
|
ewie/gbd-websuite
|
6f2814c7bb64d11cb5a0deec712df751718fb3e1
|
[
"Apache-2.0"
] | null | null | null |
app/gws/gis/ows/_test/gml_writer_test.py
|
ewie/gbd-websuite
|
6f2814c7bb64d11cb5a0deec712df751718fb3e1
|
[
"Apache-2.0"
] | null | null | null |
import gws.gis.gml
import gws.gis.shape
import gws.tools.xml2
"""
drop table if exists tt;
create temporary table tt (w text);
insert into tt values
('POINT (30 10)'),
('LINESTRING (30 10, 10 30, 40 40)'),
('POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))'),
('POLYGON ((35 10, 45 45, 15 40, 10 20, 35 10),(20 30, 35 35, 30 20, 20 30))'),
('MULTIPOINT ((10 40), (40 30), (20 20), (30 10))'),
('MULTIPOINT (10 40, 40 30, 20 20, 30 10)'),
('MULTILINESTRING ((10 10, 20 20, 10 40),(40 40, 30 30, 40 20, 30 10))'),
('MULTIPOLYGON (((30 20, 45 40, 10 40, 30 20)),((15 5, 40 10, 10 20, 5 10, 15 5)))'),
('MULTIPOLYGON (((40 40, 20 45, 45 30, 40 40)),((20 35, 10 30, 10 10, 30 5, 45 20, 20 35),(30 20, 20 15, 20 25, 30 20)))'
)
select w, st_asgml(3, st_geomfromewkt('SRID=25832;'||w), 1, 4+1) as g from tt
"""
test = [
{
"w": "POINT (30 10)",
"g": "<gml:Point srsName=\"urn:ogc:def:crs:EPSG::25832\"><gml:pos srsDimension=\"2\">30 10</gml:pos></gml:Point>"
},
{
"w": "LINESTRING (30 10, 10 30, 40 40)",
"g": "<gml:LineString srsName=\"urn:ogc:def:crs:EPSG::25832\"><gml:posList srsDimension=\"2\">30 10 10 30 40 40</gml:posList></gml:LineString>"
},
{
"w": "POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))",
"g": "<gml:Polygon srsName=\"urn:ogc:def:crs:EPSG::25832\"><gml:exterior><gml:LinearRing><gml:posList srsDimension=\"2\">30 10 40 40 20 40 10 20 30 10</gml:posList></gml:LinearRing></gml:exterior></gml:Polygon>"
},
{
"w": "POLYGON ((35 10, 45 45, 15 40, 10 20, 35 10),(20 30, 35 35, 30 20, 20 30))",
"g": "<gml:Polygon srsName=\"urn:ogc:def:crs:EPSG::25832\"><gml:exterior><gml:LinearRing><gml:posList srsDimension=\"2\">35 10 45 45 15 40 10 20 35 10</gml:posList></gml:LinearRing></gml:exterior><gml:interior><gml:LinearRing><gml:posList srsDimension=\"2\">20 30 35 35 30 20 20 30</gml:posList></gml:LinearRing></gml:interior></gml:Polygon>"
},
{
"w": "MULTIPOINT ((10 40), (40 30), (20 20), (30 10))",
"g": "<gml:MultiPoint srsName=\"urn:ogc:def:crs:EPSG::25832\"><gml:pointMember><gml:Point><gml:pos srsDimension=\"2\">10 40</gml:pos></gml:Point></gml:pointMember><gml:pointMember><gml:Point><gml:pos srsDimension=\"2\">40 30</gml:pos></gml:Point></gml:pointMember><gml:pointMember><gml:Point><gml:pos srsDimension=\"2\">20 20</gml:pos></gml:Point></gml:pointMember><gml:pointMember><gml:Point><gml:pos srsDimension=\"2\">30 10</gml:pos></gml:Point></gml:pointMember></gml:MultiPoint>"
},
{
"w": "MULTIPOINT (10 40, 40 30, 20 20, 30 10)",
"g": "<gml:MultiPoint srsName=\"urn:ogc:def:crs:EPSG::25832\"><gml:pointMember><gml:Point><gml:pos srsDimension=\"2\">10 40</gml:pos></gml:Point></gml:pointMember><gml:pointMember><gml:Point><gml:pos srsDimension=\"2\">40 30</gml:pos></gml:Point></gml:pointMember><gml:pointMember><gml:Point><gml:pos srsDimension=\"2\">20 20</gml:pos></gml:Point></gml:pointMember><gml:pointMember><gml:Point><gml:pos srsDimension=\"2\">30 10</gml:pos></gml:Point></gml:pointMember></gml:MultiPoint>"
},
{
"w": "MULTILINESTRING ((10 10, 20 20, 10 40),(40 40, 30 30, 40 20, 30 10))",
"g": "<gml:MultiCurve srsName=\"urn:ogc:def:crs:EPSG::25832\"><gml:curveMember><gml:LineString><gml:posList srsDimension=\"2\">10 10 20 20 10 40</gml:posList></gml:LineString></gml:curveMember><gml:curveMember><gml:LineString><gml:posList srsDimension=\"2\">40 40 30 30 40 20 30 10</gml:posList></gml:LineString></gml:curveMember></gml:MultiCurve>"
},
{
"w": "MULTIPOLYGON (((30 20, 45 40, 10 40, 30 20)),((15 5, 40 10, 10 20, 5 10, 15 5)))",
"g": "<gml:MultiSurface srsName=\"urn:ogc:def:crs:EPSG::25832\"><gml:surfaceMember><gml:Polygon><gml:exterior><gml:LinearRing><gml:posList srsDimension=\"2\">30 20 45 40 10 40 30 20</gml:posList></gml:LinearRing></gml:exterior></gml:Polygon></gml:surfaceMember><gml:surfaceMember><gml:Polygon><gml:exterior><gml:LinearRing><gml:posList srsDimension=\"2\">15 5 40 10 10 20 5 10 15 5</gml:posList></gml:LinearRing></gml:exterior></gml:Polygon></gml:surfaceMember></gml:MultiSurface>"
},
{
"w": "MULTIPOLYGON (((40 40, 20 45, 45 30, 40 40)),((20 35, 10 30, 10 10, 30 5, 45 20, 20 35),(30 20, 20 15, 20 25, 30 20)))",
"g": "<gml:MultiSurface srsName=\"urn:ogc:def:crs:EPSG::25832\"><gml:surfaceMember><gml:Polygon><gml:exterior><gml:LinearRing><gml:posList srsDimension=\"2\">40 40 20 45 45 30 40 40</gml:posList></gml:LinearRing></gml:exterior></gml:Polygon></gml:surfaceMember><gml:surfaceMember><gml:Polygon><gml:exterior><gml:LinearRing><gml:posList srsDimension=\"2\">20 35 10 30 10 10 30 5 45 20 20 35</gml:posList></gml:LinearRing></gml:exterior><gml:interior><gml:LinearRing><gml:posList srsDimension=\"2\">30 20 20 15 20 25 30 20</gml:posList></gml:LinearRing></gml:interior></gml:Polygon></gml:surfaceMember></gml:MultiSurface>"
}
]
def test_gml_writer():
for t in test:
s = gws.gis.shape.from_wkt(t['w'], 'EPSG:25832')
r = gws.gis.gml.shape_to_tag(s)
g = gws.tools.xml2._string(r)
assert t['g'] == g
| 73.057143
| 628
| 0.635315
| 848
| 5,114
| 3.821934
| 0.089623
| 0.02962
| 0.078988
| 0.078062
| 0.894477
| 0.890157
| 0.859303
| 0.821969
| 0.699475
| 0.682814
| 0
| 0.168409
| 0.139617
| 5,114
| 69
| 629
| 74.115942
| 0.568182
| 0
| 0
| 0.042553
| 0
| 0.468085
| 0.768123
| 0.452778
| 0
| 0
| 0
| 0
| 0.021277
| 1
| 0.021277
| false
| 0
| 0.06383
| 0
| 0.085106
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7c78321489bb69c30962b38d73e6663adad1bfe6
| 4,826
|
py
|
Python
|
nakamoto/sector/sector.py
|
YazzyYaz/nakamoto-coefficient
|
e6764e7b797dcc3dd19fbef51fd71f703a2097e7
|
[
"MIT"
] | 6
|
2019-01-17T03:40:15.000Z
|
2020-08-25T10:30:00.000Z
|
nakamoto/sector/sector.py
|
ForkWatch/nakamoto-coefficient
|
e6764e7b797dcc3dd19fbef51fd71f703a2097e7
|
[
"MIT"
] | 2
|
2019-07-09T20:05:36.000Z
|
2019-12-17T15:57:22.000Z
|
nakamoto/sector/sector.py
|
ForkWatch/nakamoto-coefficient
|
e6764e7b797dcc3dd19fbef51fd71f703a2097e7
|
[
"MIT"
] | 2
|
2019-07-09T20:02:57.000Z
|
2020-08-25T10:30:10.000Z
|
from .analysis import Gini, LorenzPlot
from nakamoto.coefficient import SectorNakamoto
import uuid
import numpy as np
class Sector(object):
def __init__(self, currency, **kwargs):
self.uuid = uuid.uuid4()
self.data = None
self.type = None
self.plot_notebook = kwargs.get('plot_notebook')
self.plot_image_path = kwargs.get('plot_image_path')
self.currency = currency
self.lorenz_data = None
self.lorenz_object = None
self.nakamoto = None
self.title = None
self.gini = None
def generate_gini_coefficient(self):
if self.data is not None:
gini_object = Gini(self.data)
gini = gini_object.get_gini()
return gini
else:
raise Exception('Cannot generate gini. No data')
def generate_nakamoto_coefficient(self):
if not self.lorenz_data:
self.lorenz_data = self.generate_lorenz_data()
nakamoto_object = SectorNakamoto(self.lorenz_data)
nakamoto = nakamoto_object.get_nakamoto_coefficient()
return nakamoto
def generate_lorenz_object(self):
file_name = f'{self.currency}_{self.type}_gini_{self.uuid}'
title_name = f'{self.currency} {self.type.capitalize()} Lorenz Curve'
lorenz_object = LorenzPlot(self.plot_notebook, self.plot_image_path, self.data, file_name, title_name)
return lorenz_object
def generate_lorenz_data(self):
if not self.lorenz_object:
self.lorenz_object = self.generate_lorenz_object()
lorenz_data = self.lorenz_object.get_lorenz_data()
return lorenz_data
def generate_lorenz_curve(self):
if not self.lorenz_object:
self.lorenz_object = self.generate_lorenz_object()
self.lorenz_object.get_plot()
def get_lorenz_data(self):
if not self.lorenz_data:
self.lorenz_data = self.generate_lorenz_data()
return self.lorenz_data
def get_gini_coefficient(self):
if not self.gini:
self.gini = self.generate_gini_coefficient()
return self.gini
def get_plot(self):
self.generate_lorenz_curve()
def get_nakamoto_coefficient(self):
if not self.nakamoto:
self.nakamoto = self.generate_nakamoto_coefficient()
return self.nakamoto
class CustomSector(object):
def __init__(self, data, currency, sector_type, **kwargs):
self.uuid = uuid.uuid4()
self.data = data
if type(self.data) is not np.ndarray:
raise Exception('Sector data must be a numpy array')
if len(self.data) == 0:
raise Exception('Cannot pass empty data numpy array')
self.plot_notebook = kwargs.get('plot_notebook')
self.plot_image_path = kwargs.get('plot_image_path')
self.gini = None
self.currency = currency
self.type = sector_type
self.nakamoto = None
self.lorenz_data = None
self.lorenz_object = None
def generate_gini_coefficient(self):
if self.data is not None:
gini_object = Gini(self.data)
gini = gini_object.get_gini()
return gini
else:
raise Exception('Cannot generate gini. No data')
def generate_nakamoto_coefficient(self):
if not self.lorenz_data:
self.lorenz_data = self.generate_lorenz_data()
nakamoto_object = SectorNakamoto(self.lorenz_data)
nakamoto = nakamoto_object.get_nakamoto_coefficient()
return nakamoto
def generate_lorenz_object(self):
file_name = f'{self.currency}_{self.type}_gini_{self.uuid}'
title_name = f'{self.currency} {self.type.capitalize()} Lorenz Curve'
lorenz_object = LorenzPlot(self.plot_notebook, self.plot_image_path, self.data, file_name, title_name)
return lorenz_object
def generate_lorenz_data(self):
if not self.lorenz_object:
self.lorenz_object = self.generate_lorenz_object()
lorenz_data = self.lorenz_object.get_lorenz_data()
return lorenz_data
def generate_lorenz_curve(self):
if not self.lorenz_object:
self.lorenz_object = self.generate_lorenz_object()
self.lorenz_object.get_plot()
def get_lorenz_data(self):
if not self.lorenz_data:
self.lorenz_data = self.generate_lorenz_data()
return self.lorenz_data
def get_gini_coefficient(self):
if not self.gini:
self.gini = self.generate_gini_coefficient()
return self.gini
def get_plot(self):
self.generate_lorenz_curve()
def get_nakamoto_coefficient(self):
if not self.nakamoto:
self.nakamoto = self.generate_nakamoto_coefficient()
return self.nakamoto
| 34.971014
| 110
| 0.656651
| 597
| 4,826
| 5.040201
| 0.093802
| 0.093054
| 0.065138
| 0.051844
| 0.849452
| 0.849452
| 0.849452
| 0.828847
| 0.804919
| 0.804919
| 0
| 0.000842
| 0.261293
| 4,826
| 137
| 111
| 35.226277
| 0.843198
| 0
| 0
| 0.852174
| 0
| 0
| 0.077704
| 0.028181
| 0
| 0
| 0
| 0
| 0
| 1
| 0.173913
| false
| 0.008696
| 0.034783
| 0
| 0.347826
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7c9afbb917ef2178c7273c4e1a8d4399d1993823
| 202
|
py
|
Python
|
graspy/plot/__init__.py
|
dfrancisco1998/graspy
|
241c2f4586d9d44bc7f2e6a7451c9383ad8ff841
|
[
"Apache-2.0"
] | null | null | null |
graspy/plot/__init__.py
|
dfrancisco1998/graspy
|
241c2f4586d9d44bc7f2e6a7451c9383ad8ff841
|
[
"Apache-2.0"
] | null | null | null |
graspy/plot/__init__.py
|
dfrancisco1998/graspy
|
241c2f4586d9d44bc7f2e6a7451c9383ad8ff841
|
[
"Apache-2.0"
] | 1
|
2020-03-31T22:02:24.000Z
|
2020-03-31T22:02:24.000Z
|
import sys
import matplotlib as mpl
from .plot import heatmap, gridplot, pairplot, degreeplot, edgeplot, screeplot
__all__ = ["heatmap", "gridplot", "pairplot", "degreeplot", "edgeplot", "screeplot"]
| 28.857143
| 84
| 0.747525
| 22
| 202
| 6.681818
| 0.636364
| 0.204082
| 0.312925
| 0.44898
| 0.680272
| 0.680272
| 0
| 0
| 0
| 0
| 0
| 0
| 0.123762
| 202
| 6
| 85
| 33.666667
| 0.830508
| 0
| 0
| 0
| 0
| 0
| 0.247525
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
6b182c02cf77c48634a782704b6561d9ab805eab
| 7,811
|
py
|
Python
|
model/resnet34.py
|
FAHAI-1/tensorflow-cifar100
|
4b5b76d0a310e007620e9a4f7d14da4e5db939e3
|
[
"Apache-2.0"
] | 109
|
2019-05-12T13:26:10.000Z
|
2022-03-09T01:45:20.000Z
|
model/resnet34.py
|
FAHAI-1/tensorflow-cifar100
|
4b5b76d0a310e007620e9a4f7d14da4e5db939e3
|
[
"Apache-2.0"
] | 10
|
2019-05-16T08:26:01.000Z
|
2020-07-27T05:56:35.000Z
|
model/resnet34.py
|
FAHAI-1/tensorflow-cifar100
|
4b5b76d0a310e007620e9a4f7d14da4e5db939e3
|
[
"Apache-2.0"
] | 40
|
2019-05-19T14:38:00.000Z
|
2022-02-25T16:18:39.000Z
|
import tensorflow as tf
def identity_block2d(input_tensor, kernel_size, filters, stage, block, is_training, reuse, kernel_initializer=tf.contrib.layers.variance_scaling_initializer()):
filters1, filters2, filters3 = filters
conv_name_2 = 'conv' + str(stage) + '_' + str(block) + '_3x3'
bn_name_2 = 'bn' + str(stage) + '_' + str(block) + '_3x3'
x = tf.layers.conv2d(input_tensor, filters2, kernel_size, use_bias=False, padding='SAME', kernel_initializer=kernel_initializer, name=conv_name_2, reuse=reuse)
x = tf.layers.batch_normalization(x, training=is_training, name=bn_name_2, reuse=reuse)
x = tf.nn.relu(x)
conv_name_3 = 'conv' + str(stage) + '_' + str(block) + '_1x1_increase'
bn_name_3 = 'bn' + str(stage) + '_' + str(block) + '_1x1_increase'
x = tf.layers.conv2d(x, filters3, (kernel_size, kernel_size), use_bias=False, padding='SAME', kernel_initializer=kernel_initializer, name=conv_name_3, reuse=reuse)
x = tf.layers.batch_normalization(x, training=is_training, name=bn_name_3, reuse=reuse)
x = tf.add(input_tensor, x)
x = tf.nn.relu(x)
return x
def conv_block_2d(input_tensor, kernel_size, filters, stage, block, is_training, reuse, strides=(2, 2), kernel_initializer=tf.contrib.layers.variance_scaling_initializer()):
filters1, filters2, filters3 = filters
conv_name_2 = 'conv' + str(stage) + '_' + str(block) + '_3x3'
bn_name_2 = 'bn' + str(stage) + '_' + str(block) + '_3x3'
x = tf.layers.conv2d(input_tensor, filters2, (kernel_size, kernel_size), use_bias=False, strides=strides, padding='SAME', kernel_initializer=kernel_initializer, name=conv_name_2, reuse=reuse)
x = tf.layers.batch_normalization(x, training=is_training, name=bn_name_2, reuse=reuse)
x = tf.nn.relu(x)
conv_name_3 = 'conv' + str(stage) + '_' + str(block) + '_1x1_increase'
bn_name_3 = 'bn' + str(stage) + '_' + str(block) + '_1x1_increase'
x = tf.layers.conv2d(x, filters3, (kernel_size, kernel_size), use_bias=False, padding='SAME', kernel_initializer=kernel_initializer, name=conv_name_3, reuse=reuse)
x = tf.layers.batch_normalization(x, training=is_training, name=bn_name_3, reuse=reuse)
conv_name_4 = 'conv' + str(stage) + '_' + str(block) + '_1x1_shortcut'
bn_name_4 = 'bn' + str(stage) + '_' + str(block) + '_1x1_shortcut'
shortcut = tf.layers.conv2d(input_tensor, filters3, (kernel_size, kernel_size), use_bias=False, strides=strides, padding='SAME', kernel_initializer=kernel_initializer, name=conv_name_4, reuse=reuse)
shortcut = tf.layers.batch_normalization(shortcut, training=is_training, name=bn_name_4, reuse=reuse)
x = tf.add(shortcut, x)
x = tf.nn.relu(x)
return x
def resnet18(input_tensor, is_training=True, pooling_and_fc=True, reuse=False, kernel_initializer=tf.contrib.layers.variance_scaling_initializer()):
x = tf.layers.conv2d(input_tensor, 64, (3,3), strides=(1,1), kernel_initializer=kernel_initializer, use_bias=False, padding='SAME', name='conv1_1/3x3_s1', reuse=reuse)
x = tf.layers.batch_normalization(x, training=is_training, name='bn1_1/3x3_s1', reuse=reuse)
x = tf.nn.relu(x)
x1 = identity_block2d(x, 3, [48, 64, 64], stage=2, block='1b', is_training=is_training, reuse=reuse, kernel_initializer=kernel_initializer)
x1 = identity_block2d(x1, 3, [48, 64, 64], stage=3, block='1c', is_training=is_training, reuse=reuse, kernel_initializer=kernel_initializer)
x2 = conv_block_2d(x1, 3, [96, 128, 128], stage=3, block='2a', strides=(2,2), is_training=is_training, reuse=reuse, kernel_initializer=kernel_initializer)
x2 = identity_block2d(x2, 3, [96, 128, 128], stage=3, block='2b', is_training=is_training, reuse=reuse, kernel_initializer=kernel_initializer)
x3 = conv_block_2d(x2, 3, [128, 256, 256], stage=4, block='3a', strides=(2,2), is_training=is_training, reuse=reuse, kernel_initializer=kernel_initializer)
x3 = identity_block2d(x3, 3, [128, 256, 256], stage=4, block='3b', is_training=is_training, reuse=reuse, kernel_initializer=kernel_initializer)
x4 = conv_block_2d(x3, 3, [256, 512, 512], stage=5, block='4a', strides=(2,2), is_training=is_training, reuse=reuse, kernel_initializer=kernel_initializer)
x4 = identity_block2d(x4, 3, [256, 512, 512], stage=5, block='4b', is_training=is_training, reuse=reuse, kernel_initializer=kernel_initializer)
# print('before gap: ', x4)
x4 = tf.reduce_mean(x4, [1,2])
# print('after gap: ', x4)
# flatten = tf.contrib.layers.flatten(x4)
prob = tf.layers.dense(x4, 100, reuse=reuse, kernel_initializer=tf.contrib.layers.xavier_initializer())
# prob = tf.layers.batch_normalization(prob, training=is_training, name='fbn', reuse=reuse)
# print('prob', prob)
return prob
def resnet34(input_tensor, is_training=True, pooling_and_fc=True, reuse=False, kernel_initializer=tf.contrib.layers.variance_scaling_initializer()):
x = tf.layers.conv2d(input_tensor, 64, (3,3), strides=(1,1), kernel_initializer=kernel_initializer, use_bias=False, padding='SAME', name='conv1_1/3x3_s1', reuse=reuse)
x = tf.layers.batch_normalization(x, training=is_training, name='bn1_1/3x3_s1', reuse=reuse)
x = tf.nn.relu(x)
x1 = identity_block2d(x, 3, [48, 64, 64], stage=1, block='1a', is_training=is_training, reuse=reuse, kernel_initializer=kernel_initializer)
x1 = identity_block2d(x1, 3, [48, 64, 64], stage=1, block='1b', is_training=is_training, reuse=reuse, kernel_initializer=kernel_initializer)
x1 = identity_block2d(x1, 3, [48, 64, 64], stage=1, block='1c', is_training=is_training, reuse=reuse, kernel_initializer=kernel_initializer)
x2 = conv_block_2d(x1, 3, [96, 128, 128], stage=2, block='2a', strides=(2,2), is_training=is_training, reuse=reuse, kernel_initializer=kernel_initializer)
x2 = identity_block2d(x2, 3, [96, 128, 128], stage=2, block='2b', is_training=is_training, reuse=reuse, kernel_initializer=kernel_initializer)
x2 = identity_block2d(x2, 3, [96, 128, 128], stage=2, block='2c', is_training=is_training, reuse=reuse, kernel_initializer=kernel_initializer)
x2 = identity_block2d(x2, 3, [96, 128, 128], stage=2, block='2d', is_training=is_training, reuse=reuse, kernel_initializer=kernel_initializer)
x3 = conv_block_2d(x2, 3, [128, 256, 256], stage=3, block='3a', strides=(2,2), is_training=is_training, reuse=reuse, kernel_initializer=kernel_initializer)
x3 = identity_block2d(x3, 3, [128, 256, 256], stage=3, block='3b', is_training=is_training, reuse=reuse, kernel_initializer=kernel_initializer)
x3 = identity_block2d(x3, 3, [128, 256, 256], stage=3, block='3c', is_training=is_training, reuse=reuse, kernel_initializer=kernel_initializer)
x3 = identity_block2d(x3, 3, [128, 256, 256], stage=3, block='3d', is_training=is_training, reuse=reuse, kernel_initializer=kernel_initializer)
x3 = identity_block2d(x3, 3, [128, 256, 256], stage=3, block='3e', is_training=is_training, reuse=reuse, kernel_initializer=kernel_initializer)
x3 = identity_block2d(x3, 3, [128, 256, 256], stage=3, block='3f', is_training=is_training, reuse=reuse, kernel_initializer=kernel_initializer)
x4 = conv_block_2d(x3, 3, [256, 512, 512], stage=4, block='4a', strides=(2,2), is_training=is_training, reuse=reuse, kernel_initializer=kernel_initializer)
x4 = identity_block2d(x4, 3, [256, 512, 512], stage=4, block='4b', is_training=is_training, reuse=reuse, kernel_initializer=kernel_initializer)
x4 = identity_block2d(x4, 3, [256, 512, 512], stage=4, block='4c', is_training=is_training, reuse=reuse, kernel_initializer=kernel_initializer)
# print('before gap: ', x4)
x4 = tf.reduce_mean(x4, [1,2])
# print('after gap: ', x4)
# flatten = tf.contrib.layers.flatten(x4)
prob = tf.layers.dense(x4, 100, reuse=reuse, kernel_initializer=tf.contrib.layers.xavier_initializer(), bias_initializer=tf.zeros_initializer())
# prob = tf.layers.batch_normalization(prob, training=is_training, name='fbn', reuse=reuse)
# print('prob', prob)
return prob
| 71.009091
| 199
| 0.745743
| 1,199
| 7,811
| 4.615513
| 0.083403
| 0.208891
| 0.107336
| 0.190459
| 0.96133
| 0.953379
| 0.936935
| 0.934225
| 0.932779
| 0.92519
| 0
| 0.066895
| 0.10242
| 7,811
| 110
| 200
| 71.009091
| 0.722436
| 0.051338
| 0
| 0.450704
| 0
| 0
| 0.035405
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.056338
| false
| 0
| 0.014085
| 0
| 0.126761
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6b1f7b9a1ebaac5dd02bf19d01af7e8f19b5b095
| 460
|
py
|
Python
|
lastpass/blob.py
|
guilt/lastpass-python
|
cb4cf24aa2b73b1ab72e2c14c1a61abeeff10597
|
[
"MIT"
] | 1
|
2020-10-12T22:49:22.000Z
|
2020-10-12T22:49:22.000Z
|
lastpass/blob.py
|
guilt/lastpass-python
|
cb4cf24aa2b73b1ab72e2c14c1a61abeeff10597
|
[
"MIT"
] | null | null | null |
lastpass/blob.py
|
guilt/lastpass-python
|
cb4cf24aa2b73b1ab72e2c14c1a61abeeff10597
|
[
"MIT"
] | null | null | null |
# coding: utf-8
class Blob(object):
def __init__(self, bytes_, key_iteration_count):
self.bytes = bytes_
self.key_iteration_count = key_iteration_count
def encryption_key(self, username, password):
from . import fetcher
return fetcher.make_key(username, password, self.key_iteration_count)
def __eq__(self, other):
return self.bytes == other.bytes and self.key_iteration_count == other.key_iteration_count
| 35.384615
| 98
| 0.715217
| 60
| 460
| 5.083333
| 0.4
| 0.236066
| 0.334426
| 0.206557
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.002717
| 0.2
| 460
| 12
| 99
| 38.333333
| 0.826087
| 0.028261
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0.222222
| 0.111111
| 0.111111
| 0.777778
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 7
|
860a209e88a855aa7c9f4457529521f75d9be34f
| 1,269
|
py
|
Python
|
graphql/pyutils/tests/test_pair_set.py
|
ThanksBoomerang/graphql-core-legacy
|
6e2fbccdec655ce9122b84d3808c14242c4e6b96
|
[
"MIT"
] | 8
|
2020-03-23T21:34:02.000Z
|
2021-11-12T11:27:45.000Z
|
graphql/pyutils/tests/test_pair_set.py
|
ThanksBoomerang/graphql-core-legacy
|
6e2fbccdec655ce9122b84d3808c14242c4e6b96
|
[
"MIT"
] | 17
|
2020-03-14T22:22:29.000Z
|
2022-03-16T19:26:37.000Z
|
graphql/pyutils/tests/test_pair_set.py
|
ThanksBoomerang/graphql-core-legacy
|
6e2fbccdec655ce9122b84d3808c14242c4e6b96
|
[
"MIT"
] | 17
|
2020-03-23T12:06:23.000Z
|
2022-02-13T05:33:32.000Z
|
from graphql.pyutils.pair_set import PairSet
def test_pair_set():
ps = PairSet()
are_mutually_exclusive = True
ps.add(1, 2, are_mutually_exclusive)
ps.add(2, 4, are_mutually_exclusive)
assert ps.has(1, 2, are_mutually_exclusive)
assert ps.has(2, 1, are_mutually_exclusive)
assert not ps.has(1, 2, not are_mutually_exclusive)
assert not ps.has(2, 1, not are_mutually_exclusive)
assert (1, 2, are_mutually_exclusive) in ps
assert (2, 1, are_mutually_exclusive) in ps
assert (1, 2, (not are_mutually_exclusive)) not in ps
assert (2, 1, (not are_mutually_exclusive)) not in ps
assert ps.has(4, 2, are_mutually_exclusive)
assert ps.has(2, 4, are_mutually_exclusive)
assert not ps.has(2, 3, are_mutually_exclusive)
assert not ps.has(1, 3, are_mutually_exclusive)
assert ps.has(4, 2, are_mutually_exclusive)
assert ps.has(2, 4, are_mutually_exclusive)
def test_pair_set_not_mutually_exclusive():
ps = PairSet()
are_mutually_exclusive = False
ps.add(1, 2, are_mutually_exclusive)
assert ps.has(1, 2, are_mutually_exclusive)
assert ps.has(2, 1, are_mutually_exclusive)
assert ps.has(1, 2, not are_mutually_exclusive)
assert ps.has(2, 1, not are_mutually_exclusive)
| 30.214286
| 57
| 0.720252
| 207
| 1,269
| 4.154589
| 0.115942
| 0.474419
| 0.534884
| 0.423256
| 0.883721
| 0.802326
| 0.702326
| 0.665116
| 0.451163
| 0.367442
| 0
| 0.04058
| 0.184397
| 1,269
| 41
| 58
| 30.95122
| 0.790338
| 0
| 0
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.642857
| 1
| 0.071429
| false
| 0
| 0.035714
| 0
| 0.107143
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
86134b6b61ea921dd8bee67243e1897bd71043af
| 27,205
|
py
|
Python
|
tickets/tests.py
|
IreneG5/spss_online
|
760abcdd3304a141396ab982ca17cb992afbcecd
|
[
"CNRI-Python",
"blessing"
] | 14
|
2017-09-28T12:15:42.000Z
|
2022-01-27T20:27:01.000Z
|
tickets/tests.py
|
IreneG5/spss_online
|
760abcdd3304a141396ab982ca17cb992afbcecd
|
[
"CNRI-Python",
"blessing"
] | null | null | null |
tickets/tests.py
|
IreneG5/spss_online
|
760abcdd3304a141396ab982ca17cb992afbcecd
|
[
"CNRI-Python",
"blessing"
] | 9
|
2017-10-25T16:49:02.000Z
|
2022-01-22T10:48:18.000Z
|
# -*- coding: utf-8 -*-
from django.test import TestCase
from django.core.urlresolvers import reverse
from accounts.models import User
from products.models import Product, Purchase
from tickets.models import Ticket, Comment
class TicketsPageVisitorTest(TestCase):
""" Test tickets page for visitors (not logged in) users """
def test_redirect_to_login_page_when_not_logged_in(self):
response = self.client.get(reverse('tickets-list'))
self.assertRedirects(response, '/login/?next=/tickets/')
class TicketsPageTest(TestCase):
""" Test Tickets Page general functionality """
def setUp(self):
super(TicketsPageTest, self).setUp()
self.user = User.objects.create_user(username='staff@test.com',
email='staff@test.com',
password='letmein1',
first_name='test',
last_name='test',
company='test',
is_staff='True')
self.user.save()
self.client.login(username='staff@test.com',
password='letmein1')
def test_tickets_menu_item_has_class_active(self):
tickets_page = self.client.get('/tickets/')
self.assertIn('id="nav-tickets" class="active"',
tickets_page.content)
def tearDown(self):
self.user.delete()
class TicketDetailPageTest(TestCase):
""" Test Ticket Detail Page general functionality """
def setUp(self):
super(TicketDetailPageTest, self).setUp()
self.user_staff = User.objects.create_user(username='staff@test.com',
email='staff@test.com',
password='letmein1',
first_name='test',
last_name='test',
company='test',
is_staff='True')
self.user_staff.save()
self.client.login(username='staff@test.com',
password='letmein1')
self.user_active = User.objects.create_user(username='active@test.com',
email='active@test.com',
password='letmein1',
first_name='customer',
last_name='test',
company='test',
is_staff='False')
self.user_active.save()
self.product = Product.objects.create(name="test")
self.product.save()
self.ticket = Ticket.objects.create(subject='test',
user=self.user_active,
product=self.product)
self.ticket.save()
self.comment_active = Comment.objects.create(user=self.user_active,
ticket=self.ticket,
comment='test')
self.comment_active.save()
def test_tickets_menu_item_has_class_active(self):
ticket_detail = self.client.get('/tickets/1/')
self.assertIn('id="nav-tickets" class="active"',
ticket_detail.content)
def tearDown(self):
self.user_staff.delete()
self.user_active.delete()
self.product.delete()
self.comment_active.delete()
self.ticket.delete()
class TicketsPageInactiveUserTest(TestCase):
""" Test tickets page for logged users that are not active customers """
def setUp(self):
super(TicketsPageInactiveUserTest, self).setUp()
self.user = User.objects.create_user(username='inactive@test.com',
email='inactive@test.com',
password='letmein1',
first_name='test',
last_name='test',
company='test',
is_staff='False')
self.user.save()
self.client.login(username='inactive@test.com',
password='letmein1')
self.product = Product.objects.create(name="test")
self.product.save()
def test_message_shown_for_inactive_user(self):
"""
Test that a message is shown for inactive users indicating
they are not allowed to open/edit tickets
"""
tickets_page = self.client.get('/tickets/')
self.assertIn('id="tickets-inactive-msg"', tickets_page.content)
def test_open_ticket_button_not_shown_for_inactive_user(self):
tickets_page = self.client.get('/tickets/')
self.assertNotIn('id="open-ticket"', tickets_page.content)
def tearDown(self):
self.user.delete()
class TicketsPageActiveCustomerUserTest(TestCase):
""" Test tickets page for active customer users """
def setUp(self):
super(TicketsPageActiveCustomerUserTest, self).setUp()
self.user = User.objects.create_user(username='active@test.com',
email='active@test.com',
password='letmein1',
first_name='test',
last_name='test',
company='test',
is_staff='False')
self.user.save()
self.client.login(username='active@test.com',
password='letmein1')
self.product = Product.objects.create(name="test")
self.product.save()
self.purchase = \
Purchase.objects.create(user=self.user, product=self.product,
license_end="2999-01-01T00:00:00Z")
self.purchase.save()
def test_message_not_shown_for_active_user(self):
tickets_page = self.client.get('/tickets/')
self.assertNotIn('id="tickets-inactive-msg"',
tickets_page.content)
def test_open_ticket_button_shown_for_active_user(self):
tickets_page = self.client.get('/tickets/')
self.assertIn('id="open-ticket"', tickets_page.content)
def tearDown(self):
self.user.delete()
self.product.delete()
self.purchase.delete()
class TicketsPageStaffUserTest(TestCase):
""" Test tickets page for staff users """
def setUp(self):
super(TicketsPageStaffUserTest, self).setUp()
self.user_staff = User.objects.create_user(username='staff@test.com',
email='staff@test.com',
password='letmein1',
first_name='test',
last_name='test',
company='test',
is_staff='True')
self.user_staff.save()
self.client.login(username='staff@test.com',
password='letmein1')
def test_message_not_shown_for_staff_user(self):
tickets_page = self.client.get('/tickets/')
self.assertNotIn('id="tickets-inactive-msg"', tickets_page.content)
def test_open_ticket_button_not_shown_for_staff_user(self):
tickets_page = self.client.get('/tickets/')
self.assertNotIn('id="open-ticket"', tickets_page.content)
def tearDown(self):
self.user_staff.delete()
class TicketsPageStatusNewTest(TestCase):
"""
Test that tickets are created with status New (staff view)
For this test is needed a staff user to login, a customer user
(with an active license) to create a ticket, a ticket and a comment
"""
def setUp(self):
super(TicketsPageStatusNewTest, self).setUp()
self.user_staff = User.objects.create_user(username='staff@test.com',
email='staff@test.com',
password='letmein1',
first_name='test',
last_name='test',
company='test',
is_staff='True')
self.user_staff.save()
self.client.login(username='staff@test.com',
password='letmein1')
self.user_active = User.objects.create_user(username='active@test.com',
email='active@test.com',
password='letmein1',
first_name='customer',
last_name='test',
company='test',
is_staff='False')
self.user_active.save()
self.product = Product.objects.create(name="test")
self.product.save()
self.purchase = \
Purchase.objects.create(user=self.user_active,
product=self.product,
license_end="2999-01-01T00:00:00Z")
self.purchase.save()
self.ticket = Ticket.objects.create(subject='test',
user=self.user_active,
product=self.product)
self.ticket.save()
self.comment_active = Comment.objects.create(user=self.user_active,
ticket=self.ticket,
comment='test')
self.comment_active.save()
def test_ticket_status_new_for_new_ticket(self):
tickets_page = self.client.get('/tickets/')
self.assertIn('<td id="status-1">NEW', tickets_page.content)
def tearDown(self):
self.user_staff.delete()
self.user_active.delete()
self.product.delete()
self.purchase.delete()
self.comment_active.delete()
self.ticket.delete()
class TicketsPageStatusPCRTest(TestCase):
"""
Test that the ticket status changes to PCR
(Pending Customer Response) (staff view)
when the last comment was added by a staff user
For this test is needed a staff user to login, a customer user
(with an active license) to create a ticket, a ticket
and two comments, first one added by
the customer and last one added by a staff user.
"""
def setUp(self):
super(TicketsPageStatusPCRTest, self).setUp()
self.user_staff = User.objects.create_user(username='staff@test.com',
email='staff@test.com',
password='letmein1',
first_name='test',
last_name='test',
company='test',
is_staff='True')
self.user_staff.save()
self.client.login(username='staff@test.com',
password='letmein1')
self.user_active = User.objects.create_user(username='active@test.com',
email='active@test.com',
password='letmein1',
first_name='customer',
last_name='test',
company='test',
is_staff='False')
self.user_active.save()
self.product = Product.objects.create(name="test")
self.product.save()
self.purchase = \
Purchase.objects.create(user=self.user_active,
product=self.product,
license_end="2999-01-01T00:00:00Z")
self.purchase.save()
self.ticket = Ticket.objects.create(subject='test',
user=self.user_active,
product=self.product)
self.ticket.save()
self.comment_active = Comment.objects\
.create(user=self.user_active, ticket=self.ticket,
comment='test-active',
created_date="2017-10-01T00:00Z")
self.comment_active.save()
self.comment_staff = Comment.objects\
.create(user=self.user_staff, ticket=self.ticket,
comment='test-staff',
created_date="2017-10-02T00:00Z")
self.comment_staff.save()
def test_ticket_status_PCR_when_last_comment_by_staff(self):
tickets_page = self.client.get('/tickets/')
self.assertIn('<td id="status-1">PCR', tickets_page.content)
def tearDown(self):
self.user_staff.delete()
self.user_active.delete()
self.product.delete()
self.purchase.delete()
self.comment_active.delete()
self.comment_staff.delete()
self.ticket.delete()
class TicketsPageStatusPERTest(TestCase):
"""
Test that the ticket status changes to PER
(Pending easySPSS Response) (staff view)
when the last comment was added by a customer.
For this test is needed a staff user to login, a customer user
(with an active license) to create a ticket, a ticket
and two comments, first one added by
the customer when opening the ticket and last one
added also by the customer.
"""
def setUp(self):
super(TicketsPageStatusPERTest, self).setUp()
self.user_staff = User.objects.create_user(username='staff@test.com',
email='staff@test.com',
password='letmein1',
first_name='test',
last_name='test',
company='test',
is_staff='True')
self.user_staff.save()
self.client.login(username='staff@test.com',
password='letmein1')
self.user_active = User.objects.create_user(username='active@test.com',
email='active@test.com',
password='letmein1',
first_name='customer',
last_name='test',
company='test',
is_staff='False')
self.user_active.save()
self.product = Product.objects.create(name="test")
self.product.save()
self.purchase = \
Purchase.objects.create(user=self.user_active,
product=self.product,
license_end="2999-01-01T00:00:00Z")
self.purchase.save()
self.ticket = Ticket.objects.create(subject='test',
user=self.user_active,
product=self.product)
self.ticket.save()
self.comment_active = Comment.objects\
.create(user=self.user_active, ticket=self.ticket,
comment='test-active',
created_date="2017-10-01T00:00Z")
self.comment_active.save()
self.comment_active2 = Comment.objects\
.create(user=self.user_active, ticket=self.ticket,
comment='test-active',
created_date="2017-10-02T00:00Z")
self.comment_active2.save()
def test_ticket_status_PER_when_last_comment_by_customer(self):
tickets_page = self.client.get('/tickets/')
self.assertIn('<td id="status-1">PER', tickets_page.content)
def tearDown(self):
self.user_staff.delete()
self.user_active.delete()
self.product.delete()
self.purchase.delete()
self.comment_active.delete()
self.comment_active2.delete()
self.ticket.delete()
class TicketDetailPageDeleteCommentsTest(TestCase):
""" Test Tickets Detail Page delete comments """
def setUp(self):
super(TicketDetailPageDeleteCommentsTest, self).setUp()
self.user_staff = User.objects.create_user(username='staff@test.com',
email='staff@test.com',
password='letmein1',
first_name='test',
last_name='test',
company='test',
is_staff='True')
self.user_staff.save()
self.client.login(username='staff@test.com',
password='letmein1')
self.user_active = User.objects.create_user(username='active@test.com',
email='active@test.com',
password='letmein1',
first_name='customer',
last_name='test',
company='test',
is_staff='False')
self.user_active.save()
self.product = Product.objects.create(name="test")
self.product.save()
self.purchase = \
Purchase.objects.create(user=self.user_active,
product=self.product,
license_end="2999-01-01T00:00:00Z")
self.purchase.save()
self.ticket = Ticket.objects.create(subject='test',
user=self.user_active,
product=self.product)
self.ticket.save()
self.comment_active = Comment.objects\
.create(user=self.user_active, ticket=self.ticket,
comment='test-active',
created_date="2017-10-01T00:00Z")
self.comment_active.save()
self.comment_staff = Comment.objects\
.create(user=self.user_staff, ticket=self.ticket,
comment='test-staff',
created_date="2017-10-02T00:00Z")
self.comment_staff.save()
def test_staff_can_delete_comments_if_more_than_one(self):
tickets_page = self.client.get('/tickets/1/')
self.assertIn('id="ticket-delete-comment', tickets_page.content)
def tearDown(self):
self.user_staff.delete()
self.user_active.delete()
self.product.delete()
self.purchase.delete()
self.comment_active.delete()
self.comment_staff.delete()
self.ticket.delete()
class TicketDetailPageCantDeleteCommentsTest(TestCase):
""" Test Tickets Detail Page can't delete comments """
def setUp(self):
super(TicketDetailPageCantDeleteCommentsTest, self).setUp()
self.user_staff = User.objects.create_user(username='staff@test.com',
email='staff@test.com',
password='letmein1',
first_name='test',
last_name='test',
company='test',
is_staff='True')
self.user_staff.save()
self.client.login(username='staff@test.com',
password='letmein1')
self.user_active = User.objects.create_user(username='active@test.com',
email='active@test.com',
password='letmein1',
first_name='customer',
last_name='test',
company='test',
is_staff='False')
self.user_active.save()
self.product = Product.objects.create(name="test")
self.product.save()
self.purchase = \
Purchase.objects.create(user=self.user_active,
product=self.product,
license_end="2999-01-01T00:00:00Z")
self.purchase.save()
self.ticket = Ticket.objects.create(subject='test',
user=self.user_active,
product=self.product)
self.ticket.save()
self.comment_active = Comment.objects.create(user=self.user_active,
ticket=self.ticket,
comment='test-active',
created_date="2017-10-01T00:00Z")
self.comment_active.save()
def test_staff_cant_delete_comments_if_less_than_two(self):
tickets_page = self.client.get('/tickets/1/')
self.assertNotIn('id="ticket-delete-comment" ', tickets_page.content)
def tearDown(self):
self.user_staff.delete()
self.user_active.delete()
self.product.delete()
self.purchase.delete()
self.comment_active.delete()
self.ticket.delete()
class TicketDetailPageInactiveUserTest(TestCase):
""" Test tickets page for logged users that are not active customers """
def setUp(self):
super(TicketDetailPageInactiveUserTest, self).setUp()
self.user = User.objects.create_user(username='inactive@test.com',
email='inactive@test.com',
password='letmein1',
first_name='test',
last_name='test',
company='test',
is_staff='False')
self.user.save()
self.client.login(username='inactive@test.com',
password='letmein1')
self.product = Product.objects.create(name="test")
self.product.save()
self.ticket = Ticket.objects.create(subject='test', user=self.user,
product=self.product)
self.ticket.save()
self.comment = Comment.objects.create(user=self.user,
ticket=self.ticket,
comment='test')
self.comment.save()
def test_message_shown_for_inactive_user(self):
"""
Test that a message is shown for inactive users indicating
they are not allowed to open/edit tickets
"""
tickets_page = self.client.get('/tickets/1/')
self.assertIn('id="ticket-inactive-msg"', tickets_page.content)
def test_buttons_not_shown_for_inactive_user(self):
tickets_page = self.client.get('/tickets/1/')
self.assertNotIn('id="open-ticket"', tickets_page.content)
self.assertNotIn('id="ticket-close-ticket"', tickets_page.content)
self.assertNotIn('id="ticket-add-comment"', tickets_page.content)
def tearDown(self):
self.user.delete()
self.product.delete()
self.ticket.delete()
self.comment.delete()
class TicketDetailPageClosedTicketTest(TestCase):
""" Test tickets page when a ticket is closed """
def setUp(self):
super(TicketDetailPageClosedTicketTest, self).setUp()
self.user = User.objects.create_user(username='active@test.com',
email='active@test.com',
password='letmein1',
first_name='test',
last_name='test',
company='test',
is_staff='False')
self.user.save()
self.client.login(username='active@test.com',
password='letmein1')
self.product = Product.objects.create(name="test")
self.product.save()
self.purchase = \
Purchase.objects.create(user=self.user, product=self.product,
license_end="2999-01-01T00:00Z")
self.purchase.save()
self.ticket = Ticket.objects.create(subject='test',
user=self.user,
product=self.product,
status='CLS',
closed_date="2017-10-01T00:00Z")
self.ticket.save()
self.comment = Comment.objects.create(user=self.user,
ticket=self.ticket,
comment='test')
self.comment.save()
def test_buttons_not_shown_for_closed_ticket(self):
tickets_page = self.client.get('/tickets/1/')
self.assertNotIn('id="open-ticket"', tickets_page.content)
self.assertNotIn('id="ticket-close-ticket"', tickets_page.content)
self.assertNotIn('id="ticket-add-comment"', tickets_page.content)
def test_reopen_buttons_shown_for_closed_ticket(self):
tickets_page = self.client.get('/tickets/1/')
self.assertIn('id="ticket-reopen-ticket"', tickets_page.content)
def test_close_date_shown_for_closed_ticket(self):
tickets_page = self.client.get('/tickets/1/')
self.assertIn('Closed date:', tickets_page.content)
def tearDown(self):
self.user.delete()
self.product.delete()
self.purchase.delete()
self.ticket.delete()
self.comment.delete()
| 44.892739
| 86
| 0.490094
| 2,477
| 27,205
| 5.245458
| 0.060961
| 0.049257
| 0.047102
| 0.053106
| 0.875548
| 0.851305
| 0.841453
| 0.824675
| 0.813592
| 0.80351
| 0
| 0.014811
| 0.411799
| 27,205
| 605
| 87
| 44.966942
| 0.79715
| 0.058776
| 0
| 0.852941
| 0
| 0
| 0.094885
| 0.011467
| 0
| 0
| 0
| 0
| 0.048319
| 1
| 0.090336
| false
| 0.063025
| 0.010504
| 0
| 0.128151
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
86361c51e5202094af60dc2cc1c71b8b512c52ee
| 42,313
|
py
|
Python
|
openmdao/solvers/nonlinear/tests/test_newton.py
|
johnjasa/OpenMDAO
|
e3c14254d09ba1360ff4c57380a684bead2962e1
|
[
"Apache-2.0"
] | null | null | null |
openmdao/solvers/nonlinear/tests/test_newton.py
|
johnjasa/OpenMDAO
|
e3c14254d09ba1360ff4c57380a684bead2962e1
|
[
"Apache-2.0"
] | null | null | null |
openmdao/solvers/nonlinear/tests/test_newton.py
|
johnjasa/OpenMDAO
|
e3c14254d09ba1360ff4c57380a684bead2962e1
|
[
"Apache-2.0"
] | 1
|
2018-07-27T06:39:15.000Z
|
2018-07-27T06:39:15.000Z
|
"""Test the Newton nonlinear solver. """
import unittest
import numpy as np
import openmdao.api as om
from openmdao.core.tests.test_discrete import InternalDiscreteGroup
from openmdao.test_suite.components.double_sellar import DoubleSellar, DoubleSellarImplicit, \
SubSellar
from openmdao.test_suite.components.implicit_newton_linesearch import ImplCompTwoStates
from openmdao.test_suite.components.sellar import SellarDerivativesGrouped, \
SellarNoDerivatives, SellarDerivatives, SellarStateConnection, StateConnection, \
SellarDis1withDerivatives, SellarDis2withDerivatives
from openmdao.utils.assert_utils import assert_rel_error, assert_warning
class TestNewton(unittest.TestCase):
def test_specify_newton_linear_solver_in_system(self):
my_newton = om.NewtonSolver()
my_newton.linear_solver = om.DirectSolver()
prob = om.Problem(model=SellarDerivatives(nonlinear_solver=my_newton))
prob.setup()
self.assertIsInstance(prob.model.nonlinear_solver.linear_solver, om.DirectSolver)
prob.run_model()
assert_rel_error(self, prob['y1'], 25.58830273, .00001)
assert_rel_error(self, prob['y2'], 12.05848819, .00001)
def test_feature_newton_basic(self):
""" Feature test for slotting a Newton solver and using it to solve
Sellar.
"""
import openmdao.api as om
from openmdao.test_suite.components.sellar import SellarDerivatives
prob = om.Problem(model=SellarDerivatives(nonlinear_solver=om.NewtonSolver()))
prob.setup()
prob.run_model()
assert_rel_error(self, prob['y1'], 25.58830273, .00001)
assert_rel_error(self, prob['y2'], 12.05848819, .00001)
def test_sellar_grouped(self):
# Tests basic Newton solution on Sellar in a subgroup
prob = om.Problem(model=SellarDerivativesGrouped(nonlinear_solver=om.NewtonSolver()))
prob.setup()
prob.set_solver_print(level=0)
prob.run_model()
assert_rel_error(self, prob['y1'], 25.58830273, .00001)
assert_rel_error(self, prob['y2'], 12.05848819, .00001)
# Make sure we aren't iterating like crazy
self.assertLess(prob.model.nonlinear_solver._iter_count, 8)
def test_sellar(self):
# Just tests Newton on Sellar with FD derivs.
prob = om.Problem(model=SellarNoDerivatives(nonlinear_solver=om.NewtonSolver()))
prob.setup()
prob.run_model()
assert_rel_error(self, prob['y1'], 25.58830273, .00001)
assert_rel_error(self, prob['y2'], 12.05848819, .00001)
# Make sure we aren't iterating like crazy
self.assertLess(prob.model.nonlinear_solver._iter_count, 8)
def test_line_search_deprecated(self):
top = om.Problem()
top.model.add_subsystem('px', om.IndepVarComp('x', 1.0))
top.model.add_subsystem('comp', ImplCompTwoStates())
top.model.connect('px.x', 'comp.x')
top.model.nonlinear_solver = om.NewtonSolver()
top.model.nonlinear_solver.options['maxiter'] = 10
top.model.linear_solver = om.ScipyKrylov()
msg = "The 'line_search' attribute provides backwards compatibility with OpenMDAO 1.x ; " \
"use 'linesearch' instead."
with assert_warning(DeprecationWarning, msg):
top.model.nonlinear_solver.line_search = om.ArmijoGoldsteinLS(bound_enforcement='vector')
with assert_warning(DeprecationWarning, msg):
ls = top.model.nonlinear_solver.line_search
ls.options['maxiter'] = 10
ls.options['alpha'] = 1.0
top.setup()
# Test lower bound: should go to the lower bound and stall
top['px.x'] = 2.0
top['comp.y'] = 0.0
top['comp.z'] = 1.6
top.run_model()
assert_rel_error(self, top['comp.z'], 1.5, 1e-8)
# Test upper bound: should go to the upper bound and stall
top['px.x'] = 0.5
top['comp.y'] = 0.0
top['comp.z'] = 2.4
top.run_model()
assert_rel_error(self, top['comp.z'], 2.5, 1e-8)
def test_sellar_derivs(self):
# Test top level Sellar (i.e., not grouped).
# Also, piggybacked testing that makes sure we only call apply_nonlinear
# on the head component behind the cycle break.
prob = om.Problem()
prob.model = SellarDerivatives(nonlinear_solver=om.NewtonSolver(),
linear_solver=om.LinearBlockGS())
prob.setup()
prob.set_solver_print(level=0)
prob.run_model()
assert_rel_error(self, prob['y1'], 25.58830273, .00001)
assert_rel_error(self, prob['y2'], 12.05848819, .00001)
# Make sure we aren't iterating like crazy
self.assertLess(prob.model.nonlinear_solver._iter_count, 8)
## Make sure we only call apply_linear on 'heads'
#nd1 = prob.model.d1.execution_count
#nd2 = prob.model.d2.execution_count
#if prob.model.d1._run_apply == True:
#self.assertEqual(nd1, 2*nd2)
#else:
#self.assertEqual(2*nd1, nd2)
def test_sellar_derivs_with_Lin_GS(self):
prob = om.Problem(model=SellarDerivatives(nonlinear_solver=om.NewtonSolver()))
prob.setup()
prob.set_solver_print(level=0)
prob.run_model()
assert_rel_error(self, prob['y1'], 25.58830273, .00001)
assert_rel_error(self, prob['y2'], 12.05848819, .00001)
# Make sure we aren't iterating like crazy
self.assertLess(prob.model.nonlinear_solver._iter_count, 8)
def test_sellar_state_connection(self):
# Sellar model closes loop with state connection instead of a cycle.
prob = om.Problem(model=SellarStateConnection(nonlinear_solver=om.NewtonSolver()))
prob.set_solver_print(level=0)
prob.setup()
prob.run_model()
assert_rel_error(self, prob['y1'], 25.58830273, .00001)
assert_rel_error(self, prob['state_eq.y2_command'], 12.05848819, .00001)
# Make sure we aren't iterating like crazy
self.assertLess(prob.model.nonlinear_solver._iter_count, 8)
def test_sellar_state_connection_fd_system(self):
# Sellar model closes loop with state connection instead of a cycle.
# This test is just fd.
prob = om.Problem(model=SellarStateConnection(nonlinear_solver=om.NewtonSolver()))
prob.model.approx_totals(method='fd')
prob.setup()
prob.set_solver_print(level=0)
prob.run_model()
assert_rel_error(self, prob['y1'], 25.58830273, .00001)
assert_rel_error(self, prob['state_eq.y2_command'], 12.05848819, .00001)
# Make sure we aren't iterating like crazy
self.assertLess(prob.model.nonlinear_solver._iter_count, 6)
def test_sellar_specify_linear_solver(self):
prob = om.Problem()
model = prob.model
model.add_subsystem('px', om.IndepVarComp('x', 1.0), promotes=['x'])
model.add_subsystem('pz', om.IndepVarComp('z', np.array([5.0, 2.0])), promotes=['z'])
proms = ['x', 'z', 'y1', 'state_eq.y2_actual', 'state_eq.y2_command', 'd1.y2', 'd2.y2']
sub = model.add_subsystem('sub', om.Group(), promotes=proms)
subgrp = sub.add_subsystem('state_eq_group', om.Group(),
promotes=['state_eq.y2_actual', 'state_eq.y2_command'])
subgrp.linear_solver = om.ScipyKrylov()
subgrp.add_subsystem('state_eq', StateConnection())
sub.add_subsystem('d1', SellarDis1withDerivatives(), promotes=['x', 'z', 'y1'])
sub.add_subsystem('d2', SellarDis2withDerivatives(), promotes=['z', 'y1'])
model.connect('state_eq.y2_command', 'd1.y2')
model.connect('d2.y2', 'state_eq.y2_actual')
model.add_subsystem('obj_cmp', om.ExecComp('obj = x**2 + z[1] + y1 + exp(-y2)',
z=np.array([0.0, 0.0]), x=0.0, y1=0.0, y2=0.0),
promotes=['x', 'z', 'y1', 'obj'])
model.connect('d2.y2', 'obj_cmp.y2')
model.add_subsystem('con_cmp1', om.ExecComp('con1 = 3.16 - y1'), promotes=['con1', 'y1'])
model.add_subsystem('con_cmp2', om.ExecComp('con2 = y2 - 24.0'), promotes=['con2'])
model.connect('d2.y2', 'con_cmp2.y2')
model.nonlinear_solver = om.NewtonSolver()
# Use bad settings for this one so that problem doesn't converge.
# That way, we test that we are really using Newton's Lin Solver
# instead.
model.linear_solver = om.ScipyKrylov()
model.linear_solver.options['maxiter'] = 1
# The good solver
model.nonlinear_solver.linear_solver = om.ScipyKrylov()
prob.set_solver_print(level=0)
prob.setup()
prob.run_model()
assert_rel_error(self, prob['y1'], 25.58830273, .00001)
assert_rel_error(self, prob['state_eq.y2_command'], 12.05848819, .00001)
# Make sure we aren't iterating like crazy
self.assertLess(model.nonlinear_solver._iter_count, 8)
self.assertEqual(model.linear_solver._iter_count, 0)
self.assertGreater(model.nonlinear_solver.linear_solver._iter_count, 0)
def test_sellar_specify_linear_direct_solver(self):
prob = om.Problem()
model = prob.model
model.add_subsystem('px', om.IndepVarComp('x', 1.0), promotes=['x'])
model.add_subsystem('pz', om.IndepVarComp('z', np.array([5.0, 2.0])), promotes=['z'])
proms = ['x', 'z', 'y1', 'state_eq.y2_actual', 'state_eq.y2_command', 'd1.y2', 'd2.y2']
sub = model.add_subsystem('sub', om.Group(), promotes=proms)
subgrp = sub.add_subsystem('state_eq_group', om.Group(),
promotes=['state_eq.y2_actual', 'state_eq.y2_command'])
subgrp.linear_solver = om.ScipyKrylov()
subgrp.add_subsystem('state_eq', StateConnection())
sub.add_subsystem('d1', SellarDis1withDerivatives(), promotes=['x', 'z', 'y1'])
sub.add_subsystem('d2', SellarDis2withDerivatives(), promotes=['z', 'y1'])
model.connect('state_eq.y2_command', 'd1.y2')
model.connect('d2.y2', 'state_eq.y2_actual')
model.add_subsystem('obj_cmp', om.ExecComp('obj = x**2 + z[1] + y1 + exp(-y2)',
z=np.array([0.0, 0.0]), x=0.0, y1=0.0, y2=0.0),
promotes=['x', 'z', 'y1', 'obj'])
model.connect('d2.y2', 'obj_cmp.y2')
model.add_subsystem('con_cmp1', om.ExecComp('con1 = 3.16 - y1'), promotes=['con1', 'y1'])
model.add_subsystem('con_cmp2', om.ExecComp('con2 = y2 - 24.0'), promotes=['con2'])
model.connect('d2.y2', 'con_cmp2.y2')
model.nonlinear_solver = om.NewtonSolver()
# Use bad settings for this one so that problem doesn't converge.
# That way, we test that we are really using Newton's Lin Solver
# instead.
sub.linear_solver = om.ScipyKrylov()
sub.linear_solver.options['maxiter'] = 1
# The good solver
model.nonlinear_solver.linear_solver = om.DirectSolver()
prob.set_solver_print(level=0)
prob.setup()
prob.run_model()
assert_rel_error(self, prob['y1'], 25.58830273, .00001)
assert_rel_error(self, prob['state_eq.y2_command'], 12.05848819, .00001)
# Make sure we aren't iterating like crazy
self.assertLess(model.nonlinear_solver._iter_count, 8)
self.assertEqual(model.linear_solver._iter_count, 0)
def test_solve_subsystems_basic(self):
prob = om.Problem(model=DoubleSellar())
model = prob.model
g1 = model.g1
g1.nonlinear_solver = om.NewtonSolver()
g1.nonlinear_solver.options['rtol'] = 1.0e-5
g1.linear_solver = om.DirectSolver(assemble_jac=True)
g1.options['assembled_jac_type'] = 'dense'
g2 = model.g2
g2.nonlinear_solver = om.NewtonSolver()
g2.nonlinear_solver.options['rtol'] = 1.0e-5
g2.linear_solver = om.DirectSolver(assemble_jac=True)
g2.options['assembled_jac_type'] = 'dense'
model.nonlinear_solver = om.NewtonSolver()
model.linear_solver = om.ScipyKrylov(assemble_jac=True)
model.options['assembled_jac_type'] = 'dense'
model.nonlinear_solver.options['solve_subsystems'] = True
prob.setup()
prob.run_model()
assert_rel_error(self, prob['g1.y1'], 0.64, .00001)
assert_rel_error(self, prob['g1.y2'], 0.80, .00001)
assert_rel_error(self, prob['g2.y1'], 0.64, .00001)
assert_rel_error(self, prob['g2.y2'], 0.80, .00001)
def test_solve_subsystems_basic_csc(self):
prob = om.Problem(model=DoubleSellar())
model = prob.model
g1 = model.g1
g1.nonlinear_solver = om.NewtonSolver(rtol=1.0e-5)
g1.options['assembled_jac_type'] = 'dense'
g1.linear_solver = om.DirectSolver(assemble_jac=True)
g2 = model.g2
g2.nonlinear_solver = om.NewtonSolver(rtol=1.0e-5)
g2.linear_solver = om.DirectSolver(assemble_jac=True)
g2.options['assembled_jac_type'] = 'dense'
model.nonlinear_solver = om.NewtonSolver(solve_subsystems=True)
model.linear_solver = om.ScipyKrylov(assemble_jac=True)
prob.setup()
prob.run_model()
assert_rel_error(self, prob['g1.y1'], 0.64, .00001)
assert_rel_error(self, prob['g1.y2'], 0.80, .00001)
assert_rel_error(self, prob['g2.y1'], 0.64, .00001)
assert_rel_error(self, prob['g2.y2'], 0.80, .00001)
def test_solve_subsystems_basic_dense_jac(self):
prob = om.Problem(model=DoubleSellar())
model = prob.model
g1 = model.g1
g1.nonlinear_solver = om.NewtonSolver(rtol=1.0e-5)
g1.linear_solver = om.DirectSolver()
g2 = model.g2
g2.nonlinear_solver = om.NewtonSolver(rtol=1.0e-5)
g2.linear_solver = om.DirectSolver()
model.nonlinear_solver = om.NewtonSolver(solve_subsystems=True)
model.linear_solver = om.ScipyKrylov(assemble_jac=True)
model.options['assembled_jac_type'] = 'dense'
prob.setup()
prob.run_model()
assert_rel_error(self, prob['g1.y1'], 0.64, .00001)
assert_rel_error(self, prob['g1.y2'], 0.80, .00001)
assert_rel_error(self, prob['g2.y1'], 0.64, .00001)
assert_rel_error(self, prob['g2.y2'], 0.80, .00001)
def test_solve_subsystems_basic_dense_jac_scaling(self):
prob = om.Problem(model=DoubleSellar(units=None, scaling=True))
model = prob.model
g1 = model.g1
g1.nonlinear_solver = om.NewtonSolver(rtol=1.0e-5)
g1.linear_solver = om.DirectSolver()
g2 = model.g2
g2.nonlinear_solver = om.NewtonSolver(rtol=1.0e-5)
g2.linear_solver = om.DirectSolver()
model.nonlinear_solver = om.NewtonSolver(solve_subsystems=True)
model.linear_solver = om.ScipyKrylov(assemble_jac=True)
model.options['assembled_jac_type'] = 'dense'
prob.setup()
prob.run_model()
assert_rel_error(self, prob['g1.y1'], 0.64, .00001)
assert_rel_error(self, prob['g1.y2'], 0.80, .00001)
assert_rel_error(self, prob['g2.y1'], 0.64, .00001)
assert_rel_error(self, prob['g2.y2'], 0.80, .00001)
def test_solve_subsystems_basic_dense_jac_units_scaling(self):
prob = om.Problem(model=DoubleSellar(units=True, scaling=True))
model = prob.model
g1 = model.g1
g1.nonlinear_solver = om.NewtonSolver(rtol=1.0e-5)
g1.linear_solver = om.DirectSolver()
g2 = model.g2
g2.nonlinear_solver = om.NewtonSolver(rtol=1.0e-5)
g2.linear_solver = om.DirectSolver()
model.nonlinear_solver = om.NewtonSolver(solve_subsystems=True)
model.linear_solver = om.ScipyKrylov(assemble_jac=True)
model.options['assembled_jac_type'] = 'dense'
prob.setup()
prob.run_model()
assert_rel_error(self, prob['g1.y1'], 0.0533333333, .00001)
assert_rel_error(self, prob['g1.y2'], 0.80, .00001)
assert_rel_error(self, prob['g2.y1'], 0.0533333333, .00001)
assert_rel_error(self, prob['g2.y2'], 0.80, .00001)
def test_solve_subsystems_assembled_jac_top(self):
prob = om.Problem(model=DoubleSellar())
model = prob.model
g1 = model.g1
g1.nonlinear_solver = om.NewtonSolver(rtol=1.0e-5)
g1.linear_solver = om.DirectSolver()
g2 = model.g2
g2.nonlinear_solver = om.NewtonSolver(rtol=1.0e-5)
g2.linear_solver = om.DirectSolver()
model.nonlinear_solver = om.NewtonSolver(solve_subsystems=True)
model.linear_solver = om.ScipyKrylov(assemble_jac=True)
model.options['assembled_jac_type'] = 'dense'
prob.setup()
prob.run_model()
assert_rel_error(self, prob['g1.y1'], 0.64, .00001)
assert_rel_error(self, prob['g1.y2'], 0.80, .00001)
assert_rel_error(self, prob['g2.y1'], 0.64, .00001)
assert_rel_error(self, prob['g2.y2'], 0.80, .00001)
def test_solve_subsystems_assembled_jac_top_csc(self):
prob = om.Problem(model=DoubleSellar())
model = prob.model
g1 = model.g1
g1.nonlinear_solver = om.NewtonSolver(rtol=1.0e-5)
g1.linear_solver = om.DirectSolver()
g2 = model.g2
g2.nonlinear_solver = om.NewtonSolver(rtol=1.0e-5)
g2.linear_solver = om.DirectSolver()
model.nonlinear_solver = om.NewtonSolver(solve_subsystems=True)
model.linear_solver = om.ScipyKrylov(assemble_jac=True)
prob.setup()
prob.run_model()
assert_rel_error(self, prob['g1.y1'], 0.64, .00001)
assert_rel_error(self, prob['g1.y2'], 0.80, .00001)
assert_rel_error(self, prob['g2.y1'], 0.64, .00001)
assert_rel_error(self, prob['g2.y2'], 0.80, .00001)
def test_solve_subsystems_assembled_jac_top_implicit(self):
prob = om.Problem(model=DoubleSellarImplicit())
model = prob.model
g1 = model.g1
g1.nonlinear_solver = om.NewtonSolver(rtol=1.0e-5)
g1.linear_solver = om.DirectSolver()
g2 = model.g2
g2.nonlinear_solver = om.NewtonSolver(rtol=1.0e-5)
g2.linear_solver = om.DirectSolver()
model.nonlinear_solver = om.NewtonSolver(solve_subsystems=True)
model.linear_solver = om.ScipyKrylov(assemble_jac=True)
model.options['assembled_jac_type'] = 'dense'
prob.setup()
prob.run_model()
assert_rel_error(self, prob['g1.y1'], 0.64, .00001)
assert_rel_error(self, prob['g1.y2'], 0.80, .00001)
assert_rel_error(self, prob['g2.y1'], 0.64, .00001)
assert_rel_error(self, prob['g2.y2'], 0.80, .00001)
def test_solve_subsystems_assembled_jac_top_implicit_scaling(self):
prob = om.Problem(model=DoubleSellarImplicit(scaling=True))
model = prob.model
g1 = model.g1
g1.nonlinear_solver = om.NewtonSolver(rtol=1.0e-5)
g1.linear_solver = om.DirectSolver()
g2 = model.g2
g2.nonlinear_solver = om.NewtonSolver(rtol=1.0e-5)
g2.linear_solver = om.DirectSolver()
model.nonlinear_solver = om.NewtonSolver(solve_subsystems=True)
model.linear_solver = om.ScipyKrylov(assemble_jac=True)
model.options['assembled_jac_type'] = 'dense'
prob.setup()
prob.run_model()
assert_rel_error(self, prob['g1.y1'], 0.64, .00001)
assert_rel_error(self, prob['g1.y2'], 0.80, .00001)
assert_rel_error(self, prob['g2.y1'], 0.64, .00001)
assert_rel_error(self, prob['g2.y2'], 0.80, .00001)
def test_solve_subsystems_assembled_jac_top_implicit_scaling_units(self):
prob = om.Problem(model=DoubleSellarImplicit(units=True, scaling=True))
model = prob.model
g1 = model.g1
g1.nonlinear_solver = om.NewtonSolver(rtol=1.0e-5)
g1.linear_solver = om.DirectSolver()
g2 = model.g2
g2.nonlinear_solver = om.NewtonSolver(rtol=1.0e-5)
g2.linear_solver = om.DirectSolver()
model.nonlinear_solver = om.NewtonSolver(solve_subsystems=True)
model.linear_solver = om.ScipyKrylov(assemble_jac=True)
model.options['assembled_jac_type'] = 'dense'
prob.setup()
prob.run_model()
assert_rel_error(self, prob['g1.y1'], 0.053333333, .00001)
assert_rel_error(self, prob['g1.y2'], 0.80, .00001)
assert_rel_error(self, prob['g2.y1'], 0.053333333, .00001)
assert_rel_error(self, prob['g2.y2'], 0.80, .00001)
def test_solve_subsystems_assembled_jac_subgroup(self):
prob = om.Problem(model=DoubleSellar())
model = prob.model
g1 = model.g1
g1.nonlinear_solver = om.NewtonSolver(rtol=1.0e-5)
g1.linear_solver = om.DirectSolver(assemble_jac=True)
model.options['assembled_jac_type'] = 'dense'
g2 = model.g2
g2.nonlinear_solver = om.NewtonSolver(rtol=1.0e-5)
g2.linear_solver = om.DirectSolver()
model.nonlinear_solver = om.NewtonSolver()
model.linear_solver = om.ScipyKrylov()
prob.setup()
prob.run_model()
assert_rel_error(self, prob['g1.y1'], 0.64, .00001)
assert_rel_error(self, prob['g1.y2'], 0.80, .00001)
assert_rel_error(self, prob['g2.y1'], 0.64, .00001)
assert_rel_error(self, prob['g2.y2'], 0.80, .00001)
def test_solve_subsystems_internals(self):
# Here we test that this feature is doing what it should do by counting the
# number of calls in various places.
class CountNewton(om.NewtonSolver):
""" This version of Newton also counts how many times it runs in total."""
def __init__(self, **kwargs):
super(CountNewton, self).__init__(**kwargs)
self.total_count = 0
def _single_iteration(self):
super(CountNewton, self)._single_iteration()
self.total_count += 1
class CountDS(om.DirectSolver):
""" This version of Newton also counts how many times it linearizes"""
def __init__(self, **kwargs):
super(CountDS, self).__init__(**kwargs)
self.lin_count = 0
def _linearize(self):
super(CountDS, self)._linearize()
self.lin_count += 1
prob = om.Problem(model=DoubleSellar())
model = prob.model
# each SubSellar group converges itself
g1 = model.g1
g1.nonlinear_solver = CountNewton()
g1.nonlinear_solver.options['rtol'] = 1.0e-5
g1.linear_solver = CountDS() # used for derivatives
g2 = model.g2
g2.nonlinear_solver = CountNewton()
g2.nonlinear_solver.options['rtol'] = 1.0e-5
g2.linear_solver = om.DirectSolver()
# Converge the outer loop with Gauss Seidel, with a looser tolerance.
model.nonlinear_solver = om.NewtonSolver()
model.linear_solver = om.ScipyKrylov()
# Enfore behavior: max_sub_solves = 0 means we run once during init
model.nonlinear_solver.options['maxiter'] = 5
model.nonlinear_solver.options['solve_subsystems'] = True
model.nonlinear_solver.options['max_sub_solves'] = 0
prob.set_solver_print(level=0)
prob.setup()
prob.run_model()
# Verifying subsolvers ran
self.assertEqual(g1.nonlinear_solver.total_count, 2)
self.assertEqual(g2.nonlinear_solver.total_count, 2)
self.assertEqual(g1.linear_solver.lin_count, 2)
prob = om.Problem(model=DoubleSellar())
model = prob.model
# each SubSellar group converges itself
g1 = model.g1
g1.nonlinear_solver = CountNewton()
g1.nonlinear_solver.options['rtol'] = 1.0e-5
g1.linear_solver = CountDS() # used for derivatives
g2 = model.g2
g2.nonlinear_solver = CountNewton()
g2.nonlinear_solver.options['rtol'] = 1.0e-5
g2.linear_solver = om.DirectSolver()
# Converge the outer loop with Gauss Seidel, with a looser tolerance.
model.nonlinear_solver = om.NewtonSolver()
model.linear_solver = om.ScipyKrylov()
# Enforce Behavior: baseline
model.nonlinear_solver.options['maxiter'] = 5
model.nonlinear_solver.options['solve_subsystems'] = True
model.nonlinear_solver.options['max_sub_solves'] = 5
prob.set_solver_print(level=0)
prob.setup()
prob.run_model()
# Verifying subsolvers ran
self.assertEqual(g1.nonlinear_solver.total_count, 5)
self.assertEqual(g2.nonlinear_solver.total_count, 5)
self.assertEqual(g1.linear_solver.lin_count, 5)
prob = om.Problem(model=DoubleSellar())
model = prob.model
# each SubSellar group converges itself
g1 = model.g1
g1.nonlinear_solver = CountNewton()
g1.nonlinear_solver.options['rtol'] = 1.0e-5
g1.linear_solver = CountDS() # used for derivatives
g2 = model.g2
g2.nonlinear_solver = CountNewton()
g2.nonlinear_solver.options['rtol'] = 1.0e-5
g2.linear_solver = om.DirectSolver()
# Converge the outer loop with Gauss Seidel, with a looser tolerance.
model.nonlinear_solver = om.NewtonSolver()
model.linear_solver = om.ScipyKrylov()
# Enfore behavior: max_sub_solves = 1 means we run during init and first iteration of iter_execute
model.nonlinear_solver.options['maxiter'] = 5
model.nonlinear_solver.options['solve_subsystems'] = True
model.nonlinear_solver.options['max_sub_solves'] = 1
prob.set_solver_print(level=0)
prob.setup()
prob.run_model()
# Verifying subsolvers ran
self.assertEqual(g1.nonlinear_solver.total_count, 4)
self.assertEqual(g2.nonlinear_solver.total_count, 4)
self.assertEqual(g1.linear_solver.lin_count, 4)
def test_maxiter_one(self):
# Fix bug when maxiter was set to 1.
# This bug caused linearize to run before apply in this case.
class ImpComp(om.ImplicitComponent):
def setup(self):
self.add_input('a', val=1.)
self.add_output('x', val=0.)
self.applied = False
self.declare_partials(of='*', wrt='*')
def apply_nonlinear(self, inputs, outputs, residuals):
residuals['x'] = np.exp(outputs['x']) - \
inputs['a']**2 * outputs['x']**2
self.applied = True
def solve_nonlinear(self, inputs, outputs):
pass
def linearize(self, inputs, outputs, jacobian):
jacobian['x', 'x'] = np.exp(outputs['x']) - \
2 * inputs['a']**2 * outputs['x']
jacobian['x', 'a'] = -2 * inputs['a'] * outputs['x']**2
if not self.applied:
raise RuntimeError("Bug! Linearize called before Apply!")
prob = om.Problem()
root = prob.model
root.add_subsystem('p1', om.IndepVarComp('a', 1.0))
root.add_subsystem('comp', ImpComp())
root.connect('p1.a', 'comp.a')
root.nonlinear_solver = om.NewtonSolver()
root.nonlinear_solver.options['maxiter'] = 1
prob.set_solver_print(level=0)
prob.setup()
prob.run_model()
def test_err_on_maxiter_deprecated(self):
# Raise AnalysisError when it fails to converge
prob = om.Problem()
nlsolver = om.NewtonSolver()
prob.model = SellarDerivatives(nonlinear_solver=nlsolver,
linear_solver=om.LinearBlockGS())
nlsolver.options['err_on_maxiter'] = True
nlsolver.options['maxiter'] = 1
prob.setup()
prob.set_solver_print(level=0)
msg = "The 'err_on_maxiter' option provides backwards compatibility " + \
"with earlier version of OpenMDAO; use options['err_on_non_converge'] " + \
"instead."
#prob.final_setup()
with assert_warning(DeprecationWarning, msg):
prob.final_setup()
with self.assertRaises(om.AnalysisError) as context:
prob.run_model()
msg = "Solver 'NL: Newton' on system '' failed to converge in 1 iterations."
self.assertEqual(str(context.exception), msg)
def test_err_on_non_converge(self):
# Raise AnalysisError when it fails to converge
prob = om.Problem()
nlsolver = om.NewtonSolver()
prob.model = SellarDerivatives(nonlinear_solver=nlsolver,
linear_solver=om.LinearBlockGS())
nlsolver.options['err_on_non_converge'] = True
nlsolver.options['maxiter'] = 1
prob.setup()
prob.set_solver_print(level=0)
with self.assertRaises(om.AnalysisError) as context:
prob.run_driver()
msg = "Solver 'NL: Newton' on system '' failed to converge in 1 iterations."
self.assertEqual(str(context.exception), msg)
def test_err_message_inf_nan(self):
prob = om.Problem()
nlsolver = om.NewtonSolver()
prob.model = SellarDerivatives(nonlinear_solver=nlsolver,
linear_solver=om.LinearBlockGS())
nlsolver.options['err_on_non_converge'] = True
nlsolver.options['maxiter'] = 1
prob.setup()
prob.set_solver_print(level=0)
prob['x'] = np.nan
with self.assertRaises(om.AnalysisError) as context:
prob.run_model()
msg = "Solver 'NL: Newton' on system '': residuals contain 'inf' or 'NaN' after 0 iterations."
self.assertEqual(str(context.exception), msg)
def test_relevancy_for_newton(self):
class TestImplCompSimple(om.ImplicitComponent):
def setup(self):
self.add_input('a', val=1.)
self.add_output('x', val=0.)
self.declare_partials(of='*', wrt='*')
def apply_nonlinear(self, inputs, outputs, residuals):
residuals['x'] = np.exp(outputs['x']) - \
inputs['a']**2 * outputs['x']**2
def linearize(self, inputs, outputs, jacobian):
jacobian['x', 'x'] = np.exp(outputs['x']) - \
2 * inputs['a']**2 * outputs['x']
jacobian['x', 'a'] = -2 * inputs['a'] * outputs['x']**2
prob = om.Problem()
model = prob.model
model.add_subsystem('p1', om.IndepVarComp('x', 3.0))
model.add_subsystem('icomp', TestImplCompSimple())
model.add_subsystem('ecomp', om.ExecComp('y = x*p', p=1.0))
model.connect('p1.x', 'ecomp.x')
model.connect('icomp.x', 'ecomp.p')
model.add_design_var('p1.x', 3.0)
model.add_objective('ecomp.y')
model.nonlinear_solver = om.NewtonSolver()
model.linear_solver = om.ScipyKrylov()
prob.setup()
prob.run_model()
J = prob.compute_totals()
assert_rel_error(self, J['ecomp.y', 'p1.x'][0][0], -0.703467422498, 1e-6)
def test_linsearch_3_deprecation(self):
prob = om.Problem(model=SellarDerivatives(nonlinear_solver=om.NewtonSolver()))
prob.setup()
msg = 'Deprecation warning: In V 3.0, the default Newton solver setup will change ' + \
'to use the BoundsEnforceLS line search.'
with assert_warning(DeprecationWarning, msg):
prob.final_setup()
class TestNewtonFeatures(unittest.TestCase):
def test_feature_basic(self):
import numpy as np
import openmdao.api as om
from openmdao.test_suite.components.sellar import SellarDis1withDerivatives, SellarDis2withDerivatives
prob = om.Problem()
model = prob.model
model.add_subsystem('px', om.IndepVarComp('x', 1.0), promotes=['x'])
model.add_subsystem('pz', om.IndepVarComp('z', np.array([5.0, 2.0])), promotes=['z'])
model.add_subsystem('d1', SellarDis1withDerivatives(), promotes=['x', 'z', 'y1', 'y2'])
model.add_subsystem('d2', SellarDis2withDerivatives(), promotes=['z', 'y1', 'y2'])
model.add_subsystem('obj_cmp', om.ExecComp('obj = x**2 + z[1] + y1 + exp(-y2)',
z=np.array([0.0, 0.0]), x=0.0),
promotes=['obj', 'x', 'z', 'y1', 'y2'])
model.add_subsystem('con_cmp1', om.ExecComp('con1 = 3.16 - y1'), promotes=['con1', 'y1'])
model.add_subsystem('con_cmp2', om.ExecComp('con2 = y2 - 24.0'), promotes=['con2', 'y2'])
model.linear_solver = om.DirectSolver()
model.nonlinear_solver = om.NewtonSolver()
prob.setup()
prob.run_model()
assert_rel_error(self, prob['y1'], 25.58830273, .00001)
assert_rel_error(self, prob['y2'], 12.05848819, .00001)
def test_feature_maxiter(self):
import numpy as np
import openmdao.api as om
from openmdao.test_suite.components.sellar import SellarDis1withDerivatives, SellarDis2withDerivatives
prob = om.Problem()
model = prob.model
model.add_subsystem('px', om.IndepVarComp('x', 1.0), promotes=['x'])
model.add_subsystem('pz', om.IndepVarComp('z', np.array([5.0, 2.0])), promotes=['z'])
model.add_subsystem('d1', SellarDis1withDerivatives(), promotes=['x', 'z', 'y1', 'y2'])
model.add_subsystem('d2', SellarDis2withDerivatives(), promotes=['z', 'y1', 'y2'])
model.add_subsystem('obj_cmp', om.ExecComp('obj = x**2 + z[1] + y1 + exp(-y2)',
z=np.array([0.0, 0.0]), x=0.0),
promotes=['obj', 'x', 'z', 'y1', 'y2'])
model.add_subsystem('con_cmp1', om.ExecComp('con1 = 3.16 - y1'), promotes=['con1', 'y1'])
model.add_subsystem('con_cmp2', om.ExecComp('con2 = y2 - 24.0'), promotes=['con2', 'y2'])
model.linear_solver = om.DirectSolver()
nlgbs = model.nonlinear_solver = om.NewtonSolver()
nlgbs.options['maxiter'] = 2
prob.setup()
prob.run_model()
assert_rel_error(self, prob['y1'], 25.5878516779, .00001)
assert_rel_error(self, prob['y2'], 12.0607416105, .00001)
def test_feature_rtol(self):
import numpy as np
import openmdao.api as om
from openmdao.test_suite.components.sellar import SellarDis1withDerivatives, SellarDis2withDerivatives
prob = om.Problem()
model = prob.model
model.add_subsystem('px', om.IndepVarComp('x', 1.0), promotes=['x'])
model.add_subsystem('pz', om.IndepVarComp('z', np.array([5.0, 2.0])), promotes=['z'])
model.add_subsystem('d1', SellarDis1withDerivatives(), promotes=['x', 'z', 'y1', 'y2'])
model.add_subsystem('d2', SellarDis2withDerivatives(), promotes=['z', 'y1', 'y2'])
model.add_subsystem('obj_cmp', om.ExecComp('obj = x**2 + z[1] + y1 + exp(-y2)',
z=np.array([0.0, 0.0]), x=0.0),
promotes=['obj', 'x', 'z', 'y1', 'y2'])
model.add_subsystem('con_cmp1', om.ExecComp('con1 = 3.16 - y1'), promotes=['con1', 'y1'])
model.add_subsystem('con_cmp2', om.ExecComp('con2 = y2 - 24.0'), promotes=['con2', 'y2'])
model.linear_solver = om.DirectSolver()
nlgbs = model.nonlinear_solver = om.NewtonSolver()
nlgbs.options['rtol'] = 1e-3
prob.setup()
prob.run_model()
assert_rel_error(self, prob['y1'], 25.5878516779, .00001)
assert_rel_error(self, prob['y2'], 12.0607416105, .00001)
def test_feature_atol(self):
import numpy as np
import openmdao.api as om
from openmdao.test_suite.components.sellar import SellarDis1withDerivatives, SellarDis2withDerivatives
prob = om.Problem()
model = prob.model
model.add_subsystem('px', om.IndepVarComp('x', 1.0), promotes=['x'])
model.add_subsystem('pz', om.IndepVarComp('z', np.array([5.0, 2.0])), promotes=['z'])
model.add_subsystem('d1', SellarDis1withDerivatives(), promotes=['x', 'z', 'y1', 'y2'])
model.add_subsystem('d2', SellarDis2withDerivatives(), promotes=['z', 'y1', 'y2'])
model.add_subsystem('obj_cmp', om.ExecComp('obj = x**2 + z[1] + y1 + exp(-y2)',
z=np.array([0.0, 0.0]), x=0.0),
promotes=['obj', 'x', 'z', 'y1', 'y2'])
model.add_subsystem('con_cmp1', om.ExecComp('con1 = 3.16 - y1'), promotes=['con1', 'y1'])
model.add_subsystem('con_cmp2', om.ExecComp('con2 = y2 - 24.0'), promotes=['con2', 'y2'])
model.linear_solver = om.DirectSolver()
nlgbs = model.nonlinear_solver = om.NewtonSolver()
nlgbs.options['atol'] = 1e-4
prob.setup()
prob.run_model()
assert_rel_error(self, prob['y1'], 25.5882856302, .00001)
assert_rel_error(self, prob['y2'], 12.05848819, .00001)
def test_feature_linear_solver(self):
import numpy as np
import openmdao.api as om
from openmdao.test_suite.components.sellar import SellarDis1withDerivatives, \
SellarDis2withDerivatives
prob = om.Problem()
model = prob.model
model.add_subsystem('px', om.IndepVarComp('x', 1.0), promotes=['x'])
model.add_subsystem('pz', om.IndepVarComp('z', np.array([5.0, 2.0])), promotes=['z'])
model.add_subsystem('d1', SellarDis1withDerivatives(), promotes=['x', 'z', 'y1', 'y2'])
model.add_subsystem('d2', SellarDis2withDerivatives(), promotes=['z', 'y1', 'y2'])
model.add_subsystem('obj_cmp', om.ExecComp('obj = x**2 + z[1] + y1 + exp(-y2)',
z=np.array([0.0, 0.0]), x=0.0),
promotes=['obj', 'x', 'z', 'y1', 'y2'])
model.add_subsystem('con_cmp1', om.ExecComp('con1 = 3.16 - y1'), promotes=['con1', 'y1'])
model.add_subsystem('con_cmp2', om.ExecComp('con2 = y2 - 24.0'), promotes=['con2', 'y2'])
model.linear_solver = om.LinearBlockGS()
nlgbs = model.nonlinear_solver = om.NewtonSolver()
nlgbs.linear_solver = om.DirectSolver()
prob.setup()
prob.run_model()
assert_rel_error(self, prob['y1'], 25.58830273, .00001)
assert_rel_error(self, prob['y2'], 12.05848819, .00001)
def test_feature_max_sub_solves(self):
import numpy as np
import openmdao.api as om
from openmdao.test_suite.components.double_sellar import SubSellar
prob = om.Problem()
model = prob.model
model.add_subsystem('g1', SubSellar())
model.add_subsystem('g2', SubSellar())
model.connect('g1.y2', 'g2.x')
model.connect('g2.y2', 'g1.x')
# Converge the outer loop with Gauss Seidel, with a looser tolerance.
model.nonlinear_solver = om.NewtonSolver()
model.linear_solver = om.DirectSolver()
g1 = model.g1
g1.nonlinear_solver = om.NewtonSolver()
g1.nonlinear_solver.options['rtol'] = 1.0e-5
g1.linear_solver = om.DirectSolver()
g2 = model.g2
g2.nonlinear_solver = om.NewtonSolver()
g2.nonlinear_solver.options['rtol'] = 1.0e-5
g2.linear_solver = om.DirectSolver()
model.nonlinear_solver = om.NewtonSolver()
model.linear_solver = om.ScipyKrylov()
model.nonlinear_solver.options['solve_subsystems'] = True
model.nonlinear_solver.options['max_sub_solves'] = 0
prob.setup()
prob.run_model()
def test_feature_err_on_non_converge(self):
import numpy as np
import openmdao.api as om
from openmdao.test_suite.components.sellar import SellarDis1withDerivatives, SellarDis2withDerivatives
prob = om.Problem()
model = prob.model
model.add_subsystem('px', om.IndepVarComp('x', 1.0), promotes=['x'])
model.add_subsystem('pz', om.IndepVarComp('z', np.array([5.0, 2.0])), promotes=['z'])
model.add_subsystem('d1', SellarDis1withDerivatives(), promotes=['x', 'z', 'y1', 'y2'])
model.add_subsystem('d2', SellarDis2withDerivatives(), promotes=['z', 'y1', 'y2'])
model.add_subsystem('obj_cmp', om.ExecComp('obj = x**2 + z[1] + y1 + exp(-y2)',
z=np.array([0.0, 0.0]), x=0.0),
promotes=['obj', 'x', 'z', 'y1', 'y2'])
model.add_subsystem('con_cmp1', om.ExecComp('con1 = 3.16 - y1'), promotes=['con1', 'y1'])
model.add_subsystem('con_cmp2', om.ExecComp('con2 = y2 - 24.0'), promotes=['con2', 'y2'])
model.linear_solver = om.DirectSolver()
nlgbs = model.nonlinear_solver = om.NewtonSolver()
nlgbs.options['maxiter'] = 1
nlgbs.options['err_on_non_converge'] = True
prob.setup()
try:
prob.run_model()
except om.AnalysisError:
pass
def test_solve_subsystems_basic(self):
import openmdao.api as om
from openmdao.test_suite.components.double_sellar import DoubleSellar
prob = om.Problem(model=DoubleSellar())
model = prob.model
g1 = model.g1
g1.nonlinear_solver = om.NewtonSolver()
g1.nonlinear_solver.options['rtol'] = 1.0e-5
g1.linear_solver = om.DirectSolver()
g2 = model.g2
g2.nonlinear_solver = om.NewtonSolver()
g2.nonlinear_solver.options['rtol'] = 1.0e-5
g2.linear_solver = om.DirectSolver()
model.nonlinear_solver = om.NewtonSolver()
model.linear_solver = om.ScipyKrylov()
model.nonlinear_solver.options['solve_subsystems'] = True
prob.setup()
prob.run_model()
assert_rel_error(self, prob['g1.y1'], 0.64, .00001)
assert_rel_error(self, prob['g1.y2'], 0.80, .00001)
assert_rel_error(self, prob['g2.y1'], 0.64, .00001)
assert_rel_error(self, prob['g2.y2'], 0.80, .00001)
if __name__ == "__main__":
unittest.main()
| 37.181898
| 110
| 0.618179
| 5,341
| 42,313
| 4.725894
| 0.06759
| 0.040886
| 0.045482
| 0.057763
| 0.868072
| 0.844856
| 0.828058
| 0.811576
| 0.797354
| 0.783329
| 0
| 0.056736
| 0.246874
| 42,313
| 1,137
| 111
| 37.2146
| 0.735338
| 0.062369
| 0
| 0.779412
| 0
| 0.001337
| 0.083306
| 0.000758
| 0
| 0
| 0
| 0
| 0.15107
| 1
| 0.064171
| false
| 0.002674
| 0.044118
| 0
| 0.11631
| 0.018717
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
865c07a372e648094a619cbb1cfcdd64a73a30de
| 34,628
|
py
|
Python
|
coniii/ising_eqn/ising_eqn_7.py
|
eltrompetero/coniii
|
d698696c11e12a62fe3340eb2f4d3344145a96dd
|
[
"MIT"
] | 14
|
2019-04-29T16:51:05.000Z
|
2022-02-05T06:33:15.000Z
|
coniii/ising_eqn/ising_eqn_7.py
|
eltrompetero/coniii
|
d698696c11e12a62fe3340eb2f4d3344145a96dd
|
[
"MIT"
] | 28
|
2018-10-30T03:43:11.000Z
|
2021-10-12T11:56:56.000Z
|
coniii/ising_eqn/ising_eqn_7.py
|
eltrompetero/coniii
|
d698696c11e12a62fe3340eb2f4d3344145a96dd
|
[
"MIT"
] | 7
|
2019-04-29T16:34:41.000Z
|
2021-08-23T00:42:42.000Z
|
# MIT License
#
# Copyright (c) 2019 Edward D. Lee, Bryan C. Daniels
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# Equations for 7-spin Ising model.
# Written on 2019/09/19.
from numpy import zeros, exp, array, prod, isnan
from ..enumerate import fast_logsumexp
def calc_observables(params):
"""
Give all parameters concatenated into one array from lowest to highest order.
Returns all correlations.
"""
Cout = zeros((28))
H = params[0:7]
J = params[7:28]
energyTerms = array([ +0, +H[6]+0, +H[5]+0, +H[5]+H[6]+J[20], +H[4]+0, +H[4]+H[6]+J[19], +H[4]+H[5]+J[18], +H[4]+H[5]+H[6]+
J[18]+J[19]+J[20], +H[3]+0, +H[3]+H[6]+J[17], +H[3]+H[5]+J[16], +H[3]+H[5]+H[6]+J[16]+J[17]+J[20], +
H[3]+H[4]+J[15], +H[3]+H[4]+H[6]+J[15]+J[17]+J[19], +H[3]+H[4]+H[5]+J[15]+J[16]+J[18], +H[3]+H[4]+H[5]+
H[6]+J[15]+J[16]+J[17]+J[18]+J[19]+J[20], +H[2]+0, +H[2]+H[6]+J[14], +H[2]+H[5]+J[13], +H[2]+H[5]+H[6]+
J[13]+J[14]+J[20], +H[2]+H[4]+J[12], +H[2]+H[4]+H[6]+J[12]+J[14]+J[19], +H[2]+H[4]+H[5]+J[12]+J[13]+
J[18], +H[2]+H[4]+H[5]+H[6]+J[12]+J[13]+J[14]+J[18]+J[19]+J[20], +H[2]+H[3]+J[11], +H[2]+H[3]+H[6]+J[11]+
J[14]+J[17], +H[2]+H[3]+H[5]+J[11]+J[13]+J[16], +H[2]+H[3]+H[5]+H[6]+J[11]+J[13]+J[14]+J[16]+J[17]+J[20], +
H[2]+H[3]+H[4]+J[11]+J[12]+J[15], +H[2]+H[3]+H[4]+H[6]+J[11]+J[12]+J[14]+J[15]+J[17]+J[19], +H[2]+H[3]+
H[4]+H[5]+J[11]+J[12]+J[13]+J[15]+J[16]+J[18], +H[2]+H[3]+H[4]+H[5]+H[6]+J[11]+J[12]+J[13]+J[14]+J[15]+
J[16]+J[17]+J[18]+J[19]+J[20], +H[1]+0, +H[1]+H[6]+J[10], +H[1]+H[5]+J[9], +H[1]+H[5]+H[6]+J[9]+J[10]+
J[20], +H[1]+H[4]+J[8], +H[1]+H[4]+H[6]+J[8]+J[10]+J[19], +H[1]+H[4]+H[5]+J[8]+J[9]+J[18], +H[1]+H[4]+
H[5]+H[6]+J[8]+J[9]+J[10]+J[18]+J[19]+J[20], +H[1]+H[3]+J[7], +H[1]+H[3]+H[6]+J[7]+J[10]+J[17], +H[1]+
H[3]+H[5]+J[7]+J[9]+J[16], +H[1]+H[3]+H[5]+H[6]+J[7]+J[9]+J[10]+J[16]+J[17]+J[20], +H[1]+H[3]+H[4]+J[7]+
J[8]+J[15], +H[1]+H[3]+H[4]+H[6]+J[7]+J[8]+J[10]+J[15]+J[17]+J[19], +H[1]+H[3]+H[4]+H[5]+J[7]+J[8]+J[9]+
J[15]+J[16]+J[18], +H[1]+H[3]+H[4]+H[5]+H[6]+J[7]+J[8]+J[9]+J[10]+J[15]+J[16]+J[17]+J[18]+J[19]+J[20], +
H[1]+H[2]+J[6], +H[1]+H[2]+H[6]+J[6]+J[10]+J[14], +H[1]+H[2]+H[5]+J[6]+J[9]+J[13], +H[1]+H[2]+H[5]+H[6]+
J[6]+J[9]+J[10]+J[13]+J[14]+J[20], +H[1]+H[2]+H[4]+J[6]+J[8]+J[12], +H[1]+H[2]+H[4]+H[6]+J[6]+J[8]+J[10]+
J[12]+J[14]+J[19], +H[1]+H[2]+H[4]+H[5]+J[6]+J[8]+J[9]+J[12]+J[13]+J[18], +H[1]+H[2]+H[4]+H[5]+H[6]+
J[6]+J[8]+J[9]+J[10]+J[12]+J[13]+J[14]+J[18]+J[19]+J[20], +H[1]+H[2]+H[3]+J[6]+J[7]+J[11], +H[1]+H[2]+
H[3]+H[6]+J[6]+J[7]+J[10]+J[11]+J[14]+J[17], +H[1]+H[2]+H[3]+H[5]+J[6]+J[7]+J[9]+J[11]+J[13]+J[16], +
H[1]+H[2]+H[3]+H[5]+H[6]+J[6]+J[7]+J[9]+J[10]+J[11]+J[13]+J[14]+J[16]+J[17]+J[20], +H[1]+H[2]+H[3]+H[4]+
J[6]+J[7]+J[8]+J[11]+J[12]+J[15], +H[1]+H[2]+H[3]+H[4]+H[6]+J[6]+J[7]+J[8]+J[10]+J[11]+J[12]+J[14]+J[15]+
J[17]+J[19], +H[1]+H[2]+H[3]+H[4]+H[5]+J[6]+J[7]+J[8]+J[9]+J[11]+J[12]+J[13]+J[15]+J[16]+J[18], +H[1]+
H[2]+H[3]+H[4]+H[5]+H[6]+J[6]+J[7]+J[8]+J[9]+J[10]+J[11]+J[12]+J[13]+J[14]+J[15]+J[16]+J[17]+J[18]+J[19]+
J[20], +H[0]+0, +H[0]+H[6]+J[5], +H[0]+H[5]+J[4], +H[0]+H[5]+H[6]+J[4]+J[5]+J[20], +H[0]+H[4]+J[3], +
H[0]+H[4]+H[6]+J[3]+J[5]+J[19], +H[0]+H[4]+H[5]+J[3]+J[4]+J[18], +H[0]+H[4]+H[5]+H[6]+J[3]+J[4]+J[5]+
J[18]+J[19]+J[20], +H[0]+H[3]+J[2], +H[0]+H[3]+H[6]+J[2]+J[5]+J[17], +H[0]+H[3]+H[5]+J[2]+J[4]+J[16], +
H[0]+H[3]+H[5]+H[6]+J[2]+J[4]+J[5]+J[16]+J[17]+J[20], +H[0]+H[3]+H[4]+J[2]+J[3]+J[15], +H[0]+H[3]+H[4]+
H[6]+J[2]+J[3]+J[5]+J[15]+J[17]+J[19], +H[0]+H[3]+H[4]+H[5]+J[2]+J[3]+J[4]+J[15]+J[16]+J[18], +H[0]+
H[3]+H[4]+H[5]+H[6]+J[2]+J[3]+J[4]+J[5]+J[15]+J[16]+J[17]+J[18]+J[19]+J[20], +H[0]+H[2]+J[1], +H[0]+
H[2]+H[6]+J[1]+J[5]+J[14], +H[0]+H[2]+H[5]+J[1]+J[4]+J[13], +H[0]+H[2]+H[5]+H[6]+J[1]+J[4]+J[5]+J[13]+
J[14]+J[20], +H[0]+H[2]+H[4]+J[1]+J[3]+J[12], +H[0]+H[2]+H[4]+H[6]+J[1]+J[3]+J[5]+J[12]+J[14]+J[19], +
H[0]+H[2]+H[4]+H[5]+J[1]+J[3]+J[4]+J[12]+J[13]+J[18], +H[0]+H[2]+H[4]+H[5]+H[6]+J[1]+J[3]+J[4]+J[5]+
J[12]+J[13]+J[14]+J[18]+J[19]+J[20], +H[0]+H[2]+H[3]+J[1]+J[2]+J[11], +H[0]+H[2]+H[3]+H[6]+J[1]+J[2]+
J[5]+J[11]+J[14]+J[17], +H[0]+H[2]+H[3]+H[5]+J[1]+J[2]+J[4]+J[11]+J[13]+J[16], +H[0]+H[2]+H[3]+H[5]+
H[6]+J[1]+J[2]+J[4]+J[5]+J[11]+J[13]+J[14]+J[16]+J[17]+J[20], +H[0]+H[2]+H[3]+H[4]+J[1]+J[2]+J[3]+J[11]+
J[12]+J[15], +H[0]+H[2]+H[3]+H[4]+H[6]+J[1]+J[2]+J[3]+J[5]+J[11]+J[12]+J[14]+J[15]+J[17]+J[19], +H[0]+
H[2]+H[3]+H[4]+H[5]+J[1]+J[2]+J[3]+J[4]+J[11]+J[12]+J[13]+J[15]+J[16]+J[18], +H[0]+H[2]+H[3]+H[4]+H[5]+
H[6]+J[1]+J[2]+J[3]+J[4]+J[5]+J[11]+J[12]+J[13]+J[14]+J[15]+J[16]+J[17]+J[18]+J[19]+J[20], +H[0]+H[1]+
J[0], +H[0]+H[1]+H[6]+J[0]+J[5]+J[10], +H[0]+H[1]+H[5]+J[0]+J[4]+J[9], +H[0]+H[1]+H[5]+H[6]+J[0]+J[4]+
J[5]+J[9]+J[10]+J[20], +H[0]+H[1]+H[4]+J[0]+J[3]+J[8], +H[0]+H[1]+H[4]+H[6]+J[0]+J[3]+J[5]+J[8]+J[10]+
J[19], +H[0]+H[1]+H[4]+H[5]+J[0]+J[3]+J[4]+J[8]+J[9]+J[18], +H[0]+H[1]+H[4]+H[5]+H[6]+J[0]+J[3]+J[4]+
J[5]+J[8]+J[9]+J[10]+J[18]+J[19]+J[20], +H[0]+H[1]+H[3]+J[0]+J[2]+J[7], +H[0]+H[1]+H[3]+H[6]+J[0]+J[2]+
J[5]+J[7]+J[10]+J[17], +H[0]+H[1]+H[3]+H[5]+J[0]+J[2]+J[4]+J[7]+J[9]+J[16], +H[0]+H[1]+H[3]+H[5]+H[6]+
J[0]+J[2]+J[4]+J[5]+J[7]+J[9]+J[10]+J[16]+J[17]+J[20], +H[0]+H[1]+H[3]+H[4]+J[0]+J[2]+J[3]+J[7]+J[8]+
J[15], +H[0]+H[1]+H[3]+H[4]+H[6]+J[0]+J[2]+J[3]+J[5]+J[7]+J[8]+J[10]+J[15]+J[17]+J[19], +H[0]+H[1]+H[3]+
H[4]+H[5]+J[0]+J[2]+J[3]+J[4]+J[7]+J[8]+J[9]+J[15]+J[16]+J[18], +H[0]+H[1]+H[3]+H[4]+H[5]+H[6]+J[0]+
J[2]+J[3]+J[4]+J[5]+J[7]+J[8]+J[9]+J[10]+J[15]+J[16]+J[17]+J[18]+J[19]+J[20], +H[0]+H[1]+H[2]+J[0]+J[1]+
J[6], +H[0]+H[1]+H[2]+H[6]+J[0]+J[1]+J[5]+J[6]+J[10]+J[14], +H[0]+H[1]+H[2]+H[5]+J[0]+J[1]+J[4]+J[6]+
J[9]+J[13], +H[0]+H[1]+H[2]+H[5]+H[6]+J[0]+J[1]+J[4]+J[5]+J[6]+J[9]+J[10]+J[13]+J[14]+J[20], +H[0]+H[1]+
H[2]+H[4]+J[0]+J[1]+J[3]+J[6]+J[8]+J[12], +H[0]+H[1]+H[2]+H[4]+H[6]+J[0]+J[1]+J[3]+J[5]+J[6]+J[8]+J[10]+
J[12]+J[14]+J[19], +H[0]+H[1]+H[2]+H[4]+H[5]+J[0]+J[1]+J[3]+J[4]+J[6]+J[8]+J[9]+J[12]+J[13]+J[18], +
H[0]+H[1]+H[2]+H[4]+H[5]+H[6]+J[0]+J[1]+J[3]+J[4]+J[5]+J[6]+J[8]+J[9]+J[10]+J[12]+J[13]+J[14]+J[18]+
J[19]+J[20], +H[0]+H[1]+H[2]+H[3]+J[0]+J[1]+J[2]+J[6]+J[7]+J[11], +H[0]+H[1]+H[2]+H[3]+H[6]+J[0]+J[1]+
J[2]+J[5]+J[6]+J[7]+J[10]+J[11]+J[14]+J[17], +H[0]+H[1]+H[2]+H[3]+H[5]+J[0]+J[1]+J[2]+J[4]+J[6]+J[7]+
J[9]+J[11]+J[13]+J[16], +H[0]+H[1]+H[2]+H[3]+H[5]+H[6]+J[0]+J[1]+J[2]+J[4]+J[5]+J[6]+J[7]+J[9]+J[10]+
J[11]+J[13]+J[14]+J[16]+J[17]+J[20], +H[0]+H[1]+H[2]+H[3]+H[4]+J[0]+J[1]+J[2]+J[3]+J[6]+J[7]+J[8]+J[11]+
J[12]+J[15], +H[0]+H[1]+H[2]+H[3]+H[4]+H[6]+J[0]+J[1]+J[2]+J[3]+J[5]+J[6]+J[7]+J[8]+J[10]+J[11]+J[12]+
J[14]+J[15]+J[17]+J[19], +H[0]+H[1]+H[2]+H[3]+H[4]+H[5]+J[0]+J[1]+J[2]+J[3]+J[4]+J[6]+J[7]+J[8]+J[9]+
J[11]+J[12]+J[13]+J[15]+J[16]+J[18], +H[0]+H[1]+H[2]+H[3]+H[4]+H[5]+H[6]+J[0]+J[1]+J[2]+J[3]+J[4]+J[5]+
J[6]+J[7]+J[8]+J[9]+J[10]+J[11]+J[12]+J[13]+J[14]+J[15]+J[16]+J[17]+J[18]+J[19]+J[20],])
logZ = fast_logsumexp(energyTerms)[0]
num = fast_logsumexp(energyTerms, [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1])
Cout[0] = exp( num[0] - logZ ) * num[1]
num = fast_logsumexp(energyTerms, [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1])
Cout[1] = exp( num[0] - logZ ) * num[1]
num = fast_logsumexp(energyTerms, [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1])
Cout[2] = exp( num[0] - logZ ) * num[1]
num = fast_logsumexp(energyTerms, [0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,0,0,0,0,0,
0,0,0,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1,
1,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,
1,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1])
Cout[3] = exp( num[0] - logZ ) * num[1]
num = fast_logsumexp(energyTerms, [0,0,0,0,1,1,1,1,0,0,0,0,1,1,1,1,0,0,0,0,1,1,1,1,0,0,0,0,1,1,1,1,0,0,0,0,1,
1,1,1,0,0,0,0,1,1,1,1,0,0,0,0,1,1,1,1,0,0,0,0,1,1,1,1,0,0,0,0,1,1,1,1,0,0,
0,0,1,1,1,1,0,0,0,0,1,1,1,1,0,0,0,0,1,1,1,1,0,0,0,0,1,1,1,1,0,0,0,0,1,1,1,
1,0,0,0,0,1,1,1,1,0,0,0,0,1,1,1,1])
Cout[4] = exp( num[0] - logZ ) * num[1]
num = fast_logsumexp(energyTerms, [0,0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,
0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,0,
1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,0,1,
1,0,0,1,1,0,0,1,1,0,0,1,1,0,0,1,1])
Cout[5] = exp( num[0] - logZ ) * num[1]
num = fast_logsumexp(energyTerms, [0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,
1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,
0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,
1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1])
Cout[6] = exp( num[0] - logZ ) * num[1]
num = fast_logsumexp(energyTerms, [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1])
Cout[7] = exp( num[0] - logZ ) * num[1]
num = fast_logsumexp(energyTerms, [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1])
Cout[8] = exp( num[0] - logZ ) * num[1]
num = fast_logsumexp(energyTerms, [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,
1,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,
1,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1])
Cout[9] = exp( num[0] - logZ ) * num[1]
num = fast_logsumexp(energyTerms, [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0,
0,0,1,1,1,1,0,0,0,0,1,1,1,1,0,0,0,0,1,1,1,1,0,0,0,0,1,1,1,1,0,0,0,0,1,1,1,
1,0,0,0,0,1,1,1,1,0,0,0,0,1,1,1,1])
Cout[10] = exp( num[0] - logZ ) * num[1]
num = fast_logsumexp(energyTerms, [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,1,1,0,0,
1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,0,1,
1,0,0,1,1,0,0,1,1,0,0,1,1,0,0,1,1])
Cout[11] = exp( num[0] - logZ ) * num[1]
num = fast_logsumexp(energyTerms, [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,1,0,1,
0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,
1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1])
Cout[12] = exp( num[0] - logZ ) * num[1]
num = fast_logsumexp(energyTerms, [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1])
Cout[13] = exp( num[0] - logZ ) * num[1]
num = fast_logsumexp(energyTerms, [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,
1,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1])
Cout[14] = exp( num[0] - logZ ) * num[1]
num = fast_logsumexp(energyTerms, [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
1,1,1,0,0,0,0,1,1,1,1,0,0,0,0,1,1,1,1,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,0,1,1,1,
1,0,0,0,0,1,1,1,1,0,0,0,0,1,1,1,1])
Cout[15] = exp( num[0] - logZ ) * num[1]
num = fast_logsumexp(energyTerms, [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,
0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,1,1,0,0,1,1,0,0,1,
1,0,0,1,1,0,0,1,1,0,0,1,1,0,0,1,1])
Cout[16] = exp( num[0] - logZ ) * num[1]
num = fast_logsumexp(energyTerms, [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,
1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,
1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1])
Cout[17] = exp( num[0] - logZ ) * num[1]
num = fast_logsumexp(energyTerms, [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1])
Cout[18] = exp( num[0] - logZ ) * num[1]
num = fast_logsumexp(energyTerms, [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,0,1,1,1,1,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,1,1,1,1,0,0,0,0,1,1,1,1])
Cout[19] = exp( num[0] - logZ ) * num[1]
num = fast_logsumexp(energyTerms, [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,1,1,0,0,1,1,0,0,1,1,0,0,1,1])
Cout[20] = exp( num[0] - logZ ) * num[1]
num = fast_logsumexp(energyTerms, [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1])
Cout[21] = exp( num[0] - logZ ) * num[1]
num = fast_logsumexp(energyTerms, [0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,
0,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,
0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,
1,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1])
Cout[22] = exp( num[0] - logZ ) * num[1]
num = fast_logsumexp(energyTerms, [0,0,0,0,0,0,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,
0,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,
1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,1,1,0,0,1,
1,0,0,0,0,0,0,0,0,0,0,1,1,0,0,1,1])
Cout[23] = exp( num[0] - logZ ) * num[1]
num = fast_logsumexp(energyTerms, [0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,1,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,1,0,0,0,0,0,
0,0,0,0,1,0,1,0,1,0,1,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,1,0,0,0,0,0,0,0,0,0,1,
0,1,0,1,0,1,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,1,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,
1,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,1])
Cout[24] = exp( num[0] - logZ ) * num[1]
num = fast_logsumexp(energyTerms, [0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,1,0,0,0,0,0,
0,1,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,1,0,0,
0,0,0,0,1,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,
1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,1])
Cout[25] = exp( num[0] - logZ ) * num[1]
num = fast_logsumexp(energyTerms, [0,0,0,0,0,1,0,1,0,0,0,0,0,1,0,1,0,0,0,0,0,1,0,1,0,0,0,0,0,1,0,1,0,0,0,0,0,
1,0,1,0,0,0,0,0,1,0,1,0,0,0,0,0,1,0,1,0,0,0,0,0,1,0,1,0,0,0,0,0,1,0,1,0,0,
0,0,0,1,0,1,0,0,0,0,0,1,0,1,0,0,0,0,0,1,0,1,0,0,0,0,0,1,0,1,0,0,0,0,0,1,0,
1,0,0,0,0,0,1,0,1,0,0,0,0,0,1,0,1])
Cout[26] = exp( num[0] - logZ ) * num[1]
num = fast_logsumexp(energyTerms, [0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,1,0,
0,0,1,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,1,0,0,
0,1,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,
1,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,1])
Cout[27] = exp( num[0] - logZ ) * num[1]
Cout[isnan(Cout)] = 0.
return(Cout)
def p(params):
"""
Give all parameters concatenated into one array from lowest to highest order.
Returns probabilities of all configurations.
"""
Cout = zeros((28))
H = params[0:7]
J = params[7:28]
H = params[0:7]
J = params[7:28]
Pout = zeros((128))
energyTerms = array([ +0, +H[6]+0, +H[5]+0, +H[5]+H[6]+J[20], +H[4]+0, +H[4]+H[6]+J[19], +H[4]+H[5]+J[18], +H[4]+H[5]+H[6]+
J[18]+J[19]+J[20], +H[3]+0, +H[3]+H[6]+J[17], +H[3]+H[5]+J[16], +H[3]+H[5]+H[6]+J[16]+J[17]+J[20], +
H[3]+H[4]+J[15], +H[3]+H[4]+H[6]+J[15]+J[17]+J[19], +H[3]+H[4]+H[5]+J[15]+J[16]+J[18], +H[3]+H[4]+H[5]+
H[6]+J[15]+J[16]+J[17]+J[18]+J[19]+J[20], +H[2]+0, +H[2]+H[6]+J[14], +H[2]+H[5]+J[13], +H[2]+H[5]+H[6]+
J[13]+J[14]+J[20], +H[2]+H[4]+J[12], +H[2]+H[4]+H[6]+J[12]+J[14]+J[19], +H[2]+H[4]+H[5]+J[12]+J[13]+
J[18], +H[2]+H[4]+H[5]+H[6]+J[12]+J[13]+J[14]+J[18]+J[19]+J[20], +H[2]+H[3]+J[11], +H[2]+H[3]+H[6]+J[11]+
J[14]+J[17], +H[2]+H[3]+H[5]+J[11]+J[13]+J[16], +H[2]+H[3]+H[5]+H[6]+J[11]+J[13]+J[14]+J[16]+J[17]+J[20], +
H[2]+H[3]+H[4]+J[11]+J[12]+J[15], +H[2]+H[3]+H[4]+H[6]+J[11]+J[12]+J[14]+J[15]+J[17]+J[19], +H[2]+H[3]+
H[4]+H[5]+J[11]+J[12]+J[13]+J[15]+J[16]+J[18], +H[2]+H[3]+H[4]+H[5]+H[6]+J[11]+J[12]+J[13]+J[14]+J[15]+
J[16]+J[17]+J[18]+J[19]+J[20], +H[1]+0, +H[1]+H[6]+J[10], +H[1]+H[5]+J[9], +H[1]+H[5]+H[6]+J[9]+J[10]+
J[20], +H[1]+H[4]+J[8], +H[1]+H[4]+H[6]+J[8]+J[10]+J[19], +H[1]+H[4]+H[5]+J[8]+J[9]+J[18], +H[1]+H[4]+
H[5]+H[6]+J[8]+J[9]+J[10]+J[18]+J[19]+J[20], +H[1]+H[3]+J[7], +H[1]+H[3]+H[6]+J[7]+J[10]+J[17], +H[1]+
H[3]+H[5]+J[7]+J[9]+J[16], +H[1]+H[3]+H[5]+H[6]+J[7]+J[9]+J[10]+J[16]+J[17]+J[20], +H[1]+H[3]+H[4]+J[7]+
J[8]+J[15], +H[1]+H[3]+H[4]+H[6]+J[7]+J[8]+J[10]+J[15]+J[17]+J[19], +H[1]+H[3]+H[4]+H[5]+J[7]+J[8]+J[9]+
J[15]+J[16]+J[18], +H[1]+H[3]+H[4]+H[5]+H[6]+J[7]+J[8]+J[9]+J[10]+J[15]+J[16]+J[17]+J[18]+J[19]+J[20], +
H[1]+H[2]+J[6], +H[1]+H[2]+H[6]+J[6]+J[10]+J[14], +H[1]+H[2]+H[5]+J[6]+J[9]+J[13], +H[1]+H[2]+H[5]+H[6]+
J[6]+J[9]+J[10]+J[13]+J[14]+J[20], +H[1]+H[2]+H[4]+J[6]+J[8]+J[12], +H[1]+H[2]+H[4]+H[6]+J[6]+J[8]+J[10]+
J[12]+J[14]+J[19], +H[1]+H[2]+H[4]+H[5]+J[6]+J[8]+J[9]+J[12]+J[13]+J[18], +H[1]+H[2]+H[4]+H[5]+H[6]+
J[6]+J[8]+J[9]+J[10]+J[12]+J[13]+J[14]+J[18]+J[19]+J[20], +H[1]+H[2]+H[3]+J[6]+J[7]+J[11], +H[1]+H[2]+
H[3]+H[6]+J[6]+J[7]+J[10]+J[11]+J[14]+J[17], +H[1]+H[2]+H[3]+H[5]+J[6]+J[7]+J[9]+J[11]+J[13]+J[16], +
H[1]+H[2]+H[3]+H[5]+H[6]+J[6]+J[7]+J[9]+J[10]+J[11]+J[13]+J[14]+J[16]+J[17]+J[20], +H[1]+H[2]+H[3]+H[4]+
J[6]+J[7]+J[8]+J[11]+J[12]+J[15], +H[1]+H[2]+H[3]+H[4]+H[6]+J[6]+J[7]+J[8]+J[10]+J[11]+J[12]+J[14]+J[15]+
J[17]+J[19], +H[1]+H[2]+H[3]+H[4]+H[5]+J[6]+J[7]+J[8]+J[9]+J[11]+J[12]+J[13]+J[15]+J[16]+J[18], +H[1]+
H[2]+H[3]+H[4]+H[5]+H[6]+J[6]+J[7]+J[8]+J[9]+J[10]+J[11]+J[12]+J[13]+J[14]+J[15]+J[16]+J[17]+J[18]+J[19]+
J[20], +H[0]+0, +H[0]+H[6]+J[5], +H[0]+H[5]+J[4], +H[0]+H[5]+H[6]+J[4]+J[5]+J[20], +H[0]+H[4]+J[3], +
H[0]+H[4]+H[6]+J[3]+J[5]+J[19], +H[0]+H[4]+H[5]+J[3]+J[4]+J[18], +H[0]+H[4]+H[5]+H[6]+J[3]+J[4]+J[5]+
J[18]+J[19]+J[20], +H[0]+H[3]+J[2], +H[0]+H[3]+H[6]+J[2]+J[5]+J[17], +H[0]+H[3]+H[5]+J[2]+J[4]+J[16], +
H[0]+H[3]+H[5]+H[6]+J[2]+J[4]+J[5]+J[16]+J[17]+J[20], +H[0]+H[3]+H[4]+J[2]+J[3]+J[15], +H[0]+H[3]+H[4]+
H[6]+J[2]+J[3]+J[5]+J[15]+J[17]+J[19], +H[0]+H[3]+H[4]+H[5]+J[2]+J[3]+J[4]+J[15]+J[16]+J[18], +H[0]+
H[3]+H[4]+H[5]+H[6]+J[2]+J[3]+J[4]+J[5]+J[15]+J[16]+J[17]+J[18]+J[19]+J[20], +H[0]+H[2]+J[1], +H[0]+
H[2]+H[6]+J[1]+J[5]+J[14], +H[0]+H[2]+H[5]+J[1]+J[4]+J[13], +H[0]+H[2]+H[5]+H[6]+J[1]+J[4]+J[5]+J[13]+
J[14]+J[20], +H[0]+H[2]+H[4]+J[1]+J[3]+J[12], +H[0]+H[2]+H[4]+H[6]+J[1]+J[3]+J[5]+J[12]+J[14]+J[19], +
H[0]+H[2]+H[4]+H[5]+J[1]+J[3]+J[4]+J[12]+J[13]+J[18], +H[0]+H[2]+H[4]+H[5]+H[6]+J[1]+J[3]+J[4]+J[5]+
J[12]+J[13]+J[14]+J[18]+J[19]+J[20], +H[0]+H[2]+H[3]+J[1]+J[2]+J[11], +H[0]+H[2]+H[3]+H[6]+J[1]+J[2]+
J[5]+J[11]+J[14]+J[17], +H[0]+H[2]+H[3]+H[5]+J[1]+J[2]+J[4]+J[11]+J[13]+J[16], +H[0]+H[2]+H[3]+H[5]+
H[6]+J[1]+J[2]+J[4]+J[5]+J[11]+J[13]+J[14]+J[16]+J[17]+J[20], +H[0]+H[2]+H[3]+H[4]+J[1]+J[2]+J[3]+J[11]+
J[12]+J[15], +H[0]+H[2]+H[3]+H[4]+H[6]+J[1]+J[2]+J[3]+J[5]+J[11]+J[12]+J[14]+J[15]+J[17]+J[19], +H[0]+
H[2]+H[3]+H[4]+H[5]+J[1]+J[2]+J[3]+J[4]+J[11]+J[12]+J[13]+J[15]+J[16]+J[18], +H[0]+H[2]+H[3]+H[4]+H[5]+
H[6]+J[1]+J[2]+J[3]+J[4]+J[5]+J[11]+J[12]+J[13]+J[14]+J[15]+J[16]+J[17]+J[18]+J[19]+J[20], +H[0]+H[1]+
J[0], +H[0]+H[1]+H[6]+J[0]+J[5]+J[10], +H[0]+H[1]+H[5]+J[0]+J[4]+J[9], +H[0]+H[1]+H[5]+H[6]+J[0]+J[4]+
J[5]+J[9]+J[10]+J[20], +H[0]+H[1]+H[4]+J[0]+J[3]+J[8], +H[0]+H[1]+H[4]+H[6]+J[0]+J[3]+J[5]+J[8]+J[10]+
J[19], +H[0]+H[1]+H[4]+H[5]+J[0]+J[3]+J[4]+J[8]+J[9]+J[18], +H[0]+H[1]+H[4]+H[5]+H[6]+J[0]+J[3]+J[4]+
J[5]+J[8]+J[9]+J[10]+J[18]+J[19]+J[20], +H[0]+H[1]+H[3]+J[0]+J[2]+J[7], +H[0]+H[1]+H[3]+H[6]+J[0]+J[2]+
J[5]+J[7]+J[10]+J[17], +H[0]+H[1]+H[3]+H[5]+J[0]+J[2]+J[4]+J[7]+J[9]+J[16], +H[0]+H[1]+H[3]+H[5]+H[6]+
J[0]+J[2]+J[4]+J[5]+J[7]+J[9]+J[10]+J[16]+J[17]+J[20], +H[0]+H[1]+H[3]+H[4]+J[0]+J[2]+J[3]+J[7]+J[8]+
J[15], +H[0]+H[1]+H[3]+H[4]+H[6]+J[0]+J[2]+J[3]+J[5]+J[7]+J[8]+J[10]+J[15]+J[17]+J[19], +H[0]+H[1]+H[3]+
H[4]+H[5]+J[0]+J[2]+J[3]+J[4]+J[7]+J[8]+J[9]+J[15]+J[16]+J[18], +H[0]+H[1]+H[3]+H[4]+H[5]+H[6]+J[0]+
J[2]+J[3]+J[4]+J[5]+J[7]+J[8]+J[9]+J[10]+J[15]+J[16]+J[17]+J[18]+J[19]+J[20], +H[0]+H[1]+H[2]+J[0]+J[1]+
J[6], +H[0]+H[1]+H[2]+H[6]+J[0]+J[1]+J[5]+J[6]+J[10]+J[14], +H[0]+H[1]+H[2]+H[5]+J[0]+J[1]+J[4]+J[6]+
J[9]+J[13], +H[0]+H[1]+H[2]+H[5]+H[6]+J[0]+J[1]+J[4]+J[5]+J[6]+J[9]+J[10]+J[13]+J[14]+J[20], +H[0]+H[1]+
H[2]+H[4]+J[0]+J[1]+J[3]+J[6]+J[8]+J[12], +H[0]+H[1]+H[2]+H[4]+H[6]+J[0]+J[1]+J[3]+J[5]+J[6]+J[8]+J[10]+
J[12]+J[14]+J[19], +H[0]+H[1]+H[2]+H[4]+H[5]+J[0]+J[1]+J[3]+J[4]+J[6]+J[8]+J[9]+J[12]+J[13]+J[18], +
H[0]+H[1]+H[2]+H[4]+H[5]+H[6]+J[0]+J[1]+J[3]+J[4]+J[5]+J[6]+J[8]+J[9]+J[10]+J[12]+J[13]+J[14]+J[18]+
J[19]+J[20], +H[0]+H[1]+H[2]+H[3]+J[0]+J[1]+J[2]+J[6]+J[7]+J[11], +H[0]+H[1]+H[2]+H[3]+H[6]+J[0]+J[1]+
J[2]+J[5]+J[6]+J[7]+J[10]+J[11]+J[14]+J[17], +H[0]+H[1]+H[2]+H[3]+H[5]+J[0]+J[1]+J[2]+J[4]+J[6]+J[7]+
J[9]+J[11]+J[13]+J[16], +H[0]+H[1]+H[2]+H[3]+H[5]+H[6]+J[0]+J[1]+J[2]+J[4]+J[5]+J[6]+J[7]+J[9]+J[10]+
J[11]+J[13]+J[14]+J[16]+J[17]+J[20], +H[0]+H[1]+H[2]+H[3]+H[4]+J[0]+J[1]+J[2]+J[3]+J[6]+J[7]+J[8]+J[11]+
J[12]+J[15], +H[0]+H[1]+H[2]+H[3]+H[4]+H[6]+J[0]+J[1]+J[2]+J[3]+J[5]+J[6]+J[7]+J[8]+J[10]+J[11]+J[12]+
J[14]+J[15]+J[17]+J[19], +H[0]+H[1]+H[2]+H[3]+H[4]+H[5]+J[0]+J[1]+J[2]+J[3]+J[4]+J[6]+J[7]+J[8]+J[9]+
J[11]+J[12]+J[13]+J[15]+J[16]+J[18], +H[0]+H[1]+H[2]+H[3]+H[4]+H[5]+H[6]+J[0]+J[1]+J[2]+J[3]+J[4]+J[5]+
J[6]+J[7]+J[8]+J[9]+J[10]+J[11]+J[12]+J[13]+J[14]+J[15]+J[16]+J[17]+J[18]+J[19]+J[20],])
logZ = fast_logsumexp(energyTerms)[0]
Pout[0] = exp( +0 - logZ )
Pout[1] = exp( +H[6]+0 - logZ )
Pout[2] = exp( +H[5]+0 - logZ )
Pout[3] = exp( +H[5]+H[6]+J[20] - logZ )
Pout[4] = exp( +H[4]+0 - logZ )
Pout[5] = exp( +H[4]+H[6]+J[19] - logZ )
Pout[6] = exp( +H[4]+H[5]+J[18] - logZ )
Pout[7] = exp( +H[4]+H[5]+H[6]+J[18]+J[19]+J[20] - logZ )
Pout[8] = exp( +H[3]+0 - logZ )
Pout[9] = exp( +H[3]+H[6]+J[17] - logZ )
Pout[10] = exp( +H[3]+H[5]+J[16] - logZ )
Pout[11] = exp( +H[3]+H[5]+H[6]+J[16]+J[17]+J[20] - logZ )
Pout[12] = exp( +H[3]+H[4]+J[15] - logZ )
Pout[13] = exp( +H[3]+H[4]+H[6]+J[15]+J[17]+J[19] - logZ )
Pout[14] = exp( +H[3]+H[4]+H[5]+J[15]+J[16]+J[18] - logZ )
Pout[15] = exp( +H[3]+H[4]+H[5]+H[6]+J[15]+J[16]+J[17]+J[18]+J[19]+J[20] - logZ )
Pout[16] = exp( +H[2]+0 - logZ )
Pout[17] = exp( +H[2]+H[6]+J[14] - logZ )
Pout[18] = exp( +H[2]+H[5]+J[13] - logZ )
Pout[19] = exp( +H[2]+H[5]+H[6]+J[13]+J[14]+J[20] - logZ )
Pout[20] = exp( +H[2]+H[4]+J[12] - logZ )
Pout[21] = exp( +H[2]+H[4]+H[6]+J[12]+J[14]+J[19] - logZ )
Pout[22] = exp( +H[2]+H[4]+H[5]+J[12]+J[13]+J[18] - logZ )
Pout[23] = exp( +H[2]+H[4]+H[5]+H[6]+J[12]+J[13]+J[14]+J[18]+J[19]+J[20] - logZ )
Pout[24] = exp( +H[2]+H[3]+J[11] - logZ )
Pout[25] = exp( +H[2]+H[3]+H[6]+J[11]+J[14]+J[17] - logZ )
Pout[26] = exp( +H[2]+H[3]+H[5]+J[11]+J[13]+J[16] - logZ )
Pout[27] = exp( +H[2]+H[3]+H[5]+H[6]+J[11]+J[13]+J[14]+J[16]+J[17]+J[20] - logZ )
Pout[28] = exp( +H[2]+H[3]+H[4]+J[11]+J[12]+J[15] - logZ )
Pout[29] = exp( +H[2]+H[3]+H[4]+H[6]+J[11]+J[12]+J[14]+J[15]+J[17]+J[19] - logZ )
Pout[30] = exp( +H[2]+H[3]+H[4]+H[5]+J[11]+J[12]+J[13]+J[15]+J[16]+J[18] - logZ )
Pout[31] = exp( +H[2]+H[3]+H[4]+H[5]+H[6]+J[11]+J[12]+J[13]+J[14]+J[15]+J[16]+J[17]+J[18]+J[19]+J[20] - logZ )
Pout[32] = exp( +H[1]+0 - logZ )
Pout[33] = exp( +H[1]+H[6]+J[10] - logZ )
Pout[34] = exp( +H[1]+H[5]+J[9] - logZ )
Pout[35] = exp( +H[1]+H[5]+H[6]+J[9]+J[10]+J[20] - logZ )
Pout[36] = exp( +H[1]+H[4]+J[8] - logZ )
Pout[37] = exp( +H[1]+H[4]+H[6]+J[8]+J[10]+J[19] - logZ )
Pout[38] = exp( +H[1]+H[4]+H[5]+J[8]+J[9]+J[18] - logZ )
Pout[39] = exp( +H[1]+H[4]+H[5]+H[6]+J[8]+J[9]+J[10]+J[18]+J[19]+J[20] - logZ )
Pout[40] = exp( +H[1]+H[3]+J[7] - logZ )
Pout[41] = exp( +H[1]+H[3]+H[6]+J[7]+J[10]+J[17] - logZ )
Pout[42] = exp( +H[1]+H[3]+H[5]+J[7]+J[9]+J[16] - logZ )
Pout[43] = exp( +H[1]+H[3]+H[5]+H[6]+J[7]+J[9]+J[10]+J[16]+J[17]+J[20] - logZ )
Pout[44] = exp( +H[1]+H[3]+H[4]+J[7]+J[8]+J[15] - logZ )
Pout[45] = exp( +H[1]+H[3]+H[4]+H[6]+J[7]+J[8]+J[10]+J[15]+J[17]+J[19] - logZ )
Pout[46] = exp( +H[1]+H[3]+H[4]+H[5]+J[7]+J[8]+J[9]+J[15]+J[16]+J[18] - logZ )
Pout[47] = exp( +H[1]+H[3]+H[4]+H[5]+H[6]+J[7]+J[8]+J[9]+J[10]+J[15]+J[16]+J[17]+J[18]+J[19]+J[20] - logZ )
Pout[48] = exp( +H[1]+H[2]+J[6] - logZ )
Pout[49] = exp( +H[1]+H[2]+H[6]+J[6]+J[10]+J[14] - logZ )
Pout[50] = exp( +H[1]+H[2]+H[5]+J[6]+J[9]+J[13] - logZ )
Pout[51] = exp( +H[1]+H[2]+H[5]+H[6]+J[6]+J[9]+J[10]+J[13]+J[14]+J[20] - logZ )
Pout[52] = exp( +H[1]+H[2]+H[4]+J[6]+J[8]+J[12] - logZ )
Pout[53] = exp( +H[1]+H[2]+H[4]+H[6]+J[6]+J[8]+J[10]+J[12]+J[14]+J[19] - logZ )
Pout[54] = exp( +H[1]+H[2]+H[4]+H[5]+J[6]+J[8]+J[9]+J[12]+J[13]+J[18] - logZ )
Pout[55] = exp( +H[1]+H[2]+H[4]+H[5]+H[6]+J[6]+J[8]+J[9]+J[10]+J[12]+J[13]+J[14]+J[18]+J[19]+J[20] - logZ )
Pout[56] = exp( +H[1]+H[2]+H[3]+J[6]+J[7]+J[11] - logZ )
Pout[57] = exp( +H[1]+H[2]+H[3]+H[6]+J[6]+J[7]+J[10]+J[11]+J[14]+J[17] - logZ )
Pout[58] = exp( +H[1]+H[2]+H[3]+H[5]+J[6]+J[7]+J[9]+J[11]+J[13]+J[16] - logZ )
Pout[59] = exp( +H[1]+H[2]+H[3]+H[5]+H[6]+J[6]+J[7]+J[9]+J[10]+J[11]+J[13]+J[14]+J[16]+J[17]+J[20] - logZ )
Pout[60] = exp( +H[1]+H[2]+H[3]+H[4]+J[6]+J[7]+J[8]+J[11]+J[12]+J[15] - logZ )
Pout[61] = exp( +H[1]+H[2]+H[3]+H[4]+H[6]+J[6]+J[7]+J[8]+J[10]+J[11]+J[12]+J[14]+J[15]+J[17]+J[19] - logZ )
Pout[62] = exp( +H[1]+H[2]+H[3]+H[4]+H[5]+J[6]+J[7]+J[8]+J[9]+J[11]+J[12]+J[13]+J[15]+J[16]+J[18] - logZ )
Pout[63] = exp( +H[1]+H[2]+H[3]+H[4]+H[5]+H[6]+J[6]+J[7]+J[8]+J[9]+J[10]+J[11]+J[12]+J[13]+J[14]+J[15]+J[16]+J[17]+J[18]+J[19]+J[20] - logZ )
Pout[64] = exp( +H[0]+0 - logZ )
Pout[65] = exp( +H[0]+H[6]+J[5] - logZ )
Pout[66] = exp( +H[0]+H[5]+J[4] - logZ )
Pout[67] = exp( +H[0]+H[5]+H[6]+J[4]+J[5]+J[20] - logZ )
Pout[68] = exp( +H[0]+H[4]+J[3] - logZ )
Pout[69] = exp( +H[0]+H[4]+H[6]+J[3]+J[5]+J[19] - logZ )
Pout[70] = exp( +H[0]+H[4]+H[5]+J[3]+J[4]+J[18] - logZ )
Pout[71] = exp( +H[0]+H[4]+H[5]+H[6]+J[3]+J[4]+J[5]+J[18]+J[19]+J[20] - logZ )
Pout[72] = exp( +H[0]+H[3]+J[2] - logZ )
Pout[73] = exp( +H[0]+H[3]+H[6]+J[2]+J[5]+J[17] - logZ )
Pout[74] = exp( +H[0]+H[3]+H[5]+J[2]+J[4]+J[16] - logZ )
Pout[75] = exp( +H[0]+H[3]+H[5]+H[6]+J[2]+J[4]+J[5]+J[16]+J[17]+J[20] - logZ )
Pout[76] = exp( +H[0]+H[3]+H[4]+J[2]+J[3]+J[15] - logZ )
Pout[77] = exp( +H[0]+H[3]+H[4]+H[6]+J[2]+J[3]+J[5]+J[15]+J[17]+J[19] - logZ )
Pout[78] = exp( +H[0]+H[3]+H[4]+H[5]+J[2]+J[3]+J[4]+J[15]+J[16]+J[18] - logZ )
Pout[79] = exp( +H[0]+H[3]+H[4]+H[5]+H[6]+J[2]+J[3]+J[4]+J[5]+J[15]+J[16]+J[17]+J[18]+J[19]+J[20] - logZ )
Pout[80] = exp( +H[0]+H[2]+J[1] - logZ )
Pout[81] = exp( +H[0]+H[2]+H[6]+J[1]+J[5]+J[14] - logZ )
Pout[82] = exp( +H[0]+H[2]+H[5]+J[1]+J[4]+J[13] - logZ )
Pout[83] = exp( +H[0]+H[2]+H[5]+H[6]+J[1]+J[4]+J[5]+J[13]+J[14]+J[20] - logZ )
Pout[84] = exp( +H[0]+H[2]+H[4]+J[1]+J[3]+J[12] - logZ )
Pout[85] = exp( +H[0]+H[2]+H[4]+H[6]+J[1]+J[3]+J[5]+J[12]+J[14]+J[19] - logZ )
Pout[86] = exp( +H[0]+H[2]+H[4]+H[5]+J[1]+J[3]+J[4]+J[12]+J[13]+J[18] - logZ )
Pout[87] = exp( +H[0]+H[2]+H[4]+H[5]+H[6]+J[1]+J[3]+J[4]+J[5]+J[12]+J[13]+J[14]+J[18]+J[19]+J[20] - logZ )
Pout[88] = exp( +H[0]+H[2]+H[3]+J[1]+J[2]+J[11] - logZ )
Pout[89] = exp( +H[0]+H[2]+H[3]+H[6]+J[1]+J[2]+J[5]+J[11]+J[14]+J[17] - logZ )
Pout[90] = exp( +H[0]+H[2]+H[3]+H[5]+J[1]+J[2]+J[4]+J[11]+J[13]+J[16] - logZ )
Pout[91] = exp( +H[0]+H[2]+H[3]+H[5]+H[6]+J[1]+J[2]+J[4]+J[5]+J[11]+J[13]+J[14]+J[16]+J[17]+J[20] - logZ )
Pout[92] = exp( +H[0]+H[2]+H[3]+H[4]+J[1]+J[2]+J[3]+J[11]+J[12]+J[15] - logZ )
Pout[93] = exp( +H[0]+H[2]+H[3]+H[4]+H[6]+J[1]+J[2]+J[3]+J[5]+J[11]+J[12]+J[14]+J[15]+J[17]+J[19] - logZ )
Pout[94] = exp( +H[0]+H[2]+H[3]+H[4]+H[5]+J[1]+J[2]+J[3]+J[4]+J[11]+J[12]+J[13]+J[15]+J[16]+J[18] - logZ )
Pout[95] = exp( +H[0]+H[2]+H[3]+H[4]+H[5]+H[6]+J[1]+J[2]+J[3]+J[4]+J[5]+J[11]+J[12]+J[13]+J[14]+J[15]+J[16]+J[17]+J[18]+J[19]+J[20] - logZ )
Pout[96] = exp( +H[0]+H[1]+J[0] - logZ )
Pout[97] = exp( +H[0]+H[1]+H[6]+J[0]+J[5]+J[10] - logZ )
Pout[98] = exp( +H[0]+H[1]+H[5]+J[0]+J[4]+J[9] - logZ )
Pout[99] = exp( +H[0]+H[1]+H[5]+H[6]+J[0]+J[4]+J[5]+J[9]+J[10]+J[20] - logZ )
Pout[100] = exp( +H[0]+H[1]+H[4]+J[0]+J[3]+J[8] - logZ )
Pout[101] = exp( +H[0]+H[1]+H[4]+H[6]+J[0]+J[3]+J[5]+J[8]+J[10]+J[19] - logZ )
Pout[102] = exp( +H[0]+H[1]+H[4]+H[5]+J[0]+J[3]+J[4]+J[8]+J[9]+J[18] - logZ )
Pout[103] = exp( +H[0]+H[1]+H[4]+H[5]+H[6]+J[0]+J[3]+J[4]+J[5]+J[8]+J[9]+J[10]+J[18]+J[19]+J[20] - logZ )
Pout[104] = exp( +H[0]+H[1]+H[3]+J[0]+J[2]+J[7] - logZ )
Pout[105] = exp( +H[0]+H[1]+H[3]+H[6]+J[0]+J[2]+J[5]+J[7]+J[10]+J[17] - logZ )
Pout[106] = exp( +H[0]+H[1]+H[3]+H[5]+J[0]+J[2]+J[4]+J[7]+J[9]+J[16] - logZ )
Pout[107] = exp( +H[0]+H[1]+H[3]+H[5]+H[6]+J[0]+J[2]+J[4]+J[5]+J[7]+J[9]+J[10]+J[16]+J[17]+J[20] - logZ )
Pout[108] = exp( +H[0]+H[1]+H[3]+H[4]+J[0]+J[2]+J[3]+J[7]+J[8]+J[15] - logZ )
Pout[109] = exp( +H[0]+H[1]+H[3]+H[4]+H[6]+J[0]+J[2]+J[3]+J[5]+J[7]+J[8]+J[10]+J[15]+J[17]+J[19] - logZ )
Pout[110] = exp( +H[0]+H[1]+H[3]+H[4]+H[5]+J[0]+J[2]+J[3]+J[4]+J[7]+J[8]+J[9]+J[15]+J[16]+J[18] - logZ )
Pout[111] = exp( +H[0]+H[1]+H[3]+H[4]+H[5]+H[6]+J[0]+J[2]+J[3]+J[4]+J[5]+J[7]+J[8]+J[9]+J[10]+J[15]+J[16]+J[17]+J[18]+J[19]+J[20] - logZ )
Pout[112] = exp( +H[0]+H[1]+H[2]+J[0]+J[1]+J[6] - logZ )
Pout[113] = exp( +H[0]+H[1]+H[2]+H[6]+J[0]+J[1]+J[5]+J[6]+J[10]+J[14] - logZ )
Pout[114] = exp( +H[0]+H[1]+H[2]+H[5]+J[0]+J[1]+J[4]+J[6]+J[9]+J[13] - logZ )
Pout[115] = exp( +H[0]+H[1]+H[2]+H[5]+H[6]+J[0]+J[1]+J[4]+J[5]+J[6]+J[9]+J[10]+J[13]+J[14]+J[20] - logZ )
Pout[116] = exp( +H[0]+H[1]+H[2]+H[4]+J[0]+J[1]+J[3]+J[6]+J[8]+J[12] - logZ )
Pout[117] = exp( +H[0]+H[1]+H[2]+H[4]+H[6]+J[0]+J[1]+J[3]+J[5]+J[6]+J[8]+J[10]+J[12]+J[14]+J[19] - logZ )
Pout[118] = exp( +H[0]+H[1]+H[2]+H[4]+H[5]+J[0]+J[1]+J[3]+J[4]+J[6]+J[8]+J[9]+J[12]+J[13]+J[18] - logZ )
Pout[119] = exp( +H[0]+H[1]+H[2]+H[4]+H[5]+H[6]+J[0]+J[1]+J[3]+J[4]+J[5]+J[6]+J[8]+J[9]+J[10]+J[12]+J[13]+J[14]+J[18]+J[19]+J[20] - logZ )
Pout[120] = exp( +H[0]+H[1]+H[2]+H[3]+J[0]+J[1]+J[2]+J[6]+J[7]+J[11] - logZ )
Pout[121] = exp( +H[0]+H[1]+H[2]+H[3]+H[6]+J[0]+J[1]+J[2]+J[5]+J[6]+J[7]+J[10]+J[11]+J[14]+J[17] - logZ )
Pout[122] = exp( +H[0]+H[1]+H[2]+H[3]+H[5]+J[0]+J[1]+J[2]+J[4]+J[6]+J[7]+J[9]+J[11]+J[13]+J[16] - logZ )
Pout[123] = exp( +H[0]+H[1]+H[2]+H[3]+H[5]+H[6]+J[0]+J[1]+J[2]+J[4]+J[5]+J[6]+J[7]+J[9]+J[10]+J[11]+J[13]+J[14]+J[16]+J[17]+J[20] - logZ )
Pout[124] = exp( +H[0]+H[1]+H[2]+H[3]+H[4]+J[0]+J[1]+J[2]+J[3]+J[6]+J[7]+J[8]+J[11]+J[12]+J[15] - logZ )
Pout[125] = exp( +H[0]+H[1]+H[2]+H[3]+H[4]+H[6]+J[0]+J[1]+J[2]+J[3]+J[5]+J[6]+J[7]+J[8]+J[10]+J[11]+J[12]+J[14]+J[15]+J[17]+J[19] - logZ )
Pout[126] = exp( +H[0]+H[1]+H[2]+H[3]+H[4]+H[5]+J[0]+J[1]+J[2]+J[3]+J[4]+J[6]+J[7]+J[8]+J[9]+J[11]+J[12]+J[13]+J[15]+J[16]+J[18] - logZ )
Pout[127] = exp( +H[0]+H[1]+H[2]+H[3]+H[4]+H[5]+H[6]+J[0]+J[1]+J[2]+J[3]+J[4]+J[5]+J[6]+J[7]+J[8]+J[9]+J[10]+J[11]+J[12]+J[13]+J[14]+J[15]+J[16]+J[17]+J[18]+J[19]+J[20] - logZ )
return(Pout)
| 77.81573
| 181
| 0.457953
| 11,468
| 34,628
| 1.380014
| 0.023631
| 0.25515
| 0.333818
| 0.401112
| 0.88342
| 0.879123
| 0.867623
| 0.861936
| 0.839821
| 0.833123
| 0
| 0.270614
| 0.099544
| 34,628
| 444
| 182
| 77.990991
| 0.236939
| 0.039621
| 0
| 0.431373
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.004902
| false
| 0
| 0.004902
| 0
| 0.009804
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
866ea75fcaf31cf737d4fe83c3e409c62d9d9af5
| 14,411
|
py
|
Python
|
models/tf_Cifar_OC_NN_Models.py
|
chihyunsong/oc-nn
|
f57130545f221fee67e9780d2a93ca48b9d10ba5
|
[
"MIT"
] | 203
|
2018-02-26T09:52:15.000Z
|
2022-03-09T12:51:38.000Z
|
models/tf_Cifar_OC_NN_Models.py
|
dherath/oc-nn
|
f57130545f221fee67e9780d2a93ca48b9d10ba5
|
[
"MIT"
] | 22
|
2018-05-07T19:36:49.000Z
|
2022-03-30T04:00:25.000Z
|
models/tf_Cifar_OC_NN_Models.py
|
dherath/oc-nn
|
f57130545f221fee67e9780d2a93ca48b9d10ba5
|
[
"MIT"
] | 81
|
2018-02-22T21:17:49.000Z
|
2022-03-24T04:53:46.000Z
|
# USAGE
# python test_network.py --model dog_not_dog.model --image images/examples/dog_01.png
# import the necessary packages
import numpy as np
import tensorflow as tf
from keras import backend as K
import time
## Declare the scoring functions
g = lambda x : 1/(1 + tf.exp(-x))
#g = lambda x : x # Linear
def nnScore(X, w, V, g):
# print "X",X.shape
# print "w",w[0].shape
# print "v",V[0].shape
return tf.matmul(g((tf.matmul(X, w))), V)
def relu(x):
y = x
y[y < 0] = 0
return y
import csv
from itertools import izip_longest
import matplotlib as plt
def write_decisionScores2Csv(path, filename, positiveScores, negativeScores):
newfilePath = path+filename
print "Writing file to ", path+filename
poslist = positiveScores.tolist()
neglist = negativeScores.tolist()
# rows = zip(poslist, neglist)
d = [poslist, neglist]
export_data = izip_longest(*d, fillvalue='')
with open(newfilePath, 'w') as myfile:
wr = csv.writer(myfile)
wr.writerow(("Normal", "Anomaly"))
wr.writerows(export_data)
myfile.close()
return
decision_scorePath = "/Users/raghav/Documents/Uni/oc-nn/Decision_Scores/cifar/"
def tf_OneClass_NN_linear(data_train,data_test,nu):
tf.reset_default_graph()
RANDOM_SEED = 42
tf.set_random_seed(RANDOM_SEED)
train_X = data_train
# Layer's sizes
x_size = train_X.shape[1] # Number of input nodes: 4 features and 1 bias
print "Input Shape:",x_size
h_size = 16 # Number of hidden nodes
y_size = 1 # Number of outcomes (3 iris flowers)
D = x_size
K = h_size
theta = np.random.normal(0, 1, K + K*D + 1)
rvalue = np.random.normal(0,1,(len(train_X),y_size))
# nu = 0.1
def init_weights(shape):
""" Weight initialization """
weights = tf.random_normal(shape,mean=0, stddev=1)
return tf.Variable(weights)
def forwardprop(X, w_1, w_2):
"""
Forward-propagation.
IMPORTANT: yhat is not softmax since TensorFlow's softmax_cross_entropy_with_logits() does that internally.
"""
X = tf.cast(X, tf.float32)
w_1 = tf.cast(w_1, tf.float32)
w_2 = tf.cast(w_2, tf.float32)
h = (tf.matmul(X, w_1)) #
yhat = tf.matmul(h, w_2) # The \varphi function
return yhat
g = lambda x : x
def nnScore(X, w, V, g):
X = tf.cast(X, tf.float32)
w = tf.cast(w, tf.float32)
V = tf.cast(V, tf.float32)
return tf.matmul(g((tf.matmul(X, w))), V)
def relu1(x):
y = x
y = tf.nn.relu(x)
return y
def relu(x):
with sess.as_default():
x = x.eval()
y = x
y[y< 0] = 0
# y = tf.nn.relu(x)
return y
def ocnn_obj(theta, X, nu, w1, w2, g,r):
w = w1
V = w2
X = tf.cast(X, tf.float32)
w = tf.cast(w1, tf.float32)
V = tf.cast(w2, tf.float32)
term1 = 0.5 * tf.reduce_sum(w**2)
term2 = 0.5 * tf.reduce_sum(V**2)
term3 = 1/nu * tf.reduce_mean(tf.nn.relu(r - nnScore(X, w, V, g)))
term4 = -r
return term1 + term2 + term3 + term4
# For testing the algorithm
test_X = data_test
# Symbols
X = tf.placeholder("float32", shape=[None, x_size])
r = tf.get_variable("r", dtype=tf.float32,shape=(),trainable=False)
# Weight initializations
w_1 = init_weights((x_size, h_size))
w_2 = init_weights((h_size, y_size))
# Forward propagation
# yhat = forwardprop(X, w_1, w_2)
# predict = tf.argmax(yhat, axis=1)
# Backward propagation
# cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(labels=y, logits=yhat))
cost = ocnn_obj(theta, X, nu, w_1, w_2, g,r)
updates = tf.train.AdamOptimizer(0.05).minimize(cost)
# Run optimization routine after initialization
sess = tf.Session()
init = tf.global_variables_initializer()
sess.run(init)
rvalue = 0.1
start_time = time.time()
for epoch in range(100):
# Train with each example
sess.run(updates, feed_dict={X: train_X,r:rvalue})
rvalue = nnScore(train_X, w_1, w_2, g)
with sess.as_default():
rvalue = rvalue.eval()
rvalue = np.percentile(rvalue,q=100*nu)
print("Epoch = %d, r = %f"
% (epoch + 1,rvalue))
trainTime = time.time() - start_time
### Get the optimized weights here
start_time = time.time()
train = nnScore(train_X, w_1, w_2, g)
test = nnScore(test_X, w_1, w_2, g)
testTime = time.time() - start_time
with sess.as_default():
arrayTrain = train.eval()
arrayTest = test.eval()
# rstar = r.eval()
rstar =rvalue
sess.close()
print "Session Closed!!!"
pos_decisionScore = arrayTrain-rstar
pos_decisionScore[pos_decisionScore < 0] = 0
neg_decisionScore = arrayTest-rstar
print "&&&&&&&&&&&&"
print pos_decisionScore
print neg_decisionScore
# write_decisionScores2Csv(decision_scorePath, "OneClass_NN_linear.csv", pos_decisionScore, neg_decisionScore)
return [pos_decisionScore, neg_decisionScore,trainTime,testTime]
def tf_OneClass_NN_sigmoid(data_train,data_test,nu):
tf.reset_default_graph()
sess = tf.Session()
train_X = data_train
RANDOM_SEED = 42
tf.set_random_seed(RANDOM_SEED)
# Layer's sizes
x_size = train_X.shape[1] # Number of input nodes: 4 features and 1 bias
print "Input Shape:", x_size
h_size = 16 # Number of hidden nodes
y_size = 1 # Number of outcomes (3 iris flowers)
D = x_size
K = h_size
theta = np.random.normal(0, 1, K + K*D + 1)
rvalue = np.random.normal(0,1,(len(train_X),y_size))
# nu = 0.1
import math
def plotNNFilter(units):
filters = 3
fig = plt.figure(1, figsize=(20, 20))
n_columns = 6
n_rows = math.ceil(filters / n_columns) + 1
for i in range(filters):
plt.subplot(n_rows, n_columns, i + 1)
plt.title('Filter ' + str(i))
plt.imshow(units[0, :, :, i], interpolation="nearest", cmap="gray")
plt.savefig('/Users/raghav/Documents/Uni/oc-nn/models/representation_sigmoid_dog.png')
# def getActivations(layer, stimuli):
# units = sess.run(layer, feed_dict={x: np.reshape(stimuli, [1, 784], order='F'), keep_prob: 1.0})
# plotNNFilter(units)
def init_weights(shape):
""" Weight initialization """
weights = tf.random_normal(shape,mean=0, stddev=0.00001)
return tf.Variable(weights)
def forwardprop(X, w_1, w_2):
"""
Forward-propagation.
IMPORTANT: yhat is not softmax since TensorFlow's softmax_cross_entropy_with_logits() does that internally.
"""
X = tf.cast(X, tf.float32)
w_1 = tf.cast(w_1, tf.float32)
w_2 = tf.cast(w_2, tf.float32)
h = tf.nn.sigmoid(tf.matmul(X, w_1)) # The \sigma function
yhat = tf.matmul(h, w_2) # The \varphi function
return yhat
g = lambda x : 1/(1 + tf.exp(-x))
def nnScore(X, w, V, g):
X = tf.cast(X, tf.float32)
w = tf.cast(w, tf.float32)
V = tf.cast(V, tf.float32)
return tf.matmul(g((tf.matmul(X, w))), V)
def data_rep(X, w, V, g):
X = tf.cast(X, tf.float32)
w = tf.cast(w, tf.float32)
return g((tf.matmul(X, w)))
def relu(x):
y = tf.nn.relu(x)
return y
def ocnn_obj(theta, X, nu, w1, w2, g,r):
w = w1
V = w2
X = tf.cast(X, tf.float32)
w = tf.cast(w1, tf.float32)
V = tf.cast(w2, tf.float32)
term1 = 0.5 * tf.reduce_sum(w**2)
term2 = 0.5 * tf.reduce_sum(V**2)
term3 = 1/nu * tf.reduce_mean(relu(r - nnScore(X, w, V, g)))
term4 = -r
return term1 + term2 + term3 + term4
# For testing the algorithm
test_X = data_test
# Symbols
X = tf.placeholder("float32", shape=[None, x_size])
r = tf.get_variable("r", dtype=tf.float32,shape=(),trainable=False)
# Weight initializations
w_1 = init_weights((x_size, h_size))
w_2 = init_weights((h_size, y_size))
# Forward propagation
yhat = forwardprop(X, w_1, w_2)
predict = tf.argmax(yhat, axis=1)
# Backward propagation
# cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(labels=y, logits=yhat))
cost = ocnn_obj(theta, X, nu, w_1, w_2, g,r)
updates = tf.train.GradientDescentOptimizer(0.0001).minimize(cost)
# Run SGD
init = tf.global_variables_initializer()
sess.run(init)
rvalue = 0.1
start_time = time.time()
for epoch in range(100):
# Train with each example
units = sess.run(updates, feed_dict={X: train_X,r:rvalue})
# plotNNFilter(units)
with sess.as_default():
w1 = w_1.eval()
w2 = w_2.eval()
rvalue = nnScore(train_X, w1, w2, g)
with sess.as_default():
rvalue = rvalue.eval()
rvalue = np.percentile(rvalue,q=100*nu)
print("Epoch = %d, r = %f"
% (epoch + 1,rvalue))
trainTime = time.time() - start_time
with sess.as_default():
w1 = w_1.eval()
w2 = w_2.eval()
start_time = time.time()
train = nnScore(train_X, w1, w2, g)
test = nnScore(test_X, w1, w2, g)
train_rep = data_rep(train_X, w1, w2, g)
test_rep = data_rep(test_X, w1, w2, g)
testTime = time.time() - start_time
with sess.as_default():
arrayTrain = train.eval()
arrayTest = test.eval()
arraytrain_rep =train_rep.eval()
arraytest_rep= test_rep.eval()
# rstar = r.eval()
rstar =rvalue
sess.close()
print "Session Closed!!!"
print "Saving Hidden layer weights w1 for cifar.. data"
import scipy.io as sio
sio.savemat('/Users/raghav/Documents/Uni/oc-nn/models/w1.mat', {'data': arraytrain_rep})
sio.savemat('/Users/raghav/Documents/Uni/oc-nn/models/w2.mat', {'data': arraytest_rep})
pos_decisionScore = arrayTrain-rstar
pos_decisionScore[pos_decisionScore< 0] = 0 ## Clip all the negative values to zero
neg_decisionScore = arrayTest-rstar
# write_decisionScores2Csv(decision_scorePath, "OneClass_NN_sigmoid.csv", pos_decisionScore, neg_decisionScore)
return [pos_decisionScore, neg_decisionScore,trainTime,testTime]
def tf_OneClass_NN_relu(data_train,data_test,nu):
tf.reset_default_graph()
sess = tf.Session()
train_X = data_train
RANDOM_SEED = 42
tf.set_random_seed(RANDOM_SEED)
# Layer's sizes
x_size = train_X.shape[1] # Number of input nodes: 4 features and 1 bias
print "Input Shape:", x_size
h_size = 16 # Number of hidden nodes
y_size = 1 # Number of outcomes (3 iris flowers)
D = x_size
K = h_size
theta = np.random.normal(0, 1, K + K*D + 1)
rvalue = np.random.normal(0,1,(len(train_X),y_size))
# nu = 0.1
def init_weights(shape):
""" Weight initialization """
weights = tf.random_normal(shape,mean=0, stddev=0.00001)
return tf.Variable(weights)
def forwardprop(X, w_1, w_2):
"""
Forward-propagation.
IMPORTANT: yhat is not softmax since TensorFlow's softmax_cross_entropy_with_logits() does that internally.
"""
X = tf.cast(X, tf.float32)
w_1 = tf.cast(w_1, tf.float32)
w_2 = tf.cast(w_2, tf.float32)
h = tf.nn.sigmoid(tf.matmul(X, w_1)) # The \sigma function
yhat = tf.matmul(h, w_2) # The \varphi function
return yhat
g = lambda x : relu(x)
def nnScore(X, w, V, g):
X = tf.cast(X, tf.float32)
w = tf.cast(w, tf.float32)
V = tf.cast(V, tf.float32)
return tf.matmul(g((tf.matmul(X, w))), V)
def relu(x):
y = tf.nn.relu(x)
return y
def ocnn_obj(theta, X, nu, w1, w2, g,r):
w = w1
V = w2
X = tf.cast(X, tf.float32)
w = tf.cast(w1, tf.float32)
V = tf.cast(w2, tf.float32)
term1 = 0.5 * tf.reduce_sum(w**2)
term2 = 0.5 * tf.reduce_sum(V**2)
term3 = 1/nu * tf.reduce_mean(relu(r - nnScore(X, w, V, g)))
term4 = -r
return term1 + term2 + term3 + term4
# For testing the algorithm
test_X = data_test
# Symbols
X = tf.placeholder("float32", shape=[None, x_size])
r = tf.get_variable("r", dtype=tf.float32,shape=(),trainable=False)
# Weight initializations
w_1 = init_weights((x_size, h_size))
w_2 = init_weights((h_size, y_size))
# Forward propagation
yhat = forwardprop(X, w_1, w_2)
predict = tf.argmax(yhat, axis=1)
# Backward propagation
# cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(labels=y, logits=yhat))
cost = ocnn_obj(theta, X, nu, w_1, w_2, g,r)
updates = tf.train.GradientDescentOptimizer(0.0001).minimize(cost)
# Run SGD
init = tf.global_variables_initializer()
sess.run(init)
rvalue = 0.1
for epoch in range(100):
# Train with each example
sess.run(updates, feed_dict={X: train_X,r:rvalue})
with sess.as_default():
w1 = w_1.eval()
w2 = w_2.eval()
rvalue = nnScore(train_X, w1, w2, g)
with sess.as_default():
rvalue = rvalue.eval()
rvalue = np.percentile(rvalue,q=100*nu)
print("Epoch = %d, r = %f"
% (epoch + 1,rvalue))
with sess.as_default():
w1 = w_1.eval()
w2 = w_2.eval()
train = nnScore(train_X, w1, w2, g)
test = nnScore(test_X, w1, w2, g)
with sess.as_default():
arrayTrain = train.eval()
arrayTest = test.eval()
# rstar = r.eval()
rstar =rvalue
sess.close()
print "Session Closed!!!"
pos_decisionScore = arrayTrain-rstar
pos_decisionScore[pos_decisionScore< 0] = 0 ## Clip all the negative values to zero
neg_decisionScore = arrayTest-rstar
return [pos_decisionScore,neg_decisionScore]
| 27.190566
| 115
| 0.587399
| 2,078
| 14,411
| 3.926853
| 0.128489
| 0.035294
| 0.015931
| 0.005882
| 0.810907
| 0.800368
| 0.779289
| 0.775245
| 0.763358
| 0.741054
| 0
| 0.035086
| 0.286032
| 14,411
| 529
| 116
| 27.241966
| 0.757994
| 0.145514
| 0
| 0.801303
| 0
| 0
| 0.042931
| 0.018937
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.029316
| null | null | 0.045603
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
867b64ae3a9da5cf750a274f80e71ce485f874c6
| 170
|
py
|
Python
|
pyclesperanto_prototype/_tier0/_set_wait_for_kernel_finish.py
|
elsandal/pyclesperanto_prototype
|
7bda828813b86b44b63d73d5e8f466d9769cded1
|
[
"BSD-3-Clause"
] | 64
|
2020-03-18T12:11:22.000Z
|
2022-03-31T08:19:18.000Z
|
pyclesperanto_prototype/_tier0/_set_wait_for_kernel_finish.py
|
elsandal/pyclesperanto_prototype
|
7bda828813b86b44b63d73d5e8f466d9769cded1
|
[
"BSD-3-Clause"
] | 148
|
2020-05-14T06:14:11.000Z
|
2022-03-26T15:02:31.000Z
|
pyclesperanto_prototype/_tier0/_set_wait_for_kernel_finish.py
|
elsandal/pyclesperanto_prototype
|
7bda828813b86b44b63d73d5e8f466d9769cded1
|
[
"BSD-3-Clause"
] | 16
|
2020-05-31T00:53:44.000Z
|
2022-03-23T13:20:57.000Z
|
def set_wait_for_kernel_finish(wait_for_kernel_finish : bool = None):
from ._program import OCLProgram
OCLProgram._wait_for_kernel_finish = wait_for_kernel_finish
| 56.666667
| 69
| 0.835294
| 25
| 170
| 5.08
| 0.48
| 0.220472
| 0.409449
| 0.598425
| 0.598425
| 0.598425
| 0.598425
| 0.598425
| 0
| 0
| 0
| 0
| 0.117647
| 170
| 3
| 70
| 56.666667
| 0.846667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
86a188e2dd126aae75a4e5fed70b19e04ab71ab1
| 1,962
|
py
|
Python
|
tests/test_cli_utils_path.py
|
MinePlayersPE/streamlink
|
481eab652c2947fdbaf0790df0d3cf4ed8130cd5
|
[
"BSD-2-Clause"
] | 4
|
2020-10-17T06:35:39.000Z
|
2021-05-14T20:00:01.000Z
|
tests/test_cli_utils_path.py
|
MinePlayersPE/streamlink
|
481eab652c2947fdbaf0790df0d3cf4ed8130cd5
|
[
"BSD-2-Clause"
] | null | null | null |
tests/test_cli_utils_path.py
|
MinePlayersPE/streamlink
|
481eab652c2947fdbaf0790df0d3cf4ed8130cd5
|
[
"BSD-2-Clause"
] | null | null | null |
import pytest
from streamlink_cli.utils.path import replace_chars
from tests import posix_only, windows_only
@pytest.mark.parametrize("char", [i for i in range(32)])
def test_replace_chars_unprintable(char: int):
assert replace_chars(f"foo{chr(char)}{chr(char)}bar") == "foo_bar", "Replaces unprintable characters"
@posix_only
@pytest.mark.parametrize("char", "/".split())
def test_replace_chars_posix(char: str):
assert replace_chars(f"foo{char}{char}bar") == "foo_bar", "Replaces multiple unsupported characters in a row"
@windows_only
@pytest.mark.parametrize("char", "\x7f\"*/:<>?\\|".split())
def test_replace_chars_windows(char: str):
assert replace_chars(f"foo{char}{char}bar") == "foo_bar", "Replaces multiple unsupported characters in a row"
@posix_only
def test_replace_chars_posix_all():
assert replace_chars("".join(chr(i) for i in range(32)) + "/") == "_"
@windows_only
def test_replace_chars_windows_all():
assert replace_chars("".join(chr(i) for i in range(32)) + "\x7f\"*/:<>?\\|") == "_"
@posix_only
def test_replace_chars_posix_override():
all_chars = "".join(chr(i) for i in range(32)) + "\x7f\"*:/<>?\\|"
assert replace_chars(all_chars) == "_\x7f\"*:_<>?\\|"
assert replace_chars(all_chars, "posix") == "_\x7f\"*:_<>?\\|"
assert replace_chars(all_chars, "unix") == "_\x7f\"*:_<>?\\|"
assert replace_chars(all_chars, "windows") == "_"
assert replace_chars(all_chars, "win32") == "_"
@windows_only
def test_replace_chars_windows_override():
all_chars = "".join(chr(i) for i in range(32)) + "\x7f\"*:/<>?\\|"
assert replace_chars(all_chars) == "_"
assert replace_chars(all_chars, "posix") == "_\x7f\"*:_<>?\\|"
assert replace_chars(all_chars, "unix") == "_\x7f\"*:_<>?\\|"
assert replace_chars(all_chars, "windows") == "_"
assert replace_chars(all_chars, "win32") == "_"
def test_replace_chars_replacement():
assert replace_chars("\x00", None, "+") == "+"
| 35.035714
| 113
| 0.671254
| 262
| 1,962
| 4.698473
| 0.179389
| 0.243704
| 0.233956
| 0.170593
| 0.828595
| 0.721365
| 0.649066
| 0.535337
| 0.535337
| 0.535337
| 0
| 0.014654
| 0.130479
| 1,962
| 55
| 114
| 35.672727
| 0.706917
| 0
| 0
| 0.473684
| 0
| 0
| 0.171764
| 0.014271
| 0
| 0
| 0
| 0
| 0.421053
| 1
| 0.210526
| false
| 0
| 0.078947
| 0
| 0.289474
| 0.052632
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
86cf67028ffaca556ac80d4ba4b69899911ab9dd
| 151
|
py
|
Python
|
test/test_export_modules.py
|
ESA-PhiLab/python-mapswipe-workers
|
558d55320159483524a6a204680802bf868d74e7
|
[
"Apache-2.0"
] | 2
|
2019-12-28T16:51:46.000Z
|
2020-05-06T18:20:24.000Z
|
test/test_export_modules.py
|
ESA-PhiLab/python-mapswipe-workers
|
558d55320159483524a6a204680802bf868d74e7
|
[
"Apache-2.0"
] | null | null | null |
test/test_export_modules.py
|
ESA-PhiLab/python-mapswipe-workers
|
558d55320159483524a6a204680802bf868d74e7
|
[
"Apache-2.0"
] | null | null | null |
"""Test export modules."""
from export_module import export_project_results, export_projects, export_users_and_stats
def test_export_module():
pass
| 21.571429
| 89
| 0.821192
| 21
| 151
| 5.47619
| 0.666667
| 0.173913
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.099338
| 151
| 6
| 90
| 25.166667
| 0.845588
| 0.13245
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
86d684994afc13bc84fa4f864e6c0d49b427d69b
| 2,448
|
py
|
Python
|
python3/lib/python3.6/site-packages/tensorflow/_api/v1/compat/v2/errors/__init__.py
|
TruongThuyLiem/keras2tensorflow
|
726f2370160701081cb43fbd8b56154c10d7ad63
|
[
"MIT"
] | 3
|
2020-10-12T15:47:01.000Z
|
2022-01-14T19:51:26.000Z
|
python3/lib/python3.6/site-packages/tensorflow/_api/v1/compat/v2/errors/__init__.py
|
TruongThuyLiem/keras2tensorflow
|
726f2370160701081cb43fbd8b56154c10d7ad63
|
[
"MIT"
] | null | null | null |
python3/lib/python3.6/site-packages/tensorflow/_api/v1/compat/v2/errors/__init__.py
|
TruongThuyLiem/keras2tensorflow
|
726f2370160701081cb43fbd8b56154c10d7ad63
|
[
"MIT"
] | 2
|
2020-08-03T13:02:06.000Z
|
2020-11-04T03:15:44.000Z
|
# This file is MACHINE GENERATED! Do not edit.
# Generated by: tensorflow/python/tools/api/generator/create_python_api.py script.
"""Exception types for TensorFlow errors.
"""
from __future__ import print_function as _print_function
from tensorflow.lite.python.lite import _NotFoundError as NotFoundError
from tensorflow.python import OpError
from tensorflow.python.framework.errors import AbortedError
from tensorflow.python.framework.errors import AlreadyExistsError
from tensorflow.python.framework.errors import CancelledError
from tensorflow.python.framework.errors import DataLossError
from tensorflow.python.framework.errors import DeadlineExceededError
from tensorflow.python.framework.errors import FailedPreconditionError
from tensorflow.python.framework.errors import InternalError
from tensorflow.python.framework.errors import InvalidArgumentError
from tensorflow.python.framework.errors import OutOfRangeError
from tensorflow.python.framework.errors import PermissionDeniedError
from tensorflow.python.framework.errors import ResourceExhaustedError
from tensorflow.python.framework.errors import UnauthenticatedError
from tensorflow.python.framework.errors import UnavailableError
from tensorflow.python.framework.errors import UnimplementedError
from tensorflow.python.framework.errors import UnknownError
from tensorflow.python.framework.errors_impl import ABORTED
from tensorflow.python.framework.errors_impl import ALREADY_EXISTS
from tensorflow.python.framework.errors_impl import CANCELLED
from tensorflow.python.framework.errors_impl import DATA_LOSS
from tensorflow.python.framework.errors_impl import DEADLINE_EXCEEDED
from tensorflow.python.framework.errors_impl import FAILED_PRECONDITION
from tensorflow.python.framework.errors_impl import INTERNAL
from tensorflow.python.framework.errors_impl import INVALID_ARGUMENT
from tensorflow.python.framework.errors_impl import NOT_FOUND
from tensorflow.python.framework.errors_impl import OK
from tensorflow.python.framework.errors_impl import OUT_OF_RANGE
from tensorflow.python.framework.errors_impl import PERMISSION_DENIED
from tensorflow.python.framework.errors_impl import RESOURCE_EXHAUSTED
from tensorflow.python.framework.errors_impl import UNAUTHENTICATED
from tensorflow.python.framework.errors_impl import UNAVAILABLE
from tensorflow.python.framework.errors_impl import UNIMPLEMENTED
from tensorflow.python.framework.errors_impl import UNKNOWN
del _print_function
| 55.636364
| 82
| 0.882353
| 301
| 2,448
| 7.046512
| 0.245847
| 0.256483
| 0.311174
| 0.437529
| 0.650636
| 0.650636
| 0.360679
| 0
| 0
| 0
| 0
| 0
| 0.069036
| 2,448
| 43
| 83
| 56.930233
| 0.930671
| 0.067402
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.972222
| 0
| 0.972222
| 0.055556
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d49fb6d5776ce774de27dabef17fd71f3e259cb6
| 6,545
|
py
|
Python
|
loldib/getratings/models/NA/na_rammus/na_rammus_top.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_rammus/na_rammus_top.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_rammus/na_rammus_top.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from getratings.models.ratings import Ratings
class NA_Rammus_Top_Aatrox(Ratings):
pass
class NA_Rammus_Top_Ahri(Ratings):
pass
class NA_Rammus_Top_Akali(Ratings):
pass
class NA_Rammus_Top_Alistar(Ratings):
pass
class NA_Rammus_Top_Amumu(Ratings):
pass
class NA_Rammus_Top_Anivia(Ratings):
pass
class NA_Rammus_Top_Annie(Ratings):
pass
class NA_Rammus_Top_Ashe(Ratings):
pass
class NA_Rammus_Top_AurelionSol(Ratings):
pass
class NA_Rammus_Top_Azir(Ratings):
pass
class NA_Rammus_Top_Bard(Ratings):
pass
class NA_Rammus_Top_Blitzcrank(Ratings):
pass
class NA_Rammus_Top_Brand(Ratings):
pass
class NA_Rammus_Top_Braum(Ratings):
pass
class NA_Rammus_Top_Caitlyn(Ratings):
pass
class NA_Rammus_Top_Camille(Ratings):
pass
class NA_Rammus_Top_Cassiopeia(Ratings):
pass
class NA_Rammus_Top_Chogath(Ratings):
pass
class NA_Rammus_Top_Corki(Ratings):
pass
class NA_Rammus_Top_Darius(Ratings):
pass
class NA_Rammus_Top_Diana(Ratings):
pass
class NA_Rammus_Top_Draven(Ratings):
pass
class NA_Rammus_Top_DrMundo(Ratings):
pass
class NA_Rammus_Top_Ekko(Ratings):
pass
class NA_Rammus_Top_Elise(Ratings):
pass
class NA_Rammus_Top_Evelynn(Ratings):
pass
class NA_Rammus_Top_Ezreal(Ratings):
pass
class NA_Rammus_Top_Fiddlesticks(Ratings):
pass
class NA_Rammus_Top_Fiora(Ratings):
pass
class NA_Rammus_Top_Fizz(Ratings):
pass
class NA_Rammus_Top_Galio(Ratings):
pass
class NA_Rammus_Top_Gangplank(Ratings):
pass
class NA_Rammus_Top_Garen(Ratings):
pass
class NA_Rammus_Top_Gnar(Ratings):
pass
class NA_Rammus_Top_Gragas(Ratings):
pass
class NA_Rammus_Top_Graves(Ratings):
pass
class NA_Rammus_Top_Hecarim(Ratings):
pass
class NA_Rammus_Top_Heimerdinger(Ratings):
pass
class NA_Rammus_Top_Illaoi(Ratings):
pass
class NA_Rammus_Top_Irelia(Ratings):
pass
class NA_Rammus_Top_Ivern(Ratings):
pass
class NA_Rammus_Top_Janna(Ratings):
pass
class NA_Rammus_Top_JarvanIV(Ratings):
pass
class NA_Rammus_Top_Jax(Ratings):
pass
class NA_Rammus_Top_Jayce(Ratings):
pass
class NA_Rammus_Top_Jhin(Ratings):
pass
class NA_Rammus_Top_Jinx(Ratings):
pass
class NA_Rammus_Top_Kalista(Ratings):
pass
class NA_Rammus_Top_Karma(Ratings):
pass
class NA_Rammus_Top_Karthus(Ratings):
pass
class NA_Rammus_Top_Kassadin(Ratings):
pass
class NA_Rammus_Top_Katarina(Ratings):
pass
class NA_Rammus_Top_Kayle(Ratings):
pass
class NA_Rammus_Top_Kayn(Ratings):
pass
class NA_Rammus_Top_Kennen(Ratings):
pass
class NA_Rammus_Top_Khazix(Ratings):
pass
class NA_Rammus_Top_Kindred(Ratings):
pass
class NA_Rammus_Top_Kled(Ratings):
pass
class NA_Rammus_Top_KogMaw(Ratings):
pass
class NA_Rammus_Top_Leblanc(Ratings):
pass
class NA_Rammus_Top_LeeSin(Ratings):
pass
class NA_Rammus_Top_Leona(Ratings):
pass
class NA_Rammus_Top_Lissandra(Ratings):
pass
class NA_Rammus_Top_Lucian(Ratings):
pass
class NA_Rammus_Top_Lulu(Ratings):
pass
class NA_Rammus_Top_Lux(Ratings):
pass
class NA_Rammus_Top_Malphite(Ratings):
pass
class NA_Rammus_Top_Malzahar(Ratings):
pass
class NA_Rammus_Top_Maokai(Ratings):
pass
class NA_Rammus_Top_MasterYi(Ratings):
pass
class NA_Rammus_Top_MissFortune(Ratings):
pass
class NA_Rammus_Top_MonkeyKing(Ratings):
pass
class NA_Rammus_Top_Mordekaiser(Ratings):
pass
class NA_Rammus_Top_Morgana(Ratings):
pass
class NA_Rammus_Top_Nami(Ratings):
pass
class NA_Rammus_Top_Nasus(Ratings):
pass
class NA_Rammus_Top_Nautilus(Ratings):
pass
class NA_Rammus_Top_Nidalee(Ratings):
pass
class NA_Rammus_Top_Nocturne(Ratings):
pass
class NA_Rammus_Top_Nunu(Ratings):
pass
class NA_Rammus_Top_Olaf(Ratings):
pass
class NA_Rammus_Top_Orianna(Ratings):
pass
class NA_Rammus_Top_Ornn(Ratings):
pass
class NA_Rammus_Top_Pantheon(Ratings):
pass
class NA_Rammus_Top_Poppy(Ratings):
pass
class NA_Rammus_Top_Quinn(Ratings):
pass
class NA_Rammus_Top_Rakan(Ratings):
pass
class NA_Rammus_Top_Rammus(Ratings):
pass
class NA_Rammus_Top_RekSai(Ratings):
pass
class NA_Rammus_Top_Renekton(Ratings):
pass
class NA_Rammus_Top_Rengar(Ratings):
pass
class NA_Rammus_Top_Riven(Ratings):
pass
class NA_Rammus_Top_Rumble(Ratings):
pass
class NA_Rammus_Top_Ryze(Ratings):
pass
class NA_Rammus_Top_Sejuani(Ratings):
pass
class NA_Rammus_Top_Shaco(Ratings):
pass
class NA_Rammus_Top_Shen(Ratings):
pass
class NA_Rammus_Top_Shyvana(Ratings):
pass
class NA_Rammus_Top_Singed(Ratings):
pass
class NA_Rammus_Top_Sion(Ratings):
pass
class NA_Rammus_Top_Sivir(Ratings):
pass
class NA_Rammus_Top_Skarner(Ratings):
pass
class NA_Rammus_Top_Sona(Ratings):
pass
class NA_Rammus_Top_Soraka(Ratings):
pass
class NA_Rammus_Top_Swain(Ratings):
pass
class NA_Rammus_Top_Syndra(Ratings):
pass
class NA_Rammus_Top_TahmKench(Ratings):
pass
class NA_Rammus_Top_Taliyah(Ratings):
pass
class NA_Rammus_Top_Talon(Ratings):
pass
class NA_Rammus_Top_Taric(Ratings):
pass
class NA_Rammus_Top_Teemo(Ratings):
pass
class NA_Rammus_Top_Thresh(Ratings):
pass
class NA_Rammus_Top_Tristana(Ratings):
pass
class NA_Rammus_Top_Trundle(Ratings):
pass
class NA_Rammus_Top_Tryndamere(Ratings):
pass
class NA_Rammus_Top_TwistedFate(Ratings):
pass
class NA_Rammus_Top_Twitch(Ratings):
pass
class NA_Rammus_Top_Udyr(Ratings):
pass
class NA_Rammus_Top_Urgot(Ratings):
pass
class NA_Rammus_Top_Varus(Ratings):
pass
class NA_Rammus_Top_Vayne(Ratings):
pass
class NA_Rammus_Top_Veigar(Ratings):
pass
class NA_Rammus_Top_Velkoz(Ratings):
pass
class NA_Rammus_Top_Vi(Ratings):
pass
class NA_Rammus_Top_Viktor(Ratings):
pass
class NA_Rammus_Top_Vladimir(Ratings):
pass
class NA_Rammus_Top_Volibear(Ratings):
pass
class NA_Rammus_Top_Warwick(Ratings):
pass
class NA_Rammus_Top_Xayah(Ratings):
pass
class NA_Rammus_Top_Xerath(Ratings):
pass
class NA_Rammus_Top_XinZhao(Ratings):
pass
class NA_Rammus_Top_Yasuo(Ratings):
pass
class NA_Rammus_Top_Yorick(Ratings):
pass
class NA_Rammus_Top_Zac(Ratings):
pass
class NA_Rammus_Top_Zed(Ratings):
pass
class NA_Rammus_Top_Ziggs(Ratings):
pass
class NA_Rammus_Top_Zilean(Ratings):
pass
class NA_Rammus_Top_Zyra(Ratings):
pass
| 15.695444
| 46
| 0.766692
| 972
| 6,545
| 4.736626
| 0.151235
| 0.209818
| 0.389661
| 0.479583
| 0.803432
| 0.803432
| 0
| 0
| 0
| 0
| 0
| 0
| 0.169748
| 6,545
| 416
| 47
| 15.733173
| 0.847258
| 0
| 0
| 0.498195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.498195
| 0.00361
| 0
| 0.501805
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 8
|
d4a50b0a6c42d222a00f860eab1ceb35f7e6fd28
| 5,581
|
py
|
Python
|
src/tests/presale/test_account.py
|
abrock/pretix
|
cd9c048458afce1198276e5936bf583578855a4f
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2021-06-23T07:44:54.000Z
|
2021-06-23T07:44:54.000Z
|
src/tests/presale/test_account.py
|
awg24/pretix
|
b1d67a48601838bac0d4e498cbe8bdcd16013d60
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
src/tests/presale/test_account.py
|
awg24/pretix
|
b1d67a48601838bac0d4e498cbe8bdcd16013d60
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
import time
from tests.base import BrowserTest
from tests.presale.test_event import EventTestMixin
from pretix.base.models import User
class UserSettingsTest(EventTestMixin, BrowserTest):
def setUp(self):
super().setUp()
self.user = User.objects.create_global_user('dummy@dummy.dummy', 'dummy')
self.driver.implicitly_wait(10)
self.driver.get('%s/%s/%s/login' % (self.live_server_url, self.orga.slug, self.event.slug))
# open the login accordion
self.scroll_and_click(self.driver.find_element_by_css_selector('a[href*=loginForm]'))
time.sleep(1)
# enter login details
self.driver.find_element_by_css_selector('#loginForm input[name=username]').send_keys('dummy@dummy.dummy')
self.driver.find_element_by_css_selector('#loginForm input[name=password]').send_keys('dummy')
self.scroll_and_click(self.driver.find_element_by_css_selector('#loginForm button.btn-primary'))
self.driver.find_element_by_partial_link_text('Your account')
self.driver.get('%s/%s/%s/account/settings' % (self.live_server_url, self.orga.slug, self.event.slug))
def test_set_name(self):
self.driver.find_element_by_name("givenname").clear()
self.driver.find_element_by_name("familyname").clear()
self.driver.find_element_by_name("givenname").send_keys("Peter")
self.driver.find_element_by_name("familyname").send_keys("Miller")
self.scroll_and_click(self.driver.find_element_by_class_name('btn-save'))
self.driver.find_element_by_class_name("alert-success")
self.user = User.objects.get(pk=self.user.pk)
assert self.user.givenname == 'Peter'
assert self.user.familyname == 'Miller'
def test_change_email_require_password(self):
self.driver.find_element_by_name("email").clear()
self.driver.find_element_by_name("email").send_keys("foo@example.com")
self.scroll_and_click(self.driver.find_element_by_class_name('btn-save'))
self.driver.find_element_by_class_name("alert-danger")
self.user = User.objects.get(pk=self.user.pk)
assert self.user.email == 'dummy@dummy.dummy'
def test_change_email_success(self):
self.driver.find_element_by_name("email").clear()
self.driver.find_element_by_name("email").send_keys("foo@example.com")
self.driver.find_element_by_name("old_pw").clear()
self.driver.find_element_by_name("old_pw").send_keys("dummy")
self.scroll_and_click(self.driver.find_element_by_class_name('btn-save'))
self.driver.find_element_by_class_name("alert-success")
self.user = User.objects.get(pk=self.user.pk)
assert self.user.email == 'foo@example.com'
def test_change_email_allow_local_duplicates(self):
User.objects.create_local_user(event=self.event, username='test', email='foo@example.com', password='foo')
self.driver.find_element_by_name("email").clear()
self.driver.find_element_by_name("email").send_keys("foo@example.com")
self.driver.find_element_by_name("old_pw").clear()
self.driver.find_element_by_name("old_pw").send_keys("dummy")
self.scroll_and_click(self.driver.find_element_by_class_name('btn-save'))
self.driver.find_element_by_class_name("alert-success")
self.user = User.objects.get(pk=self.user.pk)
assert self.user.email == 'foo@example.com'
def test_change_email_no_global_duplicates(self):
User.objects.create_global_user('foo@example.com', 'foo')
self.driver.find_element_by_name("email").clear()
self.driver.find_element_by_name("email").send_keys("foo@example.com")
self.driver.find_element_by_name("old_pw").clear()
self.driver.find_element_by_name("old_pw").send_keys("dummy")
self.scroll_and_click(self.driver.find_element_by_class_name('btn-save'))
self.driver.find_element_by_class_name("alert-danger")
self.user = User.objects.get(pk=self.user.pk)
assert self.user.email == 'dummy@dummy.dummy'
def test_change_password_require_password(self):
self.driver.find_element_by_name("new_pw").send_keys("foo")
self.driver.find_element_by_name("new_pw_repeat").send_keys("foo")
self.scroll_and_click(self.driver.find_element_by_class_name('btn-save'))
self.driver.find_element_by_class_name("alert-danger")
pw = self.user.password
self.user = User.objects.get(pk=self.user.pk)
assert self.user.password == pw
def test_change_password_success(self):
self.driver.find_element_by_name("new_pw").send_keys("foo")
self.driver.find_element_by_name("new_pw_repeat").send_keys("foo")
self.driver.find_element_by_name("old_pw").send_keys("dummy")
self.scroll_and_click(self.driver.find_element_by_class_name('btn-save'))
self.driver.find_element_by_class_name("alert-success")
self.user = User.objects.get(pk=self.user.pk)
assert self.user.check_password("foo")
def test_change_password_require_repeat(self):
self.driver.find_element_by_name("new_pw").send_keys("foo")
self.driver.find_element_by_name("new_pw_repeat").send_keys("bar")
self.driver.find_element_by_name("old_pw").send_keys("dummy")
self.scroll_and_click(self.driver.find_element_by_class_name('btn-save'))
self.driver.find_element_by_class_name("alert-danger")
pw = self.user.password
self.user = User.objects.get(pk=self.user.pk)
assert self.user.password == pw
| 54.184466
| 114
| 0.715463
| 815
| 5,581
| 4.577914
| 0.117791
| 0.134012
| 0.17636
| 0.26454
| 0.837577
| 0.784508
| 0.775931
| 0.733047
| 0.726883
| 0.718842
| 0
| 0.000633
| 0.150152
| 5,581
| 102
| 115
| 54.715686
| 0.786
| 0.007884
| 0
| 0.602273
| 0
| 0
| 0.141489
| 0.004518
| 0
| 0
| 0
| 0
| 0.102273
| 1
| 0.102273
| false
| 0.125
| 0.045455
| 0
| 0.159091
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
d4d1bea6bc7c98ce1e9f1db549e6e7d617eea23d
| 76
|
py
|
Python
|
__init__.py
|
NorthGuard/axes_sequence
|
6c9d28c3b18e3f9cf899056a5ff86cff0ec547f3
|
[
"MIT"
] | 1
|
2017-07-11T12:38:17.000Z
|
2017-07-11T12:38:17.000Z
|
__init__.py
|
NorthGuard/axes_sequence
|
6c9d28c3b18e3f9cf899056a5ff86cff0ec547f3
|
[
"MIT"
] | null | null | null |
__init__.py
|
NorthGuard/axes_sequence
|
6c9d28c3b18e3f9cf899056a5ff86cff0ec547f3
|
[
"MIT"
] | null | null | null |
from .axes_sequence import AxesSequence
from .axes_sequence import AxesGrid
| 25.333333
| 39
| 0.868421
| 10
| 76
| 6.4
| 0.6
| 0.25
| 0.5
| 0.6875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105263
| 76
| 2
| 40
| 38
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
d4e2842a4444814e240d47436217ef5564025f19
| 98,347
|
py
|
Python
|
tables/tests/test_do_undo.py
|
Marco-Sulla/PyTables
|
c06642ed12b1c99df76feb11f08a37b3e479ffbc
|
[
"BSD-3-Clause"
] | 1
|
2021-02-10T15:56:05.000Z
|
2021-02-10T15:56:05.000Z
|
tables/tests/test_do_undo.py
|
Marco-Sulla/PyTables
|
c06642ed12b1c99df76feb11f08a37b3e479ffbc
|
[
"BSD-3-Clause"
] | 1
|
2021-01-22T18:23:27.000Z
|
2021-01-22T18:23:27.000Z
|
tables/tests/test_do_undo.py
|
Marco-Sulla/PyTables
|
c06642ed12b1c99df76feb11f08a37b3e479ffbc
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
import warnings
import tables
from tables import IsDescription, StringCol, BoolCol, IntCol, FloatCol
from tables.node import NotLoggedMixin
from tables.path import join_path
from tables.tests import common
from tables.tests.common import unittest
from tables.tests.common import PyTablesTestCase as TestCase
class BasicTestCase(common.TempFileMixin, TestCase):
"""Test for basic Undo/Redo operations."""
_reopen_flag = False
"""Whether to reopen the file at certain points."""
def _do_reopen(self):
if self._reopen_flag:
self._reopen('r+')
def setUp(self):
super(BasicTestCase, self).setUp()
h5file = self.h5file
root = h5file.root
# Create an array
h5file.create_array(root, 'array', [1, 2], title="Title example")
# Create another array object
h5file.create_array(root, 'anarray', [1], "Array title")
# Create a group object
group = h5file.create_group(root, 'agroup', "Group title")
# Create a couple of objects there
h5file.create_array(group, 'anarray1', [2], "Array title 1")
h5file.create_array(group, 'anarray2', [2], "Array title 2")
# Create a lonely group in first level
h5file.create_group(root, 'agroup2', "Group title 2")
# Create a new group in the second level
h5file.create_group(group, 'agroup3', "Group title 3")
def test00_simple(self):
"""Checking simple do/undo."""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test00_simple..." % self.__class__.__name__)
# open the do/undo
self.h5file.enable_undo()
# Create a new array
self.h5file.create_array('/', 'otherarray', [3, 4], "Another array")
# Now undo the past operation
self.h5file.undo()
# Check that otherarray does not exist in the object tree
self.assertNotIn("/otherarray", self.h5file)
self.assertEqual(self.h5file._curaction, 0)
self.assertEqual(self.h5file._curmark, 0)
# Redo the operation
self._do_reopen()
self.h5file.redo()
if common.verbose:
print("Object tree after redo:", self.h5file)
# Check that otherarray has come back to life in a sane state
self.assertIn("/otherarray", self.h5file)
self.assertEqual(self.h5file.root.otherarray.read(), [3, 4])
self.assertEqual(self.h5file.root.otherarray.title, "Another array")
self.assertEqual(self.h5file._curaction, 1)
self.assertEqual(self.h5file._curmark, 0)
def test01_twice(self):
"""Checking do/undo (twice operations intertwined)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test01_twice..." % self.__class__.__name__)
# open the do/undo
self.h5file.enable_undo()
# Create a new array
self.h5file.create_array('/', 'otherarray', [3, 4], "Another array")
self.h5file.create_array('/', 'otherarray2', [4, 5], "Another array 2")
# Now undo the past operations
self._do_reopen()
self.h5file.undo()
self.assertNotIn("/otherarray", self.h5file)
self.assertNotIn("/otherarray2", self.h5file)
self.assertEqual(self.h5file._curaction, 0)
self.assertEqual(self.h5file._curmark, 0)
# Redo the operation
self.h5file.redo()
# Check that otherarray has come back to life in a sane state
self.assertIn("/otherarray", self.h5file)
self.assertIn("/otherarray2", self.h5file)
self.assertEqual(self.h5file.root.otherarray.read(), [3, 4])
self.assertEqual(self.h5file.root.otherarray2.read(), [4, 5])
self.assertEqual(self.h5file.root.otherarray.title, "Another array")
self.assertEqual(self.h5file.root.otherarray2.title, "Another array 2")
self.assertEqual(self.h5file._curaction, 2)
self.assertEqual(self.h5file._curmark, 0)
def test02_twice2(self):
"""Checking twice ops and two marks."""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test02_twice2..." % self.__class__.__name__)
# open the do/undo
self.h5file.enable_undo()
# Create a new array
self.h5file.create_array('/', 'otherarray', [3, 4], "Another array")
# Put a mark
self._do_reopen()
self.h5file.mark()
self.h5file.create_array('/', 'otherarray2', [4, 5], "Another array 2")
self.assertEqual(self.h5file._curaction, 3)
self.assertEqual(self.h5file._curmark, 1)
# Unwind just one mark
self.h5file.undo()
self.assertIn("/otherarray", self.h5file)
self.assertNotIn("/otherarray2", self.h5file)
self.assertEqual(self.h5file._curaction, 2)
self.assertEqual(self.h5file._curmark, 1)
# Unwind another mark
self.h5file.undo()
self.assertEqual(self.h5file._curaction, 0)
self.assertEqual(self.h5file._curmark, 0)
self.assertNotIn("/otherarray", self.h5file)
self.assertNotIn("/otherarray2", self.h5file)
# Redo until the next mark
self.h5file.redo()
self.assertIn("/otherarray", self.h5file)
self.assertNotIn("/otherarray2", self.h5file)
self._do_reopen()
self.assertEqual(self.h5file._curaction, 2)
self.assertEqual(self.h5file._curmark, 1)
# Redo until the end
self.h5file.redo()
self.assertIn("/otherarray", self.h5file)
self.assertIn("/otherarray2", self.h5file)
self.assertEqual(self.h5file.root.otherarray.read(), [3, 4])
self.assertEqual(self.h5file.root.otherarray2.read(), [4, 5])
self.assertEqual(self.h5file.root.otherarray.title, "Another array")
self.assertEqual(self.h5file.root.otherarray2.title, "Another array 2")
self.assertEqual(self.h5file._curaction, 3)
self.assertEqual(self.h5file._curmark, 1)
def test03_6times3marks(self):
"""Checking with six ops and three marks."""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test03_6times3marks..." %
self.__class__.__name__)
# open the do/undo
self.h5file.enable_undo()
# Create a new array
self.h5file.create_array('/', 'otherarray1', [3, 4], "Another array 1")
self.h5file.create_array('/', 'otherarray2', [4, 5], "Another array 2")
# Put a mark
self.h5file.mark()
self.h5file.create_array('/', 'otherarray3', [5, 6], "Another array 3")
self.h5file.create_array('/', 'otherarray4', [6, 7], "Another array 4")
# Put a mark
self._do_reopen()
self.h5file.mark()
self.h5file.create_array('/', 'otherarray5', [7, 8], "Another array 5")
self.h5file.create_array('/', 'otherarray6', [8, 9], "Another array 6")
# Unwind just one mark
self.h5file.undo()
self.assertIn("/otherarray1", self.h5file)
self.assertIn("/otherarray2", self.h5file)
self.assertIn("/otherarray3", self.h5file)
self.assertIn("/otherarray4", self.h5file)
self.assertNotIn("/otherarray5", self.h5file)
self.assertNotIn("/otherarray6", self.h5file)
# Unwind another mark
self.h5file.undo()
self.assertIn("/otherarray1", self.h5file)
self.assertIn("/otherarray2", self.h5file)
self.assertNotIn("/otherarray3", self.h5file)
self.assertNotIn("/otherarray4", self.h5file)
self.assertNotIn("/otherarray5", self.h5file)
self.assertNotIn("/otherarray6", self.h5file)
# Unwind all marks
self.h5file.undo()
self.assertNotIn("/otherarray1", self.h5file)
self.assertNotIn("/otherarray2", self.h5file)
self.assertNotIn("/otherarray3", self.h5file)
self.assertNotIn("/otherarray4", self.h5file)
self.assertNotIn("/otherarray5", self.h5file)
self.assertNotIn("/otherarray6", self.h5file)
# Redo until the next mark
self._do_reopen()
self.h5file.redo()
self.assertIn("/otherarray1", self.h5file)
self.assertIn("/otherarray2", self.h5file)
self.assertNotIn("/otherarray3", self.h5file)
self.assertNotIn("/otherarray4", self.h5file)
self.assertNotIn("/otherarray5", self.h5file)
self.assertNotIn("/otherarray6", self.h5file)
# Redo until the next mark
self.h5file.redo()
self.assertIn("/otherarray1", self.h5file)
self.assertIn("/otherarray2", self.h5file)
self.assertIn("/otherarray3", self.h5file)
self.assertIn("/otherarray4", self.h5file)
self.assertNotIn("/otherarray5", self.h5file)
self.assertNotIn("/otherarray6", self.h5file)
# Redo until the end
self.h5file.redo()
self.assertIn("/otherarray1", self.h5file)
self.assertIn("/otherarray2", self.h5file)
self.assertIn("/otherarray3", self.h5file)
self.assertIn("/otherarray4", self.h5file)
self.assertIn("/otherarray5", self.h5file)
self.assertIn("/otherarray6", self.h5file)
self.assertEqual(self.h5file.root.otherarray1.read(), [3, 4])
self.assertEqual(self.h5file.root.otherarray2.read(), [4, 5])
self.assertEqual(self.h5file.root.otherarray3.read(), [5, 6])
self.assertEqual(self.h5file.root.otherarray4.read(), [6, 7])
self.assertEqual(self.h5file.root.otherarray5.read(), [7, 8])
self.assertEqual(self.h5file.root.otherarray6.read(), [8, 9])
self.assertEqual(self.h5file.root.otherarray1.title, "Another array 1")
self.assertEqual(self.h5file.root.otherarray2.title, "Another array 2")
self.assertEqual(self.h5file.root.otherarray3.title, "Another array 3")
self.assertEqual(self.h5file.root.otherarray4.title, "Another array 4")
self.assertEqual(self.h5file.root.otherarray5.title, "Another array 5")
self.assertEqual(self.h5file.root.otherarray6.title, "Another array 6")
def test04_6times3marksro(self):
"""Checking with six operations, three marks and do/undo in random
order."""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test04_6times3marksro..." %
self.__class__.__name__)
# open the do/undo
self.h5file.enable_undo()
# Create a new array
self.h5file.create_array('/', 'otherarray1', [3, 4], "Another array 1")
self.h5file.create_array('/', 'otherarray2', [4, 5], "Another array 2")
# Put a mark
self.h5file.mark()
self._do_reopen()
self.h5file.create_array('/', 'otherarray3', [5, 6], "Another array 3")
self.h5file.create_array('/', 'otherarray4', [6, 7], "Another array 4")
# Unwind the previous mark
self.h5file.undo()
self.assertIn("/otherarray1", self.h5file)
self.assertIn("/otherarray2", self.h5file)
self.assertNotIn("/otherarray3", self.h5file)
self.assertNotIn("/otherarray4", self.h5file)
# Put a mark in the middle of stack
if common.verbose:
print("All nodes:", self.h5file.walk_nodes())
self.h5file.mark()
self._do_reopen()
self.h5file.create_array('/', 'otherarray5', [7, 8], "Another array 5")
self.h5file.create_array('/', 'otherarray6', [8, 9], "Another array 6")
self.assertIn("/otherarray1", self.h5file)
self.assertIn("/otherarray2", self.h5file)
self.assertNotIn("/otherarray3", self.h5file)
self.assertNotIn("/otherarray4", self.h5file)
self.assertIn("/otherarray5", self.h5file)
self.assertIn("/otherarray6", self.h5file)
# Unwind previous mark
self.h5file.undo()
self.assertIn("/otherarray1", self.h5file)
self.assertIn("/otherarray2", self.h5file)
self.assertNotIn("/otherarray3", self.h5file)
self.assertNotIn("/otherarray4", self.h5file)
self.assertNotIn("/otherarray5", self.h5file)
self.assertNotIn("/otherarray6", self.h5file)
# Redo until the last mark
self.h5file.redo()
self.assertIn("/otherarray1", self.h5file)
self.assertIn("/otherarray2", self.h5file)
self.assertNotIn("/otherarray3", self.h5file)
self.assertNotIn("/otherarray4", self.h5file)
self.assertIn("/otherarray5", self.h5file)
self.assertIn("/otherarray6", self.h5file)
# Redo until the next mark (non-existent, so no action)
self._do_reopen()
self.h5file.redo()
self.assertIn("/otherarray1", self.h5file)
self.assertIn("/otherarray2", self.h5file)
self.assertNotIn("/otherarray3", self.h5file)
self.assertNotIn("/otherarray4", self.h5file)
self.assertIn("/otherarray5", self.h5file)
self.assertIn("/otherarray6", self.h5file)
self.assertEqual(self.h5file.root.otherarray1.read(), [3, 4])
self.assertEqual(self.h5file.root.otherarray2.read(), [4, 5])
self.assertEqual(self.h5file.root.otherarray5.read(), [7, 8])
self.assertEqual(self.h5file.root.otherarray6.read(), [8, 9])
self.assertEqual(self.h5file.root.otherarray1.title, "Another array 1")
self.assertEqual(self.h5file.root.otherarray2.title, "Another array 2")
self.assertEqual(self.h5file.root.otherarray5.title, "Another array 5")
self.assertEqual(self.h5file.root.otherarray6.title, "Another array 6")
def test05_destructive(self):
"""Checking with a destructive action during undo."""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test05_destructive..." % self.__class__.__name__)
# open the do/undo
self.h5file.enable_undo()
# Create a new array
self.h5file.create_array('/', 'otherarray1', [3, 4], "Another array 1")
# Put a mark
self.h5file.mark()
self._do_reopen()
self.h5file.create_array('/', 'otherarray2', [4, 5], "Another array 2")
# Now undo the past operation
self.h5file.undo()
# Do the destructive operation
self._do_reopen()
self.h5file.create_array('/', 'otherarray3', [5, 6], "Another array 3")
# Check objects
self.assertIn("/otherarray1", self.h5file)
self.assertEqual(self.h5file.root.otherarray1.read(), [3, 4])
self.assertEqual(self.h5file.root.otherarray1.title, "Another array 1")
self.assertNotIn("/otherarray2", self.h5file)
self.assertIn("/otherarray3", self.h5file)
self.assertEqual(self.h5file.root.otherarray3.read(), [5, 6])
self.assertEqual(self.h5file.root.otherarray3.title, "Another array 3")
def test05b_destructive(self):
"""Checking with a destructive action during undo (II)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test05b_destructive..." %
self.__class__.__name__)
# open the do/undo
self.h5file.enable_undo()
# Create a new array
self.h5file.create_array('/', 'otherarray1', [3, 4], "Another array 1")
# Put a mark
self._do_reopen()
self.h5file.mark()
self.h5file.create_array('/', 'otherarray2', [4, 5], "Another array 2")
# Now undo the past operation
self.h5file.undo()
# Do the destructive operation
self.h5file.create_array('/', 'otherarray3', [5, 6], "Another array 3")
# Put a mark
self._do_reopen()
self.h5file.mark()
self.h5file.create_array('/', 'otherarray4', [6, 7], "Another array 4")
self.assertIn("/otherarray4", self.h5file)
# Now undo the past operation
self.h5file.undo()
# Check objects
self.assertIn("/otherarray1", self.h5file)
self.assertEqual(self.h5file.root.otherarray1.read(), [3, 4])
self.assertEqual(self.h5file.root.otherarray1.title, "Another array 1")
self.assertNotIn("/otherarray2", self.h5file)
self.assertIn("/otherarray3", self.h5file)
self.assertEqual(self.h5file.root.otherarray3.read(), [5, 6])
self.assertEqual(self.h5file.root.otherarray3.title, "Another array 3")
self.assertNotIn("/otherarray4", self.h5file)
def test05c_destructive(self):
"""Checking with a destructive action during undo (III)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test05c_destructive..." %
self.__class__.__name__)
# open the do/undo
self.h5file.enable_undo()
# Create a new array
self.h5file.create_array('/', 'otherarray1', [3, 4], "Another array 1")
# Put a mark
self.h5file.mark()
self._do_reopen()
self.h5file.create_array('/', 'otherarray2', [4, 5], "Another array 2")
# Now undo the past operation
self.h5file.undo()
# Do the destructive operation
self.h5file.create_array('/', 'otherarray3', [5, 6], "Another array 3")
# Put a mark
self.h5file.mark()
self._do_reopen()
self.h5file.create_array('/', 'otherarray4', [6, 7], "Another array 4")
self.assertIn("/otherarray4", self.h5file)
# Now unwind twice
self.h5file.undo()
self._do_reopen()
self.h5file.undo()
# Check objects
self.assertIn("/otherarray1", self.h5file)
self.assertNotIn("/otherarray2", self.h5file)
self.assertNotIn("/otherarray3", self.h5file)
self.assertNotIn("/otherarray4", self.h5file)
def test05d_destructive(self):
"""Checking with a destructive action during undo (IV)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test05d_destructive..." %
self.__class__.__name__)
# open the do/undo
self.h5file.enable_undo()
# Create a new array
self.h5file.create_array('/', 'otherarray1', [3, 4], "Another array 1")
# Put a mark
self._do_reopen()
self.h5file.mark()
self.h5file.create_array('/', 'otherarray2', [4, 5], "Another array 2")
# Now undo the past operation
self.h5file.undo()
# Do the destructive operation
self.h5file.create_array('/', 'otherarray3', [5, 6], "Another array 3")
# Put a mark
self.h5file.mark()
self.h5file.create_array('/', 'otherarray4', [6, 7], "Another array 4")
self.assertIn("/otherarray4", self.h5file)
# Now, go to the first mark
self._do_reopen()
self.h5file.undo(0)
# Check objects
self.assertNotIn("/otherarray1", self.h5file)
self.assertNotIn("/otherarray2", self.h5file)
self.assertNotIn("/otherarray3", self.h5file)
self.assertNotIn("/otherarray4", self.h5file)
def test05e_destructive(self):
"""Checking with a destructive action during undo (V)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test05e_destructive..." %
self.__class__.__name__)
# open the do/undo
self.h5file.enable_undo()
# Create a new array
self.h5file.create_array('/', 'otherarray1', [3, 4], "Another array 1")
# Put a mark
self.h5file.mark()
self.h5file.create_array('/', 'otherarray2', [4, 5], "Another array 2")
# Now undo the past operation
self.h5file.undo()
self._do_reopen()
# Do the destructive operation
self.h5file.create_array('/', 'otherarray3', [5, 6], "Another array 3")
# Now, unwind the actions
self.h5file.undo(0)
self._do_reopen()
# Check objects
self.assertNotIn("/otherarray1", self.h5file)
self.assertNotIn("/otherarray2", self.h5file)
self.assertNotIn("/otherarray3", self.h5file)
def test05f_destructive(self):
"""Checking with a destructive creation of existing node during undo"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test05f_destructive..." %
self.__class__.__name__)
self.h5file.enable_undo()
self.h5file.create_array('/', 'newarray', [1])
self.h5file.undo()
self._do_reopen()
self.assertNotIn('/newarray', self.h5file)
newarr = self.h5file.create_array('/', 'newarray', [1])
self.h5file.undo()
self.assertNotIn('/newarray', self.h5file)
self._do_reopen()
self.h5file.redo()
self.assertIn('/newarray', self.h5file)
if not self._reopen_flag:
self.assertIs(self.h5file.root.newarray, newarr)
def test06_totalunwind(self):
"""Checking do/undo (total unwind)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test06_totalunwind..." % self.__class__.__name__)
# open the do/undo
self.h5file.enable_undo()
# Create a new array
self.h5file.create_array('/', 'otherarray', [3, 4], "Another array")
self.h5file.mark()
self.h5file.create_array('/', 'otherarray2', [4, 5], "Another array 2")
# Now undo the past operations
self._do_reopen()
self.h5file.undo(0)
self.assertNotIn("/otherarray", self.h5file)
self.assertNotIn("/otherarray2", self.h5file)
def test07_totalrewind(self):
"""Checking do/undo (total rewind)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test07_totalunwind..." % self.__class__.__name__)
# open the do/undo
self.h5file.enable_undo()
# Create a new array
self.h5file.create_array('/', 'otherarray', [3, 4], "Another array")
self.h5file.mark()
self.h5file.create_array('/', 'otherarray2', [4, 5], "Another array 2")
# Now undo the past operations
self.h5file.undo(0)
# Redo all the operations
self._do_reopen()
self.h5file.redo(-1)
# Check that objects has come back to life in a sane state
self.assertIn("/otherarray", self.h5file)
self.assertIn("/otherarray2", self.h5file)
self.assertEqual(self.h5file.root.otherarray.read(), [3, 4])
self.assertEqual(self.h5file.root.otherarray2.read(), [4, 5])
self.assertEqual(self.h5file.root.otherarray.title, "Another array")
self.assertEqual(self.h5file.root.otherarray2.title, "Another array 2")
def test08_marknames(self):
"""Checking mark names."""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test08_marknames..." % self.__class__.__name__)
# open the do/undo
self.h5file.enable_undo()
# Create a new array
self.h5file.create_array('/', 'otherarray1', [3, 4], "Another array 1")
self.h5file.mark("first")
self.h5file.create_array('/', 'otherarray2', [4, 5], "Another array 2")
self.h5file.mark("second")
self.h5file.create_array('/', 'otherarray3', [5, 6], "Another array 3")
self.h5file.mark("third")
self.h5file.create_array('/', 'otherarray4', [6, 7], "Another array 4")
# Now go to mark "first"
self.h5file.undo("first")
self._do_reopen()
self.assertIn("/otherarray1", self.h5file)
self.assertNotIn("/otherarray2", self.h5file)
self.assertNotIn("/otherarray3", self.h5file)
self.assertNotIn("/otherarray4", self.h5file)
# Go to mark "third"
self.h5file.redo("third")
self.assertIn("/otherarray1", self.h5file)
self.assertIn("/otherarray2", self.h5file)
self.assertIn("/otherarray3", self.h5file)
self.assertNotIn("/otherarray4", self.h5file)
# Now go to mark "second"
self.h5file.undo("second")
self.assertIn("/otherarray1", self.h5file)
self.assertIn("/otherarray2", self.h5file)
self.assertNotIn("/otherarray3", self.h5file)
self.assertNotIn("/otherarray4", self.h5file)
# Go to the end
self._do_reopen()
self.h5file.redo(-1)
self.assertIn("/otherarray1", self.h5file)
self.assertIn("/otherarray2", self.h5file)
self.assertIn("/otherarray3", self.h5file)
self.assertIn("/otherarray4", self.h5file)
# Check that objects has come back to life in a sane state
self.assertEqual(self.h5file.root.otherarray1.read(), [3, 4])
self.assertEqual(self.h5file.root.otherarray2.read(), [4, 5])
self.assertEqual(self.h5file.root.otherarray3.read(), [5, 6])
self.assertEqual(self.h5file.root.otherarray4.read(), [6, 7])
def test08_initialmark(self):
"""Checking initial mark."""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test08_initialmark..." % self.__class__.__name__)
# open the do/undo
self.h5file.enable_undo()
initmid = self.h5file.get_current_mark()
# Create a new array
self.h5file.create_array('/', 'otherarray', [3, 4], "Another array")
self.h5file.mark()
self._do_reopen()
self.h5file.create_array('/', 'otherarray2', [4, 5], "Another array 2")
# Now undo the past operations
self.h5file.undo(initmid)
self.assertNotIn("/otherarray", self.h5file)
self.assertNotIn("/otherarray2", self.h5file)
# Redo all the operations
self.h5file.redo(-1)
self._do_reopen()
# Check that objects has come back to life in a sane state
self.assertIn("/otherarray", self.h5file)
self.assertIn("/otherarray2", self.h5file)
self.assertEqual(self.h5file.root.otherarray.read(), [3, 4])
self.assertEqual(self.h5file.root.otherarray2.read(), [4, 5])
self.assertEqual(self.h5file.root.otherarray.title, "Another array")
self.assertEqual(self.h5file.root.otherarray2.title, "Another array 2")
def test09_marknames(self):
"""Checking mark names (wrong direction)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test09_marknames..." % self.__class__.__name__)
# open the do/undo
self.h5file.enable_undo()
# Create a new array
self.h5file.create_array('/', 'otherarray1', [3, 4], "Another array 1")
self.h5file.mark("first")
self.h5file.create_array('/', 'otherarray2', [4, 5], "Another array 2")
self.h5file.mark("second")
self._do_reopen()
self.h5file.create_array('/', 'otherarray3', [5, 6], "Another array 3")
self.h5file.mark("third")
self.h5file.create_array('/', 'otherarray4', [6, 7], "Another array 4")
# Now go to mark "first"
self.h5file.undo("first")
# Try to undo up to mark "third"
with self.assertRaises(tables.UndoRedoError):
self.h5file.undo("third")
# Now go to mark "third"
self.h5file.redo("third")
self._do_reopen()
# Try to redo up to mark "second"
with self.assertRaises(tables.UndoRedoError):
self.h5file.redo("second")
# Final checks
self.assertIn("/otherarray1", self.h5file)
self.assertIn("/otherarray2", self.h5file)
self.assertIn("/otherarray3", self.h5file)
self.assertNotIn("/otherarray4", self.h5file)
def test10_goto(self):
"""Checking mark names (goto)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test10_goto..." % self.__class__.__name__)
# open the do/undo
self.h5file.enable_undo()
# Create a new array
self.h5file.create_array('/', 'otherarray1', [3, 4], "Another array 1")
self._do_reopen()
self.h5file.mark("first")
self.h5file.create_array('/', 'otherarray2', [4, 5], "Another array 2")
self.h5file.mark("second")
self.h5file.create_array('/', 'otherarray3', [5, 6], "Another array 3")
self._do_reopen()
self.h5file.mark("third")
self.h5file.create_array('/', 'otherarray4', [6, 7], "Another array 4")
# Now go to mark "first"
self.h5file.goto("first")
self.assertIn("/otherarray1", self.h5file)
self.assertNotIn("/otherarray2", self.h5file)
self.assertNotIn("/otherarray3", self.h5file)
self.assertNotIn("/otherarray4", self.h5file)
# Go to mark "third"
self.h5file.goto("third")
self.assertIn("/otherarray1", self.h5file)
self.assertIn("/otherarray2", self.h5file)
self.assertIn("/otherarray3", self.h5file)
self.assertNotIn("/otherarray4", self.h5file)
# Now go to mark "second"
self._do_reopen()
self.h5file.goto("second")
self.assertIn("/otherarray1", self.h5file)
self.assertIn("/otherarray2", self.h5file)
self.assertNotIn("/otherarray3", self.h5file)
self.assertNotIn("/otherarray4", self.h5file)
# Go to the end
self.h5file.goto(-1)
self.assertIn("/otherarray1", self.h5file)
self.assertIn("/otherarray2", self.h5file)
self.assertIn("/otherarray3", self.h5file)
self.assertIn("/otherarray4", self.h5file)
# Check that objects has come back to life in a sane state
self.assertIn("/otherarray2", self.h5file)
self.assertEqual(self.h5file.root.otherarray1.read(), [3, 4])
self.assertEqual(self.h5file.root.otherarray2.read(), [4, 5])
self.assertEqual(self.h5file.root.otherarray3.read(), [5, 6])
self.assertEqual(self.h5file.root.otherarray4.read(), [6, 7])
def test10_gotoint(self):
"""Checking mark sequential ids (goto)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test10_gotoint..." % self.__class__.__name__)
# open the do/undo
self.h5file.enable_undo()
# Create a new array
self.h5file.create_array('/', 'otherarray1', [3, 4], "Another array 1")
self.h5file.mark("first")
self.h5file.create_array('/', 'otherarray2', [4, 5], "Another array 2")
self.h5file.mark("second")
self._do_reopen()
self.h5file.create_array('/', 'otherarray3', [5, 6], "Another array 3")
self.h5file.mark("third")
self.h5file.create_array('/', 'otherarray4', [6, 7], "Another array 4")
# Now go to mark "first"
self.h5file.goto(1)
self._do_reopen()
self.assertIn("/otherarray1", self.h5file)
self.assertNotIn("/otherarray2", self.h5file)
self.assertNotIn("/otherarray3", self.h5file)
self.assertNotIn("/otherarray4", self.h5file)
# Go to beginning
self.h5file.goto(0)
self.assertNotIn("/otherarray1", self.h5file)
self.assertNotIn("/otherarray2", self.h5file)
self.assertNotIn("/otherarray3", self.h5file)
self.assertNotIn("/otherarray4", self.h5file)
# Go to mark "third"
self._do_reopen()
self.h5file.goto(3)
self.assertIn("/otherarray1", self.h5file)
self.assertIn("/otherarray2", self.h5file)
self.assertIn("/otherarray3", self.h5file)
self.assertNotIn("/otherarray4", self.h5file)
# Now go to mark "second"
self.h5file.goto(2)
self.assertIn("/otherarray1", self.h5file)
self.assertIn("/otherarray2", self.h5file)
self.assertNotIn("/otherarray3", self.h5file)
self.assertNotIn("/otherarray4", self.h5file)
# Go to the end
self._do_reopen()
self.h5file.goto(-1)
self.assertIn("/otherarray1", self.h5file)
self.assertIn("/otherarray2", self.h5file)
self.assertIn("/otherarray3", self.h5file)
self.assertIn("/otherarray4", self.h5file)
# Check that objects has come back to life in a sane state
self.assertIn("/otherarray2", self.h5file)
self.assertEqual(self.h5file.root.otherarray1.read(), [3, 4])
self.assertEqual(self.h5file.root.otherarray2.read(), [4, 5])
self.assertEqual(self.h5file.root.otherarray3.read(), [5, 6])
self.assertEqual(self.h5file.root.otherarray4.read(), [6, 7])
def test11_contiguous(self):
"""Creating contiguous marks"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test11_contiguous..." % self.__class__.__name__)
self.h5file.enable_undo()
m1 = self.h5file.mark()
m2 = self.h5file.mark()
self.assertNotEqual(m1, m2)
self._do_reopen()
self.h5file.undo(m1)
self.assertEqual(self.h5file.get_current_mark(), m1)
self.h5file.redo(m2)
self.assertEqual(self.h5file.get_current_mark(), m2)
self.h5file.goto(m1)
self.assertEqual(self.h5file.get_current_mark(), m1)
self.h5file.goto(m2)
self.assertEqual(self.h5file.get_current_mark(), m2)
self.h5file.goto(-1)
self._do_reopen()
self.assertEqual(self.h5file.get_current_mark(), m2)
self.h5file.goto(0)
self.assertEqual(self.h5file.get_current_mark(), 0)
def test12_keepMark(self):
"""Ensuring the mark is kept after an UNDO operation"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test12_keepMark..." % self.__class__.__name__)
self.h5file.enable_undo()
self.h5file.create_array('/', 'newarray1', [1])
mid = self.h5file.mark()
self.assertIsNotNone(mid)
self._do_reopen()
self.h5file.undo()
# We should have moved to the initial mark.
self.assertEqual(self.h5file.get_current_mark(), 0)
# So /newarray1 should not be there.
self.assertNotIn('/newarray1', self.h5file)
def test13_severalEnableDisable(self):
"""Checking that successive enable/disable Undo works"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test13_severalEnableDisable..." %
self.__class__.__name__)
self.h5file.enable_undo()
self.h5file.create_array('/', 'newarray1', [1])
self.h5file.undo()
self._do_reopen()
# We should have moved to 'mid' mark, not the initial mark.
self.assertEqual(self.h5file.get_current_mark(), 0)
# So /newarray1 should still be there.
self.assertNotIn('/newarray1', self.h5file)
# Close this do/undo session
self.h5file.disable_undo()
# Do something
self.h5file.create_array('/', 'newarray2', [1])
# Enable again do/undo
self.h5file.enable_undo()
self.h5file.create_array('/', 'newarray3', [1])
mid = self.h5file.mark()
self.h5file.create_array('/', 'newarray4', [1])
self.h5file.undo()
# We should have moved to 'mid' mark, not the initial mark.
self.assertEqual(self.h5file.get_current_mark(), mid)
# So /newarray2 and /newarray3 should still be there.
self.assertNotIn('/newarray1', self.h5file)
self.assertIn('/newarray2', self.h5file)
self.assertIn('/newarray3', self.h5file)
self.assertNotIn('/newarray4', self.h5file)
# Close this do/undo session
self._do_reopen()
self.h5file.disable_undo()
# Enable again do/undo
self.h5file.enable_undo()
self.h5file.create_array('/', 'newarray1', [1])
self.h5file.create_array('/', 'newarray4', [1])
# So /newarray2 and /newarray3 should still be there.
self.assertIn('/newarray1', self.h5file)
self.assertIn('/newarray2', self.h5file)
self.assertIn('/newarray3', self.h5file)
self.assertIn('/newarray4', self.h5file)
self.h5file.undo()
self._do_reopen()
self.assertNotIn('/newarray1', self.h5file)
self.assertIn('/newarray2', self.h5file)
self.assertIn('/newarray3', self.h5file)
self.assertNotIn('/newarray4', self.h5file)
# Close this do/undo session
self.h5file.disable_undo()
class PersistenceTestCase(BasicTestCase):
"""Test for basic Undo/Redo operations with persistence."""
_reopen_flag = True
class CreateArrayTestCase(common.TempFileMixin, TestCase):
"""Test for create_array operations"""
def setUp(self):
super(CreateArrayTestCase, self).setUp()
h5file = self.h5file
root = h5file.root
# Create an array
h5file.create_array(root, 'array', [1, 2], title="Title example")
# Create another array object
h5file.create_array(root, 'anarray', [1], "Array title")
# Create a group object
group = h5file.create_group(root, 'agroup', "Group title")
# Create a couple of objects there
h5file.create_array(group, 'anarray1', [2], "Array title 1")
h5file.create_array(group, 'anarray2', [2], "Array title 2")
# Create a lonely group in first level
h5file.create_group(root, 'agroup2', "Group title 2")
# Create a new group in the second level
h5file.create_group(group, 'agroup3', "Group title 3")
def test00(self):
"""Checking one action."""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test00..." % self.__class__.__name__)
# open the do/undo
self.h5file.enable_undo()
# Create a new array
self.h5file.create_array('/', 'otherarray1', [1, 2], "Another array 1")
# Now undo the past operation
self.h5file.undo()
# Check that otherarray does not exist in the object tree
self.assertNotIn("/otherarray1", self.h5file)
# Redo the operation
self.h5file.redo()
# Check that otherarray has come back to life in a sane state
self.assertIn("/otherarray1", self.h5file)
self.assertEqual(self.h5file.root.otherarray1.title, "Another array 1")
self.assertEqual(self.h5file.root.otherarray1.read(), [1, 2])
def test01(self):
"""Checking two actions."""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test01..." % self.__class__.__name__)
# open the do/undo
self.h5file.enable_undo()
# Create a new array
self.h5file.create_array('/', 'otherarray1', [1, 2], "Another array 1")
self.h5file.create_array('/', 'otherarray2', [2, 3], "Another array 2")
# Now undo the past operation
self.h5file.undo()
# Check that otherarray does not exist in the object tree
self.assertNotIn("/otherarray1", self.h5file)
self.assertNotIn("/otherarray2", self.h5file)
# Redo the operation
self.h5file.redo()
# Check that otherarray has come back to life in a sane state
self.assertIn("/otherarray1", self.h5file)
self.assertIn("/otherarray2", self.h5file)
self.assertEqual(self.h5file.root.otherarray1.title, "Another array 1")
self.assertEqual(self.h5file.root.otherarray2.title, "Another array 2")
self.assertEqual(self.h5file.root.otherarray1.read(), [1, 2])
self.assertEqual(self.h5file.root.otherarray2.read(), [2, 3])
def test02(self):
"""Checking three actions."""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test02..." % self.__class__.__name__)
# open the do/undo
self.h5file.enable_undo()
# Create a new array
self.h5file.create_array('/', 'otherarray1', [1, 2], "Another array 1")
self.h5file.create_array('/', 'otherarray2', [2, 3], "Another array 2")
self.h5file.create_array('/', 'otherarray3', [3, 4], "Another array 3")
# Now undo the past operation
self.h5file.undo()
# Check that otherarray does not exist in the object tree
self.assertNotIn("/otherarray1", self.h5file)
self.assertNotIn("/otherarray2", self.h5file)
self.assertNotIn("/otherarray3", self.h5file)
# Redo the operation
self.h5file.redo()
# Check that otherarray has come back to life in a sane state
self.assertIn("/otherarray1", self.h5file)
self.assertIn("/otherarray2", self.h5file)
self.assertIn("/otherarray3", self.h5file)
self.assertEqual(self.h5file.root.otherarray1.title, "Another array 1")
self.assertEqual(self.h5file.root.otherarray2.title, "Another array 2")
self.assertEqual(self.h5file.root.otherarray3.title, "Another array 3")
self.assertEqual(self.h5file.root.otherarray1.read(), [1, 2])
self.assertEqual(self.h5file.root.otherarray2.read(), [2, 3])
self.assertEqual(self.h5file.root.otherarray3.read(), [3, 4])
def test03(self):
"""Checking three actions in different depth levels."""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test03..." % self.__class__.__name__)
# open the do/undo
self.h5file.enable_undo()
# Create a new array
self.h5file.create_array('/', 'otherarray1', [1, 2], "Another array 1")
self.h5file.create_array('/agroup', 'otherarray2',
[2, 3], "Another array 2")
self.h5file.create_array('/agroup/agroup3', 'otherarray3',
[3, 4], "Another array 3")
# Now undo the past operation
self.h5file.undo()
# Check that otherarray does not exist in the object tree
self.assertNotIn("/otherarray1", self.h5file)
self.assertNotIn("/agroup/otherarray2", self.h5file)
self.assertNotIn("/agroup/agroup3/otherarray3", self.h5file)
# Redo the operation
self.h5file.redo()
# Check that otherarray has come back to life in a sane state
self.assertIn("/otherarray1", self.h5file)
self.assertIn("/agroup/otherarray2", self.h5file)
self.assertIn("/agroup/agroup3/otherarray3", self.h5file)
self.assertEqual(self.h5file.root.otherarray1.title, "Another array 1")
self.assertEqual(self.h5file.root.agroup.otherarray2.title,
"Another array 2")
self.assertEqual(self.h5file.root.agroup.agroup3.otherarray3.title,
"Another array 3")
self.assertEqual(self.h5file.root.otherarray1.read(), [1, 2])
self.assertEqual(self.h5file.root.agroup.otherarray2.read(), [2, 3])
self.assertEqual(self.h5file.root.agroup.agroup3.otherarray3.read(),
[3, 4])
class CreateGroupTestCase(common.TempFileMixin, TestCase):
"""Test for create_group operations"""
def setUp(self):
super(CreateGroupTestCase, self).setUp()
h5file = self.h5file
root = h5file.root
# Create an array
h5file.create_array(root, 'array', [1, 2], title="Title example")
# Create another array object
h5file.create_array(root, 'anarray', [1], "Array title")
# Create a group object
group = h5file.create_group(root, 'agroup', "Group title")
# Create a couple of objects there
h5file.create_array(group, 'anarray1', [2], "Array title 1")
h5file.create_array(group, 'anarray2', [2], "Array title 2")
# Create a lonely group in first level
h5file.create_group(root, 'agroup2', "Group title 2")
# Create a new group in the second level
h5file.create_group(group, 'agroup3', "Group title 3")
def test00(self):
"""Checking one action."""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test00..." % self.__class__.__name__)
# open the do/undo
self.h5file.enable_undo()
# Create a new group
self.h5file.create_group('/', 'othergroup1', "Another group 1")
# Now undo the past operation
self.h5file.undo()
# Check that othergroup1 does not exist in the object tree
self.assertNotIn("/othergroup1", self.h5file)
# Redo the operation
self.h5file.redo()
# Check that othergroup1 has come back to life in a sane state
self.assertIn("/othergroup1", self.h5file)
self.assertEqual(self.h5file.root.othergroup1._v_title,
"Another group 1")
def test01(self):
"""Checking two actions."""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test01..." % self.__class__.__name__)
# open the do/undo
self.h5file.enable_undo()
# Create a new group
self.h5file.create_group('/', 'othergroup1', "Another group 1")
self.h5file.create_group('/', 'othergroup2', "Another group 2")
# Now undo the past operation
self.h5file.undo()
# Check that othergroup does not exist in the object tree
self.assertNotIn("/othergroup1", self.h5file)
self.assertNotIn("/othergroup2", self.h5file)
# Redo the operation
self.h5file.redo()
# Check that othergroup* has come back to life in a sane state
self.assertIn("/othergroup1", self.h5file)
self.assertIn("/othergroup2", self.h5file)
self.assertEqual(self.h5file.root.othergroup1._v_title,
"Another group 1")
self.assertEqual(self.h5file.root.othergroup2._v_title,
"Another group 2")
def test02(self):
"""Checking three actions."""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test02..." % self.__class__.__name__)
# open the do/undo
self.h5file.enable_undo()
# Create a new group
self.h5file.create_group('/', 'othergroup1', "Another group 1")
self.h5file.create_group('/', 'othergroup2', "Another group 2")
self.h5file.create_group('/', 'othergroup3', "Another group 3")
# Now undo the past operation
self.h5file.undo()
# Check that othergroup* does not exist in the object tree
self.assertNotIn("/othergroup1", self.h5file)
self.assertNotIn("/othergroup2", self.h5file)
self.assertNotIn("/othergroup3", self.h5file)
# Redo the operation
self.h5file.redo()
# Check that othergroup* has come back to life in a sane state
self.assertIn("/othergroup1", self.h5file)
self.assertIn("/othergroup2", self.h5file)
self.assertIn("/othergroup3", self.h5file)
self.assertEqual(self.h5file.root.othergroup1._v_title,
"Another group 1")
self.assertEqual(self.h5file.root.othergroup2._v_title,
"Another group 2")
self.assertEqual(self.h5file.root.othergroup3._v_title,
"Another group 3")
def test03(self):
"""Checking three actions in different depth levels."""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test03..." % self.__class__.__name__)
# open the do/undo
self.h5file.enable_undo()
# Create a new group
self.h5file.create_group('/', 'othergroup1', "Another group 1")
self.h5file.create_group(
'/othergroup1', 'othergroup2', "Another group 2")
self.h5file.create_group(
'/othergroup1/othergroup2', 'othergroup3', "Another group 3")
# Now undo the past operation
self.h5file.undo()
# Check that othergroup* does not exist in the object tree
self.assertNotIn("/othergroup1", self.h5file)
self.assertNotIn("/othergroup1/othergroup2", self.h5file)
self.assertTrue(
"/othergroup1/othergroup2/othergroup3" not in self.h5file)
# Redo the operation
self.h5file.redo()
# Check that othergroup* has come back to life in a sane state
self.assertIn("/othergroup1", self.h5file)
self.assertIn("/othergroup1/othergroup2", self.h5file)
self.assertIn("/othergroup1/othergroup2/othergroup3", self.h5file)
self.assertEqual(self.h5file.root.othergroup1._v_title,
"Another group 1")
self.assertEqual(self.h5file.root.othergroup1.othergroup2._v_title,
"Another group 2")
self.assertEqual(
self.h5file.root.othergroup1.othergroup2.othergroup3._v_title,
"Another group 3")
minRowIndex = 10
def populateTable(where, name):
"""Create a table under where with name name"""
class Indexed(IsDescription):
var1 = StringCol(itemsize=4, dflt=b"", pos=1)
var2 = BoolCol(dflt=0, pos=2)
var3 = IntCol(dflt=0, pos=3)
var4 = FloatCol(dflt=0, pos=4)
nrows = minRowIndex
table = where._v_file.create_table(where, name, Indexed, "Indexed",
None, nrows)
for i in range(nrows):
table.row['var1'] = str(i)
# table.row['var2'] = i > 2
table.row['var2'] = i % 2
table.row['var3'] = i
table.row['var4'] = float(nrows - i - 1)
table.row.append()
table.flush()
# Index all entries:
indexrows = table.cols.var1.create_index()
indexrows = table.cols.var2.create_index()
indexrows = table.cols.var3.create_index()
# Do not index the var4 column
# indexrows = table.cols.var4.create_index()
if common.verbose:
print("Number of written rows:", nrows)
print("Number of indexed rows:", table.cols.var1.index.nelements)
print("Number of indexed rows(2):", indexrows)
class RenameNodeTestCase(common.TempFileMixin, TestCase):
"""Test for rename_node operations"""
def setUp(self):
super(RenameNodeTestCase, self).setUp()
h5file = self.h5file
root = h5file.root
# Create an array
h5file.create_array(root, 'array', [1, 2], title="Title example")
# Create another array object
h5file.create_array(root, 'anarray', [1], "Array title")
# Create a group object
group = h5file.create_group(root, 'agroup', "Group title")
# Create a couple of objects there
h5file.create_array(group, 'anarray1', [2], "Array title 1")
h5file.create_array(group, 'anarray2', [2], "Array title 2")
# Create a lonely group in first level
h5file.create_group(root, 'agroup2', "Group title 2")
# Create a new group in the second level
h5file.create_group(group, 'agroup3', "Group title 3")
# Create a table in root
populateTable(self.h5file.root, 'table')
def test00(self):
"""Checking rename_node (over Groups without children)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test00..." % self.__class__.__name__)
# open the do/undo
self.h5file.enable_undo()
# Create a new array
self.h5file.rename_node('/agroup2', 'agroup3')
# Now undo the past operation
self.h5file.undo()
# Check that it does not exist in the object tree
self.assertIn("/agroup2", self.h5file)
self.assertNotIn("/agroup3", self.h5file)
self.assertEqual(self.h5file.root.agroup2._v_title, "Group title 2")
# Redo the operation
self.h5file.redo()
# Check that otherarray has come back to life in a sane state
self.assertNotIn("/agroup2", self.h5file)
self.assertIn("/agroup3", self.h5file)
self.assertEqual(self.h5file.root.agroup3._v_title, "Group title 2")
def test01(self):
"""Checking rename_node (over Groups with children)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test01..." % self.__class__.__name__)
# open the do/undo
self.h5file.enable_undo()
# Create a new array
self.h5file.rename_node('/agroup', 'agroup3')
# Now undo the past operation
self.h5file.undo()
# Check that it does not exist in the object tree
self.assertIn("/agroup", self.h5file)
self.assertNotIn("/agroup3", self.h5file)
# Check that children are reachable
self.assertIn("/agroup/anarray1", self.h5file)
self.assertIn("/agroup/anarray2", self.h5file)
self.assertIn("/agroup/agroup3", self.h5file)
self.assertEqual(self.h5file.root.agroup._v_title, "Group title")
# Redo the operation
self.h5file.redo()
# Check that otherarray has come back to life in a sane state
self.assertNotIn("/agroup", self.h5file)
self.assertIn("/agroup3", self.h5file)
self.assertEqual(self.h5file.root.agroup3._v_title, "Group title")
# Check that children are reachable
self.assertIn("/agroup3/anarray1", self.h5file)
self.assertIn("/agroup3/anarray2", self.h5file)
self.assertIn("/agroup3/agroup3", self.h5file)
def test01b(self):
"""Checking rename_node (over Groups with children 2)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test01b..." % self.__class__.__name__)
# open the do/undo
self.h5file.enable_undo()
# Create a new array
self.h5file.rename_node('/agroup', 'agroup3')
self.h5file.rename_node('/agroup3', 'agroup4')
# Now undo the past operation
self.h5file.undo()
# Check that it does not exist in the object tree
self.assertIn("/agroup", self.h5file)
self.assertNotIn("/agroup4", self.h5file)
# Check that children are reachable
self.assertIn("/agroup/anarray1", self.h5file)
self.assertIn("/agroup/anarray2", self.h5file)
self.assertIn("/agroup/agroup3", self.h5file)
self.assertEqual(self.h5file.root.agroup._v_title, "Group title")
# Redo the operation
self.h5file.redo()
# Check that otherarray has come back to life in a sane state
self.assertNotIn("/agroup", self.h5file)
self.assertIn("/agroup4", self.h5file)
self.assertEqual(self.h5file.root.agroup4._v_title, "Group title")
# Check that children are reachable
self.assertIn("/agroup4/anarray1", self.h5file)
self.assertIn("/agroup4/anarray2", self.h5file)
self.assertIn("/agroup4/agroup3", self.h5file)
def test02(self):
"""Checking rename_node (over Leaves)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test02..." % self.__class__.__name__)
# open the do/undo
self.h5file.enable_undo()
# Create a new array
self.h5file.rename_node('/anarray', 'anarray2')
# Now undo the past operation
self.h5file.undo()
# Check that otherarray does not exist in the object tree
self.assertIn("/anarray", self.h5file)
self.assertNotIn("/anarray2", self.h5file)
self.assertEqual(self.h5file.root.anarray.title, "Array title")
# Redo the operation
self.h5file.redo()
# Check that otherarray has come back to life in a sane state
self.assertNotIn("/anarray", self.h5file)
self.assertIn("/anarray2", self.h5file)
self.assertEqual(self.h5file.root.anarray2.title, "Array title")
def test03(self):
"""Checking rename_node (over Tables)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test03..." % self.__class__.__name__)
# open the do/undo
self.h5file.enable_undo()
# Create a new array
self.h5file.rename_node('/table', 'table2')
# Now undo the past operation
self.h5file.undo()
# Check that table2 does not exist in the object tree
self.assertIn("/table", self.h5file)
table = self.h5file.root.table
self.assertIsNotNone(table.cols.var1.index)
self.assertIsNotNone(table.cols.var2.index)
self.assertIsNotNone(table.cols.var3.index)
self.assertIsNone(table.cols.var4.index)
self.assertEqual(table.cols.var1.index.nelements, minRowIndex)
self.assertEqual(table.cols.var2.index.nelements, minRowIndex)
self.assertEqual(table.cols.var3.index.nelements, minRowIndex)
self.assertNotIn("/table2", self.h5file)
self.assertEqual(self.h5file.root.table.title, "Indexed")
# Redo the operation
self.h5file.redo()
# Check that table2 has come back to life in a sane state
self.assertNotIn("/table", self.h5file)
self.assertIn("/table2", self.h5file)
self.assertEqual(self.h5file.root.table2.title, "Indexed")
table = self.h5file.root.table2
self.assertIsNotNone(table.cols.var1.index)
self.assertIsNotNone(table.cols.var2.index)
self.assertIsNotNone(table.cols.var3.index)
self.assertEqual(table.cols.var1.index.nelements, minRowIndex)
self.assertEqual(table.cols.var2.index.nelements, minRowIndex)
self.assertEqual(table.cols.var3.index.nelements, minRowIndex)
self.assertIsNone(table.cols.var4.index)
class MoveNodeTestCase(common.TempFileMixin, TestCase):
"""Tests for move_node operations"""
def setUp(self):
super(MoveNodeTestCase, self).setUp()
h5file = self.h5file
root = h5file.root
# Create an array
h5file.create_array(root, 'array', [1, 2], title="Title example")
# Create another array object
h5file.create_array(root, 'anarray', [1], "Array title")
# Create a group object
group = h5file.create_group(root, 'agroup', "Group title")
# Create a couple of objects there
h5file.create_array(group, 'anarray1', [2], "Array title 1")
h5file.create_array(group, 'anarray2', [2], "Array title 2")
# Create a lonely group in first level
h5file.create_group(root, 'agroup2', "Group title 2")
# Create a new group in the second level
h5file.create_group(group, 'agroup3', "Group title 3")
# Create a table in root
populateTable(self.h5file.root, 'table')
def test00(self):
"""Checking move_node (over Leaf)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test00..." % self.__class__.__name__)
# open the do/undo
self.h5file.enable_undo()
# Create a new array
self.h5file.move_node('/anarray', '/agroup/agroup3')
# Now undo the past operation
self.h5file.undo()
# Check that it does not exist in the object tree
self.assertIn("/anarray", self.h5file)
self.assertNotIn("/agroup/agroup3/anarray", self.h5file)
self.assertEqual(self.h5file.root.anarray.title, "Array title")
# Redo the operation
self.h5file.redo()
# Check that otherarray has come back to life in a sane state
self.assertNotIn("/anarray", self.h5file)
self.assertIn("/agroup/agroup3/anarray", self.h5file)
self.assertEqual(self.h5file.root.agroup.agroup3.anarray.title,
"Array title")
def test01(self):
"""Checking move_node (over Groups with children)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test01..." % self.__class__.__name__)
# open the do/undo
self.h5file.enable_undo()
# Create a new array
self.h5file.move_node('/agroup', '/agroup2', 'agroup3')
# Now undo the past operation
self.h5file.undo()
# Check that it does not exist in the object tree
self.assertIn("/agroup", self.h5file)
self.assertNotIn("/agroup2/agroup3", self.h5file)
# Check that children are reachable
self.assertIn("/agroup/anarray1", self.h5file)
self.assertIn("/agroup/anarray2", self.h5file)
self.assertIn("/agroup/agroup3", self.h5file)
self.assertEqual(self.h5file.root.agroup._v_title, "Group title")
# Redo the operation
self.h5file.redo()
# Check that otherarray has come back to life in a sane state
self.assertNotIn("/agroup", self.h5file)
self.assertIn("/agroup2/agroup3", self.h5file)
self.assertEqual(self.h5file.root.agroup2.agroup3._v_title,
"Group title")
# Check that children are reachable
self.assertIn("/agroup2/agroup3/anarray1", self.h5file)
self.assertIn("/agroup2/agroup3/anarray2", self.h5file)
self.assertIn("/agroup2/agroup3/agroup3", self.h5file)
def test01b(self):
"""Checking move_node (over Groups with children 2)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test01b..." % self.__class__.__name__)
# open the do/undo
self.h5file.enable_undo()
# Create a new array
self.h5file.move_node('/agroup', '/', 'agroup3')
self.h5file.move_node('/agroup3', '/agroup2', 'agroup4')
# Now undo the past operation
self.h5file.undo()
# Check that it does not exist in the object tree
self.assertIn("/agroup", self.h5file)
self.assertNotIn("/agroup2/agroup4", self.h5file)
# Check that children are reachable
self.assertIn("/agroup/anarray1", self.h5file)
self.assertIn("/agroup/anarray2", self.h5file)
self.assertIn("/agroup/agroup3", self.h5file)
self.assertEqual(self.h5file.root.agroup._v_title, "Group title")
# Redo the operation
self.h5file.redo()
# Check that otherarray has come back to life in a sane state
self.assertNotIn("/agroup", self.h5file)
self.assertIn("/agroup2/agroup4", self.h5file)
self.assertEqual(self.h5file.root.agroup2.agroup4._v_title,
"Group title")
# Check that children are reachable
self.assertIn("/agroup2/agroup4/anarray1", self.h5file)
self.assertIn("/agroup2/agroup4/anarray2", self.h5file)
self.assertIn("/agroup2/agroup4/agroup3", self.h5file)
def test02(self):
"""Checking move_node (over Leaves)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test02..." % self.__class__.__name__)
# open the do/undo
self.h5file.enable_undo()
# Create a new array
self.h5file.move_node('/anarray', '/agroup2', 'anarray2')
# Now undo the past operation
self.h5file.undo()
# Check that otherarray does not exist in the object tree
self.assertIn("/anarray", self.h5file)
self.assertNotIn("/agroup2/anarray2", self.h5file)
self.assertEqual(self.h5file.root.anarray.title, "Array title")
# Redo the operation
self.h5file.redo()
# Check that otherarray has come back to life in a sane state
self.assertNotIn("/anarray", self.h5file)
self.assertIn("/agroup2/anarray2", self.h5file)
self.assertEqual(
self.h5file.root.agroup2.anarray2.title, "Array title")
def test03(self):
"""Checking move_node (over Tables)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test03..." % self.__class__.__name__)
# open the do/undo
self.h5file.enable_undo()
# Create a new array
self.h5file.move_node('/table', '/agroup2', 'table2')
# Now undo the past operation
self.h5file.undo()
# Check that table2 does not exist in the object tree
self.assertIn("/table", self.h5file)
self.assertNotIn("/agroup2/table2", self.h5file)
table = self.h5file.root.table
self.assertIsNotNone(table.cols.var1.index)
self.assertIsNotNone(table.cols.var2.index)
self.assertIsNotNone(table.cols.var3.index)
self.assertIsNone(table.cols.var4.index)
self.assertEqual(table.cols.var1.index.nelements, minRowIndex)
self.assertEqual(table.cols.var2.index.nelements, minRowIndex)
self.assertEqual(table.cols.var3.index.nelements, minRowIndex)
self.assertEqual(self.h5file.root.table.title, "Indexed")
# Redo the operation
self.h5file.redo()
# Check that table2 has come back to life in a sane state
self.assertNotIn("/table", self.h5file)
self.assertIn("/agroup2/table2", self.h5file)
self.assertEqual(self.h5file.root.agroup2.table2.title, "Indexed")
table = self.h5file.root.agroup2.table2
self.assertIsNotNone(table.cols.var1.index)
self.assertIsNotNone(table.cols.var2.index)
self.assertIsNotNone(table.cols.var3.index)
self.assertEqual(table.cols.var1.index.nelements, minRowIndex)
self.assertEqual(table.cols.var2.index.nelements, minRowIndex)
self.assertEqual(table.cols.var3.index.nelements, minRowIndex)
self.assertIsNone(table.cols.var4.index)
class RemoveNodeTestCase(common.TempFileMixin, TestCase):
"""Test for remove_node operations"""
def setUp(self):
super(RemoveNodeTestCase, self).setUp()
h5file = self.h5file
root = h5file.root
# Create an array
h5file.create_array(root, 'array', [1, 2], title="Title example")
# Create another array object
h5file.create_array(root, 'anarray', [1], "Array title")
# Create a group object
group = h5file.create_group(root, 'agroup', "Group title")
# Create a couple of objects there
h5file.create_array(group, 'anarray1', [2], "Array title 1")
h5file.create_array(group, 'anarray2', [2], "Array title 2")
# Create a lonely group in first level
h5file.create_group(root, 'agroup2', "Group title 2")
# Create a new group in the second level
h5file.create_group(group, 'agroup3', "Group title 3")
# Create a table in root
populateTable(self.h5file.root, 'table')
def test00(self):
"""Checking remove_node (over Leaf)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test00..." % self.__class__.__name__)
# open the do/undo
self.h5file.enable_undo()
# Delete an existing array
self.h5file.remove_node('/anarray')
# Now undo the past operation
self.h5file.undo()
# Check that it does exist in the object tree
self.assertIn("/anarray", self.h5file)
self.assertEqual(self.h5file.root.anarray.title, "Array title")
# Redo the operation
self.h5file.redo()
# Check that array has gone again
self.assertNotIn("/anarray", self.h5file)
def test00b(self):
"""Checking remove_node (over several Leaves)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test00b..." % self.__class__.__name__)
# open the do/undo
self.h5file.enable_undo()
# Delete a couple of arrays
self.h5file.remove_node('/anarray')
self.h5file.remove_node('/agroup/anarray2')
# Now undo the past operation
self.h5file.undo()
# Check that arrays has come into life
self.assertIn("/anarray", self.h5file)
self.assertIn("/agroup/anarray2", self.h5file)
self.assertEqual(self.h5file.root.anarray.title, "Array title")
self.assertEqual(
self.h5file.root.agroup.anarray2.title, "Array title 2")
# Redo the operation
self.h5file.redo()
# Check that arrays has disappeared again
self.assertNotIn("/anarray", self.h5file)
self.assertNotIn("/agroup/anarray2", self.h5file)
def test00c(self):
"""Checking remove_node (over Tables)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test00c..." % self.__class__.__name__)
# open the do/undo
self.h5file.enable_undo()
# Create a new array
self.h5file.remove_node('/table')
# Now undo the past operation
self.h5file.undo()
# Check that table2 does not exist in the object tree
self.assertIn("/table", self.h5file)
table = self.h5file.root.table
self.assertIsNotNone(table.cols.var1.index)
self.assertIsNotNone(table.cols.var2.index)
self.assertIsNotNone(table.cols.var3.index)
self.assertIsNone(table.cols.var4.index)
self.assertEqual(table.cols.var1.index.nelements, minRowIndex)
self.assertEqual(table.cols.var2.index.nelements, minRowIndex)
self.assertEqual(table.cols.var3.index.nelements, minRowIndex)
self.assertEqual(self.h5file.root.table.title, "Indexed")
# Redo the operation
self.h5file.redo()
# Check that table2 has come back to life in a sane state
self.assertNotIn("/table", self.h5file)
def test01(self):
"""Checking remove_node (over Groups with children)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test01..." % self.__class__.__name__)
# open the do/undo
self.h5file.enable_undo()
# Delete a group recursively
self.h5file.remove_node('/agroup', recursive=1)
# Now undo the past operation
self.h5file.undo()
# Check that parent and children has come into life in a sane state
self.assertIn("/agroup", self.h5file)
self.assertIn("/agroup/anarray1", self.h5file)
self.assertIn("/agroup/anarray2", self.h5file)
self.assertIn("/agroup/agroup3", self.h5file)
self.assertEqual(self.h5file.root.agroup._v_title, "Group title")
# Redo the operation
self.h5file.redo()
# Check that parent and children are not reachable
self.assertNotIn("/agroup", self.h5file)
self.assertNotIn("/agroup/anarray1", self.h5file)
self.assertNotIn("/agroup/anarray2", self.h5file)
self.assertNotIn("/agroup/agroup3", self.h5file)
def test01b(self):
"""Checking remove_node (over Groups with children 2)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test01b..." % self.__class__.__name__)
# open the do/undo
self.h5file.enable_undo()
# Remove a couple of groups
self.h5file.remove_node('/agroup', recursive=1)
self.h5file.remove_node('/agroup2')
# Now undo the past operation
self.h5file.undo()
# Check that they does exist in the object tree
self.assertIn("/agroup", self.h5file)
self.assertIn("/agroup2", self.h5file)
# Check that children are reachable
self.assertIn("/agroup/anarray1", self.h5file)
self.assertIn("/agroup/anarray2", self.h5file)
self.assertIn("/agroup/agroup3", self.h5file)
self.assertEqual(self.h5file.root.agroup._v_title, "Group title")
# Redo the operation
self.h5file.redo()
# Check that groups does not exist again
self.assertNotIn("/agroup", self.h5file)
self.assertNotIn("/agroup2", self.h5file)
# Check that children are not reachable
self.assertNotIn("/agroup/anarray1", self.h5file)
self.assertNotIn("/agroup/anarray2", self.h5file)
self.assertNotIn("/agroup/agroup3", self.h5file)
class CopyNodeTestCase(common.TempFileMixin, TestCase):
"""Tests for copy_node and copy_children operations"""
def setUp(self):
super(CopyNodeTestCase, self).setUp()
h5file = self.h5file
root = h5file.root
# Create an array
h5file.create_array(root, 'array', [1, 2], title="Title example")
# Create another array object
h5file.create_array(root, 'anarray', [1], "Array title")
# Create a group object
group = h5file.create_group(root, 'agroup', "Group title")
# Create a couple of objects there
h5file.create_array(group, 'anarray1', [2], "Array title 1")
h5file.create_array(group, 'anarray2', [2], "Array title 2")
# Create a lonely group in first level
h5file.create_group(root, 'agroup2', "Group title 2")
# Create a new group in the second level
h5file.create_group(group, 'agroup3', "Group title 3")
# Create a table in root
populateTable(self.h5file.root, 'table')
def test00_copyLeaf(self):
"""Checking copy_node (over Leaves)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test00_copyLeaf..." % self.__class__.__name__)
# Enable undo/redo.
self.h5file.enable_undo()
# /anarray => /agroup/agroup3/
new_node = self.h5file.copy_node('/anarray', '/agroup/agroup3')
# Undo the copy.
self.h5file.undo()
# Check that the copied node does not exist in the object tree.
self.assertNotIn('/agroup/agroup3/anarray', self.h5file)
# Redo the copy.
self.h5file.redo()
# Check that the copied node exists again in the object tree.
self.assertIn('/agroup/agroup3/anarray', self.h5file)
self.assertIs(self.h5file.root.agroup.agroup3.anarray, new_node)
def test00b_copyTable(self):
"""Checking copy_node (over Tables)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test00b_copyTable..." % self.__class__.__name__)
# open the do/undo
self.h5file.enable_undo()
# /table => /agroup/agroup3/
warnings.filterwarnings("ignore", category=UserWarning)
table = self.h5file.copy_node(
'/table', '/agroup/agroup3', propindexes=True)
warnings.filterwarnings("default", category=UserWarning)
self.assertIn("/agroup/agroup3/table", self.h5file)
table = self.h5file.root.agroup.agroup3.table
self.assertEqual(table.title, "Indexed")
self.assertIsNotNone(table.cols.var1.index)
self.assertIsNotNone(table.cols.var2.index)
self.assertIsNotNone(table.cols.var3.index)
self.assertEqual(table.cols.var1.index.nelements, minRowIndex)
self.assertEqual(table.cols.var2.index.nelements, minRowIndex)
self.assertEqual(table.cols.var3.index.nelements, minRowIndex)
self.assertIsNone(table.cols.var4.index)
# Now undo the past operation
self.h5file.undo()
table = self.h5file.root.table
self.assertIsNotNone(table.cols.var1.index)
self.assertIsNotNone(table.cols.var2.index)
self.assertIsNotNone(table.cols.var3.index)
self.assertIsNone(table.cols.var4.index)
self.assertEqual(table.cols.var1.index.nelements, minRowIndex)
self.assertEqual(table.cols.var2.index.nelements, minRowIndex)
self.assertEqual(table.cols.var3.index.nelements, minRowIndex)
# Check that the copied node does not exist in the object tree.
self.assertNotIn("/agroup/agroup3/table", self.h5file)
# Redo the operation
self.h5file.redo()
# Check that table has come back to life in a sane state
self.assertIn("/table", self.h5file)
self.assertIn("/agroup/agroup3/table", self.h5file)
table = self.h5file.root.agroup.agroup3.table
self.assertEqual(table.title, "Indexed")
self.assertIsNotNone(table.cols.var1.index)
self.assertIsNotNone(table.cols.var2.index)
self.assertIsNotNone(table.cols.var3.index)
self.assertEqual(table.cols.var1.index.nelements, minRowIndex)
self.assertEqual(table.cols.var2.index.nelements, minRowIndex)
self.assertEqual(table.cols.var3.index.nelements, minRowIndex)
self.assertIsNone(table.cols.var4.index)
def test01_copyGroup(self):
"""Copying a group (recursively)."""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test01_copyGroup..." % self.__class__.__name__)
# Enable undo/redo.
self.h5file.enable_undo()
# /agroup => /acopy
new_node = self.h5file.copy_node(
'/agroup', newname='acopy', recursive=True)
# Undo the copy.
self.h5file.undo()
# Check that the copied node does not exist in the object tree.
self.assertNotIn('/acopy', self.h5file)
self.assertNotIn('/acopy/anarray1', self.h5file)
self.assertNotIn('/acopy/anarray2', self.h5file)
self.assertNotIn('/acopy/agroup3', self.h5file)
# Redo the copy.
self.h5file.redo()
# Check that the copied node exists again in the object tree.
self.assertIn('/acopy', self.h5file)
self.assertIn('/acopy/anarray1', self.h5file)
self.assertIn('/acopy/anarray2', self.h5file)
self.assertIn('/acopy/agroup3', self.h5file)
self.assertIs(self.h5file.root.acopy, new_node)
def test02_copyLeafOverwrite(self):
"""Copying a leaf, overwriting destination."""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test02_copyLeafOverwrite..." %
self.__class__.__name__)
# Enable undo/redo.
self.h5file.enable_undo()
# /anarray => /agroup/agroup
oldNode = self.h5file.root.agroup
new_node = self.h5file.copy_node(
'/anarray', newname='agroup', overwrite=True)
# Undo the copy.
self.h5file.undo()
# Check that the copied node does not exist in the object tree.
# Check that the overwritten node exists again in the object tree.
self.assertIs(self.h5file.root.agroup, oldNode)
# Redo the copy.
self.h5file.redo()
# Check that the copied node exists again in the object tree.
# Check that the overwritten node does not exist in the object tree.
self.assertIs(self.h5file.root.agroup, new_node)
def test03_copyChildren(self):
"""Copying the children of a group"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test03_copyChildren..." %
self.__class__.__name__)
# Enable undo/redo.
self.h5file.enable_undo()
# /agroup/* => /agroup/
self.h5file.copy_children('/agroup', '/agroup2', recursive=True)
# Undo the copy.
self.h5file.undo()
# Check that the copied nodes do not exist in the object tree.
self.assertNotIn('/agroup2/anarray1', self.h5file)
self.assertNotIn('/agroup2/anarray2', self.h5file)
self.assertNotIn('/agroup2/agroup3', self.h5file)
# Redo the copy.
self.h5file.redo()
# Check that the copied nodes exist again in the object tree.
self.assertIn('/agroup2/anarray1', self.h5file)
self.assertIn('/agroup2/anarray2', self.h5file)
self.assertIn('/agroup2/agroup3', self.h5file)
class ComplexTestCase(common.TempFileMixin, TestCase):
"""Tests for a mix of all operations"""
def setUp(self):
super(ComplexTestCase, self).setUp()
h5file = self.h5file
root = h5file.root
# Create an array
h5file.create_array(root, 'array', [1, 2], title="Title example")
# Create another array object
h5file.create_array(root, 'anarray', [1], "Array title")
# Create a group object
group = h5file.create_group(root, 'agroup', "Group title")
# Create a couple of objects there
h5file.create_array(group, 'anarray1', [2], "Array title 1")
h5file.create_array(group, 'anarray2', [2], "Array title 2")
# Create a lonely group in first level
h5file.create_group(root, 'agroup2', "Group title 2")
# Create a new group in the second level
h5file.create_group(group, 'agroup3', "Group title 3")
def test00(self):
"""Mix of create_array, create_group, renameNone, move_node,
remove_node, copy_node and copy_children."""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test00..." % self.__class__.__name__)
# Enable undo/redo.
self.h5file.enable_undo()
# Create an array
self.h5file.create_array(self.h5file.root, 'anarray3',
[1], "Array title 3")
# Create a group
self.h5file.create_group(self.h5file.root, 'agroup3', "Group title 3")
# /anarray => /agroup/agroup3/
new_node = self.h5file.copy_node('/anarray3', '/agroup/agroup3')
new_node = self.h5file.copy_children(
'/agroup', '/agroup3', recursive=1)
# rename anarray
self.h5file.rename_node('/anarray', 'anarray4')
# Move anarray
new_node = self.h5file.copy_node('/anarray3', '/agroup')
# Remove anarray4
self.h5file.remove_node('/anarray4')
# Undo the actions
self.h5file.undo()
self.assertNotIn('/anarray4', self.h5file)
self.assertNotIn('/anarray3', self.h5file)
self.assertNotIn('/agroup/agroup3/anarray3', self.h5file)
self.assertNotIn('/agroup3', self.h5file)
self.assertNotIn('/anarray4', self.h5file)
self.assertIn('/anarray', self.h5file)
# Redo the actions
self.h5file.redo()
# Check that the copied node exists again in the object tree.
self.assertIn('/agroup/agroup3/anarray3', self.h5file)
self.assertIn('/agroup/anarray3', self.h5file)
self.assertIn('/agroup3/agroup3/anarray3', self.h5file)
self.assertNotIn('/agroup3/anarray3', self.h5file)
self.assertIs(self.h5file.root.agroup.anarray3, new_node)
self.assertNotIn('/anarray', self.h5file)
self.assertNotIn('/anarray4', self.h5file)
def test01(self):
"""Test with multiple generations (Leaf case)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test01..." % self.__class__.__name__)
# Enable undo/redo.
self.h5file.enable_undo()
# remove /anarray
self.h5file.remove_node('/anarray')
# Create an array in the same place
self.h5file.create_array(self.h5file.root, 'anarray',
[2], "Array title 2")
# remove the array again
self.h5file.remove_node('/anarray')
# Create an array
self.h5file.create_array(self.h5file.root, 'anarray',
[3], "Array title 3")
# remove the array again
self.h5file.remove_node('/anarray')
# Create an array
self.h5file.create_array(self.h5file.root, 'anarray',
[4], "Array title 4")
# Undo the actions
self.h5file.undo()
# Check that /anarray is in the correct state before redoing
self.assertEqual(self.h5file.root.anarray.title, "Array title")
self.assertEqual(self.h5file.root.anarray[:], [1])
# Redo the actions
self.h5file.redo()
self.assertEqual(self.h5file.root.anarray.title, "Array title 4")
self.assertEqual(self.h5file.root.anarray[:], [4])
def test02(self):
"""Test with multiple generations (Group case)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test02..." % self.__class__.__name__)
# Enable undo/redo.
self.h5file.enable_undo()
# remove /agroup
self.h5file.remove_node('/agroup2')
# Create a group in the same place
self.h5file.create_group(self.h5file.root, 'agroup2', "Group title 22")
# remove the group
self.h5file.remove_node('/agroup2')
# Create a group
self.h5file.create_group(self.h5file.root, 'agroup2', "Group title 3")
# remove the group
self.h5file.remove_node('/agroup2')
# Create a group
self.h5file.create_group(self.h5file.root, 'agroup2', "Group title 4")
# Create a child group
self.h5file.create_group(self.h5file.root.agroup2, 'agroup5',
"Group title 5")
# Undo the actions
self.h5file.undo()
# Check that /agroup is in the state before enabling do/undo
self.assertEqual(self.h5file.root.agroup2._v_title, "Group title 2")
self.assertIn('/agroup2', self.h5file)
# Redo the actions
self.h5file.redo()
self.assertEqual(self.h5file.root.agroup2._v_title, "Group title 4")
self.assertEqual(self.h5file.root.agroup2.agroup5._v_title,
"Group title 5")
def test03(self):
"""Test with multiple generations (Group case, recursive remove)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test03..." % self.__class__.__name__)
# Enable undo/redo.
self.h5file.enable_undo()
# remove /agroup
self.h5file.remove_node('/agroup', recursive=1)
# Create a group in the same place
self.h5file.create_group(self.h5file.root, 'agroup', "Group title 2")
# remove the group
self.h5file.remove_node('/agroup')
# Create a group
self.h5file.create_group(self.h5file.root, 'agroup', "Group title 3")
# remove the group
self.h5file.remove_node('/agroup')
# Create a group
self.h5file.create_group(self.h5file.root, 'agroup', "Group title 4")
# Create a child group
self.h5file.create_group(self.h5file.root.agroup, 'agroup5',
"Group title 5")
# Undo the actions
self.h5file.undo()
# Check that /agroup is in the state before enabling do/undo
self.assertIn('/agroup', self.h5file)
self.assertEqual(self.h5file.root.agroup._v_title, "Group title")
self.assertIn('/agroup/anarray1', self.h5file)
self.assertIn('/agroup/anarray2', self.h5file)
self.assertIn('/agroup/agroup3', self.h5file)
self.assertNotIn('/agroup/agroup5', self.h5file)
# Redo the actions
self.h5file.redo()
self.assertIn('/agroup', self.h5file)
self.assertEqual(self.h5file.root.agroup._v_title, "Group title 4")
self.assertIn('/agroup/agroup5', self.h5file)
self.assertEqual(
self.h5file.root.agroup.agroup5._v_title, "Group title 5")
def test03b(self):
"""Test with multiple generations (Group case, recursive remove,
case 2)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test03b..." % self.__class__.__name__)
# Enable undo/redo.
self.h5file.enable_undo()
# Create a new group with a child
self.h5file.create_group(self.h5file.root, 'agroup3', "Group title 3")
self.h5file.create_group(self.h5file.root.agroup3, 'agroup4',
"Group title 4")
# remove /agroup3
self.h5file.remove_node('/agroup3', recursive=1)
# Create a group in the same place
self.h5file.create_group(self.h5file.root, 'agroup3', "Group title 4")
# Undo the actions
self.h5file.undo()
# Check that /agroup is in the state before enabling do/undo
self.assertNotIn('/agroup3', self.h5file)
# Redo the actions
self.h5file.redo()
self.assertEqual(self.h5file.root.agroup3._v_title, "Group title 4")
self.assertIn('/agroup3', self.h5file)
self.assertNotIn('/agroup/agroup4', self.h5file)
class AttributesTestCase(common.TempFileMixin, TestCase):
"""Tests for operation on attributes"""
def setUp(self):
super(AttributesTestCase, self).setUp()
# Create an array.
array = self.h5file.create_array('/', 'array', [1, 2])
# Set some attributes on it.
attrs = array.attrs
attrs.attr_1 = 10
attrs.attr_2 = 20
attrs.attr_3 = 30
def test00_setAttr(self):
"""Setting a nonexistent attribute"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test00_setAttr..." % self.__class__.__name__)
array = self.h5file.root.array
attrs = array.attrs
self.h5file.enable_undo()
setattr(attrs, 'attr_0', 0)
self.assertIn('attr_0', attrs)
self.assertEqual(attrs.attr_0, 0)
self.h5file.undo()
self.assertNotIn('attr_0', attrs)
self.h5file.redo()
self.assertIn('attr_0', attrs)
self.assertEqual(attrs.attr_0, 0)
def test01_setAttrExisting(self):
"""Setting an existing attribute"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test01_setAttrExisting..." %
self.__class__.__name__)
array = self.h5file.root.array
attrs = array.attrs
self.h5file.enable_undo()
setattr(attrs, 'attr_1', 11)
self.assertIn('attr_1', attrs)
self.assertEqual(attrs.attr_1, 11)
self.h5file.undo()
self.assertIn('attr_1', attrs)
self.assertEqual(attrs.attr_1, 10)
self.h5file.redo()
self.assertIn('attr_1', attrs)
self.assertEqual(attrs.attr_1, 11)
def test02_delAttr(self):
"""Removing an attribute"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test02_delAttr..." % self.__class__.__name__)
array = self.h5file.root.array
attrs = array.attrs
self.h5file.enable_undo()
delattr(attrs, 'attr_1')
self.assertNotIn('attr_1', attrs)
self.h5file.undo()
self.assertIn('attr_1', attrs)
self.assertEqual(attrs.attr_1, 10)
self.h5file.redo()
self.assertNotIn('attr_1', attrs)
def test03_copyNodeAttrs(self):
"""Copying an attribute set"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test03_copyNodeAttrs..." %
self.__class__.__name__)
rattrs = self.h5file.root._v_attrs
rattrs.attr_0 = 0
rattrs.attr_1 = 100
array = self.h5file.root.array
attrs = array.attrs
self.h5file.enable_undo()
attrs._f_copy(self.h5file.root)
self.assertEqual(rattrs.attr_0, 0)
self.assertEqual(rattrs.attr_1, 10)
self.assertEqual(rattrs.attr_2, 20)
self.assertEqual(rattrs.attr_3, 30)
self.h5file.undo()
self.assertEqual(rattrs.attr_0, 0)
self.assertEqual(rattrs.attr_1, 100)
self.assertNotIn('attr_2', rattrs)
self.assertNotIn('attr_3', rattrs)
self.h5file.redo()
self.assertEqual(rattrs.attr_0, 0)
self.assertEqual(rattrs.attr_1, 10)
self.assertEqual(rattrs.attr_2, 20)
self.assertEqual(rattrs.attr_3, 30)
def test04_replaceNode(self):
"""Replacing a node with a rewritten attribute"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test04_replaceNode..." % self.__class__.__name__)
array = self.h5file.root.array
attrs = array.attrs
self.h5file.enable_undo()
attrs.attr_1 = 11
self.h5file.remove_node('/array')
arr = self.h5file.create_array('/', 'array', [1])
arr.attrs.attr_1 = 12
self.h5file.undo()
self.assertIn('attr_1', self.h5file.root.array.attrs)
self.assertEqual(self.h5file.root.array.attrs.attr_1, 10)
self.h5file.redo()
self.assertIn('attr_1', self.h5file.root.array.attrs)
self.assertEqual(self.h5file.root.array.attrs.attr_1, 12)
class NotLoggedTestCase(common.TempFileMixin, TestCase):
"""Test not logged nodes."""
class NotLoggedArray(NotLoggedMixin, tables.Array):
pass
def test00_hierarchy(self):
"""Performing hierarchy operations on a not logged node."""
self.h5file.create_group('/', 'tgroup')
self.h5file.enable_undo()
# Node creation is not undone.
arr = self.NotLoggedArray(self.h5file.root, 'test',
[1], self._getMethodName())
self.h5file.undo()
self.assertIn('/test', self.h5file)
# Node movement is not undone.
arr.move('/tgroup')
self.h5file.undo()
self.assertIn('/tgroup/test', self.h5file)
# Node removal is not undone.
arr.remove()
self.h5file.undo()
self.assertNotIn('/tgroup/test', self.h5file)
def test01_attributes(self):
"""Performing attribute operations on a not logged node."""
arr = self.NotLoggedArray(self.h5file.root, 'test',
[1], self._getMethodName())
self.h5file.enable_undo()
# Attribute creation is not undone.
arr._v_attrs.foo = 'bar'
self.h5file.undo()
self.assertEqual(arr._v_attrs.foo, 'bar')
# Attribute change is not undone.
arr._v_attrs.foo = 'baz'
self.h5file.undo()
self.assertEqual(arr._v_attrs.foo, 'baz')
# Attribute removal is not undone.
del arr._v_attrs.foo
self.h5file.undo()
self.assertRaises(AttributeError, getattr, arr._v_attrs, 'foo')
class CreateParentsTestCase(common.TempFileMixin, TestCase):
"""Test the ``createparents`` flag."""
def setUp(self):
super(CreateParentsTestCase, self).setUp()
g1 = self.h5file.create_group('/', 'g1')
self.h5file.create_group(g1, 'g2')
def existing(self, paths):
"""Return a set of the existing paths in `paths`."""
return frozenset(path for path in paths if path in self.h5file)
def basetest(self, doit, pre, post):
pre()
self.h5file.enable_undo()
paths = ['/g1', '/g1/g2', '/g1/g2/g3', '/g1/g2/g3/g4']
for newpath in paths:
before = self.existing(paths)
doit(newpath)
after = self.existing(paths)
self.assertTrue(after.issuperset(before))
self.h5file.undo()
post(newpath)
after = self.existing(paths)
self.assertEqual(after, before)
def test00_create(self):
"""Test creating a node."""
def pre():
pass
def doit(newpath):
self.h5file.create_array(newpath, 'array', [1], createparents=True)
self.assertIn(join_path(newpath, 'array'), self.h5file)
def post(newpath):
self.assertNotIn(join_path(newpath, 'array'), self.h5file)
self.basetest(doit, pre, post)
def test01_move(self):
"""Test moving a node."""
def pre():
self.h5file.create_array('/', 'array', [1])
def doit(newpath):
self.h5file.move_node('/array', newpath, createparents=True)
self.assertNotIn('/array', self.h5file)
self.assertIn(join_path(newpath, 'array'), self.h5file)
def post(newpath):
self.assertIn('/array', self.h5file)
self.assertNotIn(join_path(newpath, 'array'), self.h5file)
self.basetest(doit, pre, post)
def test02_copy(self):
"""Test copying a node."""
def pre():
self.h5file.create_array('/', 'array', [1])
def doit(newpath):
self.h5file.copy_node('/array', newpath, createparents=True)
self.assertIn(join_path(newpath, 'array'), self.h5file)
def post(newpath):
self.assertNotIn(join_path(newpath, 'array'), self.h5file)
self.basetest(doit, pre, post)
def test03_copyChildren(self):
"""Test copying the children of a group."""
def pre():
g = self.h5file.create_group('/', 'group')
self.h5file.create_array(g, 'array1', [1])
self.h5file.create_array(g, 'array2', [1])
def doit(newpath):
self.h5file.copy_children('/group', newpath, createparents=True)
self.assertIn(join_path(newpath, 'array1'), self.h5file)
self.assertIn(join_path(newpath, 'array2'), self.h5file)
def post(newpath):
self.assertNotIn(join_path(newpath, 'array1'), self.h5file)
self.assertNotIn(join_path(newpath, 'array2'), self.h5file)
self.basetest(doit, pre, post)
def suite():
theSuite = unittest.TestSuite()
niter = 1
# common.heavy = 1 # uncomment this only for testing purposes
for n in range(niter):
theSuite.addTest(unittest.makeSuite(BasicTestCase))
theSuite.addTest(unittest.makeSuite(PersistenceTestCase))
theSuite.addTest(unittest.makeSuite(CreateArrayTestCase))
theSuite.addTest(unittest.makeSuite(CreateGroupTestCase))
theSuite.addTest(unittest.makeSuite(RenameNodeTestCase))
theSuite.addTest(unittest.makeSuite(MoveNodeTestCase))
theSuite.addTest(unittest.makeSuite(RemoveNodeTestCase))
theSuite.addTest(unittest.makeSuite(CopyNodeTestCase))
theSuite.addTest(unittest.makeSuite(AttributesTestCase))
theSuite.addTest(unittest.makeSuite(ComplexTestCase))
theSuite.addTest(unittest.makeSuite(NotLoggedTestCase))
theSuite.addTest(unittest.makeSuite(CreateParentsTestCase))
if common.heavy:
pass
return theSuite
if __name__ == '__main__':
import sys
common.parse_argv(sys.argv)
common.print_versions()
unittest.main(defaultTest='suite')
## Local Variables:
## mode: python
## End:
| 35.85381
| 79
| 0.612281
| 11,392
| 98,347
| 5.186096
| 0.03915
| 0.164861
| 0.066588
| 0.063896
| 0.886104
| 0.8511
| 0.820447
| 0.792417
| 0.764658
| 0.737339
| 0
| 0.037729
| 0.256449
| 98,347
| 2,742
| 80
| 35.866885
| 0.770191
| 0.154372
| 0
| 0.744841
| 0
| 0
| 0.144727
| 0.014186
| 0
| 0
| 0
| 0
| 0.387117
| 1
| 0.057536
| false
| 0.001876
| 0.005629
| 0
| 0.076923
| 0.077548
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0794b2411672826b91a88649d990d09b514526d3
| 2,390
|
py
|
Python
|
scripts/quest/q34931e.py
|
G00dBye/YYMS
|
1de816fc842b6598d5b4b7896b6ab0ee8f7cdcfb
|
[
"MIT"
] | 54
|
2019-04-16T23:24:48.000Z
|
2021-12-18T11:41:50.000Z
|
scripts/quest/q34931e.py
|
G00dBye/YYMS
|
1de816fc842b6598d5b4b7896b6ab0ee8f7cdcfb
|
[
"MIT"
] | 3
|
2019-05-19T15:19:41.000Z
|
2020-04-27T16:29:16.000Z
|
scripts/quest/q34931e.py
|
G00dBye/YYMS
|
1de816fc842b6598d5b4b7896b6ab0ee8f7cdcfb
|
[
"MIT"
] | 49
|
2020-11-25T23:29:16.000Z
|
2022-03-26T16:20:24.000Z
|
# Created by MechAviv
# Quest ID :: 34931
# Not coded yet
sm.setSpeakerID(3001510)
sm.setSpeakerType(3)
sm.flipDialogue()
sm.setBoxChat()
sm.boxChatPlayerAsSpeaker()
sm.setBoxOverrideSpeaker()
sm.flipBoxChat()
sm.flipBoxChatPlayerAsSpeaker()
sm.setColor(1)
sm.sendNext("#face1#Good work! I'm getting the signal again. We need to move quickly. Follow me.")
sm.setSpeakerID(3001509)
sm.setSpeakerType(3)
sm.flipDialogue()
sm.setBoxChat()
sm.boxChatPlayerAsSpeaker()
sm.setBoxOverrideSpeaker()
sm.flipBoxChat()
sm.flipBoxChatPlayerAsSpeaker()
sm.setColor(1)
sm.sendSay("#face3#Oh, we didn't let the sandstorm get us down!\r\nNow our trouble's behind us, and we're searchin' around!")
# Update Quest Record EX | Quest ID: [34995] | Data: 00=h1;10=h0;01=h0;11=h0;02=h0;12=h0;13=h0;04=h0;23=h0;14=h0;05=h0;24=h0;15=h0;06=h0;16=h0;07=h0;17=h0;09=h0
sm.completeQuest(34931)
# Unhandled Stat Changed [EXP] Packet: 00 00 00 00 01 00 00 00 00 00 EB 21 00 00 00 00 00 00 FF 00 00 00 00
sm.giveExp(7360)
# Update Quest Record EX | Quest ID: [34931] | Data: exp=1
# Update Quest Record EX | Quest ID: [34995] | Data: 00=h1;10=h0;01=h0;11=h1;02=h0;12=h0;13=h0;04=h0;23=h0;14=h0;05=h0;24=h0;15=h0;06=h0;16=h0;07=h0;17=h0;09=h0
# Unhandled Message [47] Packet: 2F 0A 00 00 00 40 9C 00 00 00 00 00 00 28 00 00 00 00 00 00 80 05 BB 46 E6 17 02 0C 00 75 73 65 72 5F 6C 76 75 70 3D 32 36 B8 58 08 00 00 00 00 00 23 02 00 00 00 00 00 80 05 BB 46 E6 17 02 0D 00 6D 6F 62 5F 6B 69 6C 6C 3D 34 39 35 38 58 68 08 00 00 00 00 00 27 02 00 00 00 00 00 80 05 BB 46 E6 17 02 0D 00 6D 6F 62 5F 6B 69 6C 6C 3D 34 39 35 38 B0 83 08 00 00 00 00 00 2E 02 00 00 00 00 00 80 05 BB 46 E6 17 02 0B 00 6D 6F 62 5F 6B 69 6C 6C 3D 31 38 70 5E 09 00 00 00 00 00 66 02 00 00 00 00 00 80 05 BB 46 E6 17 02 14 00 63 6F 6D 62 6F 6B 69 6C 6C 5F 69 6E 63 72 65 73 65 3D 32 32 E0 75 09 00 00 00 00 00 6C 02 00 00 00 00 00 80 05 BB 46 E6 17 02 0D 00 6D 75 6C 74 69 6B 69 6C 6C 3D 34 32 34 98 81 09 00 00 00 00 00 6F 02 00 00 00 00 00 80 05 BB 46 E6 17 02 0D 00 6D 75 6C 74 69 6B 69 6C 6C 3D 34 32 34 50 8D 09 00 00 00 00 00 72 02 00 00 00 00 00 80 05 BB 46 E6 17 02 0A 00 6D 6F 62 5F 6B 69 6C 6C 3D 38 08 99 09 00 00 00 00 00 75 02 00 00 00 00 00 80 05 BB 46 E6 17 02 0A 00 6D 6F 62 5F 6B 69 6C 6C 3D 38 C4 22 11 00 00 00 00 00 63 04 00 00 0C 02 A0 18 36 98 8A D6 D4 01 0D 00 66 69 65 6C 64 5F 65 6E 74 65 72 3D 31
sm.warp(402090005, 0)
| 64.594595
| 1,151
| 0.702092
| 607
| 2,390
| 2.764415
| 0.265239
| 0.228844
| 0.253874
| 0.224076
| 0.647199
| 0.63528
| 0.54708
| 0.54708
| 0.54708
| 0.54708
| 0
| 0.448827
| 0.215063
| 2,390
| 36
| 1,152
| 66.388889
| 0.445629
| 0.703766
| 0
| 0.695652
| 0
| 0.086957
| 0.277539
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
07d5737ffacba50ef4c83fc492e72bc9bc3b9ad9
| 47,003
|
py
|
Python
|
test/test_reassembly.py
|
lfntac/vppmirror
|
efbd124378b2a2c46d2747cab8eecf1b172f5c05
|
[
"Apache-2.0"
] | 2
|
2019-04-03T10:09:09.000Z
|
2020-09-23T08:56:24.000Z
|
test/test_reassembly.py
|
napatech/vpp
|
de146e5d5f7e919b423feeff3159c4ecd564c353
|
[
"Apache-2.0"
] | null | null | null |
test/test_reassembly.py
|
napatech/vpp
|
de146e5d5f7e919b423feeff3159c4ecd564c353
|
[
"Apache-2.0"
] | 1
|
2019-03-12T18:19:23.000Z
|
2019-03-12T18:19:23.000Z
|
#!/usr/bin/env python
from random import shuffle
import six
import unittest
from parameterized import parameterized
import scapy.compat
from scapy.packet import Raw
from scapy.layers.l2 import Ether, GRE
from scapy.layers.inet import IP, UDP, ICMP
from scapy.layers.inet6 import IPv6, IPv6ExtHdrFragment, ICMPv6ParamProblem,\
ICMPv6TimeExceeded
from framework import VppTestCase, VppTestRunner
from util import ppp, fragment_rfc791, fragment_rfc8200
from vpp_gre_interface import VppGreInterface
from vpp_ip import DpoProto
from vpp_ip_route import VppIpRoute, VppRoutePath
# 35 is enough to have >257 400-byte fragments
test_packet_count = 35
# <class 'scapy.layers.inet.IP'>
# <class 'scapy.layers.inet6.IPv6'>
_scapy_ip_family_types = (IP, IPv6)
def validate_scapy_ip_family(scapy_ip_family):
if scapy_ip_family not in _scapy_ip_family_types:
raise ValueError("'scapy_ip_family' must be of type: %s. Got %s" %
(_scapy_ip_family_types, scapy_ip_family))
class TestIPReassemblyMixin(object):
def verify_capture(self, scapy_ip_family, capture,
dropped_packet_indexes=None):
"""Verify captured packet stream.
:param list capture: Captured packet stream.
"""
validate_scapy_ip_family(scapy_ip_family)
if dropped_packet_indexes is None:
dropped_packet_indexes = []
info = None
seen = set()
for packet in capture:
try:
self.logger.debug(ppp("Got packet:", packet))
ip = packet[scapy_ip_family]
udp = packet[UDP]
payload_info = self.payload_to_info(packet[Raw])
packet_index = payload_info.index
self.assertTrue(
packet_index not in dropped_packet_indexes,
ppp("Packet received, but should be dropped:", packet))
if packet_index in seen:
raise Exception(ppp("Duplicate packet received", packet))
seen.add(packet_index)
self.assertEqual(payload_info.dst, self.src_if.sw_if_index)
info = self._packet_infos[packet_index]
self.assertTrue(info is not None)
self.assertEqual(packet_index, info.index)
saved_packet = info.data
self.assertEqual(ip.src, saved_packet[scapy_ip_family].src)
self.assertEqual(ip.dst, saved_packet[scapy_ip_family].dst)
self.assertEqual(udp.payload, saved_packet[UDP].payload)
except Exception:
self.logger.error(ppp("Unexpected or invalid packet:", packet))
raise
for index in self._packet_infos:
self.assertTrue(index in seen or index in dropped_packet_indexes,
"Packet with packet_index %d not received" % index)
def test_disabled(self, scapy_ip_family, stream,
dropped_packet_indexes):
""" reassembly disabled """
validate_scapy_ip_family(scapy_ip_family)
is_ip6 = 1 if scapy_ip_family == IPv6 else 0
self.vapi.ip_reassembly_set(timeout_ms=1000, max_reassemblies=0,
expire_walk_interval_ms=10000,
is_ip6=is_ip6)
self.pg_enable_capture()
self.src_if.add_stream(stream)
self.pg_start()
packets = self.dst_if.get_capture(
len(self.pkt_infos) - len(dropped_packet_indexes))
self.verify_capture(scapy_ip_family, packets, dropped_packet_indexes)
self.src_if.assert_nothing_captured()
def test_duplicates(self, scapy_ip_family, stream):
""" duplicate fragments """
validate_scapy_ip_family(scapy_ip_family)
self.pg_enable_capture()
self.src_if.add_stream(stream)
self.pg_start()
packets = self.dst_if.get_capture(len(self.pkt_infos))
self.verify_capture(scapy_ip_family, packets)
self.src_if.assert_nothing_captured()
def test_random(self, scapy_ip_family, stream):
""" random order reassembly """
validate_scapy_ip_family(scapy_ip_family)
fragments = list(stream)
shuffle(fragments)
self.pg_enable_capture()
self.src_if.add_stream(fragments)
self.pg_start()
packets = self.dst_if.get_capture(len(self.packet_infos))
self.verify_capture(scapy_ip_family, packets)
self.src_if.assert_nothing_captured()
# run it all again to verify correctness
self.pg_enable_capture()
self.src_if.add_stream(fragments)
self.pg_start()
packets = self.dst_if.get_capture(len(self.packet_infos))
self.verify_capture(scapy_ip_family, packets)
self.src_if.assert_nothing_captured()
def test_reassembly(self, scapy_ip_family, stream):
""" basic reassembly """
validate_scapy_ip_family(scapy_ip_family)
self.pg_enable_capture()
self.src_if.add_stream(stream)
self.pg_start()
packets = self.dst_if.get_capture(len(self.pkt_infos))
self.verify_capture(scapy_ip_family, packets)
self.src_if.assert_nothing_captured()
# run it all again to verify correctness
self.pg_enable_capture()
self.src_if.add_stream(stream)
self.pg_start()
packets = self.dst_if.get_capture(len(self.pkt_infos))
self.verify_capture(scapy_ip_family, packets)
self.src_if.assert_nothing_captured()
def test_reversed(self, scapy_ip_family, stream):
""" reverse order reassembly """
validate_scapy_ip_family(scapy_ip_family)
fragments = list(stream)
fragments.reverse()
self.pg_enable_capture()
self.src_if.add_stream(fragments)
self.pg_start()
packets = self.dst_if.get_capture(len(self.packet_infos))
self.verify_capture(scapy_ip_family, packets)
self.src_if.assert_nothing_captured()
# run it all again to verify correctness
self.pg_enable_capture()
self.src_if.add_stream(fragments)
self.pg_start()
packets = self.dst_if.get_capture(len(self.packet_infos))
self.verify_capture(scapy_ip_family, packets)
self.src_if.assert_nothing_captured()
def test_timeout_inline(self, scapy_ip_family, stream,
dropped_packet_indexes):
""" timeout (inline) """
validate_scapy_ip_family(scapy_ip_family)
is_ip6 = 1 if scapy_ip_family == IPv6 else 0
self.vapi.ip_reassembly_set(timeout_ms=0, max_reassemblies=1000,
expire_walk_interval_ms=10000,
is_ip6=is_ip6)
self.pg_enable_capture()
self.src_if.add_stream(stream)
self.pg_start()
packets = self.dst_if.get_capture(
len(self.pkt_infos) - len(dropped_packet_indexes))
self.verify_capture(scapy_ip_family, packets,
dropped_packet_indexes)
class TestIPv4Reassembly(TestIPReassemblyMixin, VppTestCase):
""" IPv4 Reassembly """
@classmethod
def setUpClass(cls):
super(TestIPv4Reassembly, cls).setUpClass()
cls.create_pg_interfaces([0, 1])
cls.src_if = cls.pg0
cls.dst_if = cls.pg1
# setup all interfaces
for i in cls.pg_interfaces:
i.admin_up()
i.config_ip4()
i.resolve_arp()
# packet sizes
cls.packet_sizes = [64, 512, 1518, 9018]
cls.padding = " abcdefghijklmn"
cls.create_stream(cls.packet_sizes)
cls.create_fragments()
@classmethod
def tearDownClass(cls):
super(TestIPv4Reassembly, cls).tearDownClass()
def setUp(self):
""" Test setup - force timeout on existing reassemblies """
super(TestIPv4Reassembly, self).setUp()
self.vapi.ip_reassembly_enable_disable(
sw_if_index=self.src_if.sw_if_index, enable_ip4=True)
self.vapi.ip_reassembly_set(timeout_ms=0, max_reassemblies=1000,
expire_walk_interval_ms=10)
self.sleep(.25)
self.vapi.ip_reassembly_set(timeout_ms=1000000, max_reassemblies=1000,
expire_walk_interval_ms=10000)
def tearDown(self):
super(TestIPv4Reassembly, self).tearDown()
def show_commands_at_teardown(self):
self.logger.debug(self.vapi.ppcli("show ip4-reassembly details"))
self.logger.debug(self.vapi.ppcli("show buffers"))
@classmethod
def create_stream(cls, packet_sizes, packet_count=test_packet_count):
"""Create input packet stream
:param list packet_sizes: Required packet sizes.
"""
for i in range(0, packet_count):
info = cls.create_packet_info(cls.src_if, cls.src_if)
payload = cls.info_to_payload(info)
p = (Ether(dst=cls.src_if.local_mac, src=cls.src_if.remote_mac) /
IP(id=info.index, src=cls.src_if.remote_ip4,
dst=cls.dst_if.remote_ip4) /
UDP(sport=1234, dport=5678) /
Raw(payload))
size = packet_sizes[(i // 2) % len(packet_sizes)]
cls.extend_packet(p, size, cls.padding)
info.data = p
@classmethod
def create_fragments(cls):
infos = cls._packet_infos
cls.pkt_infos = []
for index, info in six.iteritems(infos):
p = info.data
# cls.logger.debug(ppp("Packet:",
# p.__class__(scapy.compat.raw(p))))
fragments_400 = fragment_rfc791(p, 400)
fragments_300 = fragment_rfc791(p, 300)
fragments_200 = [
x for f in fragments_400 for x in fragment_rfc791(f, 200)]
cls.pkt_infos.append(
(index, fragments_400, fragments_300, fragments_200))
cls.fragments_400 = [
x for (_, frags, _, _) in cls.pkt_infos for x in frags]
cls.fragments_300 = [
x for (_, _, frags, _) in cls.pkt_infos for x in frags]
cls.fragments_200 = [
x for (_, _, _, frags) in cls.pkt_infos for x in frags]
cls.logger.debug("Fragmented %s packets into %s 400-byte fragments, "
"%s 300-byte fragments and %s 200-byte fragments" %
(len(infos), len(cls.fragments_400),
len(cls.fragments_300), len(cls.fragments_200)))
@parameterized.expand([(IP, None)])
def test_reassembly(self, family, stream):
""" basic reassembly """
stream = self.__class__.fragments_200
super(TestIPv4Reassembly, self).test_reassembly(family, stream)
@parameterized.expand([(IP, None)])
def test_reversed(self, family, stream):
""" reverse order reassembly """
stream = self.__class__.fragments_200
super(TestIPv4Reassembly, self).test_reversed(family, stream)
@parameterized.expand([(IP, None)])
def test_random(self, family, stream):
stream = self.__class__.fragments_200
super(TestIPv4Reassembly, self).test_random(family, stream)
def test_5737(self):
""" fragment length + ip header size > 65535 """
self.vapi.cli("clear errors")
raw = ('E\x00\x00\x88,\xf8\x1f\xfe@\x01\x98\x00\xc0\xa8\n-\xc0\xa8\n'
'\x01\x08\x00\xf0J\xed\xcb\xf1\xf5Test-group: IPv4.IPv4.ipv4-'
'message.Ethernet-Payload.IPv4-Packet.IPv4-Header.Fragment-Of'
'fset; Test-case: 5737')
malformed_packet = (Ether(dst=self.src_if.local_mac,
src=self.src_if.remote_mac) /
IP(raw))
p = (Ether(dst=self.src_if.local_mac, src=self.src_if.remote_mac) /
IP(id=1000, src=self.src_if.remote_ip4,
dst=self.dst_if.remote_ip4) /
UDP(sport=1234, dport=5678) /
Raw("X" * 1000))
valid_fragments = fragment_rfc791(p, 400)
self.pg_enable_capture()
self.src_if.add_stream([malformed_packet] + valid_fragments)
self.pg_start()
self.dst_if.get_capture(1)
self.assert_packet_counter_equal("ip4-reassembly-feature", 1)
# TODO remove above, uncomment below once clearing of counters
# is supported
# self.assert_packet_counter_equal(
# "/err/ip4-reassembly-feature/malformed packets", 1)
def test_44924(self):
""" compress tiny fragments """
packets = [(Ether(dst=self.src_if.local_mac,
src=self.src_if.remote_mac) /
IP(id=24339, flags="MF", frag=0, ttl=64,
src=self.src_if.remote_ip4,
dst=self.dst_if.remote_ip4) /
ICMP(type="echo-request", code=0, id=0x1fe6, seq=0x2407) /
Raw(load='Test-group: IPv4')),
(Ether(dst=self.src_if.local_mac,
src=self.src_if.remote_mac) /
IP(id=24339, flags="MF", frag=3, ttl=64,
src=self.src_if.remote_ip4,
dst=self.dst_if.remote_ip4) /
ICMP(type="echo-request", code=0, id=0x1fe6, seq=0x2407) /
Raw(load='.IPv4.Fragmentation.vali')),
(Ether(dst=self.src_if.local_mac,
src=self.src_if.remote_mac) /
IP(id=24339, frag=6, ttl=64,
src=self.src_if.remote_ip4,
dst=self.dst_if.remote_ip4) /
ICMP(type="echo-request", code=0, id=0x1fe6, seq=0x2407) /
Raw(load='d; Test-case: 44924'))
]
self.pg_enable_capture()
self.src_if.add_stream(packets)
self.pg_start()
self.dst_if.get_capture(1)
def test_frag_1(self):
""" fragment of size 1 """
self.vapi.cli("clear errors")
malformed_packets = [(Ether(dst=self.src_if.local_mac,
src=self.src_if.remote_mac) /
IP(id=7, len=21, flags="MF", frag=0, ttl=64,
src=self.src_if.remote_ip4,
dst=self.dst_if.remote_ip4) /
ICMP(type="echo-request")),
(Ether(dst=self.src_if.local_mac,
src=self.src_if.remote_mac) /
IP(id=7, len=21, frag=1, ttl=64,
src=self.src_if.remote_ip4,
dst=self.dst_if.remote_ip4) /
Raw(load='\x08')),
]
p = (Ether(dst=self.src_if.local_mac, src=self.src_if.remote_mac) /
IP(id=1000, src=self.src_if.remote_ip4,
dst=self.dst_if.remote_ip4) /
UDP(sport=1234, dport=5678) /
Raw("X" * 1000))
valid_fragments = fragment_rfc791(p, 400)
self.pg_enable_capture()
self.src_if.add_stream(malformed_packets + valid_fragments)
self.pg_start()
self.dst_if.get_capture(1)
self.assert_packet_counter_equal("ip4-reassembly-feature", 1)
# TODO remove above, uncomment below once clearing of counters
# is supported
# self.assert_packet_counter_equal(
# "/err/ip4-reassembly-feature/malformed packets", 1)
@parameterized.expand([(IP, None)])
def test_duplicates(self, family, stream):
""" duplicate fragments """
fragments = [
# IPv4 uses 4 fields in pkt_infos, IPv6 uses 3.
x for (_, frags, _, _) in self.pkt_infos
for x in frags
for _ in range(0, min(2, len(frags)))
]
super(TestIPv4Reassembly, self).test_duplicates(family, fragments)
def test_overlap1(self):
""" overlapping fragments case #1 """
fragments = []
for _, _, frags_300, frags_200 in self.pkt_infos:
if len(frags_300) == 1:
fragments.extend(frags_300)
else:
for i, j in zip(frags_200, frags_300):
fragments.extend(i)
fragments.extend(j)
self.pg_enable_capture()
self.src_if.add_stream(fragments)
self.pg_start()
packets = self.dst_if.get_capture(len(self.pkt_infos))
self.verify_capture(IP, packets)
self.src_if.assert_nothing_captured()
# run it all to verify correctness
self.pg_enable_capture()
self.src_if.add_stream(fragments)
self.pg_start()
packets = self.dst_if.get_capture(len(self.pkt_infos))
self.verify_capture(IP, packets)
self.src_if.assert_nothing_captured()
def test_overlap2(self):
""" overlapping fragments case #2 """
fragments = []
for _, _, frags_300, frags_200 in self.pkt_infos:
if len(frags_300) == 1:
fragments.extend(frags_300)
else:
# care must be taken here so that there are no fragments
# received by vpp after reassembly is finished, otherwise
# new reassemblies will be started and packet generator will
# freak out when it detects unfreed buffers
zipped = zip(frags_300, frags_200)
for i, j in zipped[:-1]:
fragments.extend(i)
fragments.extend(j)
fragments.append(zipped[-1][0])
self.pg_enable_capture()
self.src_if.add_stream(fragments)
self.pg_start()
packets = self.dst_if.get_capture(len(self.pkt_infos))
self.verify_capture(IP, packets)
self.src_if.assert_nothing_captured()
# run it all to verify correctness
self.pg_enable_capture()
self.src_if.add_stream(fragments)
self.pg_start()
packets = self.dst_if.get_capture(len(self.pkt_infos))
self.verify_capture(IP, packets)
self.src_if.assert_nothing_captured()
@parameterized.expand([(IP, None, None)])
def test_timeout_inline(self, family, stream, dropped_packet_indexes):
""" timeout (inline) """
stream = self.fragments_400
dropped_packet_indexes = set(
index for (index, frags, _, _) in self.pkt_infos if len(frags) > 1
)
super(TestIPv4Reassembly, self).test_timeout_inline(
family, stream, dropped_packet_indexes)
self.src_if.assert_nothing_captured()
def test_timeout_cleanup(self):
""" timeout (cleanup) """
# whole packets + fragmented packets sans last fragment
fragments = [
x for (_, frags_400, _, _) in self.pkt_infos
for x in frags_400[:-1 if len(frags_400) > 1 else None]
]
# last fragments for fragmented packets
fragments2 = [frags_400[-1]
for (_, frags_400, _, _) in self.pkt_infos
if len(frags_400) > 1]
dropped_packet_indexes = set(
index for (index, frags_400, _, _) in self.pkt_infos
if len(frags_400) > 1)
self.vapi.ip_reassembly_set(timeout_ms=100, max_reassemblies=1000,
expire_walk_interval_ms=50)
self.pg_enable_capture()
self.src_if.add_stream(fragments)
self.pg_start()
self.sleep(.25, "wait before sending rest of fragments")
self.src_if.add_stream(fragments2)
self.pg_start()
packets = self.dst_if.get_capture(
len(self.pkt_infos) - len(dropped_packet_indexes))
self.verify_capture(IP, packets, dropped_packet_indexes)
self.src_if.assert_nothing_captured()
@parameterized.expand([(IP, None, None)])
def test_disabled(self, family, stream, dropped_packet_indexes):
""" reassembly disabled """
stream = self.__class__.fragments_400
dropped_packet_indexes = set(
index for (index, frags_400, _, _) in self.pkt_infos
if len(frags_400) > 1)
super(TestIPv4Reassembly, self).test_disabled(
family, stream, dropped_packet_indexes)
class TestIPv6Reassembly(TestIPReassemblyMixin, VppTestCase):
""" IPv6 Reassembly """
@classmethod
def setUpClass(cls):
super(TestIPv6Reassembly, cls).setUpClass()
cls.create_pg_interfaces([0, 1])
cls.src_if = cls.pg0
cls.dst_if = cls.pg1
# setup all interfaces
for i in cls.pg_interfaces:
i.admin_up()
i.config_ip6()
i.resolve_ndp()
# packet sizes
cls.packet_sizes = [64, 512, 1518, 9018]
cls.padding = " abcdefghijklmn"
cls.create_stream(cls.packet_sizes)
cls.create_fragments()
@classmethod
def tearDownClass(cls):
super(TestIPv6Reassembly, cls).tearDownClass()
def setUp(self):
""" Test setup - force timeout on existing reassemblies """
super(TestIPv6Reassembly, self).setUp()
self.vapi.ip_reassembly_enable_disable(
sw_if_index=self.src_if.sw_if_index, enable_ip6=True)
self.vapi.ip_reassembly_set(timeout_ms=0, max_reassemblies=1000,
expire_walk_interval_ms=10, is_ip6=1)
self.sleep(.25)
self.vapi.ip_reassembly_set(timeout_ms=1000000, max_reassemblies=1000,
expire_walk_interval_ms=10000, is_ip6=1)
self.logger.debug(self.vapi.ppcli("show ip6-reassembly details"))
self.logger.debug(self.vapi.ppcli("show buffers"))
def tearDown(self):
super(TestIPv6Reassembly, self).tearDown()
def show_commands_at_teardown(self):
self.logger.debug(self.vapi.ppcli("show ip6-reassembly details"))
self.logger.debug(self.vapi.ppcli("show buffers"))
@classmethod
def create_stream(cls, packet_sizes, packet_count=test_packet_count):
"""Create input packet stream for defined interface.
:param list packet_sizes: Required packet sizes.
"""
for i in range(0, packet_count):
info = cls.create_packet_info(cls.src_if, cls.src_if)
payload = cls.info_to_payload(info)
p = (Ether(dst=cls.src_if.local_mac, src=cls.src_if.remote_mac) /
IPv6(src=cls.src_if.remote_ip6,
dst=cls.dst_if.remote_ip6) /
UDP(sport=1234, dport=5678) /
Raw(payload))
size = packet_sizes[(i // 2) % len(packet_sizes)]
cls.extend_packet(p, size, cls.padding)
info.data = p
@classmethod
def create_fragments(cls):
infos = cls._packet_infos
cls.pkt_infos = []
for index, info in six.iteritems(infos):
p = info.data
# cls.logger.debug(ppp("Packet:",
# p.__class__(scapy.compat.raw(p))))
fragments_400 = fragment_rfc8200(p, info.index, 400)
fragments_300 = fragment_rfc8200(p, info.index, 300)
cls.pkt_infos.append((index, fragments_400, fragments_300))
cls.fragments_400 = [
x for _, frags, _ in cls.pkt_infos for x in frags]
cls.fragments_300 = [
x for _, _, frags in cls.pkt_infos for x in frags]
cls.logger.debug("Fragmented %s packets into %s 400-byte fragments, "
"and %s 300-byte fragments" %
(len(infos), len(cls.fragments_400),
len(cls.fragments_300)))
@parameterized.expand([(IPv6, None)])
def test_reassembly(self, family, stream):
""" basic reassembly """
stream = self.__class__.fragments_400
super(TestIPv6Reassembly, self).test_reassembly(family, stream)
@parameterized.expand([(IPv6, None)])
def test_reversed(self, family, stream):
""" reverse order reassembly """
stream = self.__class__.fragments_400
super(TestIPv6Reassembly, self).test_reversed(family, stream)
@parameterized.expand([(IPv6, None)])
def test_random(self, family, stream):
""" random order reassembly """
stream = self.__class__.fragments_400
super(TestIPv6Reassembly, self).test_random(family, stream)
@parameterized.expand([(IPv6, None)])
def test_duplicates(self, family, stream):
""" duplicate fragments """
fragments = [
# IPv4 uses 4 fields in pkt_infos, IPv6 uses 3.
x for (_, frags, _) in self.pkt_infos
for x in frags
for _ in range(0, min(2, len(frags)))
]
super(TestIPv6Reassembly, self).test_duplicates(family, fragments)
def test_overlap1(self):
""" overlapping fragments case #1 (differs from IP test case)"""
fragments = []
for _, frags_400, frags_300 in self.pkt_infos:
if len(frags_300) == 1:
fragments.extend(frags_400)
else:
for i, j in zip(frags_300, frags_400):
fragments.extend(i)
fragments.extend(j)
dropped_packet_indexes = set(
index for (index, _, frags) in self.pkt_infos if len(frags) > 1
)
self.pg_enable_capture()
self.src_if.add_stream(fragments)
self.pg_start()
packets = self.dst_if.get_capture(
len(self.pkt_infos) - len(dropped_packet_indexes))
self.verify_capture(IPv6, packets, dropped_packet_indexes)
self.src_if.assert_nothing_captured()
def test_overlap2(self):
""" overlapping fragments case #2 (differs from IP test case)"""
fragments = []
for _, frags_400, frags_300 in self.pkt_infos:
if len(frags_400) == 1:
fragments.extend(frags_400)
else:
# care must be taken here so that there are no fragments
# received by vpp after reassembly is finished, otherwise
# new reassemblies will be started and packet generator will
# freak out when it detects unfreed buffers
zipped = zip(frags_400, frags_300)
for i, j in zipped[:-1]:
fragments.extend(i)
fragments.extend(j)
fragments.append(zipped[-1][0])
dropped_packet_indexes = set(
index for (index, _, frags) in self.pkt_infos if len(frags) > 1
)
self.pg_enable_capture()
self.src_if.add_stream(fragments)
self.pg_start()
packets = self.dst_if.get_capture(
len(self.pkt_infos) - len(dropped_packet_indexes))
self.verify_capture(IPv6, packets, dropped_packet_indexes)
self.src_if.assert_nothing_captured()
@parameterized.expand([(IPv6, None, None)])
def test_timeout_inline(self, family, stream, dropped_packets_index):
""" timeout (inline) """
stream = self.__class__.fragments_400
dropped_packet_indexes = set(
index for (index, frags, _) in self.pkt_infos if len(frags) > 1
)
super(TestIPv6Reassembly, self).test_timeout_inline(
family, stream, dropped_packet_indexes)
pkts = self.src_if.get_capture(
expected_count=len(dropped_packet_indexes))
for icmp in pkts:
self.assertIn(ICMPv6TimeExceeded, icmp)
self.assertIn(IPv6ExtHdrFragment, icmp)
self.assertIn(icmp[IPv6ExtHdrFragment].id, dropped_packet_indexes)
dropped_packet_indexes.remove(icmp[IPv6ExtHdrFragment].id)
def test_timeout_cleanup(self):
""" timeout (cleanup) """
# whole packets + fragmented packets sans last fragment
fragments = [
x for (_, frags_400, _) in self.pkt_infos
for x in frags_400[:-1 if len(frags_400) > 1 else None]
]
# last fragments for fragmented packets
fragments2 = [frags_400[-1]
for (_, frags_400, _) in self.pkt_infos
if len(frags_400) > 1]
dropped_packet_indexes = set(
index for (index, frags_400, _) in self.pkt_infos
if len(frags_400) > 1)
self.vapi.ip_reassembly_set(timeout_ms=100, max_reassemblies=1000,
expire_walk_interval_ms=50)
self.vapi.ip_reassembly_set(timeout_ms=100, max_reassemblies=1000,
expire_walk_interval_ms=50, is_ip6=1)
self.pg_enable_capture()
self.src_if.add_stream(fragments)
self.pg_start()
self.sleep(.25, "wait before sending rest of fragments")
self.src_if.add_stream(fragments2)
self.pg_start()
packets = self.dst_if.get_capture(
len(self.pkt_infos) - len(dropped_packet_indexes))
self.verify_capture(IPv6, packets, dropped_packet_indexes)
pkts = self.src_if.get_capture(
expected_count=len(dropped_packet_indexes))
for icmp in pkts:
self.assertIn(ICMPv6TimeExceeded, icmp)
self.assertIn(IPv6ExtHdrFragment, icmp)
self.assertIn(icmp[IPv6ExtHdrFragment].id, dropped_packet_indexes)
dropped_packet_indexes.remove(icmp[IPv6ExtHdrFragment].id)
@parameterized.expand([(IPv6, None, None)])
def test_disabled(self, family, stream, dropped_packet_indexes):
""" reassembly disabled """
stream = self.__class__.fragments_400
dropped_packet_indexes = set(
index for (index, frags_400, _) in self.pkt_infos
if len(frags_400) > 1)
super(TestIPv6Reassembly, self).test_disabled(
family, stream, dropped_packet_indexes)
self.src_if.assert_nothing_captured()
def test_missing_upper(self):
""" missing upper layer """
p = (Ether(dst=self.src_if.local_mac, src=self.src_if.remote_mac) /
IPv6(src=self.src_if.remote_ip6,
dst=self.src_if.local_ip6) /
UDP(sport=1234, dport=5678) /
Raw())
self.extend_packet(p, 1000, self.padding)
fragments = fragment_rfc8200(p, 1, 500)
bad_fragment = p.__class__(scapy.compat.raw(fragments[1]))
bad_fragment[IPv6ExtHdrFragment].nh = 59
bad_fragment[IPv6ExtHdrFragment].offset = 0
self.pg_enable_capture()
self.src_if.add_stream([bad_fragment])
self.pg_start()
pkts = self.src_if.get_capture(expected_count=1)
icmp = pkts[0]
self.assertIn(ICMPv6ParamProblem, icmp)
self.assert_equal(icmp[ICMPv6ParamProblem].code, 3, "ICMP code")
def test_invalid_frag_size(self):
""" fragment size not a multiple of 8 """
p = (Ether(dst=self.src_if.local_mac, src=self.src_if.remote_mac) /
IPv6(src=self.src_if.remote_ip6,
dst=self.src_if.local_ip6) /
UDP(sport=1234, dport=5678) /
Raw())
self.extend_packet(p, 1000, self.padding)
fragments = fragment_rfc8200(p, 1, 500)
bad_fragment = fragments[0]
self.extend_packet(bad_fragment, len(bad_fragment) + 5)
self.pg_enable_capture()
self.src_if.add_stream([bad_fragment])
self.pg_start()
pkts = self.src_if.get_capture(expected_count=1)
icmp = pkts[0]
self.assertIn(ICMPv6ParamProblem, icmp)
self.assert_equal(icmp[ICMPv6ParamProblem].code, 0, "ICMP code")
def test_invalid_packet_size(self):
""" total packet size > 65535 """
p = (Ether(dst=self.src_if.local_mac, src=self.src_if.remote_mac) /
IPv6(src=self.src_if.remote_ip6,
dst=self.src_if.local_ip6) /
UDP(sport=1234, dport=5678) /
Raw())
self.extend_packet(p, 1000, self.padding)
fragments = fragment_rfc8200(p, 1, 500)
bad_fragment = fragments[1]
bad_fragment[IPv6ExtHdrFragment].offset = 65500
self.pg_enable_capture()
self.src_if.add_stream([bad_fragment])
self.pg_start()
pkts = self.src_if.get_capture(expected_count=1)
icmp = pkts[0]
self.assertIn(ICMPv6ParamProblem, icmp)
self.assert_equal(icmp[ICMPv6ParamProblem].code, 0, "ICMP code")
class TestIPv4ReassemblyLocalNode(VppTestCase):
""" IPv4 Reassembly for packets coming to ip4-local node """
@classmethod
def setUpClass(cls):
super(TestIPv4ReassemblyLocalNode, cls).setUpClass()
cls.create_pg_interfaces([0])
cls.src_dst_if = cls.pg0
# setup all interfaces
for i in cls.pg_interfaces:
i.admin_up()
i.config_ip4()
i.resolve_arp()
cls.padding = " abcdefghijklmn"
cls.create_stream()
cls.create_fragments()
@classmethod
def tearDownClass(cls):
super(TestIPv4ReassemblyLocalNode, cls).tearDownClass()
def setUp(self):
""" Test setup - force timeout on existing reassemblies """
super(TestIPv4ReassemblyLocalNode, self).setUp()
self.vapi.ip_reassembly_set(timeout_ms=0, max_reassemblies=1000,
expire_walk_interval_ms=10)
self.sleep(.25)
self.vapi.ip_reassembly_set(timeout_ms=1000000, max_reassemblies=1000,
expire_walk_interval_ms=10000)
def tearDown(self):
super(TestIPv4ReassemblyLocalNode, self).tearDown()
def show_commands_at_teardown(self):
self.logger.debug(self.vapi.ppcli("show ip4-reassembly details"))
self.logger.debug(self.vapi.ppcli("show buffers"))
@classmethod
def create_stream(cls, packet_count=test_packet_count):
"""Create input packet stream for defined interface.
:param list packet_sizes: Required packet sizes.
"""
for i in range(0, packet_count):
info = cls.create_packet_info(cls.src_dst_if, cls.src_dst_if)
payload = cls.info_to_payload(info)
p = (Ether(dst=cls.src_dst_if.local_mac,
src=cls.src_dst_if.remote_mac) /
IP(id=info.index, src=cls.src_dst_if.remote_ip4,
dst=cls.src_dst_if.local_ip4) /
ICMP(type='echo-request', id=1234) /
Raw(payload))
cls.extend_packet(p, 1518, cls.padding)
info.data = p
@classmethod
def create_fragments(cls):
infos = cls._packet_infos
cls.pkt_infos = []
for index, info in six.iteritems(infos):
p = info.data
# cls.logger.debug(ppp("Packet:",
# p.__class__(scapy.compat.raw(p))))
fragments_300 = fragment_rfc791(p, 300)
cls.pkt_infos.append((index, fragments_300))
cls.fragments_300 = [x for (_, frags) in cls.pkt_infos for x in frags]
cls.logger.debug("Fragmented %s packets into %s 300-byte fragments" %
(len(infos), len(cls.fragments_300)))
def verify_capture(self, capture):
"""Verify captured packet stream.
:param list capture: Captured packet stream.
"""
info = None
seen = set()
for packet in capture:
try:
self.logger.debug(ppp("Got packet:", packet))
ip = packet[IP]
icmp = packet[ICMP]
payload_info = self.payload_to_info(packet[Raw])
packet_index = payload_info.index
if packet_index in seen:
raise Exception(ppp("Duplicate packet received", packet))
seen.add(packet_index)
self.assertEqual(payload_info.dst, self.src_dst_if.sw_if_index)
info = self._packet_infos[packet_index]
self.assertIsNotNone(info)
self.assertEqual(packet_index, info.index)
saved_packet = info.data
self.assertEqual(ip.src, saved_packet[IP].dst)
self.assertEqual(ip.dst, saved_packet[IP].src)
self.assertEqual(icmp.type, 0) # echo reply
self.assertEqual(icmp.id, saved_packet[ICMP].id)
self.assertEqual(icmp.payload, saved_packet[ICMP].payload)
except Exception:
self.logger.error(ppp("Unexpected or invalid packet:", packet))
raise
for index in self._packet_infos:
self.assertIn(index, seen,
"Packet with packet_index %d not received" % index)
def test_reassembly(self):
""" basic reassembly """
self.pg_enable_capture()
self.src_dst_if.add_stream(self.fragments_300)
self.pg_start()
packets = self.src_dst_if.get_capture(len(self.pkt_infos))
self.verify_capture(packets)
# run it all again to verify correctness
self.pg_enable_capture()
self.src_dst_if.add_stream(self.fragments_300)
self.pg_start()
packets = self.src_dst_if.get_capture(len(self.pkt_infos))
self.verify_capture(packets)
class TestFIFReassembly(VppTestCase):
""" Fragments in fragments reassembly """
@classmethod
def setUpClass(cls):
super(TestFIFReassembly, cls).setUpClass()
cls.create_pg_interfaces([0, 1])
cls.src_if = cls.pg0
cls.dst_if = cls.pg1
for i in cls.pg_interfaces:
i.admin_up()
i.config_ip4()
i.resolve_arp()
i.config_ip6()
i.resolve_ndp()
cls.packet_sizes = [64, 512, 1518, 9018]
cls.padding = " abcdefghijklmn"
@classmethod
def tearDownClass(cls):
super(TestFIFReassembly, cls).tearDownClass()
def setUp(self):
""" Test setup - force timeout on existing reassemblies """
super(TestFIFReassembly, self).setUp()
self.vapi.ip_reassembly_enable_disable(
sw_if_index=self.src_if.sw_if_index, enable_ip4=True,
enable_ip6=True)
self.vapi.ip_reassembly_enable_disable(
sw_if_index=self.dst_if.sw_if_index, enable_ip4=True,
enable_ip6=True)
self.vapi.ip_reassembly_set(timeout_ms=0, max_reassemblies=1000,
expire_walk_interval_ms=10)
self.vapi.ip_reassembly_set(timeout_ms=0, max_reassemblies=1000,
expire_walk_interval_ms=10, is_ip6=1)
self.sleep(.25)
self.vapi.ip_reassembly_set(timeout_ms=1000000, max_reassemblies=1000,
expire_walk_interval_ms=10000)
self.vapi.ip_reassembly_set(timeout_ms=1000000, max_reassemblies=1000,
expire_walk_interval_ms=10000, is_ip6=1)
def tearDown(self):
super(TestFIFReassembly, self).tearDown()
def show_commands_at_teardown(self):
self.logger.debug(self.vapi.ppcli("show ip4-reassembly details"))
self.logger.debug(self.vapi.ppcli("show ip6-reassembly details"))
self.logger.debug(self.vapi.ppcli("show buffers"))
def verify_capture(self, capture, ip_class, dropped_packet_indexes=[]):
"""Verify captured packet stream.
:param list capture: Captured packet stream.
"""
info = None
seen = set()
for packet in capture:
try:
self.logger.debug(ppp("Got packet:", packet))
ip = packet[ip_class]
udp = packet[UDP]
payload_info = self.payload_to_info(packet[Raw])
packet_index = payload_info.index
self.assertTrue(
packet_index not in dropped_packet_indexes,
ppp("Packet received, but should be dropped:", packet))
if packet_index in seen:
raise Exception(ppp("Duplicate packet received", packet))
seen.add(packet_index)
self.assertEqual(payload_info.dst, self.dst_if.sw_if_index)
info = self._packet_infos[packet_index]
self.assertTrue(info is not None)
self.assertEqual(packet_index, info.index)
saved_packet = info.data
self.assertEqual(ip.src, saved_packet[ip_class].src)
self.assertEqual(ip.dst, saved_packet[ip_class].dst)
self.assertEqual(udp.payload, saved_packet[UDP].payload)
except Exception:
self.logger.error(ppp("Unexpected or invalid packet:", packet))
raise
for index in self._packet_infos:
self.assertTrue(index in seen or index in dropped_packet_indexes,
"Packet with packet_index %d not received" % index)
def test_fif4(self):
""" Fragments in fragments (4o4) """
# TODO this should be ideally in setUpClass, but then we hit a bug
# with VppIpRoute incorrectly reporting it's present when it's not
# so we need to manually remove the vpp config, thus we cannot have
# it shared for multiple test cases
self.tun_ip4 = "1.1.1.2"
self.gre4 = VppGreInterface(self, self.src_if.local_ip4, self.tun_ip4)
self.gre4.add_vpp_config()
self.gre4.admin_up()
self.gre4.config_ip4()
self.vapi.ip_reassembly_enable_disable(
sw_if_index=self.gre4.sw_if_index, enable_ip4=True)
self.route4 = VppIpRoute(self, self.tun_ip4, 32,
[VppRoutePath(self.src_if.remote_ip4,
self.src_if.sw_if_index)])
self.route4.add_vpp_config()
self.reset_packet_infos()
for i in range(test_packet_count):
info = self.create_packet_info(self.src_if, self.dst_if)
payload = self.info_to_payload(info)
# Ethernet header here is only for size calculation, thus it
# doesn't matter how it's initialized. This is to ensure that
# reassembled packet is not > 9000 bytes, so that it's not dropped
p = (Ether() /
IP(id=i, src=self.src_if.remote_ip4,
dst=self.dst_if.remote_ip4) /
UDP(sport=1234, dport=5678) /
Raw(payload))
size = self.packet_sizes[(i // 2) % len(self.packet_sizes)]
self.extend_packet(p, size, self.padding)
info.data = p[IP] # use only IP part, without ethernet header
fragments = [x for _, p in six.iteritems(self._packet_infos)
for x in fragment_rfc791(p.data, 400)]
encapped_fragments = \
[Ether(dst=self.src_if.local_mac, src=self.src_if.remote_mac) /
IP(src=self.tun_ip4, dst=self.src_if.local_ip4) /
GRE() /
p
for p in fragments]
fragmented_encapped_fragments = \
[x for p in encapped_fragments
for x in fragment_rfc791(p, 200)]
self.src_if.add_stream(fragmented_encapped_fragments)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.src_if.assert_nothing_captured()
packets = self.dst_if.get_capture(len(self._packet_infos))
self.verify_capture(packets, IP)
# TODO remove gre vpp config by hand until VppIpRoute gets fixed
# so that it's query_vpp_config() works as it should
self.gre4.remove_vpp_config()
self.logger.debug(self.vapi.ppcli("show interface"))
def test_fif6(self):
""" Fragments in fragments (6o6) """
# TODO this should be ideally in setUpClass, but then we hit a bug
# with VppIpRoute incorrectly reporting it's present when it's not
# so we need to manually remove the vpp config, thus we cannot have
# it shared for multiple test cases
self.tun_ip6 = "1002::1"
self.gre6 = VppGreInterface(self, self.src_if.local_ip6, self.tun_ip6)
self.gre6.add_vpp_config()
self.gre6.admin_up()
self.gre6.config_ip6()
self.vapi.ip_reassembly_enable_disable(
sw_if_index=self.gre6.sw_if_index, enable_ip6=True)
self.route6 = VppIpRoute(self, self.tun_ip6, 128,
[VppRoutePath(self.src_if.remote_ip6,
self.src_if.sw_if_index,
proto=DpoProto.DPO_PROTO_IP6)],
is_ip6=1)
self.route6.add_vpp_config()
self.reset_packet_infos()
for i in range(test_packet_count):
info = self.create_packet_info(self.src_if, self.dst_if)
payload = self.info_to_payload(info)
# Ethernet header here is only for size calculation, thus it
# doesn't matter how it's initialized. This is to ensure that
# reassembled packet is not > 9000 bytes, so that it's not dropped
p = (Ether() /
IPv6(src=self.src_if.remote_ip6, dst=self.dst_if.remote_ip6) /
UDP(sport=1234, dport=5678) /
Raw(payload))
size = self.packet_sizes[(i // 2) % len(self.packet_sizes)]
self.extend_packet(p, size, self.padding)
info.data = p[IPv6] # use only IPv6 part, without ethernet header
fragments = [x for _, i in six.iteritems(self._packet_infos)
for x in fragment_rfc8200(
i.data, i.index, 400)]
encapped_fragments = \
[Ether(dst=self.src_if.local_mac, src=self.src_if.remote_mac) /
IPv6(src=self.tun_ip6, dst=self.src_if.local_ip6) /
GRE() /
p
for p in fragments]
fragmented_encapped_fragments = \
[x for p in encapped_fragments for x in (
fragment_rfc8200(
p,
2 * len(self._packet_infos) + p[IPv6ExtHdrFragment].id,
200)
if IPv6ExtHdrFragment in p else [p]
)
]
self.src_if.add_stream(fragmented_encapped_fragments)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.src_if.assert_nothing_captured()
packets = self.dst_if.get_capture(len(self._packet_infos))
self.verify_capture(packets, IPv6)
# TODO remove gre vpp config by hand until VppIpRoute gets fixed
# so that it's query_vpp_config() works as it should
self.gre6.remove_vpp_config()
if __name__ == '__main__':
unittest.main(testRunner=VppTestRunner)
| 39.432047
| 79
| 0.600643
| 5,725
| 47,003
| 4.682795
| 0.070917
| 0.022194
| 0.035585
| 0.019135
| 0.876944
| 0.847924
| 0.826924
| 0.80473
| 0.7869
| 0.773546
| 0
| 0.03495
| 0.303002
| 47,003
| 1,191
| 80
| 39.465155
| 0.78337
| 0.096909
| 0
| 0.714122
| 0
| 0.002296
| 0.035157
| 0.005522
| 0
| 0
| 0.000857
| 0.001679
| 0.06659
| 1
| 0.072331
| false
| 0
| 0.016073
| 0
| 0.094145
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
07e73b52c9805b7fabebfaf13301bd4f44398c78
| 19,032
|
py
|
Python
|
unit_tests/monitoring/test_metric.py
|
Ofekmeister/google-cloud-python
|
07dd51bc447beca67b8da1c66f1dfb944ef70418
|
[
"Apache-2.0"
] | null | null | null |
unit_tests/monitoring/test_metric.py
|
Ofekmeister/google-cloud-python
|
07dd51bc447beca67b8da1c66f1dfb944ef70418
|
[
"Apache-2.0"
] | null | null | null |
unit_tests/monitoring/test_metric.py
|
Ofekmeister/google-cloud-python
|
07dd51bc447beca67b8da1c66f1dfb944ef70418
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
class TestMetricKind(unittest.TestCase):
def _getTargetClass(self):
from google.cloud.monitoring.metric import MetricKind
return MetricKind
def test_one(self):
self.assertTrue(hasattr(self._getTargetClass(), 'GAUGE'))
def test_names(self):
for name in self._getTargetClass().__dict__:
if not name.startswith('_'):
self.assertEqual(getattr(self._getTargetClass(), name), name)
class TestValueType(unittest.TestCase):
def _getTargetClass(self):
from google.cloud.monitoring.metric import ValueType
return ValueType
def test_one(self):
self.assertTrue(hasattr(self._getTargetClass(), 'DISTRIBUTION'))
def test_names(self):
for name in self._getTargetClass().__dict__:
if not name.startswith('_'):
self.assertEqual(getattr(self._getTargetClass(), name), name)
class TestMetricDescriptor(unittest.TestCase):
def _getTargetClass(self):
from google.cloud.monitoring.metric import MetricDescriptor
return MetricDescriptor
def _makeOne(self, *args, **kwargs):
return self._getTargetClass()(*args, **kwargs)
def test_constructor(self):
from google.cloud.monitoring.label import LabelDescriptor
TYPE = 'appengine.googleapis.com/http/server/response_count'
NAME = 'projects/my-project/metricDescriptors/' + TYPE
LABELS = [
LabelDescriptor(key='loading', value_type='BOOL',
description='Loaded a new instance?'),
LabelDescriptor(key='response_code', value_type='INT64',
description='HTTP status code for the request.'),
]
METRIC_KIND = 'DELTA'
VALUE_TYPE = 'INT64'
UNIT = '{responses}/s'
DESCRIPTION = 'Delta HTTP response count.'
DISPLAY_NAME = 'Response count'
client = object()
descriptor = self._makeOne(
client=client,
name=NAME,
type_=TYPE,
labels=LABELS,
metric_kind=METRIC_KIND,
value_type=VALUE_TYPE,
unit=UNIT,
description=DESCRIPTION,
display_name=DISPLAY_NAME,
)
self.assertIs(descriptor.client, client)
self.assertEqual(descriptor.name, NAME)
self.assertEqual(descriptor.type, TYPE)
self.assertEqual(descriptor.labels, LABELS)
self.assertEqual(descriptor.metric_kind, METRIC_KIND)
self.assertEqual(descriptor.value_type, VALUE_TYPE)
self.assertEqual(descriptor.unit, UNIT)
self.assertEqual(descriptor.description, DESCRIPTION)
self.assertEqual(descriptor.display_name, DISPLAY_NAME)
def test_constructor_defaults(self):
TYPE = 'appengine.googleapis.com/http/server/response_count'
client = object()
descriptor = self._makeOne(client=client, type_=TYPE)
self.assertIs(descriptor.client, client)
self.assertIsNone(descriptor.name)
self.assertEqual(descriptor.type, TYPE)
self.assertEqual(descriptor.labels, ())
self.assertEqual(descriptor.metric_kind, 'METRIC_KIND_UNSPECIFIED')
self.assertEqual(descriptor.value_type, 'VALUE_TYPE_UNSPECIFIED')
self.assertEqual(descriptor.unit, '')
self.assertEqual(descriptor.description, '')
self.assertEqual(descriptor.display_name, '')
def test_from_dict(self):
TYPE = 'appengine.googleapis.com/http/server/response_count'
NAME = 'projects/my-project/metricDescriptors/' + TYPE
LABEL1 = {'key': 'loading', 'valueType': 'BOOL',
'description': 'Loaded a new instance?'}
LABEL2 = {'key': 'response_code', 'valueType': 'INT64',
'description': 'HTTP status code for the request.'}
METRIC_KIND = 'DELTA'
VALUE_TYPE = 'INT64'
UNIT = '{responses}/s'
DESCRIPTION = 'Delta HTTP response count.'
DISPLAY_NAME = 'Response count'
info = {
'name': NAME,
'type': TYPE,
'labels': [LABEL1, LABEL2],
'metricKind': METRIC_KIND,
'valueType': VALUE_TYPE,
'unit': UNIT,
'description': DESCRIPTION,
'displayName': DISPLAY_NAME,
}
client = object()
descriptor = self._getTargetClass()._from_dict(client, info)
self.assertIs(descriptor.client, client)
self.assertEqual(descriptor.name, NAME)
self.assertEqual(descriptor.type, TYPE)
self.assertEqual(len(descriptor.labels), 2)
label1, label2 = descriptor.labels
self.assertEqual(label1.key, LABEL1['key'])
self.assertEqual(label2.key, LABEL2['key'])
self.assertEqual(descriptor.metric_kind, METRIC_KIND)
self.assertEqual(descriptor.value_type, VALUE_TYPE)
self.assertEqual(descriptor.unit, UNIT)
self.assertEqual(descriptor.description, DESCRIPTION)
self.assertEqual(descriptor.display_name, DISPLAY_NAME)
def test_from_dict_defaults(self):
TYPE = 'appengine.googleapis.com/http/server/response_count'
NAME = 'projects/my-project/metricDescriptors/' + TYPE
METRIC_KIND = 'CUMULATIVE'
VALUE_TYPE = 'DOUBLE'
info = {
'name': NAME,
'type': TYPE,
'metricKind': METRIC_KIND,
'valueType': VALUE_TYPE,
}
client = object()
descriptor = self._getTargetClass()._from_dict(client, info)
self.assertIs(descriptor.client, client)
self.assertEqual(descriptor.name, NAME)
self.assertEqual(descriptor.type, TYPE)
self.assertEqual(descriptor.labels, ())
self.assertEqual(descriptor.metric_kind, METRIC_KIND)
self.assertEqual(descriptor.value_type, VALUE_TYPE)
self.assertEqual(descriptor.unit, '')
self.assertEqual(descriptor.description, '')
self.assertEqual(descriptor.display_name, '')
def test_to_dict(self):
TYPE = 'appengine.googleapis.com/http/server/response_count'
NAME = 'projects/my-project/metricDescriptors/' + TYPE
LABEL1 = {'key': 'loading', 'valueType': 'BOOL',
'description': 'Loaded a new instance?'}
LABEL2 = {'key': 'response_code', 'valueType': 'INT64',
'description': 'HTTP status code for the request.'}
METRIC_KIND = 'DELTA'
VALUE_TYPE = 'INT64'
UNIT = '{responses}/s'
DESCRIPTION = 'Delta HTTP response count.'
DISPLAY_NAME = 'Response count'
info = {
'name': NAME,
'type': TYPE,
'labels': [LABEL1, LABEL2],
'metricKind': METRIC_KIND,
'valueType': VALUE_TYPE,
'unit': UNIT,
'description': DESCRIPTION,
'displayName': DISPLAY_NAME,
}
client = object()
descriptor = self._getTargetClass()._from_dict(client, info)
del info['name']
self.assertEqual(descriptor._to_dict(), info)
def test_to_dict_defaults(self):
TYPE = 'appengine.googleapis.com/http/server/response_count'
NAME = 'projects/my-project/metricDescriptors/' + TYPE
METRIC_KIND = 'DELTA'
VALUE_TYPE = 'INT64'
info = {
'name': NAME,
'type': TYPE,
'metricKind': METRIC_KIND,
'valueType': VALUE_TYPE,
}
client = object()
descriptor = self._getTargetClass()._from_dict(client, info)
del info['name']
self.assertEqual(descriptor._to_dict(), info)
def test_create(self):
PROJECT = 'my-project'
TYPE = 'custom.googleapis.com/my_metric'
PATH = 'projects/{project}/metricDescriptors/'.format(project=PROJECT)
NAME = PATH + TYPE
METRIC_KIND = 'GAUGE'
VALUE_TYPE = 'DOUBLE'
DESCRIPTION = 'This is my metric.'
REQUEST = {
'type': TYPE,
'metricKind': METRIC_KIND,
'valueType': VALUE_TYPE,
'description': DESCRIPTION,
}
RESPONSE = dict(REQUEST, name=NAME)
connection = _Connection(RESPONSE)
client = _Client(project=PROJECT, connection=connection)
descriptor = self._makeOne(
client=client,
type_=TYPE,
metric_kind=METRIC_KIND,
value_type=VALUE_TYPE,
description=DESCRIPTION,
)
descriptor.create()
self.assertEqual(descriptor.name, NAME)
self.assertEqual(descriptor.type, TYPE)
self.assertEqual(descriptor.labels, ())
self.assertEqual(descriptor.metric_kind, METRIC_KIND)
self.assertEqual(descriptor.value_type, VALUE_TYPE)
self.assertEqual(descriptor.unit, '')
self.assertEqual(descriptor.description, DESCRIPTION)
self.assertEqual(descriptor.display_name, '')
request, = connection._requested
expected_request = {'method': 'POST', 'path': '/' + PATH,
'data': REQUEST}
self.assertEqual(request, expected_request)
def test_delete(self):
PROJECT = 'my-project'
TYPE = 'custom.googleapis.com/my_metric'
NAME = 'projects/{project}/metricDescriptors/{type}'.format(
project=PROJECT, type=TYPE)
connection = _Connection({})
client = _Client(project=PROJECT, connection=connection)
descriptor = self._makeOne(
client=client,
type_=TYPE,
metric_kind='NOTUSED',
value_type='NOTUSED',
)
descriptor.delete()
request, = connection._requested
expected_request = {'method': 'DELETE', 'path': '/' + NAME}
self.assertEqual(request, expected_request)
def test_fetch(self):
PROJECT = 'my-project'
TYPE = 'custom.googleapis.com/my_metric'
NAME = 'projects/{project}/metricDescriptors/{type}'.format(
project=PROJECT, type=TYPE)
DESCRIPTION = 'This is my metric.'
METRIC_DESCRIPTOR = {
'name': NAME,
'type': TYPE,
'metricKind': 'GAUGE',
'valueType': 'DOUBLE',
'description': DESCRIPTION,
}
connection = _Connection(METRIC_DESCRIPTOR)
client = _Client(project=PROJECT, connection=connection)
descriptor = self._getTargetClass()._fetch(client, TYPE)
self.assertIs(descriptor.client, client)
self.assertEqual(descriptor.name, NAME)
self.assertEqual(descriptor.type, TYPE)
self.assertEqual(descriptor.description, DESCRIPTION)
request, = connection._requested
expected_request = {'method': 'GET', 'path': '/' + NAME}
self.assertEqual(request, expected_request)
def test_list(self):
PROJECT = 'my-project'
PATH = 'projects/{project}/metricDescriptors/'.format(project=PROJECT)
TYPE1 = 'custom.googleapis.com/my_metric_1'
DESCRIPTION1 = 'This is my first metric.'
NAME1 = PATH + TYPE1
METRIC_DESCRIPTOR1 = {
'name': NAME1,
'type': TYPE1,
'metricKind': 'GAUGE',
'valueType': 'DOUBLE',
'description': DESCRIPTION1,
}
TYPE2 = 'custom.googleapis.com/my_metric_2'
DESCRIPTION2 = 'This is my second metric.'
NAME2 = PATH + TYPE2
METRIC_DESCRIPTOR2 = {
'name': NAME2,
'type': TYPE2,
'metricKind': 'GAUGE',
'valueType': 'DOUBLE',
'description': DESCRIPTION2,
}
RESPONSE = {
'metricDescriptors': [METRIC_DESCRIPTOR1, METRIC_DESCRIPTOR2],
}
connection = _Connection(RESPONSE)
client = _Client(project=PROJECT, connection=connection)
descriptors = self._getTargetClass()._list(client)
self.assertEqual(len(descriptors), 2)
descriptor1, descriptor2 = descriptors
self.assertIs(descriptor1.client, client)
self.assertEqual(descriptor1.name, NAME1)
self.assertEqual(descriptor1.type, TYPE1)
self.assertEqual(descriptor1.description, DESCRIPTION1)
self.assertIs(descriptor2.client, client)
self.assertEqual(descriptor2.name, NAME2)
self.assertEqual(descriptor2.type, TYPE2)
self.assertEqual(descriptor2.description, DESCRIPTION2)
request, = connection._requested
expected_request = {'method': 'GET', 'path': '/' + PATH,
'query_params': {}}
self.assertEqual(request, expected_request)
def test_list_paged(self):
from google.cloud.exceptions import NotFound
PROJECT = 'my-project'
PATH = 'projects/{project}/metricDescriptors/'.format(project=PROJECT)
TYPE1 = 'custom.googleapis.com/my_metric_1'
DESCRIPTION1 = 'This is my first metric.'
NAME1 = PATH + TYPE1
METRIC_DESCRIPTOR1 = {
'name': NAME1,
'type': TYPE1,
'metricKind': 'GAUGE',
'valueType': 'DOUBLE',
'description': DESCRIPTION1,
}
TYPE2 = 'custom.googleapis.com/my_metric_2'
DESCRIPTION2 = 'This is my second metric.'
NAME2 = PATH + TYPE2
METRIC_DESCRIPTOR2 = {
'name': NAME2,
'type': TYPE2,
'metricKind': 'GAUGE',
'valueType': 'DOUBLE',
'description': DESCRIPTION2,
}
TOKEN = 'second-page-please'
RESPONSE1 = {
'metricDescriptors': [METRIC_DESCRIPTOR1],
'nextPageToken': TOKEN,
}
RESPONSE2 = {
'metricDescriptors': [METRIC_DESCRIPTOR2],
}
connection = _Connection(RESPONSE1, RESPONSE2)
client = _Client(project=PROJECT, connection=connection)
descriptors = self._getTargetClass()._list(client)
self.assertEqual(len(descriptors), 2)
descriptor1, descriptor2 = descriptors
self.assertEqual(descriptor1.name, NAME1)
self.assertEqual(descriptor1.type, TYPE1)
self.assertEqual(descriptor1.description, DESCRIPTION1)
self.assertEqual(descriptor2.name, NAME2)
self.assertEqual(descriptor2.type, TYPE2)
self.assertEqual(descriptor2.description, DESCRIPTION2)
request1, request2 = connection._requested
expected_request1 = {'method': 'GET', 'path': '/' + PATH,
'query_params': {}}
expected_request2 = {'method': 'GET', 'path': '/' + PATH,
'query_params': {'pageToken': TOKEN}}
self.assertEqual(request1, expected_request1)
self.assertEqual(request2, expected_request2)
with self.assertRaises(NotFound):
self._getTargetClass()._list(client)
def test_list_filtered(self):
PROJECT = 'my-project'
PATH = 'projects/{project}/metricDescriptors/'.format(project=PROJECT)
# Request only custom metrics.
FILTER = 'metric.type = starts_with("custom.googleapis.com/")'
# But let's say there are no custom metrics.
RESPONSE = {'metricDescriptors': []}
connection = _Connection(RESPONSE)
client = _Client(project=PROJECT, connection=connection)
descriptors = self._getTargetClass()._list(client, FILTER)
self.assertEqual(len(descriptors), 0)
request, = connection._requested
expected_request = {'method': 'GET', 'path': '/' + PATH,
'query_params': {'filter': FILTER}}
self.assertEqual(request, expected_request)
def test_list_filtered_by_type_prefix(self):
PROJECT = 'my-project'
PATH = 'projects/{project}/metricDescriptors/'.format(project=PROJECT)
# Request only custom metrics.
PREFIX = 'custom.googleapis.com/'
FILTER = 'metric.type = starts_with("{prefix}")'.format(prefix=PREFIX)
# But let's say there are no custom metrics.
RESPONSE = {'metricDescriptors': []}
connection = _Connection(RESPONSE)
client = _Client(project=PROJECT, connection=connection)
descriptors = self._getTargetClass()._list(client, type_prefix=PREFIX)
self.assertEqual(len(descriptors), 0)
request, = connection._requested
expected_request = {'method': 'GET', 'path': '/' + PATH,
'query_params': {'filter': FILTER}}
self.assertEqual(request, expected_request)
class TestMetric(unittest.TestCase):
def _getTargetClass(self):
from google.cloud.monitoring.metric import Metric
return Metric
def _makeOne(self, *args, **kwargs):
return self._getTargetClass()(*args, **kwargs)
def test_constructor(self):
TYPE = 'appengine.googleapis.com/http/server/response_count'
LABELS = {
'response_code': 200,
'loading': False,
}
metric = self._makeOne(type=TYPE, labels=LABELS)
self.assertEqual(metric.type, TYPE)
self.assertEqual(metric.labels, LABELS)
def test_from_dict(self):
TYPE = 'appengine.googleapis.com/http/server/response_count'
LABELS = {
'response_code': 200,
'loading': False,
}
info = {
'type': TYPE,
'labels': LABELS,
}
metric = self._getTargetClass()._from_dict(info)
self.assertEqual(metric.type, TYPE)
self.assertEqual(metric.labels, LABELS)
def test_from_dict_defaults(self):
TYPE = 'appengine.googleapis.com/http/server/response_count'
info = {'type': TYPE}
metric = self._getTargetClass()._from_dict(info)
self.assertEqual(metric.type, TYPE)
self.assertEqual(metric.labels, {})
class _Connection(object):
def __init__(self, *responses):
self._responses = list(responses)
self._requested = []
def api_request(self, **kwargs):
from google.cloud.exceptions import NotFound
self._requested.append(kwargs)
try:
return self._responses.pop(0)
except IndexError:
raise NotFound('miss')
class _Client(object):
def __init__(self, project, connection):
self.project = project
self.connection = connection
| 34.353791
| 78
| 0.615017
| 1,809
| 19,032
| 6.322278
| 0.117192
| 0.1023
| 0.093993
| 0.02046
| 0.800909
| 0.778701
| 0.751596
| 0.734721
| 0.709802
| 0.681123
| 0
| 0.009407
| 0.273855
| 19,032
| 553
| 79
| 34.415913
| 0.818162
| 0.037253
| 0
| 0.70922
| 0
| 0
| 0.160994
| 0.069489
| 0
| 0
| 0
| 0
| 0.210402
| 1
| 0.068558
| false
| 0
| 0.018913
| 0.004728
| 0.118203
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
580323503389fcedaa7b7264e100d8b10dfddb6b
| 124
|
py
|
Python
|
stable_baselines3/cql/__init__.py
|
mjyoo2/stable-baselines3
|
ef7a580219df6d977b56fb99e503890bd5211195
|
[
"MIT"
] | null | null | null |
stable_baselines3/cql/__init__.py
|
mjyoo2/stable-baselines3
|
ef7a580219df6d977b56fb99e503890bd5211195
|
[
"MIT"
] | null | null | null |
stable_baselines3/cql/__init__.py
|
mjyoo2/stable-baselines3
|
ef7a580219df6d977b56fb99e503890bd5211195
|
[
"MIT"
] | null | null | null |
from stable_baselines3.cql.policies import CnnPolicy, MlpPolicy, MultiInputPolicy
from stable_baselines3.cql.cql import CQL
| 41.333333
| 81
| 0.870968
| 16
| 124
| 6.625
| 0.5625
| 0.188679
| 0.377358
| 0.433962
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017544
| 0.080645
| 124
| 2
| 82
| 62
| 0.912281
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ed162cf23bb14376fc73244dc010cb29b7165712
| 89
|
py
|
Python
|
src/commands/__init__.py
|
AAU-PSix/canary
|
93b07d23cd9380adc03a6aa1291a13eaa3b3008c
|
[
"MIT"
] | null | null | null |
src/commands/__init__.py
|
AAU-PSix/canary
|
93b07d23cd9380adc03a6aa1291a13eaa3b3008c
|
[
"MIT"
] | null | null | null |
src/commands/__init__.py
|
AAU-PSix/canary
|
93b07d23cd9380adc03a6aa1291a13eaa3b3008c
|
[
"MIT"
] | null | null | null |
from .generate_tests import *
from .create_cfg import *
from .mutation_analysis import *
| 22.25
| 32
| 0.797753
| 12
| 89
| 5.666667
| 0.666667
| 0.294118
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.134831
| 89
| 3
| 33
| 29.666667
| 0.883117
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ed264f2843b4478c9017a67b89051904ff2b56db
| 93
|
py
|
Python
|
kyujipy/__init__.py
|
DrTurnon/KyujiPy
|
8f358de3c3d9f88dad45eabeb30f0befa37728e3
|
[
"MIT"
] | 8
|
2020-05-17T11:57:15.000Z
|
2022-02-26T21:45:23.000Z
|
kyujipy/__init__.py
|
DrTurnon/KyujiPy
|
8f358de3c3d9f88dad45eabeb30f0befa37728e3
|
[
"MIT"
] | null | null | null |
kyujipy/__init__.py
|
DrTurnon/KyujiPy
|
8f358de3c3d9f88dad45eabeb30f0befa37728e3
|
[
"MIT"
] | 2
|
2020-10-11T15:05:22.000Z
|
2021-04-03T14:02:35.000Z
|
from kyujipy.converter import BasicConverter
from kyujipy.converter import KyujitaiConverter
| 31
| 47
| 0.892473
| 10
| 93
| 8.3
| 0.6
| 0.26506
| 0.481928
| 0.626506
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086022
| 93
| 2
| 48
| 46.5
| 0.976471
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
ed623216d5c887835d9dfb0bdda9fedf9366020f
| 15,799
|
py
|
Python
|
tests/commands/test_grow.py
|
kapb14/hatch
|
e7f7e094571780d6499d41960999134966ae699d
|
[
"Apache-2.0",
"MIT"
] | 1
|
2018-03-15T17:27:37.000Z
|
2018-03-15T17:27:37.000Z
|
tests/commands/test_grow.py
|
anmolsrivastava05/hatch
|
df2c9d46ee7713a1bc156c361cfd0f78e5935297
|
[
"Apache-2.0"
] | null | null | null |
tests/commands/test_grow.py
|
anmolsrivastava05/hatch
|
df2c9d46ee7713a1bc156c361cfd0f78e5935297
|
[
"Apache-2.0"
] | null | null | null |
import os
import shutil
from click.testing import CliRunner
from hatch.cli import hatch
from hatch.env import install_packages
from hatch.settings import (
SETTINGS_FILE, copy_default_settings, save_settings
)
from hatch.utils import basepath, temp_chdir, temp_move_path
from hatch.venv import create_venv, venv
from ..utils import read_file, wait_for_os
def test_invalid_part():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['init', 'ok', '--basic', '-ne'])
result = runner.invoke(hatch, ['grow', 'big'])
init_file = os.path.join(d, 'ok', '__init__.py')
contents = read_file(init_file)
assert result.exit_code == 2
assert contents == "__version__ = '0.0.1'\n"
assert 'invalid choice' in result.output
def test_package_cwd():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['init', 'ok', '--basic', '-ne'])
result = runner.invoke(hatch, ['grow', 'minor'])
init_file = os.path.join(d, 'ok', '__init__.py')
contents = read_file(init_file)
assert result.exit_code == 0
assert contents == "__version__ = '0.1.0'\n"
assert 'Updated {}'.format(init_file) in result.output
assert '0.0.1 -> 0.1.0' in result.output
def test_package_cwd_about():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['init', 'ok', '--basic', '-ne'])
init_file = os.path.join(d, 'ok', '__init__.py')
about_file = os.path.join(d, 'ok', '__about__.py')
shutil.copyfile(init_file, about_file)
result = runner.invoke(hatch, ['grow', 'minor'])
assert result.exit_code == 0
assert read_file(init_file) == "__version__ = '0.0.1'\n"
assert read_file(about_file) == "__version__ = '0.1.0'\n"
assert 'Updated {}'.format(about_file) in result.output
assert '0.0.1 -> 0.1.0' in result.output
def test_package_cwd_version():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['init', 'ok', '--basic', '-ne'])
init_file = os.path.join(d, 'ok', '__init__.py')
about_file = os.path.join(d, 'ok', '__about__.py')
version_file = os.path.join(d, 'ok', '__version__.py')
shutil.copyfile(init_file, about_file)
shutil.copyfile(init_file, version_file)
result = runner.invoke(hatch, ['grow', 'minor'])
assert result.exit_code == 0
assert read_file(init_file) == "__version__ = '0.0.1'\n"
assert read_file(about_file) == "__version__ = '0.0.1'\n"
assert read_file(version_file) == "__version__ = '0.1.0'\n"
assert 'Updated {}'.format(version_file) in result.output
assert '0.0.1 -> 0.1.0' in result.output
def test_package_path():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['new', 'zzz', '--basic', '-ne'])
origin = os.path.join(d, 'zzz', 'zzz')
package_dir = os.path.join(d, basepath(d))
priority_dir = os.path.join(d, 'aaa')
package_file = os.path.join(package_dir, '__init__.py')
priority_file = os.path.join(priority_dir, '__init__.py')
shutil.copytree(origin, package_dir)
shutil.copytree(origin, priority_dir)
result = runner.invoke(hatch, ['grow', 'minor'])
wait_for_os()
assert result.exit_code == 0
assert read_file(priority_file) == "__version__ = '0.0.1'\n"
assert read_file(package_file) == "__version__ = '0.1.0'\n"
assert 'Updated {}'.format(package_file) in result.output
assert '0.0.1 -> 0.1.0' in result.output
def test_src_package_path():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['new', 'zzz', '--basic', '-ne'])
origin = os.path.join(d, 'zzz', 'zzz')
project_name = basepath(d)
src_package_dir = os.path.join(d, 'src', project_name)
package_dir = os.path.join(d, project_name)
priority_dir = os.path.join(d, 'aaa')
src_package_file = os.path.join(src_package_dir, '__init__.py')
package_file = os.path.join(package_dir, '__init__.py')
priority_file = os.path.join(priority_dir, '__init__.py')
shutil.copytree(origin, src_package_dir)
shutil.copytree(origin, package_dir)
shutil.copytree(origin, priority_dir)
result = runner.invoke(hatch, ['grow', 'minor'])
wait_for_os()
assert result.exit_code == 0
assert read_file(priority_file) == "__version__ = '0.0.1'\n"
assert read_file(package_file) == "__version__ = '0.0.1'\n"
assert read_file(src_package_file) == "__version__ = '0.1.0'\n"
assert 'Updated {}'.format(src_package_file) in result.output
assert '0.0.1 -> 0.1.0' in result.output
def test_init_cwd():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['init', 'ok', '--basic', '-ne'])
os.chdir(os.path.join(d, 'ok'))
result = runner.invoke(hatch, ['grow', 'patch'])
init_file = os.path.join(d, 'ok', '__init__.py')
contents = read_file(init_file)
assert result.exit_code == 0
assert contents == "__version__ = '0.0.2'\n"
assert 'Updated {}'.format(init_file) in result.output
assert '0.0.1 -> 0.0.2' in result.output
def test_package():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['new', 'ok', '--basic', '-ne'])
venv_dir = os.path.join(d, 'venv')
create_venv(venv_dir)
with venv(venv_dir):
os.chdir(os.path.join(d, 'ok'))
install_packages(['-e', '.'])
os.chdir(d)
result = runner.invoke(hatch, ['grow', 'fix', 'ok'])
init_file = os.path.join(d, 'ok', 'ok', '__init__.py')
contents = read_file(init_file)
assert result.exit_code == 0
assert contents == "__version__ = '0.0.2'\n"
assert 'Updated {}'.format(init_file) in result.output
assert '0.0.1 -> 0.0.2' in result.output
def test_package_not_exist():
with temp_chdir() as d:
runner = CliRunner()
venv_dir = os.path.join(d, 'venv')
create_venv(venv_dir)
with venv(venv_dir):
result = runner.invoke(hatch, ['grow', 'fix', 'ok'])
assert result.exit_code == 1
assert '`{}` is not an editable package.'.format('ok') in result.output
def test_local():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['new', 'ok', '--basic', '-ne'])
venv_dir = os.path.join(d, 'venv')
create_venv(venv_dir)
with venv(venv_dir):
install_packages(['-e', os.path.join(d, 'ok')])
result = runner.invoke(hatch, ['grow', 'fix', '-l'])
init_file = os.path.join(d, 'ok', 'ok', '__init__.py')
contents = read_file(init_file)
assert result.exit_code == 0
assert contents == "__version__ = '0.0.2'\n"
assert 'Package `ok` has been selected.' in result.output
assert 'Updated {}'.format(init_file) in result.output
assert '0.0.1 -> 0.0.2' in result.output
def test_local_not_exist():
with temp_chdir() as d:
runner = CliRunner()
venv_dir = os.path.join(d, 'venv')
create_venv(venv_dir)
with venv(venv_dir):
result = runner.invoke(hatch, ['grow', 'fix', '-l'])
assert result.exit_code == 1
assert 'There are no local packages available.' in result.output
def test_local_multiple():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['new', 'ok', '--basic', '-ne'])
runner.invoke(hatch, ['new', 'ko', '--basic', '-ne'])
venv_dir = os.path.join(d, 'venv')
create_venv(venv_dir)
with venv(venv_dir):
install_packages(['-e', os.path.join(d, 'ok')])
install_packages(['-e', os.path.join(d, 'ko')])
result = runner.invoke(hatch, ['grow', 'fix', '-l'])
assert result.exit_code == 1
assert (
'There are multiple local packages available. '
'Select one with the optional argument.'
) in result.output
def test_path_relative():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['new', 'ok', '--basic', '-ne'])
result = runner.invoke(hatch, ['grow', 'major', '-p', 'ok'])
init_file = os.path.join(d, 'ok', 'ok', '__init__.py')
contents = read_file(init_file)
assert result.exit_code == 0
assert contents == "__version__ = '1.0.0'\n"
assert 'Updated {}'.format(init_file) in result.output
assert '0.0.1 -> 1.0.0' in result.output
def test_path_full():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['new', 'ok', '--basic', '-ne'])
runner.invoke(hatch, ['new', 'ko', '--basic', '-ne'])
os.chdir(os.path.join(d, 'ko'))
result = runner.invoke(
hatch,
['grow', 'fix', '-p', os.path.join(d, 'ok')]
)
init_file = os.path.join(d, 'ok', 'ok', '__init__.py')
contents = read_file(init_file)
assert result.exit_code == 0
assert contents == "__version__ = '0.0.2'\n"
assert 'Updated {}'.format(init_file) in result.output
assert '0.0.1 -> 0.0.2' in result.output
def test_path_full_not_exist():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['new', 'ok', '--basic', '-ne'])
full_path = os.path.join(d, 'ko')
result = runner.invoke(hatch, ['grow', 'fix', '-p', full_path])
init_file = os.path.join(d, 'ok', 'ok', '__init__.py')
contents = read_file(init_file)
assert result.exit_code == 1
assert contents == "__version__ = '0.0.1'\n"
assert 'Directory `{}` does not exist.'.format(full_path) in result.output
def test_path_file():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['new', 'ok', '--basic', '-ne'])
init_file = os.path.join(d, 'ok', 'ok', '__init__.py')
result = runner.invoke(hatch, ['grow', 'major', '-p', init_file])
contents = read_file(init_file)
assert result.exit_code == 0
assert contents == "__version__ = '1.0.0'\n"
assert 'Updated {}'.format(init_file) in result.output
assert '0.0.1 -> 1.0.0' in result.output
def test_no_init():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['new', 'ok', '--basic', '-ne'])
result = runner.invoke(hatch, ['grow', 'fix'])
init_file = os.path.join(d, 'ok', 'ok', '__init__.py')
contents = read_file(init_file)
assert result.exit_code == 1
assert contents == "__version__ = '0.0.1'\n"
assert 'No version files found.' in result.output
def test_no_version():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['init', 'ok', '--basic', '-ne'])
os.remove(os.path.join(d, 'ok', '__init__.py'))
result = runner.invoke(hatch, ['grow', 'fix'])
assert result.exit_code == 1
assert 'Found version files:' in result.output
assert os.path.join(d, 'tests', '__init__.py') in result.output
assert 'Unable to find a version specifier.' in result.output
def test_multi_line_init():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['init', 'ok', '--basic', '-ne'])
with open(os.path.join(d, 'ok', '__init__.py'), 'w') as f:
f.write('__version__ = "123"\nok\n')
result = runner.invoke(hatch, ['grow', 'fix'])
assert result.exit_code == 1
assert 'Unable to find a version specifier.' in result.output
def test_no_match():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['init', 'ok', '--basic', '-ne'])
with open(os.path.join(d, 'ok', '__init__.py'), 'w') as f:
f.write('__version__ = "123"')
result = runner.invoke(hatch, ['grow', 'fix'])
assert result.exit_code == 1
assert 'Unable to find a version specifier.' in result.output
def test_pre_config():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['init', 'ok', '--basic', '-ne'])
init_file = os.path.join(d, 'ok', '__init__.py')
with temp_move_path(SETTINGS_FILE, d):
settings = copy_default_settings()
settings['semver']['pre'] = 'dev'
save_settings(settings)
result = runner.invoke(hatch, ['grow', 'pre'])
contents = read_file(init_file)
assert result.exit_code == 0
assert contents == "__version__ = '0.0.1-dev.1'\n"
assert 'Updated {}'.format(init_file) in result.output
assert '0.0.1 -> 0.0.1-dev.1' in result.output
def test_pre_option():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['init', 'ok', '--basic', '-ne'])
init_file = os.path.join(d, 'ok', '__init__.py')
with temp_move_path(SETTINGS_FILE, d):
settings = copy_default_settings()
settings['semver']['pre'] = 'rc'
save_settings(settings)
result = runner.invoke(hatch, ['grow', 'pre', '--pre', 'dev'])
contents = read_file(init_file)
assert result.exit_code == 0
assert contents == "__version__ = '0.0.1-dev.1'\n"
assert 'Updated {}'.format(init_file) in result.output
assert '0.0.1 -> 0.0.1-dev.1' in result.output
def test_build_config():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['init', 'ok', '--basic', '-ne'])
init_file = os.path.join(d, 'ok', '__init__.py')
with temp_move_path(SETTINGS_FILE, d):
settings = copy_default_settings()
settings['semver']['build'] = 'nightly'
save_settings(settings)
result = runner.invoke(hatch, ['grow', 'build'])
contents = read_file(init_file)
assert result.exit_code == 0
assert contents == "__version__ = '0.0.1+nightly.1'\n"
assert 'Updated {}'.format(init_file) in result.output
assert '0.0.1 -> 0.0.1+nightly.1' in result.output
def test_build_option():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['init', 'ok', '--basic', '-ne'])
init_file = os.path.join(d, 'ok', '__init__.py')
with temp_move_path(SETTINGS_FILE, d):
settings = copy_default_settings()
settings['semver']['build'] = 'rc'
save_settings(settings)
result = runner.invoke(hatch, ['grow', 'build', '--build', 'nightly'])
contents = read_file(init_file)
assert result.exit_code == 0
assert contents == "__version__ = '0.0.1+nightly.1'\n"
assert 'Updated {}'.format(init_file) in result.output
assert '0.0.1 -> 0.0.1+nightly.1' in result.output
def test_no_config():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['init', 'ok', '--basic', '-ne'])
init_file = os.path.join(d, 'ok', '__init__.py')
with temp_move_path(SETTINGS_FILE, d):
result = runner.invoke(hatch, ['grow', 'pre'])
contents = read_file(init_file)
assert result.exit_code == 0
assert contents == "__version__ = '0.0.1-rc.1'\n"
assert 'Updated {}'.format(init_file) in result.output
assert '0.0.1 -> 0.0.1-rc.1' in result.output
| 34.87638
| 82
| 0.581429
| 2,116
| 15,799
| 4.094518
| 0.061909
| 0.069252
| 0.098107
| 0.055863
| 0.897853
| 0.893698
| 0.86265
| 0.836103
| 0.826985
| 0.789705
| 0
| 0.018015
| 0.255143
| 15,799
| 452
| 83
| 34.95354
| 0.718219
| 0
| 0
| 0.723529
| 0
| 0
| 0.164124
| 0
| 0
| 0
| 0
| 0
| 0.276471
| 1
| 0.073529
| false
| 0
| 0.026471
| 0
| 0.1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ed7b0eb96944fa5b11d1b7bac5fc6cd2a3539e8b
| 3,642
|
py
|
Python
|
unittests/tools/test_netsparker_parser.py
|
mtcolman/django-DefectDojo
|
76175aca446e077884bdb5e1d8e2a671a0840775
|
[
"BSD-3-Clause"
] | 249
|
2016-09-06T21:04:40.000Z
|
2018-01-19T15:59:44.000Z
|
unittests/tools/test_netsparker_parser.py
|
mtcolman/django-DefectDojo
|
76175aca446e077884bdb5e1d8e2a671a0840775
|
[
"BSD-3-Clause"
] | 255
|
2016-09-06T21:36:37.000Z
|
2018-01-19T19:57:57.000Z
|
unittests/tools/test_netsparker_parser.py
|
mtcolman/django-DefectDojo
|
76175aca446e077884bdb5e1d8e2a671a0840775
|
[
"BSD-3-Clause"
] | 152
|
2016-09-06T21:04:54.000Z
|
2018-01-18T08:52:24.000Z
|
from ..dojo_test_case import DojoTestCase
from dojo.models import Test
from dojo.tools.netsparker.parser import NetsparkerParser
class TestNetsparkerParser(DojoTestCase):
def test_parse_file_with_one_finding(self):
testfile = open("unittests/scans/netsparker/netsparker_one_finding.json")
parser = NetsparkerParser()
findings = parser.get_findings(testfile, Test())
self.assertEqual(1, len(findings))
for finding in findings:
for endpoint in finding.unsaved_endpoints:
endpoint.clean()
with self.subTest(i=0):
finding = findings[0]
self.assertEqual("Medium", finding.severity)
self.assertEqual(16, finding.cwe)
self.assertEqual("25/06/2021", finding.date.strftime("%d/%m/%Y"))
self.assertIsNotNone(finding.description)
self.assertGreater(len(finding.description), 0)
self.assertEqual("CVSS:3.0/AV:N/AC:L/PR:L/UI:R/S:U/C:H/I:N/A:N/E:H/RL:O/RC:C", finding.cvssv3)
self.assertEqual(1, len(finding.unsaved_endpoints))
endpoint = finding.unsaved_endpoints[0]
self.assertEqual(str(endpoint), "http://php.testsparker.com/auth/login.php")
def test_parse_file_with_multiple_finding(self):
testfile = open("unittests/scans/netsparker/netsparker_many_findings.json")
parser = NetsparkerParser()
findings = parser.get_findings(testfile, Test())
self.assertEqual(16, len(findings))
for finding in findings:
for endpoint in finding.unsaved_endpoints:
endpoint.clean()
with self.subTest(i=0):
finding = findings[0]
self.assertEqual("Medium", finding.severity)
self.assertEqual(16, finding.cwe)
self.assertEqual("25/06/2021", finding.date.strftime("%d/%m/%Y"))
self.assertIsNotNone(finding.description)
self.assertGreater(len(finding.description), 0)
self.assertEqual("CVSS:3.0/AV:N/AC:L/PR:L/UI:R/S:U/C:H/I:N/A:N/E:H/RL:O/RC:C", finding.cvssv3)
self.assertEqual(1, len(finding.unsaved_endpoints))
endpoint = finding.unsaved_endpoints[0]
self.assertEqual(str(endpoint), "http://php.testsparker.com/auth/login.php")
with self.subTest(i=1):
finding = findings[1]
self.assertEqual("Critical", finding.severity)
self.assertEqual(89, finding.cwe)
self.assertEqual("25/06/2021", finding.date.strftime("%d/%m/%Y"))
self.assertIsNotNone(finding.description)
self.assertGreater(len(finding.description), 0)
self.assertEqual("CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:C/C:H/I:H/A:H", finding.cvssv3)
self.assertEqual(1, len(finding.unsaved_endpoints))
endpoint = finding.unsaved_endpoints[0]
self.assertEqual(str(endpoint), "http://php.testsparker.com/artist.php?id=-1%20OR%2017-7=10")
with self.subTest(i=2):
finding = findings[2]
self.assertEqual("Medium", finding.severity)
self.assertEqual(205, finding.cwe)
self.assertEqual("25/06/2021", finding.date.strftime("%d/%m/%Y"))
self.assertIsNotNone(finding.description)
self.assertGreater(len(finding.description), 0)
self.assertEqual("CVSS:3.0/AV:N/AC:L/PR:L/UI:N/S:U/C:N/I:L/A:N/E:H/RL:O/RC:C", finding.cvssv3)
self.assertEqual(1, len(finding.unsaved_endpoints))
endpoint = finding.unsaved_endpoints[0]
self.assertEqual(str(endpoint), "http://php.testsparker.com")
| 51.295775
| 106
| 0.637562
| 465
| 3,642
| 4.933333
| 0.197849
| 0.170009
| 0.100262
| 0.081081
| 0.8483
| 0.830863
| 0.830863
| 0.808631
| 0.758936
| 0.758936
| 0
| 0.03041
| 0.223504
| 3,642
| 70
| 107
| 52.028571
| 0.780764
| 0
| 0
| 0.671875
| 0
| 0.078125
| 0.162548
| 0.09006
| 0
| 0
| 0
| 0
| 0.53125
| 1
| 0.03125
| false
| 0
| 0.046875
| 0
| 0.09375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9c3b3f2d937e4612a91680d993ac557e569c3c6d
| 216
|
py
|
Python
|
stroylux/main/export_import/__init__.py
|
vladkoblynsky/shop
|
aaf027f4111605772624a868a0243b221b97c857
|
[
"BSD-3-Clause"
] | null | null | null |
stroylux/main/export_import/__init__.py
|
vladkoblynsky/shop
|
aaf027f4111605772624a868a0243b221b97c857
|
[
"BSD-3-Clause"
] | 7
|
2020-09-19T16:24:46.000Z
|
2022-01-13T03:19:46.000Z
|
stroylux/main/export_import/__init__.py
|
vladkoblynsky/shop
|
aaf027f4111605772624a868a0243b221b97c857
|
[
"BSD-3-Clause"
] | null | null | null |
class ExportObjStatus:
SUCCESS = 'success'
ERROR = 'error'
IN_PROGRESS = 'in_progress'
CHOICES = [
(SUCCESS, 'Success'),
(ERROR, 'Error'),
(IN_PROGRESS, 'In progress'),
]
| 19.636364
| 37
| 0.546296
| 19
| 216
| 6.052632
| 0.368421
| 0.347826
| 0.330435
| 0.417391
| 0.765217
| 0.765217
| 0.765217
| 0.765217
| 0
| 0
| 0
| 0
| 0.310185
| 216
| 10
| 38
| 21.6
| 0.771812
| 0
| 0
| 0
| 0
| 0
| 0.212963
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0.555556
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
92c0ee370f5d7d8dc167154a973a5c647466e52a
| 23,574
|
py
|
Python
|
sdk/lusid/api/data_types_api.py
|
fossabot/lusid-sdk-python
|
154a0232a00026d79379aec7196555f24d742ade
|
[
"MIT"
] | null | null | null |
sdk/lusid/api/data_types_api.py
|
fossabot/lusid-sdk-python
|
154a0232a00026d79379aec7196555f24d742ade
|
[
"MIT"
] | null | null | null |
sdk/lusid/api/data_types_api.py
|
fossabot/lusid-sdk-python
|
154a0232a00026d79379aec7196555f24d742ade
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
LUSID API
FINBOURNE Technology # noqa: E501
The version of the OpenAPI document: 0.11.2321
Contact: info@finbourne.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from lusid.api_client import ApiClient
from lusid.exceptions import (
ApiTypeError,
ApiValueError
)
class DataTypesApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def get_data_type(self, scope, code, **kwargs): # noqa: E501
"""[EARLY ACCESS] Get data type definition # noqa: E501
Get the definition of a specified data type # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_data_type(scope, code, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: The scope of the data type (required)
:param str code: The code of the data type (required)
:param datetime as_at: The asAt datetime at which to retrieve the data type definition. Defaults to return the latest version of the instrument definition if not specified.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: DataType
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_data_type_with_http_info(scope, code, **kwargs) # noqa: E501
def get_data_type_with_http_info(self, scope, code, **kwargs): # noqa: E501
"""[EARLY ACCESS] Get data type definition # noqa: E501
Get the definition of a specified data type # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_data_type_with_http_info(scope, code, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: The scope of the data type (required)
:param str code: The code of the data type (required)
:param datetime as_at: The asAt datetime at which to retrieve the data type definition. Defaults to return the latest version of the instrument definition if not specified.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(DataType, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['scope', 'code', 'as_at'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_data_type" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
if ('scope' in local_var_params and
len(local_var_params['scope']) > 64):
raise ApiValueError("Invalid value for parameter `scope` when calling `get_data_type`, length must be less than or equal to `64`") # noqa: E501
if ('scope' in local_var_params and
len(local_var_params['scope']) < 1):
raise ApiValueError("Invalid value for parameter `scope` when calling `get_data_type`, length must be greater than or equal to `1`") # noqa: E501
if 'scope' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['scope']): # noqa: E501
raise ApiValueError("Invalid value for parameter `scope` when calling `get_data_type`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) > 64):
raise ApiValueError("Invalid value for parameter `code` when calling `get_data_type`, length must be less than or equal to `64`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) < 1):
raise ApiValueError("Invalid value for parameter `code` when calling `get_data_type`, length must be greater than or equal to `1`") # noqa: E501
if 'code' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['code']): # noqa: E501
raise ApiValueError("Invalid value for parameter `code` when calling `get_data_type`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope' in local_var_params:
path_params['scope'] = local_var_params['scope'] # noqa: E501
if 'code' in local_var_params:
path_params['code'] = local_var_params['code'] # noqa: E501
query_params = []
if 'as_at' in local_var_params:
query_params.append(('asAt', local_var_params['as_at'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
# set the LUSID header
header_params['X-LUSID-SDK-Language'] = 'Python'
header_params['X-LUSID-SDK-Version'] = '0.11.2321'
return self.api_client.call_api(
'/api/datatypes/{scope}/{code}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DataType', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_units_from_data_type(self, scope, code, **kwargs): # noqa: E501
"""[EARLY ACCESS] Get units from data type # noqa: E501
Get the definitions of the specified units associated bound to a specific data type # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_units_from_data_type(scope, code, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: The scope of the data type (required)
:param str code: The code of the data type (required)
:param list[str] units: One or more unit identifiers for which the definition is being requested
:param str filter: Optional. Expression to filter the result set. For example, to filter on the Schema, use \"schema eq 'string'\" Read more about filtering results from LUSID here https://support.lusid.com/filtering-results-from-lusid.
:param datetime as_at: Optional. The as at of the requested data type
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: ResourceListOfIUnitDefinitionDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_units_from_data_type_with_http_info(scope, code, **kwargs) # noqa: E501
def get_units_from_data_type_with_http_info(self, scope, code, **kwargs): # noqa: E501
"""[EARLY ACCESS] Get units from data type # noqa: E501
Get the definitions of the specified units associated bound to a specific data type # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_units_from_data_type_with_http_info(scope, code, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: The scope of the data type (required)
:param str code: The code of the data type (required)
:param list[str] units: One or more unit identifiers for which the definition is being requested
:param str filter: Optional. Expression to filter the result set. For example, to filter on the Schema, use \"schema eq 'string'\" Read more about filtering results from LUSID here https://support.lusid.com/filtering-results-from-lusid.
:param datetime as_at: Optional. The as at of the requested data type
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(ResourceListOfIUnitDefinitionDto, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['scope', 'code', 'units', 'filter', 'as_at'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_units_from_data_type" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
if ('scope' in local_var_params and
len(local_var_params['scope']) > 64):
raise ApiValueError("Invalid value for parameter `scope` when calling `get_units_from_data_type`, length must be less than or equal to `64`") # noqa: E501
if ('scope' in local_var_params and
len(local_var_params['scope']) < 1):
raise ApiValueError("Invalid value for parameter `scope` when calling `get_units_from_data_type`, length must be greater than or equal to `1`") # noqa: E501
if 'scope' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['scope']): # noqa: E501
raise ApiValueError("Invalid value for parameter `scope` when calling `get_units_from_data_type`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) > 64):
raise ApiValueError("Invalid value for parameter `code` when calling `get_units_from_data_type`, length must be less than or equal to `64`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) < 1):
raise ApiValueError("Invalid value for parameter `code` when calling `get_units_from_data_type`, length must be greater than or equal to `1`") # noqa: E501
if 'code' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['code']): # noqa: E501
raise ApiValueError("Invalid value for parameter `code` when calling `get_units_from_data_type`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope' in local_var_params:
path_params['scope'] = local_var_params['scope'] # noqa: E501
if 'code' in local_var_params:
path_params['code'] = local_var_params['code'] # noqa: E501
query_params = []
if 'units' in local_var_params:
query_params.append(('units', local_var_params['units'])) # noqa: E501
collection_formats['units'] = 'multi' # noqa: E501
if 'filter' in local_var_params:
query_params.append(('filter', local_var_params['filter'])) # noqa: E501
if 'as_at' in local_var_params:
query_params.append(('asAt', local_var_params['as_at'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
# set the LUSID header
header_params['X-LUSID-SDK-Language'] = 'Python'
header_params['X-LUSID-SDK-Version'] = '0.11.2321'
return self.api_client.call_api(
'/api/datatypes/{scope}/{code}/units', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResourceListOfIUnitDefinitionDto', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def list_data_types(self, scope, **kwargs): # noqa: E501
"""[EARLY ACCESS] List data types # noqa: E501
List all data types in a specified scope # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_data_types(scope, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: The requested scope of the data types (required)
:param datetime as_at: The as at of the requested data types
:param bool include_system: Whether to additionally include those data types in the \"system\" scope
:param list[str] sort_by: Optional. Order the results by these fields. Use use the '-' sign to denote descending order e.g. -MyFieldName
:param int start: Optional. When paginating, skip this number of results
:param int limit: Optional. When paginating, limit the number of returned results to this many.
:param str filter: Optional. Expression to filter the result set. For example, to filter on the Display Name, use \"displayName eq 'string'\" Read more about filtering results from LUSID here https://support.lusid.com/filtering-results-from-lusid.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: ResourceListOfDataType
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.list_data_types_with_http_info(scope, **kwargs) # noqa: E501
def list_data_types_with_http_info(self, scope, **kwargs): # noqa: E501
"""[EARLY ACCESS] List data types # noqa: E501
List all data types in a specified scope # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_data_types_with_http_info(scope, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: The requested scope of the data types (required)
:param datetime as_at: The as at of the requested data types
:param bool include_system: Whether to additionally include those data types in the \"system\" scope
:param list[str] sort_by: Optional. Order the results by these fields. Use use the '-' sign to denote descending order e.g. -MyFieldName
:param int start: Optional. When paginating, skip this number of results
:param int limit: Optional. When paginating, limit the number of returned results to this many.
:param str filter: Optional. Expression to filter the result set. For example, to filter on the Display Name, use \"displayName eq 'string'\" Read more about filtering results from LUSID here https://support.lusid.com/filtering-results-from-lusid.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(ResourceListOfDataType, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['scope', 'as_at', 'include_system', 'sort_by', 'start', 'limit', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method list_data_types" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
if 'scope' in local_var_params:
path_params['scope'] = local_var_params['scope'] # noqa: E501
query_params = []
if 'as_at' in local_var_params:
query_params.append(('asAt', local_var_params['as_at'])) # noqa: E501
if 'include_system' in local_var_params:
query_params.append(('includeSystem', local_var_params['include_system'])) # noqa: E501
if 'sort_by' in local_var_params:
query_params.append(('sortBy', local_var_params['sort_by'])) # noqa: E501
collection_formats['sortBy'] = 'multi' # noqa: E501
if 'start' in local_var_params:
query_params.append(('start', local_var_params['start'])) # noqa: E501
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'filter' in local_var_params:
query_params.append(('filter', local_var_params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
# set the LUSID header
header_params['X-LUSID-SDK-Language'] = 'Python'
header_params['X-LUSID-SDK-Version'] = '0.11.2321'
return self.api_client.call_api(
'/api/datatypes/{scope}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResourceListOfDataType', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
| 52.620536
| 281
| 0.624544
| 2,935
| 23,574
| 4.815332
| 0.088245
| 0.047548
| 0.077266
| 0.030567
| 0.9233
| 0.922663
| 0.920045
| 0.905399
| 0.904762
| 0.904762
| 0
| 0.017495
| 0.289556
| 23,574
| 447
| 282
| 52.738255
| 0.826367
| 0.453593
| 0
| 0.712264
| 1
| 0.056604
| 0.263625
| 0.052346
| 0
| 0
| 0
| 0
| 0
| 1
| 0.033019
| false
| 0
| 0.023585
| 0
| 0.089623
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
92c106a0f22157ed7abf3a3750ddca7d861d3ece
| 32
|
py
|
Python
|
__init__.py
|
dzianissokalau/SRM_checker
|
0f275a945316c6caf9ca7508ef7936c76fb43a52
|
[
"MIT"
] | null | null | null |
__init__.py
|
dzianissokalau/SRM_checker
|
0f275a945316c6caf9ca7508ef7936c76fb43a52
|
[
"MIT"
] | null | null | null |
__init__.py
|
dzianissokalau/SRM_checker
|
0f275a945316c6caf9ca7508ef7936c76fb43a52
|
[
"MIT"
] | null | null | null |
from .check_srm import check_srm
| 32
| 32
| 0.875
| 6
| 32
| 4.333333
| 0.666667
| 0.615385
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.09375
| 32
| 1
| 32
| 32
| 0.896552
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
92c1b81caf62fcfeca75b22a6a6ebad7701f9266
| 6,755
|
py
|
Python
|
day19/Python/day19.py
|
prabhigupta/instaBlog
|
02f219ea278d85c7799d739294c664aa5a47719a
|
[
"Apache-2.0"
] | null | null | null |
day19/Python/day19.py
|
prabhigupta/instaBlog
|
02f219ea278d85c7799d739294c664aa5a47719a
|
[
"Apache-2.0"
] | 1
|
2021-12-25T19:22:09.000Z
|
2021-12-25T19:22:09.000Z
|
day19/Python/day19.py
|
prabhigupta/AOC2021
|
02f219ea278d85c7799d739294c664aa5a47719a
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
from collections import Counter
from termcolor import colored
from pyfiglet import *
print(colored("Advent of Code - Day 19", "yellow").center(80, "-"))
print(colored(figlet_format("Beacon Scanner",font="small",justify="center"), 'green'))
print(colored("Output","yellow").center(80, "-"))
r = []
r.append(lambda x,y,z: np.array([x,y,z]))
r.append(lambda x,y,z: np.array([x,-z,y]))
r.append(lambda x,y,z: np.array([x,-y,-z]))
r.append(lambda x,y,z: np.array([x,z,-y]))
r.append(lambda x,y,z: np.array([-x,y,-z]))
r.append(lambda x,y,z: np.array([-x,-z,-y]))
r.append(lambda x,y,z: np.array([-x,-y,z]))
r.append(lambda x,y,z: np.array([-x,z,y]))
r.append(lambda y,z,x: np.array([x,y,z]))
r.append(lambda y,z,x: np.array([x,-z,y]))
r.append(lambda y,z,x: np.array([x,-y,-z]))
r.append(lambda y,z,x: np.array([x,z,-y]))
r.append(lambda y,z,x: np.array([-x,y,-z]))
r.append(lambda y,z,x: np.array([-x,-z,-y]))
r.append(lambda y,z,x: np.array([-x,-y,z]))
r.append(lambda y,z,x: np.array([-x,z,y]))
r.append(lambda z,x,y: np.array([x,y,z]))
r.append(lambda z,x,y: np.array([x,-z,y]))
r.append(lambda z,x,y: np.array([x,-y,-z]))
r.append(lambda z,x,y: np.array([x,z,-y]))
r.append(lambda z,x,y: np.array([-x,y,-z]))
r.append(lambda z,x,y: np.array([-x,-z,-y]))
r.append(lambda z,x,y: np.array([-x,-y,z]))
r.append(lambda z,x,y: np.array([-x,z,y]))
data = [x for x in open("../Input/day19.txt", "r").read().splitlines()]
scanners = []
for d in data:
if 'scanner' in d:
beacons = []
if ',' in d:
beacons.append([int(x) for x in d.split(',')])
if len(d) == 0:
scanners.append(beacons)
scanners.append(beacons)
signal_id = 0
signal_pos = [0,0,0]
beacons = set()
signals = {}
threshold = 12
signals[signal_id] = signal_pos
beacons.update([tuple(x) for x in scanners[0]])
while len(signals) < len(scanners):
for signal_id, reference in enumerate(scanners):
if signal_id in signals:
dist1 = np.array([[np.abs(np.subtract(x,y)).sum() for x in reference] for y in reference])
for i, s in enumerate(scanners):
if i not in signals and i != signal_id:
# print(f'Checking sensor {i}')
dist2 = np.array([[np.abs(np.subtract(x,y)).sum() for x in s] for y in s])
overlaps = []
for n1, row1 in enumerate(dist1):
for n2, row2 in enumerate(dist2):
o = list((Counter(row1) & Counter(row2)).elements())
if len(o) >= threshold:
overlaps.append((n1, n2, len(o)))
break
if len(overlaps) >= threshold:
originals = [reference[x[0]] for x in overlaps]
for rot in r:
trans = [rot(*s[x[1]]) for x in overlaps]
diff = [tuple(x-y) for x,y in zip(originals, trans)]
if len(set(diff)) == 1:
signal_pos = list(diff[0])
signals[i] = signal_pos
scanners[i] = [list(signal_pos + rot(*x)) for x in s]
beacons.update([tuple(x) for x in scanners[i]])
break
print('\nPuzzle 1: ', len(beacons))
r = []
r.append(lambda x,y,z: np.array([x,y,z]))
r.append(lambda x,y,z: np.array([x,-z,y]))
r.append(lambda x,y,z: np.array([x,-y,-z]))
r.append(lambda x,y,z: np.array([x,z,-y]))
r.append(lambda x,y,z: np.array([-x,y,-z]))
r.append(lambda x,y,z: np.array([-x,-z,-y]))
r.append(lambda x,y,z: np.array([-x,-y,z]))
r.append(lambda x,y,z: np.array([-x,z,y]))
r.append(lambda y,z,x: np.array([x,y,z]))
r.append(lambda y,z,x: np.array([x,-z,y]))
r.append(lambda y,z,x: np.array([x,-y,-z]))
r.append(lambda y,z,x: np.array([x,z,-y]))
r.append(lambda y,z,x: np.array([-x,y,-z]))
r.append(lambda y,z,x: np.array([-x,-z,-y]))
r.append(lambda y,z,x: np.array([-x,-y,z]))
r.append(lambda y,z,x: np.array([-x,z,y]))
r.append(lambda z,x,y: np.array([x,y,z]))
r.append(lambda z,x,y: np.array([x,-z,y]))
r.append(lambda z,x,y: np.array([x,-y,-z]))
r.append(lambda z,x,y: np.array([x,z,-y]))
r.append(lambda z,x,y: np.array([-x,y,-z]))
r.append(lambda z,x,y: np.array([-x,-z,-y]))
r.append(lambda z,x,y: np.array([-x,-y,z]))
r.append(lambda z,x,y: np.array([-x,z,y]))
data = [x for x in open("../Input/day19.txt", "r").read().splitlines()]
scanners = []
for d in data:
if 'scanner' in d:
beacons = []
if ',' in d:
beacons.append([int(x) for x in d.split(',')])
if len(d) == 0:
scanners.append(beacons)
scanners.append(beacons)
signal_id = 0
signal_pos = [0,0,0]
beacons = set()
signals = {}
threshold = 12
signals[signal_id] = signal_pos
beacons.update([tuple(x) for x in scanners[0]])
while len(signals) < len(scanners):
for signal_id, reference in enumerate(scanners):
if signal_id in signals:
dist1 = np.array([[np.abs(np.subtract(x,y)).sum() for x in reference] for y in reference])
for i, s in enumerate(scanners):
if i not in signals and i != signal_id:
# print(f'Checking sensor {i}')
dist2 = np.array([[np.abs(np.subtract(x,y)).sum() for x in s] for y in s])
overlaps = []
for n1, row1 in enumerate(dist1):
for n2, row2 in enumerate(dist2):
o = list((Counter(row1) & Counter(row2)).elements())
if len(o) >= threshold:
overlaps.append((n1, n2, len(o)))
break
if len(overlaps) >= threshold:
originals = [reference[x[0]] for x in overlaps]
for rot in r:
trans = [rot(*s[x[1]]) for x in overlaps]
diff = [tuple(x-y) for x,y in zip(originals, trans)]
if len(set(diff)) == 1:
signal_pos = list(diff[0])
signals[i] = signal_pos
scanners[i] = [list(signal_pos + rot(*x)) for x in s]
beacons.update([tuple(x) for x in scanners[i]])
break
max_dist = 0
for signal1 in signals.values():
for signal2 in signals.values():
max_dist = max(max_dist, sum([abs(x-y) for x,y in zip(signal1, signal2)]))
print('Puzzle 2: ', max_dist,end='\n\n')
print(colored("=".center(71, "="), "yellow"))
| 39.273256
| 102
| 0.519467
| 1,111
| 6,755
| 3.135914
| 0.091809
| 0.037887
| 0.179104
| 0.061998
| 0.886912
| 0.886912
| 0.886912
| 0.883467
| 0.883467
| 0.883467
| 0
| 0.013877
| 0.28527
| 6,755
| 172
| 103
| 39.273256
| 0.707746
| 0.008734
| 0
| 0.905405
| 0
| 0
| 0.02435
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.027027
| 0
| 0.027027
| 0.040541
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
92d28cd75dd9a8b7f662534c23c819536efe13ce
| 6,411
|
py
|
Python
|
sdk/monitor/azure-monitor-query/tests/test_exceptions.py
|
lambertpan/azure-sdk-for-python
|
cc7e454fc04ec61cf021adeaac4fc0fc0a0805b2
|
[
"MIT"
] | 1
|
2021-09-16T02:33:52.000Z
|
2021-09-16T02:33:52.000Z
|
sdk/monitor/azure-monitor-query/tests/test_exceptions.py
|
lambertpan/azure-sdk-for-python
|
cc7e454fc04ec61cf021adeaac4fc0fc0a0805b2
|
[
"MIT"
] | null | null | null |
sdk/monitor/azure-monitor-query/tests/test_exceptions.py
|
lambertpan/azure-sdk-for-python
|
cc7e454fc04ec61cf021adeaac4fc0fc0a0805b2
|
[
"MIT"
] | null | null | null |
from datetime import timedelta, datetime
import pytest
import os
from azure.identity import ClientSecretCredential
from azure.core.exceptions import HttpResponseError
from azure.monitor.query import LogsQueryClient, LogsBatchQuery, LogsQueryError,LogsQueryResult, QueryPartialErrorException
def _credential():
credential = ClientSecretCredential(
client_id = os.environ['AZURE_CLIENT_ID'],
client_secret = os.environ['AZURE_CLIENT_SECRET'],
tenant_id = os.environ['AZURE_TENANT_ID']
)
return credential
@pytest.mark.live_test_only
def test_logs_single_query_fatal_exception():
credential = _credential()
client = LogsQueryClient(credential)
with pytest.raises(HttpResponseError):
client.query('bad_workspace_id', 'AppRequests', timespan=None)
@pytest.mark.live_test_only
def test_logs_single_query_partial_exception_not_allowed():
credential = _credential()
client = LogsQueryClient(credential)
query = """let Weight = 92233720368547758;
range x from 1 to 3 step 1
| summarize percentilesw(x, Weight * 100, 50)"""
with pytest.raises(QueryPartialErrorException) as err:
client.query(os.environ['LOG_WORKSPACE_ID'], query, timespan=timedelta(days=1))
@pytest.mark.live_test_only
def test_logs_single_query_partial_exception_allowed():
credential = _credential()
client = LogsQueryClient(credential)
query = """let Weight = 92233720368547758;
range x from 1 to 3 step 1
| summarize percentilesw(x, Weight * 100, 50)"""
response = client.query(os.environ['LOG_WORKSPACE_ID'], query, timespan=timedelta(days=1), allow_partial_errors=True)
assert response.partial_error is not None
assert response.partial_error.code == 'PartialError'
assert response.partial_error.__class__ == LogsQueryError
@pytest.mark.live_test_only
def test_logs_batch_query_fatal_exception():
credential = ClientSecretCredential(
client_id = os.environ['AZURE_CLIENT_ID'],
client_secret = 'bad_secret',
tenant_id = os.environ['AZURE_TENANT_ID']
)
client = LogsQueryClient(credential)
requests = [
LogsBatchQuery(
query="AzureActivity | summarize count()",
timespan=timedelta(hours=1),
workspace_id= os.environ['LOG_WORKSPACE_ID']
),
LogsBatchQuery(
query= """AppRequestsss | take 10""",
timespan=(datetime(2021, 6, 2), timedelta(days=1)),
workspace_id= os.environ['LOG_WORKSPACE_ID']
),
LogsBatchQuery(
query= """let Weight = 92233720368547758;
range x from 1 to 3 step 1
| summarize percentilesw(x, Weight * 100, 50)""",
workspace_id= os.environ['LOG_WORKSPACE_ID'],
timespan=(datetime(2021, 6, 2), datetime(2021, 6, 3)),
include_statistics=True
),
]
with pytest.raises(HttpResponseError):
responses = client.query_batch(requests, allow_partial_errors=True)
@pytest.mark.live_test_only
def test_logs_batch_query_partial_exception_not_allowed():
credential = _credential()
client = LogsQueryClient(credential)
requests = [
LogsBatchQuery(
query="AzureActivity | summarize count()",
timespan=timedelta(hours=1),
workspace_id= os.environ['LOG_WORKSPACE_ID']
),
LogsBatchQuery(
query= """AppRequests | take 10""",
timespan=(datetime(2021, 6, 2), timedelta(days=1)),
workspace_id= os.environ['LOG_WORKSPACE_ID']
),
LogsBatchQuery(
query= """let Weight = 92233720368547758;
range x from 1 to 3 step 1
| summarize percentilesw(x, Weight * 100, 50)""",
workspace_id= os.environ['LOG_WORKSPACE_ID'],
timespan=(datetime(2021, 6, 2), datetime(2021, 6, 3)),
include_statistics=True
),
]
responses = client.query_batch(requests)
r1, r2, r3 = responses[0], responses[1], responses[2]
assert r1.__class__ == LogsQueryResult
assert r2.__class__ == LogsQueryResult
assert r3.__class__ == LogsQueryError
@pytest.mark.live_test_only
def test_logs_batch_query_partial_exception_allowed():
credential = _credential()
client = LogsQueryClient(credential)
requests = [
LogsBatchQuery(
query="AzureActivity | summarize count()",
timespan=timedelta(hours=1),
workspace_id= os.environ['LOG_WORKSPACE_ID']
),
LogsBatchQuery(
query= """AppRequests | take 10""",
timespan=(datetime(2021, 6, 2), timedelta(days=1)),
workspace_id= os.environ['LOG_WORKSPACE_ID']
),
LogsBatchQuery(
query= """let Weight = 92233720368547758;
range x from 1 to 3 step 1
| summarize percentilesw(x, Weight * 100, 50)""",
workspace_id= os.environ['LOG_WORKSPACE_ID'],
timespan=(datetime(2021, 6, 2), datetime(2021, 6, 3)),
include_statistics=True
),
]
responses = client.query_batch(requests, allow_partial_errors=True)
r1, r2, r3 = responses[0], responses[1], responses[2]
assert r1.__class__ == LogsQueryResult
assert r2.__class__ == LogsQueryResult
assert r3.__class__ == LogsQueryResult
assert r3.partial_error is not None
@pytest.mark.live_test_only
def test_logs_batch_query_non_fatal_exception():
credential = _credential()
client = LogsQueryClient(credential)
requests = [
LogsBatchQuery(
query="AzureActivity | summarize count()",
timespan=timedelta(hours=1),
workspace_id= os.environ['LOG_WORKSPACE_ID']
),
LogsBatchQuery(
query= """AppRequests | take 10""",
timespan=(datetime(2021, 6, 2), timedelta(days=1)),
workspace_id= os.environ['LOG_WORKSPACE_ID']
),
LogsBatchQuery(
query= """Bad Query""",
workspace_id= os.environ['LOG_WORKSPACE_ID'],
timespan=(datetime(2021, 6, 2), datetime(2021, 6, 3)),
include_statistics=True
),
]
responses = client.query_batch(requests)
r1, r2, r3 = responses[0], responses[1], responses[2]
assert r1.__class__ == LogsQueryResult
assert r2.__class__ == LogsQueryResult
assert r3.__class__ == LogsQueryError
| 39.091463
| 123
| 0.656528
| 690
| 6,411
| 5.83913
| 0.131884
| 0.073716
| 0.043683
| 0.072971
| 0.8414
| 0.831968
| 0.831968
| 0.81236
| 0.79449
| 0.773393
| 0
| 0.049703
| 0.237404
| 6,411
| 163
| 124
| 39.331288
| 0.774391
| 0
| 0
| 0.780645
| 0
| 0
| 0.185151
| 0
| 0
| 0
| 0
| 0
| 0.083871
| 1
| 0.051613
| false
| 0
| 0.03871
| 0
| 0.096774
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
92ffb78a1cbd206add3df2707962335b3b855059
| 12,085
|
py
|
Python
|
tests/integration/test_grant_and_revoke/test.py
|
johnskopis/ClickHouse
|
318f14b95e0bbc02d1a5f07241d8f2c1c3d4d281
|
[
"Apache-2.0"
] | 8
|
2019-06-04T02:50:13.000Z
|
2022-02-10T06:46:51.000Z
|
tests/integration/test_grant_and_revoke/test.py
|
johnskopis/ClickHouse
|
318f14b95e0bbc02d1a5f07241d8f2c1c3d4d281
|
[
"Apache-2.0"
] | 16
|
2021-06-07T21:32:30.000Z
|
2022-03-31T21:08:29.000Z
|
tests/integration/test_grant_and_revoke/test.py
|
johnskopis/ClickHouse
|
318f14b95e0bbc02d1a5f07241d8f2c1c3d4d281
|
[
"Apache-2.0"
] | null | null | null |
import pytest
from helpers.cluster import ClickHouseCluster
from helpers.test_tools import TSV
import re
cluster = ClickHouseCluster(__file__)
instance = cluster.add_instance('instance')
@pytest.fixture(scope="module", autouse=True)
def start_cluster():
try:
cluster.start()
instance.query("CREATE DATABASE test")
instance.query("CREATE TABLE test.table(x UInt32, y UInt32) ENGINE = MergeTree ORDER BY tuple()")
instance.query("INSERT INTO test.table VALUES (1,5), (2,10)")
yield cluster
finally:
cluster.shutdown()
@pytest.fixture(autouse=True)
def cleanup_after_test():
try:
yield
finally:
instance.query("DROP USER IF EXISTS A, B")
instance.query("DROP TABLE IF EXISTS test.view_1")
def test_smoke():
instance.query("CREATE USER A")
assert "Not enough privileges" in instance.query_and_get_error("SELECT * FROM test.table", user='A')
instance.query('GRANT SELECT ON test.table TO A')
assert instance.query("SELECT * FROM test.table", user='A') == "1\t5\n2\t10\n"
instance.query('REVOKE SELECT ON test.table FROM A')
assert "Not enough privileges" in instance.query_and_get_error("SELECT * FROM test.table", user='A')
def test_grant_option():
instance.query("CREATE USER A")
instance.query("CREATE USER B")
instance.query('GRANT SELECT ON test.table TO A')
assert instance.query("SELECT * FROM test.table", user='A') == "1\t5\n2\t10\n"
assert "Not enough privileges" in instance.query_and_get_error("GRANT SELECT ON test.table TO B", user='A')
instance.query('GRANT SELECT ON test.table TO A WITH GRANT OPTION')
instance.query("GRANT SELECT ON test.table TO B", user='A')
assert instance.query("SELECT * FROM test.table", user='B') == "1\t5\n2\t10\n"
instance.query('REVOKE SELECT ON test.table FROM A, B')
def test_revoke_requires_grant_option():
instance.query("CREATE USER A")
instance.query("CREATE USER B")
instance.query("GRANT SELECT ON test.table TO B")
assert instance.query("SHOW GRANTS FOR B") == "GRANT SELECT ON test.table TO B\n"
expected_error = "Not enough privileges"
assert expected_error in instance.query_and_get_error("REVOKE SELECT ON test.table FROM B", user='A')
assert instance.query("SHOW GRANTS FOR B") == "GRANT SELECT ON test.table TO B\n"
instance.query("GRANT SELECT ON test.table TO A")
expected_error = "privileges have been granted, but without grant option"
assert expected_error in instance.query_and_get_error("REVOKE SELECT ON test.table FROM B", user='A')
assert instance.query("SHOW GRANTS FOR B") == "GRANT SELECT ON test.table TO B\n"
instance.query("GRANT SELECT ON test.table TO A WITH GRANT OPTION")
assert instance.query("SHOW GRANTS FOR B") == "GRANT SELECT ON test.table TO B\n"
instance.query("REVOKE SELECT ON test.table FROM B", user='A')
assert instance.query("SHOW GRANTS FOR B") == ""
instance.query("GRANT SELECT ON test.table TO B")
assert instance.query("SHOW GRANTS FOR B") == "GRANT SELECT ON test.table TO B\n"
instance.query("REVOKE SELECT ON test.* FROM B", user='A')
assert instance.query("SHOW GRANTS FOR B") == ""
instance.query("GRANT SELECT ON test.table TO B")
assert instance.query("SHOW GRANTS FOR B") == "GRANT SELECT ON test.table TO B\n"
instance.query("REVOKE ALL ON test.* FROM B", user='A')
assert instance.query("SHOW GRANTS FOR B") == ""
instance.query("GRANT SELECT ON test.table TO B")
assert instance.query("SHOW GRANTS FOR B") == "GRANT SELECT ON test.table TO B\n"
instance.query("REVOKE ALL ON *.* FROM B", user='A')
assert instance.query("SHOW GRANTS FOR B") == ""
instance.query("REVOKE GRANT OPTION FOR ALL ON *.* FROM A")
instance.query("GRANT SELECT ON test.table TO B")
assert instance.query("SHOW GRANTS FOR B") == "GRANT SELECT ON test.table TO B\n"
expected_error = "privileges have been granted, but without grant option"
assert expected_error in instance.query_and_get_error("REVOKE SELECT ON test.table FROM B", user='A')
assert instance.query("SHOW GRANTS FOR B") == "GRANT SELECT ON test.table TO B\n"
instance.query("GRANT SELECT ON test.* TO A WITH GRANT OPTION")
instance.query("GRANT SELECT ON test.table TO B")
assert instance.query("SHOW GRANTS FOR B") == "GRANT SELECT ON test.table TO B\n"
instance.query("REVOKE SELECT ON test.table FROM B", user='A')
assert instance.query("SHOW GRANTS FOR B") == ""
def test_implicit_show_grants():
instance.query("CREATE USER A")
assert instance.query("select count() FROM system.databases WHERE name='test'", user="A") == "0\n"
assert instance.query("select count() FROM system.tables WHERE database='test' AND name='table'", user="A") == "0\n"
assert instance.query("select count() FROM system.columns WHERE database='test' AND table='table'", user="A") == "0\n"
instance.query("GRANT SELECT(x) ON test.table TO A")
assert instance.query("SHOW GRANTS FOR A") == "GRANT SELECT(x) ON test.table TO A\n"
assert instance.query("select count() FROM system.databases WHERE name='test'", user="A") == "1\n"
assert instance.query("select count() FROM system.tables WHERE database='test' AND name='table'", user="A") == "1\n"
assert instance.query("select count() FROM system.columns WHERE database='test' AND table='table'", user="A") == "1\n"
instance.query("GRANT SELECT ON test.table TO A")
assert instance.query("SHOW GRANTS FOR A") == "GRANT SELECT ON test.table TO A\n"
assert instance.query("select count() FROM system.databases WHERE name='test'", user="A") == "1\n"
assert instance.query("select count() FROM system.tables WHERE database='test' AND name='table'", user="A") == "1\n"
assert instance.query("select count() FROM system.columns WHERE database='test' AND table='table'", user="A") == "2\n"
instance.query("GRANT SELECT ON test.* TO A")
assert instance.query("SHOW GRANTS FOR A") == "GRANT SELECT ON test.* TO A\n"
assert instance.query("select count() FROM system.databases WHERE name='test'", user="A") == "1\n"
assert instance.query("select count() FROM system.tables WHERE database='test' AND name='table'", user="A") == "1\n"
assert instance.query("select count() FROM system.columns WHERE database='test' AND table='table'", user="A") == "2\n"
instance.query("GRANT SELECT ON *.* TO A")
assert instance.query("SHOW GRANTS FOR A") == "GRANT SELECT ON *.* TO A\n"
assert instance.query("select count() FROM system.databases WHERE name='test'", user="A") == "1\n"
assert instance.query("select count() FROM system.tables WHERE database='test' AND name='table'", user="A") == "1\n"
assert instance.query("select count() FROM system.columns WHERE database='test' AND table='table'", user="A") == "2\n"
instance.query("REVOKE ALL ON *.* FROM A")
assert instance.query("select count() FROM system.databases WHERE name='test'", user="A") == "0\n"
assert instance.query("select count() FROM system.tables WHERE database='test' AND name='table'", user="A") == "0\n"
assert instance.query("select count() FROM system.columns WHERE database='test' AND table='table'", user="A") == "0\n"
def test_implicit_create_view_grant():
instance.query("CREATE USER A")
expected_error = "Not enough privileges"
assert expected_error in instance.query_and_get_error("CREATE VIEW test.view_1 AS SELECT 1", user="A")
instance.query("GRANT CREATE TABLE ON test.* TO A")
instance.query("CREATE VIEW test.view_1 AS SELECT 1", user="A")
assert instance.query("SELECT * FROM test.view_1") == "1\n"
instance.query("REVOKE CREATE TABLE ON test.* FROM A")
instance.query("DROP TABLE test.view_1")
assert expected_error in instance.query_and_get_error("CREATE VIEW test.view_1 AS SELECT 1", user="A")
def test_implicit_create_temporary_table_grant():
instance.query("CREATE USER A")
expected_error = "Not enough privileges"
assert expected_error in instance.query_and_get_error("CREATE TEMPORARY TABLE tmp(name String)", user="A")
instance.query("GRANT CREATE TABLE ON test.* TO A")
instance.query("CREATE TEMPORARY TABLE tmp(name String)", user="A")
instance.query("REVOKE CREATE TABLE ON *.* FROM A")
assert expected_error in instance.query_and_get_error("CREATE TEMPORARY TABLE tmp(name String)", user="A")
def test_introspection():
instance.query("CREATE USER A")
instance.query("CREATE USER B")
instance.query('GRANT SELECT ON test.table TO A')
instance.query('GRANT CREATE ON *.* TO B WITH GRANT OPTION')
assert instance.query("SHOW USERS") == TSV([ "A", "B", "default" ])
assert instance.query("SHOW CREATE USERS A") == TSV([ "CREATE USER A" ])
assert instance.query("SHOW CREATE USERS B") == TSV([ "CREATE USER B" ])
assert instance.query("SHOW CREATE USERS A,B") == TSV([ "CREATE USER A", "CREATE USER B" ])
assert instance.query("SHOW CREATE USERS") == TSV([ "CREATE USER A", "CREATE USER B", "CREATE USER default IDENTIFIED WITH plaintext_password SETTINGS PROFILE default" ])
assert instance.query("SHOW GRANTS FOR A") == TSV([ "GRANT SELECT ON test.table TO A" ])
assert instance.query("SHOW GRANTS FOR B") == TSV([ "GRANT CREATE ON *.* TO B WITH GRANT OPTION" ])
assert instance.query("SHOW GRANTS FOR A,B") == TSV([ "GRANT SELECT ON test.table TO A", "GRANT CREATE ON *.* TO B WITH GRANT OPTION" ])
assert instance.query("SHOW GRANTS FOR B,A") == TSV([ "GRANT SELECT ON test.table TO A", "GRANT CREATE ON *.* TO B WITH GRANT OPTION" ])
assert instance.query("SHOW GRANTS FOR ALL") == TSV([ "GRANT SELECT ON test.table TO A", "GRANT CREATE ON *.* TO B WITH GRANT OPTION", "GRANT ALL ON *.* TO default WITH GRANT OPTION" ])
assert instance.query("SHOW GRANTS", user='A') == TSV([ "GRANT SELECT ON test.table TO A" ])
assert instance.query("SHOW GRANTS", user='B') == TSV([ "GRANT CREATE ON *.* TO B WITH GRANT OPTION" ])
expected_access1 = "CREATE USER A\n"\
"CREATE USER B\n"\
"CREATE USER default IDENTIFIED WITH plaintext_password SETTINGS PROFILE default"
expected_access2 = "GRANT SELECT ON test.table TO A\n"\
"GRANT CREATE ON *.* TO B WITH GRANT OPTION\n"\
"GRANT ALL ON *.* TO default WITH GRANT OPTION\n"
assert expected_access1 in instance.query("SHOW ACCESS")
assert expected_access2 in instance.query("SHOW ACCESS")
assert instance.query("SELECT name, storage, auth_type, auth_params, host_ip, host_names, host_names_regexp, host_names_like, default_roles_all, default_roles_list, default_roles_except from system.users WHERE name IN ('A', 'B') ORDER BY name") ==\
TSV([[ "A", "local directory", "no_password", "{}", "['::/0']", "[]", "[]", "[]", 1, "[]", "[]" ],
[ "B", "local directory", "no_password", "{}", "['::/0']", "[]", "[]", "[]", 1, "[]", "[]" ]])
assert instance.query("SELECT * from system.grants WHERE user_name IN ('A', 'B') ORDER BY user_name, access_type, grant_option") ==\
TSV([[ "A", "\N", "SELECT", "test", "table", "\N", 0, 0 ],
[ "B", "\N", "CREATE", "\N", "\N", "\N", 0, 1 ]])
def test_current_database():
instance.query("CREATE USER A")
instance.query("GRANT SELECT ON table TO A", database="test")
assert instance.query("SHOW GRANTS FOR A") == TSV([ "GRANT SELECT ON test.table TO A" ])
assert instance.query("SHOW GRANTS FOR A", database="test") == TSV([ "GRANT SELECT ON test.table TO A" ])
assert instance.query("SELECT * FROM test.table", user='A') == "1\t5\n2\t10\n"
assert instance.query("SELECT * FROM table", user='A', database='test') == "1\t5\n2\t10\n"
instance.query("CREATE TABLE default.table(x UInt32, y UInt32) ENGINE = MergeTree ORDER BY tuple()")
assert "Not enough privileges" in instance.query_and_get_error("SELECT * FROM table", user='A')
| 54.192825
| 252
| 0.670749
| 1,804
| 12,085
| 4.433481
| 0.068182
| 0.203176
| 0.14016
| 0.087147
| 0.854714
| 0.84146
| 0.804701
| 0.784321
| 0.759315
| 0.73831
| 0
| 0.007881
| 0.181051
| 12,085
| 222
| 253
| 54.436937
| 0.800243
| 0
| 0
| 0.487952
| 0
| 0.012048
| 0.504427
| 0
| 0
| 0
| 0
| 0
| 0.433735
| 0
| null | null | 0.024096
| 0.024096
| null | null | 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
1318748a15fdc63a88de41d290fef356d0533a84
| 467
|
py
|
Python
|
ports/unix/variants/manifest.py
|
ljk53/micropython
|
838f3a1ff6e291f9ee287d3c2152097946d4fe8c
|
[
"MIT"
] | 1
|
2021-05-29T13:44:20.000Z
|
2021-05-29T13:44:20.000Z
|
ports/unix/variants/manifest.py
|
ljk53/micropython
|
838f3a1ff6e291f9ee287d3c2152097946d4fe8c
|
[
"MIT"
] | 1
|
2021-05-31T22:39:54.000Z
|
2021-05-31T22:39:54.000Z
|
ports/unix/variants/manifest.py
|
ljk53/micropython
|
838f3a1ff6e291f9ee287d3c2152097946d4fe8c
|
[
"MIT"
] | 1
|
2021-06-22T07:39:06.000Z
|
2021-06-22T07:39:06.000Z
|
freeze_as_mpy('$(MPY_DIR)/tools', 'upip.py')
freeze_as_mpy('$(MPY_DIR)/tools', 'upip_utarfile.py', opt=3)
freeze('$(MPY_DIR)/lib/lv_bindings/driver/linux', 'evdev.py')
freeze('$(MPY_DIR)/lib/lv_bindings/lib', 'lv_colors.py')
freeze('$(MPY_DIR)/lib/lv_bindings/lib', 'async_utils.py')
freeze('$(MPY_DIR)/lib/lv_bindings/lib', 'display_driver_utils.py')
freeze('$(MPY_DIR)/lib/lv_bindings/lib', 'imagetools.py')
freeze('$(MPY_DIR)/lib/lv_bindings/lib', 'fs_driver.py')
| 51.888889
| 67
| 0.72591
| 79
| 467
| 3.987342
| 0.278481
| 0.152381
| 0.228571
| 0.285714
| 0.752381
| 0.752381
| 0.673016
| 0.507937
| 0.222222
| 0
| 0
| 0.002222
| 0.036403
| 467
| 8
| 68
| 58.375
| 0.697778
| 0
| 0
| 0
| 0
| 0
| 0.698073
| 0.453961
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1375c1f52b862fbfc864160fccce7c0540d4d8d1
| 1,693
|
py
|
Python
|
blog/httphelp.py
|
hsiang-ever/django_blog
|
eb863d6495638692ddd4de58f063533ceb2de111
|
[
"MIT"
] | null | null | null |
blog/httphelp.py
|
hsiang-ever/django_blog
|
eb863d6495638692ddd4de58f063533ceb2de111
|
[
"MIT"
] | null | null | null |
blog/httphelp.py
|
hsiang-ever/django_blog
|
eb863d6495638692ddd4de58f063533ceb2de111
|
[
"MIT"
] | null | null | null |
from urllib.request import urlopen, Request
import json
def getPostList():
# url = 'http://0.0.0.0:5000/posts/'
url = 'https://shorten-url-1491815099304.appspot.com/posts/'
headers = {'Content-Type': 'application/json'}
req = Request(url=url, headers=headers)
res = urlopen(req)
posts = json.loads(res.read())['posts']
return posts
# return res.read()
def getPostDetail(pk):
# url = 'http://0.0.0.0:5000/posts/{}/'.format(pk)
url = 'https://shorten-url-1491815099304.appspot.com/posts/{}/'.format(pk)
headers = {'Content-Type': 'application/json'}
req = Request(url=url, headers=headers)
res = urlopen(req)
post = json.loads(res.read())['post']
return post
def postPostDetail(data):
# url = 'http://0.0.0.0:5000/posts/'
url = 'https://shorten-url-1491815099304.appspot.com/posts/'
headers = {'Content-Type': 'application/json'}
req = Request(url=url, headers=headers, data=data)
res = urlopen(req)
post = json.loads(res.read())['post']
return post
def putPostDetail(pk, data):
# url = 'http://0.0.0.0:5000/posts/{}/'.format(pk)
url = 'https://shorten-url-1491815099304.appspot.com/posts/{}/'.format(pk)
headers = {'Content-Type': 'application/json'}
req = Request(url=url, headers=headers, data=data)
req.get_method = lambda:'PUT'
res = urlopen(req)
post = json.loads(res.read())['post']
return post
def deletePostDetail(pk):
# url = 'http://0.0.0.0:5000/posts/{}/'.format(pk)
url = 'https://shorten-url-1491815099304.appspot.com/posts/{}/'.format(pk)
headers = {'Content-Type': 'application/json'}
req = Request(url=url, headers=headers)
req.get_method = lambda:'DELETE'
res = urlopen(req)
# post = json.loads(res.read())['post']
# return post
| 30.232143
| 75
| 0.679268
| 242
| 1,693
| 4.743802
| 0.165289
| 0.026132
| 0.026132
| 0.039199
| 0.810976
| 0.810976
| 0.810976
| 0.810976
| 0.810976
| 0.804007
| 0
| 0.070047
| 0.114589
| 1,693
| 55
| 76
| 30.781818
| 0.695797
| 0.16775
| 0
| 0.702703
| 0
| 0
| 0.311828
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.135135
| false
| 0
| 0.054054
| 0
| 0.297297
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
137c7495e1b1686700844a777c25af6b9c94e279
| 15,279
|
py
|
Python
|
genome_designer/variants/tests/test_vcf_parser.py
|
churchlab/millstone
|
ddb5d003a5b8a7675e5a56bafd5c432d9642b473
|
[
"MIT"
] | 45
|
2015-09-30T14:55:33.000Z
|
2021-06-28T02:33:30.000Z
|
genome_designer/variants/tests/test_vcf_parser.py
|
churchlab/millstone
|
ddb5d003a5b8a7675e5a56bafd5c432d9642b473
|
[
"MIT"
] | 261
|
2015-06-03T20:41:56.000Z
|
2022-03-07T08:46:10.000Z
|
genome_designer/variants/tests/test_vcf_parser.py
|
churchlab/millstone
|
ddb5d003a5b8a7675e5a56bafd5c432d9642b473
|
[
"MIT"
] | 22
|
2015-06-04T20:43:10.000Z
|
2022-02-27T08:27:34.000Z
|
"""
Tests for vcf_parser.py
"""
import os
from django.conf import settings
from django.test import TestCase
import vcf
from main.models import AlignmentGroup
from main.models import Chromosome
from main.models import Dataset
from main.models import ExperimentSample
from main.models import Variant
from main.models import VariantAlternate
from main.models import VariantCallerCommonData
from main.testing_util import create_common_entities
from main.testing_util import create_recoli_sv_data_from_vcf
from main.testing_util import create_sample_and_alignment
from main.testing_util import TEST_DATA_DIR
from utils.import_util import copy_and_add_dataset_source
from utils.import_util import import_reference_genome_from_local_file
from variants.vcf_parser import parse_alignment_group_vcf
from variants.vcf_parser import parse_vcf
TEST_FASTA = os.path.join(settings.PWD, 'test_data', 'fake_genome_and_reads',
'test_genome.fa')
TEST_GENOME_SNPS = os.path.join(settings.PWD, 'test_data', 'fake_genome_and_reads',
'test_genome_snps.vcf')
TEST_GENOME_HAPLOID_SNPS = os.path.join(settings.PWD, 'test_data',
'fake_genome_and_reads', 'test_genome_snps_haploid.vcf')
VCF_PARSER_TEST_DATA_DIR = os.path.join(TEST_DATA_DIR, 'vcf_parser_test_data')
LUMPY_4_SAMPLES_2_DELETIONS_VCF = os.path.join(
VCF_PARSER_TEST_DATA_DIR, 'lumpy_4_samples_2_deletions.vcf')
class TestVCFParser(TestCase):
def setUp(self):
self.common_data = create_common_entities()
self.project = self.common_data['project']
self.reference_genome = import_reference_genome_from_local_file(
self.project, 'ref_genome', TEST_FASTA, 'fasta')
def test_parser(self):
"""Basic tests for the parser.
"""
VCF_DATATYPE = Dataset.TYPE.VCF_FREEBAYES
alignment_group = AlignmentGroup.objects.create(
label='test alignment', reference_genome=self.reference_genome)
copy_and_add_dataset_source(alignment_group, VCF_DATATYPE,
VCF_DATATYPE, TEST_GENOME_SNPS)
Chromosome.objects.create(
reference_genome=self.reference_genome,
label='Chromosome',
num_bases=9001)
# Create experiment sample objects having UIDs that correspond to those
# in the vcf file. This is a bit "fake" in that the actual pipeline we
# will be generating the vcf file from the samples (see add_groups()
# stage of pipeline.
with open(TEST_GENOME_SNPS) as fh:
reader = vcf.Reader(fh)
experiment_sample_uids = reader.samples
num_experiment_samples = len(experiment_sample_uids)
for sample_uid in experiment_sample_uids:
ExperimentSample.objects.create(
uid=sample_uid,
project=self.project,
label='fakename:' + sample_uid
)
# Count the number of records in the vcf file for testing.
record_count = 0
with open(TEST_GENOME_SNPS) as fh:
for record in vcf.Reader(fh):
record_count += 1
# Parse the vcf
parse_alignment_group_vcf(alignment_group, VCF_DATATYPE)
variant_list = Variant.objects.filter(
reference_genome=self.reference_genome)
# There should be one Variant object for each record.
self.assertEqual(record_count, len(variant_list))
# Spot-check a few variants.
self.assertEqual(1, len(Variant.objects.filter(
reference_genome=self.reference_genome,
position=376)))
v_453 = Variant.objects.get(reference_genome=self.reference_genome,
position=453)
self.assertEqual(['G'], v_453.get_alternates())
# Check false negatives.
self.assertEqual(0, len(Variant.objects.filter(
reference_genome=self.reference_genome,
position=454)))
# There should be one VariantCallerCommonData object for each record.
self.assertEqual(record_count,
len(VariantCallerCommonData.objects.filter(
variant__reference_genome=self.reference_genome)))
# There should also be one VariantEvidence object per Variant x Sample.
for variant in variant_list:
vccd = variant.variantcallercommondata_set.all()[0]
self.assertEqual(num_experiment_samples,
len(vccd.variantevidence_set.all()))
# Check that alternate data is populated.
#Chromosome 1330 . CG C,GC,AG 126.036 . AB=0.5,0.5,1;ABP=3.0103,3.0103,7.35324;AC=1,1,1;AF=0.0833333,0.0833333,0.0833333;AN=12;AO=1,1,2;CIGAR=1M1D,2X,1X1M;DP=10;DPRA=1.33333,1.33333,1.33333;EPP=5.18177,5.18177,3.0103;EPPR=4.45795;HWE=-16.5861;LEN=1,2,1;MEANALT=2,2,1;MQM=60,37,48.5;MQMR=40.8333;NS=6;NUMALT=3;ODDS=1.50408;PAIRED=1,0,0.5;PAIREDR=0.166667;RO=6;RPP=5.18177,5.18177,7.35324;RPPR=16.0391;RUN=1,1,1;SAP=5.18177,5.18177,3.0103;SRP=4.45795;TYPE=del,mnp,snp;XAI=0,0.0102041,0.00515464;XAM=0,0.0102041,0.0253649;XAS=0,0,0.0202103;XRI=0.0016835;XRM=0.00835084;XRS=0.00666733;technology.illumina=1,1,1;BVAR GT:DP:RO:QR:AO:QA:GL . 0/0:1:1:36:0,0,0:0,0,0:0,-0.30103,-3.6,-0.30103,-3.6,-3.6,-0.30103,-3.6,-3.6,-3.6 0/0:2:2:76:0,0,0:0,0,0:0,-0.60206,-7.03,-0.60206,-7.03,-7.03,-0.60206,-7.03,-7.03,-7.03 1/2:2:0:0:1,1,0:108,31,0:-8.645,-3.40103,-3.1,-6.30103,-0.30103,-6,-8.645,-3.40103,-6.30103,-8.645 . 0/3:2:0:0:0,0,2:0,0,73:-6.935,-6.935,-6.935,-6.935,-6.935,-6.935,-0.60206,-0.60206,-0.60206,0 0/0:2:2:72:0,0,0:0,0,0:0,-0.60206,-6.84,-0.60206,-6.84,-6.84,-0.60206,-6.84,-6.84,-6.84 . 0/0:1:1:34:0,0,0:0,0,0:0,-0.30103,-3.4,-0.30103,-3.4,-3.4,-0.30103,-3.4,-3.4,-3.4 .
v_1330 = Variant.objects.get(reference_genome=self.reference_genome,
position=1330)
self.assertEqual(set(v_1330.get_alternates()), set(['C', 'GC', 'AG']))
v_1330_c = VariantAlternate.objects.get(variant=v_1330, alt_value='C')
self.assertTrue(len(v_1330_c.variantevidence_set.all()))
v_1330_gc = VariantAlternate.objects.get(variant=v_1330, alt_value='GC')
self.assertTrue(len(v_1330_gc.variantevidence_set.all()))
self.assertEqual(v_1330_c.data['INFO_ABP'], v_1330_gc.data['INFO_ABP'])
def test_parser_skip_het(self):
"""Test that skipping het_only variants works.
"""
VCF_DATATYPE = Dataset.TYPE.VCF_FREEBAYES
alignment_group = AlignmentGroup.objects.create(
label='test alignment', reference_genome=self.reference_genome)
copy_and_add_dataset_source(alignment_group, VCF_DATATYPE,
VCF_DATATYPE, TEST_GENOME_SNPS)
Chromosome.objects.create(
reference_genome=self.reference_genome,
label='Chromosome',
num_bases=9001)
# Create experiment sample objects having UIDs that correspond to those
# in the vcf file. This is a bit "fake" in that the actual pipeline we
# will be generating the vcf file from the samples (see add_groups()
# stage of pipeline.
with open(TEST_GENOME_SNPS) as fh:
reader = vcf.Reader(fh)
experiment_sample_uids = reader.samples
num_experiment_samples = len(experiment_sample_uids)
for sample_uid in experiment_sample_uids:
ExperimentSample.objects.create(
uid=sample_uid,
project=self.project,
label='fakename:' + sample_uid
)
# Count the number of records in the vcf file for testing.
record_count = 0
with open(TEST_GENOME_SNPS) as fh:
for record in vcf.Reader(fh):
if sum([s.gt_type == 2 for s in record.samples]) > 0:
record_count += 1
# Turn on het-only skipping
alignment_group.alignment_options['skip_het_only'] = True
alignment_group.save()
# Parse the vcf
parse_alignment_group_vcf(alignment_group, VCF_DATATYPE)
variant_list = Variant.objects.filter(
reference_genome=self.reference_genome)
# There should be one Variant object for each record that is not het.
self.assertEqual(record_count, len(variant_list))
def test_parser_haploid(self):
"""Basic tests for the parser in haploid mode.
"""
VCF_DATATYPE = Dataset.TYPE.VCF_FREEBAYES
alignment_group = AlignmentGroup.objects.create(
label='test alignment', reference_genome=self.reference_genome)
alignment_group.alignment_options['call_as_haploid'] = True
copy_and_add_dataset_source(alignment_group, VCF_DATATYPE,
VCF_DATATYPE, TEST_GENOME_HAPLOID_SNPS)
Chromosome.objects.create(
reference_genome=self.reference_genome,
label='Chromosome',
num_bases=9001)
# Create experiment sample objects having UIDs that correspond to those
# in the vcf file. This is a bit "fake" in that the actual pipeline we
# will be generating the vcf file from the samples (see add_groups()
# stage of pipeline.
with open(TEST_GENOME_SNPS) as fh:
reader = vcf.Reader(fh)
experiment_sample_uids = reader.samples
num_experiment_samples = len(experiment_sample_uids)
for sample_uid in experiment_sample_uids:
ExperimentSample.objects.create(
uid=sample_uid,
project=self.project,
label='fakename:' + sample_uid
)
# Count the number of records in the vcf file for testing.
record_count = 0
with open(TEST_GENOME_SNPS) as fh:
for record in vcf.Reader(fh):
record_count += 1
# Parse the vcf
parse_alignment_group_vcf(alignment_group, VCF_DATATYPE)
variant_list = Variant.objects.filter(
reference_genome=self.reference_genome)
# There should be one Variant object for each record.
self.assertEqual(record_count, len(variant_list))
# Spot-check a few variants.
self.assertEqual(1, len(Variant.objects.filter(
reference_genome=self.reference_genome,
position=376)))
v_453 = Variant.objects.get(reference_genome=self.reference_genome,
position=453)
self.assertEqual(['G'], v_453.get_alternates())
# Check false negatives.
self.assertEqual(0, len(Variant.objects.filter(
reference_genome=self.reference_genome,
position=454)))
# There should be one VariantCallerCommonData object for each record.
self.assertEqual(record_count,
len(VariantCallerCommonData.objects.filter(
variant__reference_genome=self.reference_genome)))
# There should also be one VariantEvidence object per Variant x Sample.
for variant in variant_list:
vccd = variant.variantcallercommondata_set.all()[0]
self.assertEqual(num_experiment_samples,
len(vccd.variantevidence_set.all()))
# Check that alternate data is populated.
#Chromosome 1330 . CG C,GC,AG 126.036 . AB=0.5,0.5,1;ABP=3.0103,3.0103,7.35324;AC=1,1,1;AF=0.0833333,0.0833333,0.0833333;AN=12;AO=1,1,2;CIGAR=1M1D,2X,1X1M;DP=10;DPRA=1.33333,1.33333,1.33333;EPP=5.18177,5.18177,3.0103;EPPR=4.45795;HWE=-16.5861;LEN=1,2,1;MEANALT=2,2,1;MQM=60,37,48.5;MQMR=40.8333;NS=6;NUMALT=3;ODDS=1.50408;PAIRED=1,0,0.5;PAIREDR=0.166667;RO=6;RPP=5.18177,5.18177,7.35324;RPPR=16.0391;RUN=1,1,1;SAP=5.18177,5.18177,3.0103;SRP=4.45795;TYPE=del,mnp,snp;XAI=0,0.0102041,0.00515464;XAM=0,0.0102041,0.0253649;XAS=0,0,0.0202103;XRI=0.0016835;XRM=0.00835084;XRS=0.00666733;technology.illumina=1,1,1;BVAR GT:DP:RO:QR:AO:QA:GL . 0/0:1:1:36:0,0,0:0,0,0:0,-0.30103,-3.6,-0.30103,-3.6,-3.6,-0.30103,-3.6,-3.6,-3.6 0/0:2:2:76:0,0,0:0,0,0:0,-0.60206,-7.03,-0.60206,-7.03,-7.03,-0.60206,-7.03,-7.03,-7.03 1/2:2:0:0:1,1,0:108,31,0:-8.645,-3.40103,-3.1,-6.30103,-0.30103,-6,-8.645,-3.40103,-6.30103,-8.645 . 0/3:2:0:0:0,0,2:0,0,73:-6.935,-6.935,-6.935,-6.935,-6.935,-6.935,-0.60206,-0.60206,-0.60206,0 0/0:2:2:72:0,0,0:0,0,0:0,-0.60206,-6.84,-0.60206,-6.84,-6.84,-0.60206,-6.84,-6.84,-6.84 . 0/0:1:1:34:0,0,0:0,0,0:0,-0.30103,-3.4,-0.30103,-3.4,-3.4,-0.30103,-3.4,-3.4,-3.4 .
v_1330 = Variant.objects.get(reference_genome=self.reference_genome,
position=1330)
self.assertEqual(set(v_1330.get_alternates()), set(['C', 'GC', 'AG']))
v_1330_c = VariantAlternate.objects.get(variant=v_1330, alt_value='C')
self.assertTrue(len(v_1330_c.variantevidence_set.all()))
v_1330_gc = VariantAlternate.objects.get(variant=v_1330, alt_value='GC')
self.assertFalse(len(v_1330_gc.variantevidence_set.all()))
def test_parser__sv_lumpy(self):
"""Tests parsing lumpy output which contains SV data.
"""
DELETION_TEST_DATA_DIR = os.path.join(TEST_DATA_DIR,
'sv_testing', 'deletion_bd5a1123')
DELETION_REF_FASTA = os.path.join(
DELETION_TEST_DATA_DIR, 'small_ref.fa')
DELETION_SAMPLE_1_UID = 'ds1'
DELETION_SAMPLE_2_UID = 'ds2'
DELETION_SAMPLE_3_UID = 'ds3'
DELETION_SAMPLE_4_UID = 'f8346a99'
reference_genome = import_reference_genome_from_local_file(
self.project, 'ref_genome', DELETION_REF_FASTA, 'fasta')
alignment_group = AlignmentGroup.objects.create(
label='Alignment 1', reference_genome=reference_genome,
aligner=AlignmentGroup.ALIGNER.BWA)
# Connect lumpy vcf as Dataset.
lumpy_vcf_dataset = copy_and_add_dataset_source(
alignment_group, Dataset.TYPE.VCF_LUMPY, Dataset.TYPE.VCF_LUMPY,
LUMPY_4_SAMPLES_2_DELETIONS_VCF)
# Create samples corresponding to sample ids in vcf.
create_sample_and_alignment(
self.project, alignment_group, DELETION_SAMPLE_1_UID)
create_sample_and_alignment(
self.project, alignment_group, DELETION_SAMPLE_2_UID)
create_sample_and_alignment(
self.project, alignment_group, DELETION_SAMPLE_3_UID)
create_sample_and_alignment(
self.project, alignment_group, DELETION_SAMPLE_4_UID)
# Now we have everything we need to parse the vcf.
parse_vcf(lumpy_vcf_dataset, alignment_group)
# Check expected variants.
v_4998 = Variant.objects.get(
reference_genome=reference_genome, position=4998)
v_4998_vccd = v_4998.variantcallercommondata_set.all()[0]
self.assertTrue(v_4998_vccd.data['IS_SV'])
v_9999 = Variant.objects.get(
reference_genome=reference_genome, position=9999)
v_9999_vccd = v_9999.variantcallercommondata_set.all()[0]
self.assertTrue(v_9999_vccd.data['IS_SV'])
def test_parser__sv_lumpy__and_check_output(self):
"""Populates db from lumpy-generated vcf and makes various checks
on the output.
"""
create_recoli_sv_data_from_vcf(self.project)
| 48.198738
| 1,206
| 0.662347
| 2,262
| 15,279
| 4.276304
| 0.127321
| 0.017781
| 0.017368
| 0.017368
| 0.828595
| 0.810297
| 0.767807
| 0.744133
| 0.732761
| 0.726145
| 0
| 0.11474
| 0.222528
| 15,279
| 316
| 1,207
| 48.351266
| 0.699554
| 0.287715
| 0
| 0.614634
| 0
| 0
| 0.044276
| 0.0113
| 0
| 0
| 0
| 0
| 0.107317
| 1
| 0.029268
| false
| 0
| 0.102439
| 0
| 0.136585
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
13868f879246c68859ec6c33940fb1a1f22bad69
| 3,395
|
py
|
Python
|
Fastir_Collector/fs/windowsVistaFiles.py
|
Unam3dd/Train-2018-2020
|
afb6ae70fe338cbe55a21b74648d91996b818fa2
|
[
"MIT"
] | 4
|
2021-04-23T15:39:17.000Z
|
2021-12-27T22:53:24.000Z
|
Fastir_Collector/fs/windowsVistaFiles.py
|
Unam3dd/Train-2018-2020
|
afb6ae70fe338cbe55a21b74648d91996b818fa2
|
[
"MIT"
] | null | null | null |
Fastir_Collector/fs/windowsVistaFiles.py
|
Unam3dd/Train-2018-2020
|
afb6ae70fe338cbe55a21b74648d91996b818fa2
|
[
"MIT"
] | 2
|
2021-04-19T08:28:54.000Z
|
2022-01-19T13:23:29.000Z
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from fs import _FS
class WindowsVistaFiles(_FS):
def __init__(self, params):
super(WindowsVistaFiles, self).__init__(params)
self.userprofile = params['USERPROFILE']
def __list_named_pipes(self):
return super(WindowsVistaFiles, self)._list_named_pipes()
def _list_windows_prefetch(self):
return super(WindowsVistaFiles, self)._list_windows_prefetch()
def _chrome_history(self):
return super(WindowsVistaFiles, self)._chrome_history(
'\\Users\\*\\AppData\\Local\\Google\\Chrome\\User Data\\*\\History')
def _firefox_history(self):
return super(WindowsVistaFiles, self)._firefox_history(
'\\Users\\*\\AppData\\Roaming\\Mozilla\\Firefox\\Profiles\\*.default\\places.sqlite')
def csv_print_list_named_pipes(self):
super(WindowsVistaFiles, self)._csv_list_named_pipes(self._list_named_pipes())
def csv_print_list_windows_prefetch(self):
super(WindowsVistaFiles, self)._csv_windows_prefetch(self._list_windows_prefetch())
def csv_skype_history(self):
super(WindowsVistaFiles, self)._skype_history(['AppData\\Roaming\\Skype'])
def csv_ie_history(self):
super(WindowsVistaFiles, self)._ie_history(['AppData\\Local\\Microsoft\\Windows\\*\\History.IE5',
'AppData\\Local\\Microsoft\\Windows\\*\\Low\\History.IE5'])
def csv_firefox_history(self):
super(WindowsVistaFiles, self)._csv_firefox_history(self._firefox_history())
def csv_chrome_history(self):
super(WindowsVistaFiles, self)._csv_chrome_history(self._chrome_history())
def csv_firefox_downloads(self):
super(WindowsVistaFiles, self)._firefox_downloads(
['AppData\\Roaming\\Mozilla\\Firefox\\Profiles\\*.default\\downloads.sqlite'])
def csv_get_startup_files(self):
super(WindowsVistaFiles, self)._csv_get_startup_files(
self.userprofile + '\\*\\AppData\\Roaming\\Microsoft\\Windows\\Start Menu\\Programs\\Startup\\*')
def json_print_list_named_pipes(self):
super(WindowsVistaFiles, self)._json_list_named_pipes(self._list_named_pipes())
def json_print_list_windows_prefetch(self):
super(WindowsVistaFiles, self)._json_windows_prefetch(self._list_windows_prefetch())
def json_skype_history(self):
super(WindowsVistaFiles, self)._skype_history(['AppData\\Roaming\\Skype'])
def json_ie_history(self):
super(WindowsVistaFiles, self)._ie_history(['AppData\\Local\\Microsoft\\Windows\\*\\History.IE5',
'AppData\\Local\\Microsoft\\Windows\\*\\Low\\History.IE5'])
def json_firefox_history(self):
super(WindowsVistaFiles, self)._json_firefox_history(self._firefox_history())
def json_chrome_history(self):
super(WindowsVistaFiles, self)._json_chrome_history(self._chrome_history())
def json_firefox_downloads(self):
super(WindowsVistaFiles, self)._firefox_downloads(
['AppData\\Roaming\\Mozilla\\Firefox\\Profiles\\*.default\\downloads.sqlite'])
def json_get_startup_files(self):
super(WindowsVistaFiles, self)._json_get_startup_files(
self.userprofile + '\\*\\AppData\\Roaming\\Microsoft\\Windows\\Start Menu\\Programs\\Startup\\*')
| 43.525641
| 111
| 0.697791
| 369
| 3,395
| 6.02981
| 0.143631
| 0.20764
| 0.245393
| 0.21573
| 0.838652
| 0.812135
| 0.572584
| 0.495281
| 0.371236
| 0.371236
| 0
| 0.001776
| 0.170839
| 3,395
| 77
| 112
| 44.090909
| 0.788632
| 0.006186
| 0
| 0.222222
| 0
| 0
| 0.210558
| 0.201661
| 0
| 0
| 0
| 0
| 0
| 1
| 0.388889
| false
| 0
| 0.037037
| 0.074074
| 0.518519
| 0.074074
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
13a06e5d37cfa811e586aa7684292c546e854691
| 11,050
|
py
|
Python
|
tests/integration/test_io.py
|
t-b/pynwb
|
b58e7b003247485120380360bb112bc6b22c7e60
|
[
"BSD-3-Clause-LBNL"
] | 1
|
2021-04-13T20:47:36.000Z
|
2021-04-13T20:47:36.000Z
|
tests/integration/test_io.py
|
t-b/pynwb
|
b58e7b003247485120380360bb112bc6b22c7e60
|
[
"BSD-3-Clause-LBNL"
] | null | null | null |
tests/integration/test_io.py
|
t-b/pynwb
|
b58e7b003247485120380360bb112bc6b22c7e60
|
[
"BSD-3-Clause-LBNL"
] | null | null | null |
import unittest2 as unittest
import six
from datetime import datetime
from dateutil.tz import tzlocal, tzutc
import os
from h5py import File
from pynwb import NWBFile, TimeSeries, get_manager
from pynwb.form.backends.hdf5 import HDF5IO
from pynwb.form.build import GroupBuilder, DatasetBuilder
from pynwb.form.spec import NamespaceCatalog
from pynwb.spec import NWBGroupSpec, NWBDatasetSpec, NWBNamespace
class TestHDF5Writer(unittest.TestCase):
_required_tests = ('test_nwbio', 'test_write_clobber', 'test_write_cache_spec')
@property
def required_tests(self):
return self._required_tests
def setUp(self):
self.manager = get_manager()
self.path = "test_pynwb_io_hdf5.h5"
self.start_time = datetime(1970, 1, 1, 12, tzinfo=tzutc())
self.create_date = datetime(2017, 4, 15, 12, tzinfo=tzlocal())
self.container = NWBFile('a test NWB File', 'TEST123',
self.start_time, file_create_date=self.create_date)
ts = TimeSeries('test_timeseries',
list(range(100, 200, 10)), 'SIunit', timestamps=list(range(10)), resolution=0.1)
self.container.add_acquisition(ts)
ts_builder = GroupBuilder('test_timeseries',
attributes={'neurodata_type': 'TimeSeries',
'help': 'General purpose TimeSeries'},
datasets={'data': DatasetBuilder('data', list(range(100, 200, 10)),
attributes={'unit': 'SIunit',
'conversion': 1.0,
'resolution': 0.1}),
'timestamps': DatasetBuilder('timestamps', list(range(10)),
attributes={'unit': 'Seconds',
'interval': 1})})
self.builder = GroupBuilder(
'root', groups={'acquisition': GroupBuilder('acquisition', groups={'test_timeseries': ts_builder}),
'analysis': GroupBuilder('analysis'),
'general': GroupBuilder('general'),
'processing': GroupBuilder('processing'),
'stimulus': GroupBuilder(
'stimulus',
groups={'presentation': GroupBuilder('presentation'),
'templates': GroupBuilder('templates')})},
datasets={'file_create_date': DatasetBuilder('file_create_date', [self.create_date.isoformat()]),
'identifier': DatasetBuilder('identifier', 'TEST123'),
'session_description': DatasetBuilder('session_description', 'a test NWB File'),
'nwb_version': DatasetBuilder('nwb_version', '1.0.6'),
'session_start_time': DatasetBuilder('session_start_time', self.start_time.isoformat())},
attributes={'neurodata_type': 'NWBFile'})
def tearDown(self):
os.remove(self.path)
def test_nwbio(self):
io = HDF5IO(self.path, self.manager)
io.write(self.container)
io.close()
f = File(self.path)
self.assertIn('acquisition', f)
self.assertIn('analysis', f)
self.assertIn('general', f)
self.assertIn('processing', f)
self.assertIn('file_create_date', f)
self.assertIn('identifier', f)
self.assertIn('session_description', f)
self.assertIn('session_start_time', f)
acq = f.get('acquisition')
self.assertIn('test_timeseries', acq)
def test_write_clobber(self):
io = HDF5IO(self.path, self.manager)
io.write(self.container)
io.close()
f = File(self.path) # noqa: F841
if six.PY2:
assert_file_exists = IOError
elif six.PY3:
assert_file_exists = OSError
with self.assertRaises(assert_file_exists):
io = HDF5IO(self.path, self.manager, mode='w-')
io.write(self.container)
io.close()
def test_write_cache_spec(self):
'''
Round-trip test for writing spec and reading it back in
'''
io = HDF5IO(self.path, self.manager)
io.write(self.container, cache_spec=True)
io.close()
f = File(self.path)
self.assertIn('specifications', f)
ns_catalog = NamespaceCatalog(NWBGroupSpec, NWBDatasetSpec, NWBNamespace)
HDF5IO.load_namespaces(ns_catalog, self.path, namespaces=['core'])
original_ns = self.manager.namespace_catalog.get_namespace('core')
cached_ns = ns_catalog.get_namespace('core')
self.maxDiff = None
for key in ('author', 'contact', 'doc', 'full_name', 'name'):
with self.subTest(namespace_field=key):
self.assertEqual(original_ns[key], cached_ns[key])
for dt in original_ns.get_registered_types():
with self.subTest(neurodata_type=dt):
original_spec = original_ns.get_spec(dt)
cached_spec = cached_ns.get_spec(dt)
with self.subTest(test='data_type spec read back in'):
self.assertIsNotNone(cached_spec)
with self.subTest(test='cached spec preserved original spec'):
self.assertDictEqual(original_spec, cached_spec)
class TestHDF5WriterWithInjectedFile(unittest.TestCase):
_required_tests = ('test_nwbio', 'test_write_clobber', 'test_write_cache_spec')
@property
def required_tests(self):
return self._required_tests
def setUp(self):
self.manager = get_manager()
self.path = "test_pynwb_io_hdf5.h5"
self.start_time = datetime(1970, 1, 1, 12, tzinfo=tzutc())
self.create_date = datetime(2017, 4, 15, 12, tzinfo=tzlocal())
self.container = NWBFile('a test NWB File', 'TEST123',
self.start_time, file_create_date=self.create_date)
ts = TimeSeries('test_timeseries',
list(range(100, 200, 10)), 'SIunit', timestamps=list(range(10)), resolution=0.1)
self.container.add_acquisition(ts)
ts_builder = GroupBuilder('test_timeseries',
attributes={'neurodata_type': 'TimeSeries',
'help': 'General purpose TimeSeries'},
datasets={'data': DatasetBuilder('data', list(range(100, 200, 10)),
attributes={'unit': 'SIunit',
'conversion': 1.0,
'resolution': 0.1}),
'timestamps': DatasetBuilder('timestamps', list(range(10)),
attributes={'unit': 'Seconds',
'interval': 1})})
self.builder = GroupBuilder(
'root', groups={'acquisition': GroupBuilder('acquisition', groups={'test_timeseries': ts_builder}),
'analysis': GroupBuilder('analysis'),
'general': GroupBuilder('general'),
'processing': GroupBuilder('processing'),
'stimulus': GroupBuilder(
'stimulus',
groups={'presentation': GroupBuilder('presentation'),
'templates': GroupBuilder('templates')})},
datasets={'file_create_date': DatasetBuilder('file_create_date', [self.create_date.isoformat()]),
'identifier': DatasetBuilder('identifier', 'TEST123'),
'session_description': DatasetBuilder('session_description', 'a test NWB File'),
'nwb_version': DatasetBuilder('nwb_version', '1.0.6'),
'session_start_time': DatasetBuilder('session_start_time', self.start_time.isoformat())},
attributes={'neurodata_type': 'NWBFile'})
def tearDown(self):
os.remove(self.path)
def test_nwbio(self):
fil = File(self.path)
io = HDF5IO(self.path, self.manager, file=fil)
io.write(self.container)
io.close()
f = File(self.path)
self.assertIn('acquisition', f)
self.assertIn('analysis', f)
self.assertIn('general', f)
self.assertIn('processing', f)
self.assertIn('file_create_date', f)
self.assertIn('identifier', f)
self.assertIn('session_description', f)
self.assertIn('session_start_time', f)
acq = f.get('acquisition')
self.assertIn('test_timeseries', acq)
def test_write_clobber(self):
fil = File(self.path)
io = HDF5IO(self.path, self.manager, file=fil)
io.write(self.container)
io.close()
f = File(self.path) # noqa: F841
if six.PY2:
assert_file_exists = IOError
elif six.PY3:
assert_file_exists = OSError
with self.assertRaises(assert_file_exists):
io = HDF5IO(self.path, self.manager, mode='w-')
io.write(self.container)
io.close()
def test_write_cache_spec(self):
'''
Round-trip test for writing spec and reading it back in
'''
fil = File(self.path)
io = HDF5IO(self.path, self.manager, file=fil)
io.write(self.container, cache_spec=True)
io.close()
f = File(self.path)
self.assertIn('specifications', f)
ns_catalog = NamespaceCatalog(NWBGroupSpec, NWBDatasetSpec, NWBNamespace)
HDF5IO.load_namespaces(ns_catalog, self.path, namespaces=['core'])
original_ns = self.manager.namespace_catalog.get_namespace('core')
cached_ns = ns_catalog.get_namespace('core')
self.maxDiff = None
for key in ('author', 'contact', 'doc', 'full_name', 'name'):
with self.subTest(namespace_field=key):
self.assertEqual(original_ns[key], cached_ns[key])
for dt in original_ns.get_registered_types():
with self.subTest(neurodata_type=dt):
original_spec = original_ns.get_spec(dt)
cached_spec = cached_ns.get_spec(dt)
with self.subTest(test='data_type spec read back in'):
self.assertIsNotNone(cached_spec)
with self.subTest(test='cached spec preserved original spec'):
self.assertDictEqual(original_spec, cached_spec)
| 47.62931
| 111
| 0.548597
| 1,080
| 11,050
| 5.447222
| 0.149074
| 0.031277
| 0.030937
| 0.021758
| 0.932517
| 0.932517
| 0.932517
| 0.932517
| 0.932517
| 0.932517
| 0
| 0.018638
| 0.339638
| 11,050
| 231
| 112
| 47.835498
| 0.787584
| 0.012127
| 0
| 0.934343
| 0
| 0
| 0.156054
| 0.007729
| 0
| 0
| 0
| 0
| 0.161616
| 1
| 0.060606
| false
| 0
| 0.055556
| 0.010101
| 0.146465
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
13f2aff61986beff23ed48dc8621cb957b84ee68
| 2,087
|
py
|
Python
|
apps/users/migrations/0030_auto_20201003_0952.py
|
lucasjaroszewski/incremental-game
|
bae8823f986be0fd046bd50195d43fbc548fad90
|
[
"MIT"
] | null | null | null |
apps/users/migrations/0030_auto_20201003_0952.py
|
lucasjaroszewski/incremental-game
|
bae8823f986be0fd046bd50195d43fbc548fad90
|
[
"MIT"
] | 5
|
2021-06-09T17:54:51.000Z
|
2022-03-12T00:46:49.000Z
|
apps/users/migrations/0030_auto_20201003_0952.py
|
lucasjaroszewski/incremental-game
|
bae8823f986be0fd046bd50195d43fbc548fad90
|
[
"MIT"
] | 1
|
2020-09-27T18:26:15.000Z
|
2020-09-27T18:26:15.000Z
|
# Generated by Django 3.0.6 on 2020-10-03 12:52
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0029_auto_20200930_2241'),
]
operations = [
migrations.AddField(
model_name='character',
name='badge_pwr',
field=models.PositiveIntegerField(default='0', validators=[django.core.validators.MinValueValidator(0), django.core.validators.MaxValueValidator(35)]),
),
migrations.AddField(
model_name='character',
name='badge_spd',
field=models.PositiveIntegerField(default='0', validators=[django.core.validators.MinValueValidator(0), django.core.validators.MaxValueValidator(35)]),
),
migrations.AlterField(
model_name='character_weapon',
name='end',
field=models.IntegerField(default='0'),
),
migrations.AlterField(
model_name='character_weapon',
name='hp',
field=models.IntegerField(default='0'),
),
migrations.AlterField(
model_name='character_weapon',
name='int',
field=models.IntegerField(default='0'),
),
migrations.AlterField(
model_name='character_weapon',
name='lck',
field=models.IntegerField(default='0'),
),
migrations.AlterField(
model_name='character_weapon',
name='mp',
field=models.IntegerField(default='0'),
),
migrations.AlterField(
model_name='character_weapon',
name='pwr',
field=models.IntegerField(default='0'),
),
migrations.AlterField(
model_name='character_weapon',
name='spd',
field=models.IntegerField(default='0'),
),
migrations.AlterField(
model_name='character_weapon',
name='spr',
field=models.IntegerField(default='0'),
),
]
| 32.107692
| 163
| 0.574509
| 185
| 2,087
| 6.356757
| 0.27027
| 0.076531
| 0.153061
| 0.197279
| 0.828231
| 0.801871
| 0.801871
| 0.701531
| 0.701531
| 0.701531
| 0
| 0.032258
| 0.301869
| 2,087
| 64
| 164
| 32.609375
| 0.77488
| 0.021562
| 0
| 0.689655
| 1
| 0
| 0.109804
| 0.011275
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.034483
| 0
| 0.086207
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b936c1bbe34b4b5785d82f35bc91fc71a5fa32d4
| 51,711
|
py
|
Python
|
grafana/common/dashboards/aggregated/rssac_reporting.py
|
MikeAT/visualizer
|
946b98d82eaf7ec508861115585afd683fc49e5c
|
[
"MIT"
] | 6
|
2021-03-03T17:52:24.000Z
|
2022-02-10T11:45:22.000Z
|
grafana/common/dashboards/aggregated/rssac_reporting.py
|
Acidburn0zzz/visualizer
|
20fba91f0d26b98531f97f643c8329640d1c0d11
|
[
"MIT"
] | 1
|
2021-04-29T12:34:04.000Z
|
2021-04-29T14:50:17.000Z
|
grafana/common/dashboards/aggregated/rssac_reporting.py
|
Acidburn0zzz/visualizer
|
20fba91f0d26b98531f97f643c8329640d1c0d11
|
[
"MIT"
] | 2
|
2021-04-27T14:02:03.000Z
|
2021-11-12T10:34:32.000Z
|
# Copyright 2021 Internet Corporation for Assigned Names and Numbers.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, you can obtain one at https://mozilla.org/MPL/2.0/.
#
# Developed by Sinodun IT (sinodun.com)
#
# Aggregation RSSAC plots
import textwrap
import grafanalib.core as GCore
import grafanacommon as GCommon
def dash(myuid, agginfo, nodesel, **kwargs):
return GCommon.Dashboard(
title = "RSSAC Reporting",
tags = [
agginfo['graph_tag']
],
uid = myuid,
rows = [
GCore.Row(
panels = [
GCommon.QPMGraph(
title = 'RCODE volume',
targets = [
GCommon.ClickHouseTarget(
database = agginfo['database'],
table = 'Responses' + agginfo['table_suffix'],
round = agginfo['round'],
query = textwrap.dedent("""\
SELECT t, groupArray((ResponseRcode, qc))
FROM
(
SELECT
t,ResponseRcode,60*cnt/{interval_divisor} AS qc
FROM
(
SELECT
$timeSeries AS t,
ResponseRcodeMap.ResponseRcode AS ResponseRcode,
sum(toUInt64(ResponseRcodeMap.Count)) AS cnt
FROM $table
ARRAY JOIN ResponseRcodeMap
WHERE $timeFilter AND NodeID IN {nodesel}
GROUP BY t, ResponseRcode
ORDER BY t, ResponseRcode
)
GROUP BY t, ResponseRcode, cnt
ORDER BY t, ResponseRcode
)
GROUP BY t
ORDER BY t""".format(
interval_divisor=agginfo['interval_divisor'],
nodesel=nodesel)),
refId = 'A'
),
],
),
],
),
GCore.Row(
panels = [
GCommon.BarChart(
title = 'DNS UDP Query size for sizes below 1000 bytes',
layout = GCommon.BarChartLayout(
barmode = GCommon.BAR_CHART_LAYOUT_MODE_STACK,
showlegend = False,
xaxis = GCommon.BarChartAxis(
autotick = False,
axtype = GCommon.BAR_CHART_AXIS_TYPE_LINEAR,
axrange = [0, 1000],
tick0 = 16,
dtick = 16,
tickangle = -45,
tickmargin = 40,
title = 'Message size (bytes)',
),
yaxis = GCommon.BarChartAxis(
axtype = GCommon.BAR_CHART_AXIS_TYPE_LINEAR,
dtick = 16,
rangemode = GCommon.BAR_CHART_AXIS_RANGEMODE_TOZERO,
title = 'Messages per minute',
),
),
traces = [
GCommon.BarChartTrace(
name = 'UDP Query',
color = '#1F60C4',
x = 'UDPQueryLen',
y = 'UDPQueryCnt',
text = 'UDPQueryCnt',
),
],
targets = [
GCommon.ClickHouseTableTarget(
database = agginfo['database'],
table = 'QueryResponseLength' + agginfo['table_suffix'],
round = agginfo['round'],
query = textwrap.dedent("""\
SELECT
Len + intDiv({bucketsize}, 2) AS UDPQueryLen,
60 * sum(Cnt)/($to - $from) AS UDPQueryCnt
FROM
(
SELECT
intDiv(QueryLengthMap.Length, {bucketsize})*{bucketsize} AS Len,
sum(toUInt64(QueryLengthMap.Count)) AS Cnt
FROM $table
ARRAY JOIN QueryLengthMap
WHERE $timeFilter
AND TransportTCP=0
AND NodeID IN {nodesel}
AND Len < 1000
GROUP BY Len
UNION ALL
(
SELECT
CAST(number*{bucketsize} AS UInt16) AS Len,
CAST(0 AS UInt64) AS Cnt
FROM system.numbers
WHERE number > 0 LIMIT {bucketlen}
)
)
GROUP BY Len""".format(
nodesel=nodesel,
bucketsize=16, bucketlen=1000//16)),
refId = 'A'
),
],
),
],
),
GCore.Row(
panels = [
GCommon.BarChart(
title = 'DNS TCP Query size for sizes below 1000 bytes',
layout = GCommon.BarChartLayout(
barmode = GCommon.BAR_CHART_LAYOUT_MODE_STACK,
showlegend = False,
xaxis = GCommon.BarChartAxis(
autotick = False,
axtype = GCommon.BAR_CHART_AXIS_TYPE_LINEAR,
axrange = [0, 1000],
tick0 = 16,
dtick = 16,
tickangle = -45,
tickmargin = 40,
title = 'Message size (bytes)',
),
yaxis = GCommon.BarChartAxis(
axtype = GCommon.BAR_CHART_AXIS_TYPE_LINEAR,
dtick = 16,
rangemode = GCommon.BAR_CHART_AXIS_RANGEMODE_TOZERO,
title = 'Messages per minute',
),
),
traces = [
GCommon.BarChartTrace(
name = 'TCP Query',
color = '#1F60C4',
x = 'TCPQueryLen',
y = 'TCPQueryCnt',
text = 'TCPQueryCnt',
),
],
targets = [
GCommon.ClickHouseTableTarget(
database = agginfo['database'],
table = 'QueryResponseLength' + agginfo['table_suffix'],
round = agginfo['round'],
query = textwrap.dedent("""\
SELECT
Len + intDiv({bucketsize}, 2) AS TCPQueryLen,
60 * sum(Cnt)/($to - $from) AS TCPQueryCnt
FROM
(
SELECT
intDiv(QueryLengthMap.Length, {bucketsize})*{bucketsize} AS Len,
sum(toUInt64(QueryLengthMap.Count)) AS Cnt
FROM $table
ARRAY JOIN QueryLengthMap
WHERE $timeFilter
AND TransportTCP=1
AND NodeID IN {nodesel}
AND Len < 1000
GROUP BY Len
UNION ALL
(
SELECT
CAST(number*{bucketsize} AS UInt16) AS Len,
CAST(0 AS UInt64) AS Cnt
FROM system.numbers
WHERE number > 0 LIMIT {bucketlen}
)
)
GROUP BY Len""".format(
nodesel=nodesel,
bucketsize=16, bucketlen=1000//16)),
refId = 'A'
),
],
),
],
),
GCore.Row(
panels = [
GCommon.BarChart(
title = 'DNS UDP Response size for sizes below 1000 bytes',
layout = GCommon.BarChartLayout(
barmode = GCommon.BAR_CHART_LAYOUT_MODE_STACK,
showlegend = False,
xaxis = GCommon.BarChartAxis(
autotick = False,
axtype = GCommon.BAR_CHART_AXIS_TYPE_LINEAR,
axrange = [0, 1000],
tick0 = 16,
dtick = 16,
tickangle = -45,
tickmargin = 40,
title = 'Message size (bytes)',
),
yaxis = GCommon.BarChartAxis(
axtype = GCommon.BAR_CHART_AXIS_TYPE_LINEAR,
dtick = 16,
rangemode = GCommon.BAR_CHART_AXIS_RANGEMODE_TOZERO,
title = 'Messages per minute',
),
),
traces = [
GCommon.BarChartTrace(
name = 'UDP Response',
color = '#1F60C4',
x = 'UDPResponseLen',
y = 'UDPResponseCnt',
text = 'UDPResponseCnt',
),
],
targets = [
GCommon.ClickHouseTableTarget(
database = agginfo['database'],
table = 'QueryResponseLength' + agginfo['table_suffix'],
round = agginfo['round'],
query = textwrap.dedent("""\
SELECT
Len + intDiv({bucketsize}, 2) AS UDPResponseLen,
60 * sum(Cnt)/($to - $from) AS UDPResponseCnt
FROM
(
SELECT
intDiv(ResponseLengthMap.Length, {bucketsize})*{bucketsize} AS Len,
sum(toUInt64(ResponseLengthMap.Count)) AS Cnt
FROM $table
ARRAY JOIN ResponseLengthMap
WHERE $timeFilter
AND TransportTCP=0
AND NodeID IN {nodesel}
AND Len < 1000
GROUP BY Len
UNION ALL
(
SELECT
CAST(number*{bucketsize} AS UInt16) AS Len,
CAST(0 AS UInt64) AS Cnt
FROM system.numbers
WHERE number > 0 LIMIT {bucketlen}
)
)
GROUP BY Len""".format(
nodesel=nodesel,
bucketsize=16, bucketlen=1000//16)),
refId = 'A'
),
],
),
],
),
GCore.Row(
panels = [
GCommon.BarChart(
title = 'DNS TCP Response size for sizes below 1000 bytes',
layout = GCommon.BarChartLayout(
barmode = GCommon.BAR_CHART_LAYOUT_MODE_STACK,
showlegend = False,
xaxis = GCommon.BarChartAxis(
autotick = False,
axtype = GCommon.BAR_CHART_AXIS_TYPE_LINEAR,
axrange = [0, 1000],
tick0 = 16,
dtick = 16,
tickangle = -45,
tickmargin = 40,
title = 'Message size (bytes)',
),
yaxis = GCommon.BarChartAxis(
axtype = GCommon.BAR_CHART_AXIS_TYPE_LINEAR,
dtick = 16,
rangemode = GCommon.BAR_CHART_AXIS_RANGEMODE_TOZERO,
title = 'Messages per minute',
),
),
traces = [
GCommon.BarChartTrace(
name = 'TCP Response',
color = '#1F60C4',
x = 'TCPResponseLen',
y = 'TCPResponseCnt',
text = 'TCPResponseCnt',
),
],
targets = [
GCommon.ClickHouseTableTarget(
database = agginfo['database'],
table = 'QueryResponseLength' + agginfo['table_suffix'],
round = agginfo['round'],
query = textwrap.dedent("""\
SELECT
Len + intDiv({bucketsize}, 2) AS TCPResponseLen,
60 * sum(Cnt)/($to - $from) AS TCPResponseCnt
FROM
(
SELECT
intDiv(ResponseLengthMap.Length, {bucketsize})*{bucketsize} AS Len,
sum(toUInt64(ResponseLengthMap.Count)) AS Cnt
FROM $table
ARRAY JOIN ResponseLengthMap
WHERE $timeFilter
AND TransportTCP=1
AND NodeID IN {nodesel}
AND Len < 1000
GROUP BY Len
UNION ALL
(
SELECT
CAST(number*{bucketsize} AS UInt16) AS Len,
CAST(0 AS UInt64) AS Cnt
FROM system.numbers
WHERE number > 0 LIMIT {bucketlen}
)
)
GROUP BY Len""".format(
nodesel=nodesel,
bucketsize=16, bucketlen=1000//16)),
refId = 'A'
),
],
),
],
),
GCore.Row(
panels = [
GCommon.BarChart(
title = 'DNS UDP Query size for sizes above 1000 bytes',
layout = GCommon.BarChartLayout(
barmode = GCommon.BAR_CHART_LAYOUT_MODE_STACK,
showlegend = False,
xaxis = GCommon.BarChartAxis(
autotick = False,
axtype = GCommon.BAR_CHART_AXIS_TYPE_LINEAR,
axrange = [1000, 3000],
tick0 = 1008,
dtick = 32,
tickangle = -90,
tickmargin = 45,
title = 'Message size (bytes)',
),
yaxis = GCommon.BarChartAxis(
axtype = GCommon.BAR_CHART_AXIS_TYPE_LINEAR,
dtick = 16,
rangemode = GCommon.BAR_CHART_AXIS_RANGEMODE_TOZERO,
title = 'Messages per minute',
),
),
traces = [
GCommon.BarChartTrace(
name = 'UDP Query',
color = '#FFB357',
x = 'UDPQueryLen',
y = 'UDPQueryCnt',
text = 'UDPQueryCnt',
),
],
targets = [
GCommon.ClickHouseTableTarget(
database = agginfo['database'],
table = 'QueryResponseLength' + agginfo['table_suffix'],
round = agginfo['round'],
query = textwrap.dedent("""\
SELECT
Len + intDiv({bucketsize}, 2) AS UDPQueryLen,
60 * sum(Cnt)/($to - $from) AS UDPQueryCnt
FROM
(
SELECT
intDiv(QueryLengthMap.Length, {bucketsize})*{bucketsize} AS Len,
sum(toUInt64(QueryLengthMap.Count)) AS Cnt
FROM $table
ARRAY JOIN QueryLengthMap
WHERE $timeFilter
AND TransportTCP=0
AND NodeID IN {nodesel}
AND Len >= 1000
GROUP BY Len
UNION ALL
(
SELECT
CAST((number + intDiv(1000, {bucketsize}))*{bucketsize} AS UInt16) AS Len,
CAST(0 AS UInt64) AS Cnt
FROM system.numbers
WHERE number > 0 LIMIT {bucketlen}
)
)
GROUP BY Len""".format(
nodesel=nodesel,
bucketsize=16, bucketlen=2000//16)),
refId = 'A'
),
],
),
],
),
GCore.Row(
panels = [
GCommon.BarChart(
title = 'DNS TCP Query size for sizes above 1000 bytes',
layout = GCommon.BarChartLayout(
barmode = GCommon.BAR_CHART_LAYOUT_MODE_STACK,
showlegend = False,
xaxis = GCommon.BarChartAxis(
autotick = False,
axtype = GCommon.BAR_CHART_AXIS_TYPE_LINEAR,
axrange = [1000, 3000],
tick0 = 1008,
dtick = 32,
tickangle = -90,
tickmargin = 45,
title = 'Message size (bytes)',
),
yaxis = GCommon.BarChartAxis(
axtype = GCommon.BAR_CHART_AXIS_TYPE_LINEAR,
dtick = 16,
rangemode = GCommon.BAR_CHART_AXIS_RANGEMODE_TOZERO,
title = 'Messages per minute',
),
),
traces = [
GCommon.BarChartTrace(
name = 'TCP Query',
color = '#FFB357',
x = 'TCPQueryLen',
y = 'TCPQueryCnt',
text = 'TCPQueryCnt',
),
],
targets = [
GCommon.ClickHouseTableTarget(
database = agginfo['database'],
table = 'QueryResponseLength' + agginfo['table_suffix'],
round = agginfo['round'],
query = textwrap.dedent("""\
SELECT
Len + intDiv({bucketsize}, 2) AS TCPQueryLen,
60 * sum(Cnt)/($to - $from) AS TCPQueryCnt
FROM
(
SELECT
intDiv(QueryLengthMap.Length, {bucketsize})*{bucketsize} AS Len,
sum(toUInt64(QueryLengthMap.Count)) AS Cnt
FROM $table
ARRAY JOIN QueryLengthMap
WHERE $timeFilter
AND TransportTCP=1
AND NodeID IN {nodesel}
AND Len >= 1000
GROUP BY Len
UNION ALL
(
SELECT
CAST((number + intDiv(1000, {bucketsize}))*{bucketsize} AS UInt16) AS Len,
CAST(0 AS UInt64) AS Cnt
FROM system.numbers
WHERE number > 0 LIMIT {bucketlen}
)
)
GROUP BY Len""".format(
nodesel=nodesel,
bucketsize=16, bucketlen=2000//16)),
refId = 'A'
),
],
),
],
),
GCore.Row(
panels = [
GCommon.BarChart(
title = 'DNS UDP Response size for sizes above 1000 bytes',
layout = GCommon.BarChartLayout(
barmode = GCommon.BAR_CHART_LAYOUT_MODE_STACK,
showlegend = False,
xaxis = GCommon.BarChartAxis(
autotick = False,
axtype = GCommon.BAR_CHART_AXIS_TYPE_LINEAR,
axrange = [1000, 3000],
tick0 = 1008,
dtick = 32,
tickangle = -90,
tickmargin = 45,
title = 'Message size (bytes)',
),
yaxis = GCommon.BarChartAxis(
axtype = GCommon.BAR_CHART_AXIS_TYPE_LINEAR,
dtick = 16,
rangemode = GCommon.BAR_CHART_AXIS_RANGEMODE_TOZERO,
title = 'Messages per minute',
),
),
traces = [
GCommon.BarChartTrace(
name = 'UDP Response',
color = '#FFB357',
x = 'UDPResponseLen',
y = 'UDPResponseCnt',
text = 'UDPResponseCnt',
),
],
targets = [
GCommon.ClickHouseTableTarget(
database = agginfo['database'],
table = 'QueryResponseLength' + agginfo['table_suffix'],
round = agginfo['round'],
query = textwrap.dedent("""\
SELECT
Len + intDiv({bucketsize}, 2) AS UDPResponseLen,
60 * sum(Cnt)/($to - $from) AS UDPResponseCnt
FROM
(
SELECT
intDiv(ResponseLengthMap.Length, {bucketsize})*{bucketsize} AS Len,
sum(toUInt64(ResponseLengthMap.Count)) AS Cnt
FROM $table
ARRAY JOIN ResponseLengthMap
WHERE $timeFilter
AND TransportTCP=0
AND NodeID IN {nodesel}
AND Len >= 1000
GROUP BY Len
UNION ALL
(
SELECT
CAST((number + intDiv(1000, {bucketsize}))*{bucketsize} AS UInt16) AS Len,
CAST(0 AS UInt64) AS Cnt
FROM system.numbers
WHERE number > 0 LIMIT {bucketlen}
)
)
GROUP BY Len""".format(
nodesel=nodesel,
bucketsize=16, bucketlen=2000//16)),
refId = 'A'
),
],
),
],
),
GCore.Row(
panels = [
GCommon.BarChart(
title = 'DNS TCP Response size for sizes above 1000 bytes',
layout = GCommon.BarChartLayout(
barmode = GCommon.BAR_CHART_LAYOUT_MODE_STACK,
showlegend = False,
xaxis = GCommon.BarChartAxis(
autotick = False,
axtype = GCommon.BAR_CHART_AXIS_TYPE_LINEAR,
axrange = [1000, 3000],
tick0 = 1008,
dtick = 32,
tickangle = -90,
tickmargin = 45,
title = 'Message size (bytes)',
),
yaxis = GCommon.BarChartAxis(
axtype = GCommon.BAR_CHART_AXIS_TYPE_LINEAR,
dtick = 16,
rangemode = GCommon.BAR_CHART_AXIS_RANGEMODE_TOZERO,
title = 'Messages per minute',
),
),
traces = [
GCommon.BarChartTrace(
name = 'TCP Response',
color = '#FFB357',
x = 'TCPResponseLen',
y = 'TCPResponseCnt',
text = 'TCPResponseCnt',
),
],
targets = [
GCommon.ClickHouseTableTarget(
database = agginfo['database'],
table = 'QueryResponseLength' + agginfo['table_suffix'],
round = agginfo['round'],
query = textwrap.dedent("""\
SELECT
Len + intDiv({bucketsize}, 2) AS TCPResponseLen,
60 * sum(Cnt)/($to - $from) AS TCPResponseCnt
FROM
(
SELECT
intDiv(ResponseLengthMap.Length, {bucketsize})*{bucketsize} AS Len,
sum(toUInt64(ResponseLengthMap.Count)) AS Cnt
FROM $table
ARRAY JOIN ResponseLengthMap
WHERE $timeFilter
AND TransportTCP=1
AND NodeID IN {nodesel}
AND Len >= 1000
GROUP BY Len
UNION ALL
(
SELECT
CAST((number + intDiv(1000, {bucketsize}))*{bucketsize} AS UInt16) AS Len,
CAST(0 AS UInt64) AS Cnt
FROM system.numbers
WHERE number > 0 LIMIT {bucketlen}
)
)
GROUP BY Len""".format(
nodesel=nodesel,
bucketsize=16, bucketlen=2000//16)),
refId = 'A'
),
],
),
],
),
GCore.Row(
panels = [
GCommon.QPMGraph(
title = 'Queries and Responses, TCP/IPv4',
targets = [
GCommon.ClickHouseTarget(
database = agginfo['database'],
table = 'QueryResponseTransport' + agginfo['table_suffix'],
round = agginfo['round'],
query = textwrap.dedent("""\
SELECT
$timeSeries AS t,
60 * sum(toUInt64(QueryCount))/{interval_divisor} AS Queries,
60 * sum(toUInt64(ResponseCount))/{interval_divisor} AS Responses
FROM $table
WHERE
$timeFilter
AND TransportTCP = 1
AND TransportIPv6 = 0
AND NodeID IN {nodesel}
GROUP BY t
ORDER BY t""".format(
interval_divisor=agginfo['interval_divisor'],
nodesel=nodesel)),
refId = 'A'
),
],
),
GCommon.QPMGraph(
title = 'Queries and Responses, TCP/IPv6',
targets = [
GCommon.ClickHouseTarget(
database = agginfo['database'],
table = 'QueryResponseTransport' + agginfo['table_suffix'],
round = agginfo['round'],
query = textwrap.dedent("""\
SELECT
$timeSeries AS t,
60 * sum(toUInt64(QueryCount))/{interval_divisor} AS Queries,
60 * sum(toUInt64(ResponseCount))/{interval_divisor} AS Responses
FROM $table
WHERE
$timeFilter
AND TransportTCP = 1
AND TransportIPv6 <> 0
AND NodeID IN {nodesel}
GROUP BY t
ORDER BY t""".format(
interval_divisor=agginfo['interval_divisor'],
nodesel=nodesel)),
refId = 'A'
),
],
),
],
),
GCore.Row(
panels = [
GCommon.QPMGraph(
title = 'Queries and Responses, UDP/IPv4',
targets = [
GCommon.ClickHouseTarget(
database = agginfo['database'],
table = 'QueryResponseTransport' + agginfo['table_suffix'],
round = agginfo['round'],
query = textwrap.dedent("""\
SELECT
$timeSeries AS t,
60 * sum(toUInt64(QueryCount))/{interval_divisor} AS Queries,
60 * sum(toUInt64(ResponseCount))/{interval_divisor} AS Responses
FROM $table
WHERE
$timeFilter
AND TransportTCP = 0
AND TransportIPv6 = 0
AND NodeID IN {nodesel}
GROUP BY t
ORDER BY t""".format(
interval_divisor=agginfo['interval_divisor'],
nodesel=nodesel)),
refId = 'A'
),
],
),
GCommon.QPMGraph(
title = 'Queries and Responses, UDP/IPv6',
targets = [
GCommon.ClickHouseTarget(
database = agginfo['database'],
table = 'QueryResponseTransport' + agginfo['table_suffix'],
round = agginfo['round'],
query = textwrap.dedent("""\
SELECT
$timeSeries AS t,
60 * sum(toUInt64(QueryCount))/{interval_divisor} AS Queries,
60 * sum(toUInt64(ResponseCount))/{interval_divisor} AS Responses
FROM $table
WHERE
$timeFilter
AND TransportTCP = 0
AND TransportIPv6 <> 0
AND NodeID IN {nodesel}
GROUP BY t
ORDER BY t""".format(
interval_divisor=agginfo['interval_divisor'],
nodesel=nodesel)),
refId = 'A'
),
],
),
],
),
GCore.Row(
panels = [
GCommon.QPMGraph(
title = 'Difference in query and response volume, TCP/IPv4',
targets = [
GCommon.ClickHouseTarget(
database = agginfo['database'],
table = 'QueryResponseTransport' + agginfo['table_suffix'],
round = agginfo['round'],
query = textwrap.dedent("""\
SELECT
$timeSeries AS t,
60 * sum(toInt64(QueryCount) - ResponseCount)/{interval_divisor} AS Difference
FROM $table
WHERE
$timeFilter
AND TransportTCP = 1
AND TransportIPv6 = 0
AND NodeID IN {nodesel}
GROUP BY t
ORDER BY t""".format(
interval_divisor=agginfo['interval_divisor'],
nodesel=nodesel)),
refId = 'A'
),
],
),
GCommon.QPMGraph(
title = 'Difference in query and response volume, TCP/IPv6',
targets = [
GCommon.ClickHouseTarget(
database = agginfo['database'],
table = 'QueryResponseTransport' + agginfo['table_suffix'],
round = agginfo['round'],
query = textwrap.dedent("""\
SELECT
$timeSeries AS t,
60 * sum(toUInt64(QueryCount) - ResponseCount)/{interval_divisor} AS Difference
FROM $table
WHERE
$timeFilter
AND TransportTCP = 1
AND TransportIPv6 <> 0
AND NodeID IN {nodesel}
GROUP BY t
ORDER BY t""".format(
interval_divisor=agginfo['interval_divisor'],
nodesel=nodesel)),
refId = 'A'
),
],
),
],
),
GCore.Row(
panels = [
GCommon.QPMGraph(
title = 'Difference in query and response volume, UDP/IPv4',
targets = [
GCommon.ClickHouseTarget(
database = agginfo['database'],
table = 'QueryResponseTransport' + agginfo['table_suffix'],
round = agginfo['round'],
query = textwrap.dedent("""\
SELECT
$timeSeries AS t,
60 * sum(toInt64(QueryCount) - ResponseCount)/{interval_divisor} AS Difference
FROM $table
WHERE
$timeFilter
AND TransportTCP = 0
AND TransportIPv6 = 0
AND NodeID IN {nodesel}
GROUP BY t
ORDER BY t""".format(
interval_divisor=agginfo['interval_divisor'],
nodesel=nodesel)),
refId = 'A'
),
],
),
GCommon.QPMGraph(
title = 'Difference in query and response volume, UDP/IPv6',
targets = [
GCommon.ClickHouseTarget(
database = agginfo['database'],
table = 'QueryResponseTransport' + agginfo['table_suffix'],
round = agginfo['round'],
query = textwrap.dedent("""\
SELECT
$timeSeries AS t,
60 * sum(toInt64(QueryCount) - ResponseCount)/{interval_divisor} AS Difference
FROM $table
WHERE
$timeFilter
AND TransportTCP = 0
AND TransportIPv6 <> 0
AND NodeID IN {nodesel}
GROUP BY t
ORDER BY t""".format(
interval_divisor=agginfo['interval_divisor'],
nodesel=nodesel)),
refId = 'A'
),
],
),
],
),
GCore.Row(
panels = [
GCommon.BarChart(
title = 'Unique source addresses',
orientation = GCommon.BAR_CHART_ORIENTATION_HORIZONTAL,
layout = GCommon.BarChartLayout(
xaxis = GCommon.BarChartAxis(
title = 'Number of unique sources',
),
yaxis = GCommon.BarChartAxis(
tickmargin = 55,
title = 'IP Version/Aggregation',
),
),
traces = [
GCommon.BarChartTrace(
name = 'IPv6/64',
color = '#33B5E5',
x = 'IPv664Cnt',
y = 'IPv664Proto',
text = 'IPv664Cnt',
),
GCommon.BarChartTrace(
name = 'IPv6',
color = '#1F60C4',
x = 'IPv6Cnt',
y = 'IPv6Proto',
text = 'IPv6Cnt',
),
GCommon.BarChartTrace(
name = 'IPv4',
color = '#8877D9',
x = 'IPv4Cnt',
y = 'IPv4Proto',
text = 'IPv4Cnt',
),
],
targets = [
GCommon.ClickHouseTableTarget(
database = agginfo['database'],
table = 'UniqueIPv6Addr' + agginfo['table_suffix'],
round = agginfo['round'],
query = textwrap.dedent("""\
SELECT
'IPv6/64' AS IPv664Proto,
uniqMerge(IPv664Addr) AS IPv664Cnt
FROM $table
WHERE $timeFilter
AND NodeID IN {nodesel}""".format(
nodesel=nodesel)),
refId = 'A'
),
GCommon.ClickHouseTableTarget(
database = agginfo['database'],
table = 'UniqueIPv6Addr' + agginfo['table_suffix'],
round = agginfo['round'],
query = textwrap.dedent("""\
SELECT
'IPv6' AS IPv6Proto,
uniqMerge(IPv6Addr) AS IPv6Cnt
FROM $table
WHERE $timeFilter
AND NodeID IN {nodesel}""".format(
nodesel=nodesel)),
refId = 'B'
),
GCommon.ClickHouseTableTarget(
database = agginfo['database'],
table = 'UniqueIPv4Addr' + agginfo['table_suffix'],
round = agginfo['round'],
query = textwrap.dedent("""\
SELECT
'IPv4' AS IPv4Proto,
uniqMerge(IPv4Addr) AS IPv4Cnt
FROM $table
WHERE $timeFilter
AND NodeID IN {nodesel}""".format(
nodesel=nodesel)),
refId = 'C'
),
],
),
],
),
GCore.Row(
panels = [
GCommon.BarChart(
title = 'Zone Load Time',
plottype = GCommon.PLOTLY_CHART_TYPE_SCATTER,
layout = GCommon.BarChartLayout(
xaxis = GCommon.BarChartAxis(
axtype = GCommon.BAR_CHART_AXIS_TYPE_CATEGORY,
title = 'Serial number',
),
yaxis = GCommon.BarChartAxis(
axtype = GCommon.BAR_CHART_AXIS_TYPE_LINEAR,
title = 'Load time (s)',
),
),
traces = [
GCommon.BarChartTrace(
name = 'Latencies',
color = '#33B5E5',
x = 'Serial',
y = 'Latency',
text = 'Name',
showmarkers = True,
),
],
targets = [
GCommon.ClickHouseTableTarget(
database = agginfo['raw_database'],
table = 'ZoneLatency',
round = agginfo['round'],
query = textwrap.dedent("""\
SELECT
Serial, Latency, Name
FROM $table
ALL INNER JOIN
(
SELECT
{name_col} AS Name,
node_id AS NodeID
FROM {nodeinfo_database}.node_text
) AS NodeName USING NodeID
WHERE $timeFilter
AND NodeID IN {nodesel}
ORDER BY Serial ASC""".format(
nodesel=nodesel,
name_col=kwargs['zone_load_name_col'],
nodeinfo_database=agginfo['nodeinfo_database'])),
refId = 'A'
),
],
),
],
),
]
)
| 51.711
| 120
| 0.301464
| 2,662
| 51,711
| 5.779113
| 0.09429
| 0.022751
| 0.034126
| 0.032111
| 0.892551
| 0.889236
| 0.87266
| 0.858944
| 0.855824
| 0.855824
| 0
| 0.030625
| 0.640077
| 51,711
| 999
| 121
| 51.762763
| 0.795938
| 0.006246
| 0
| 0.874239
| 0
| 0
| 0.446105
| 0.036922
| 0
| 0
| 0
| 0
| 0
| 1
| 0.001014
| false
| 0
| 0.003043
| 0.001014
| 0.005071
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b97fb5b288c10cbff036fa63cb51d4149b87a792
| 98
|
py
|
Python
|
spotifyClient/config.py
|
sebcampos/music
|
b508654f9524127bd2681628a9ec4b88dc607c8d
|
[
"MIT"
] | null | null | null |
spotifyClient/config.py
|
sebcampos/music
|
b508654f9524127bd2681628a9ec4b88dc607c8d
|
[
"MIT"
] | null | null | null |
spotifyClient/config.py
|
sebcampos/music
|
b508654f9524127bd2681628a9ec4b88dc607c8d
|
[
"MIT"
] | null | null | null |
CLIENT_ID = "9f9c1dd9992f4c1aa1b51059f3ec9805"
CLIENT_SECRET = "34e88d7df570483d9df03f27c62a80ff"
| 32.666667
| 50
| 0.877551
| 6
| 98
| 14
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.434783
| 0.061224
| 98
| 2
| 51
| 49
| 0.478261
| 0
| 0
| 0
| 0
| 0
| 0.653061
| 0.653061
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.